mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-21 00:04:07 +00:00
Compare commits
198 Commits
cert-mgmt
...
patch-ldap
Author | SHA1 | Date | |
---|---|---|---|
91634fbe76 | |||
4962a63888 | |||
9e9de9f527 | |||
6af4a06c02 | |||
fe6dc248b6 | |||
7d380f9b43 | |||
76c8410081 | |||
6df90fa825 | |||
c042bafba3 | |||
8067df821e | |||
1906896e56 | |||
a8ccfd9c92 | |||
32609b95a0 | |||
08d3436217 | |||
2ae45dc1cc | |||
44a898fb15 | |||
4d194052b5 | |||
1d622bb121 | |||
5c149c6ac6 | |||
c19f8839ff | |||
c6c71a04e8 | |||
d47c586a52 | |||
88156c8cd8 | |||
27d5d90d02 | |||
07ca1ed424 | |||
18c5dd3cbd | |||
467e3aab56 | |||
577b432861 | |||
dda6b1d233 | |||
e83f31249a | |||
3142d36ea1 | |||
9506b60d02 | |||
ed25b82113 | |||
83bd97fc70 | |||
1d5115972b | |||
d26521be0b | |||
473f8137fd | |||
719d0ea30f | |||
aaef339e21 | |||
e3beeb68eb | |||
d0c76ae4b4 | |||
a5cf6f40c7 | |||
f121f8e828 | |||
54c8da8ab6 | |||
6e0dfc72e4 | |||
b226fdac9d | |||
3c36d5dbd2 | |||
a5f895ad91 | |||
9f66b9bb4d | |||
80e55a9341 | |||
5142d6f3c1 | |||
c8677ac548 | |||
992cc03eca | |||
f0e7c459e2 | |||
29d0694a16 | |||
f13930bc6b | |||
0d5514834d | |||
b495156444 | |||
65a2b0116b | |||
8ef2501407 | |||
21c6160c84 | |||
8a2268956a | |||
df3c58bc2a | |||
2675aa6969 | |||
6bad13738f | |||
dbae6968c9 | |||
e019f3811b | |||
db726128f1 | |||
24935f4e07 | |||
1835777832 | |||
cb237831c7 | |||
49d2ea6f2e | |||
3b2a2d1a73 | |||
f490fb6616 | |||
c4f9a3b31e | |||
afcf15df55 | |||
bf8aee25fe | |||
ebdfe31c17 | |||
e65ce932dd | |||
ae177343d5 | |||
0342ba0890 | |||
c119f506fd | |||
93638baba7 | |||
bad97774c4 | |||
68f5be2ff1 | |||
0b54099789 | |||
9b2a2eda0c | |||
a332019c25 | |||
8039b3f21e | |||
c9f7f6481f | |||
39df6ce086 | |||
de3e23ecfa | |||
17a79fb621 | |||
0ee792e84b | |||
116e940050 | |||
5d45237ea5 | |||
44928a2e3c | |||
ff912fc3b0 | |||
bde40e53e3 | |||
5211eb1ed6 | |||
96fffd3c03 | |||
56506b5a47 | |||
400b412196 | |||
2780414fcb | |||
b82524d65d | |||
c493f1d0f6 | |||
fb1b816be6 | |||
2645d4d158 | |||
61d60498a9 | |||
93f3395bde | |||
d6060781e4 | |||
345edb3f15 | |||
d4ef92787d | |||
b7326bf4c6 | |||
3dd024c90a | |||
dd6fb4232e | |||
3411185d60 | |||
ccef9646c6 | |||
458639e93d | |||
35998e98cf | |||
e19b67f9a2 | |||
f41ec46a35 | |||
33aa9ea1a7 | |||
2d8a2a6a3a | |||
5eeea767a3 | |||
2b4f5962e2 | |||
bf14bbfeee | |||
fa77dc01df | |||
ed5044a102 | |||
ec7fe013fd | |||
a26ad6cfb0 | |||
dd0399d12e | |||
04456fe996 | |||
2605987289 | |||
7edcf5ff90 | |||
3947e3dabf | |||
fe6e5e09ac | |||
068eb9246d | |||
3472be480a | |||
df71ecffa0 | |||
68818beb38 | |||
e600b68684 | |||
b52aebfd92 | |||
c9e56e4e9f | |||
08a77f6ddb | |||
bc3f21809e | |||
46b48cea63 | |||
44956c6a37 | |||
4de63b6140 | |||
5cee228f5f | |||
20fea1e25f | |||
d0ffb94bc7 | |||
d5658d374a | |||
810a58c836 | |||
9e24050f17 | |||
7057d399bc | |||
c63d57f086 | |||
a9ce3789b0 | |||
023a0d99ab | |||
5aadc41a4a | |||
4f38352765 | |||
cf5e367aba | |||
a70043b80d | |||
b94db5d674 | |||
bd6a89fa9a | |||
9977329741 | |||
21bd468307 | |||
e95109c446 | |||
93d5180dfc | |||
a9bec84d27 | |||
e3f87382a3 | |||
736f067178 | |||
f3ea7b3dfd | |||
777dfd5f58 | |||
e6ed1231cd | |||
9197530b43 | |||
1eae7d0c30 | |||
cc8119766a | |||
928d5a5240 | |||
32dd478894 | |||
c3f7c1d46b | |||
89644703a0 | |||
d20b897f28 | |||
70e022826e | |||
b7f5fa2cec | |||
7b444e91a8 | |||
abd4b411fa | |||
9e4b248794 | |||
f6e44463c4 | |||
1a6b710138 | |||
43a3731b62 | |||
24b8b64d3b | |||
263d321d75 | |||
a6e71c98a6 | |||
0e86d5573a | |||
6c0ab43c97 | |||
d743537284 | |||
5df53a25fc |
.github/workflows
README.mdbackend
package-lock.jsonpackage.json
src
@types
db
migrations
20240609133400_private-key-handoff.ts20240614010847_custom-rate-limits-for-self-hosting.ts20240614115952_tag-machine-identity.ts20240614154212_certificate-mgmt.ts20240614184133_make-secret-sharing-public.ts
schemas
ee
routes/v1
services
ldap-config
license
rate-limit
saml-config
secret-snapshot
lib
server
services
auth
auth-fns.tsauth-login-service.tsauth-login-type.tsauth-password-service.tsauth-password-type.tsauth-signup-service.tsauth-signup-type.ts
identity-kubernetes-auth
integration-auth
project-membership
secret-sharing
secret-tag
secret
super-admin
user
cli
company/documentation/getting-started
docs
api-reference/endpoints/secret-tags
changelog
cli/commands
documentation
images/platform/secret-sharing
integrations/platforms
mint.jsonsdks/languages
frontend
package-lock.jsonconst.ts
src
components
signup
utilities
v2/LeaveProjectModal
hooks/api
pages
views
Login/components
Project/AuditLogsPage/components
Settings/ProjectSettingsPage/components/DeleteProjectSection
ShareSecretPage/components
ShareSecretPublicPage
Signup/components/UserInfoSSOStep
admin
helm-charts/secrets-operator
k8-operator
@ -35,7 +35,7 @@ jobs:
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
|
||||
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
|
||||
env:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
|
31
README.md
31
README.md
@ -48,25 +48,26 @@
|
||||
|
||||
## Introduction
|
||||
|
||||
**[Infisical](https://infisical.com)** is the open source secret management platform that teams use to centralize their secrets like API keys, database credentials, and configurations.
|
||||
**[Infisical](https://infisical.com)** is the open source secret management platform that teams use to centralize their application configuration and secrets like API keys and database credentials as well as manage their internal PKI.
|
||||
|
||||
We're on a mission to make secret management more accessible to everyone, not just security teams, and that means redesigning the entire developer experience from ground up.
|
||||
We're on a mission to make security tooling more accessible to everyone, not just security teams, and that means redesigning the entire developer experience from ground up.
|
||||
|
||||
## Features
|
||||
|
||||
- **[User-friendly dashboard](https://infisical.com/docs/documentation/platform/project)** to manage secrets across projects and environments (e.g. development, production, etc.).
|
||||
- **[Client SDKs](https://infisical.com/docs/sdks/overview)** to fetch secrets for your apps and infrastructure on demand.
|
||||
- **[Infisical CLI](https://infisical.com/docs/cli/overview)** to fetch and inject secrets into any framework in local development and CI/CD.
|
||||
- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)** to perform CRUD operation on secrets, users, projects, and any other resource in Infisical.
|
||||
- **[Native integrations](https://infisical.com/docs/integrations/overview)** with platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more.
|
||||
- **[User-friendly dashboard](https://infisical.com/docs/documentation/platform/project)** to manage secrets across projects and environments (e.g. development, production, etc.).
|
||||
- **[Client SDKs](https://infisical.com/docs/sdks/overview)** to fetch secrets for your apps and infrastructure on demand.
|
||||
- **[Infisical CLI](https://infisical.com/docs/cli/overview)** to fetch and inject secrets into any framework in local development and CI/CD.
|
||||
- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)** to perform CRUD operation on secrets, users, projects, and any other resource in Infisical.
|
||||
- **[Native integrations](https://infisical.com/docs/integrations/overview)** with platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more.
|
||||
- **[Infisical Kubernetes operator](https://infisical.com/docs/documentation/getting-started/kubernetes)** to managed secrets in k8s, automatically reload deployments, and more.
|
||||
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)** to inject secrets into your applications without modifying any code logic.
|
||||
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)** to inject secrets into your applications without modifying any code logic.
|
||||
- **[Self-hosting and on-prem](https://infisical.com/docs/self-hosting/overview)** to get complete control over your data.
|
||||
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)** to version every secret and project state.
|
||||
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)** to record every action taken in a project.
|
||||
- **[Role-based Access Controls](https://infisical.com/docs/documentation/platform/role-based-access-controls)** to create permission sets on any resource in Infisica and assign those to user or machine identities.
|
||||
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)** to version every secret and project state.
|
||||
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)** to record every action taken in a project.
|
||||
- **[Role-based Access Controls](https://infisical.com/docs/documentation/platform/role-based-access-controls)** to create permission sets on any resource in Infisica and assign those to user or machine identities.
|
||||
- **[Simple on-premise deployments](https://infisical.com/docs/self-hosting/overview)** to AWS, Digital Ocean, and more.
|
||||
- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)** to prevent secrets from leaking to git.
|
||||
- **[Internal PKI](https://infisical.com/docs/documentation/platform/pki/private-ca)** to create Private CA hierarchies and start issuing and managing X.509 digital certificates.
|
||||
- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)** to prevent secrets from leaking to git.
|
||||
|
||||
And much more.
|
||||
|
||||
@ -74,9 +75,9 @@ And much more.
|
||||
|
||||
Check out the [Quickstart Guides](https://infisical.com/docs/getting-started/introduction)
|
||||
|
||||
| Use Infisical Cloud | Deploy Infisical on premise |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
|
||||
| Use Infisical Cloud | Deploy Infisical on premise |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------ |
|
||||
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
|
||||
|
||||
### Run Infisical locally
|
||||
|
||||
|
29
backend/package-lock.json
generated
29
backend/package-lock.json
generated
@ -38,6 +38,7 @@
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.4.2",
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"cron": "^3.1.7",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.26.0",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
@ -4951,6 +4952,11 @@
|
||||
"long": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/luxon": {
|
||||
"version": "3.4.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/luxon/-/luxon-3.4.2.tgz",
|
||||
"integrity": "sha512-TifLZlFudklWlMBfhubvgqTXRzLDI5pCbGa4P8a3wPyUQSW+1xQ5eDsreP9DWHX3tjq1ke96uYG/nwundroWcA=="
|
||||
},
|
||||
"node_modules/@types/mime": {
|
||||
"version": "1.3.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz",
|
||||
@ -6453,12 +6459,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/braces": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
|
||||
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
|
||||
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"fill-range": "^7.0.1"
|
||||
"fill-range": "^7.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
@ -6847,6 +6853,15 @@
|
||||
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/cron": {
|
||||
"version": "3.1.7",
|
||||
"resolved": "https://registry.npmjs.org/cron/-/cron-3.1.7.tgz",
|
||||
"integrity": "sha512-tlBg7ARsAMQLzgwqVxy8AZl/qlTc5nibqYwtNGoCrd+cV+ugI+tvZC1oT/8dFH8W455YrywGykx/KMmAqOr7Jw==",
|
||||
"dependencies": {
|
||||
"@types/luxon": "~3.4.0",
|
||||
"luxon": "~3.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/cron-parser": {
|
||||
"version": "4.9.0",
|
||||
"resolved": "https://registry.npmjs.org/cron-parser/-/cron-parser-4.9.0.tgz",
|
||||
@ -8100,9 +8115,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/fill-range": {
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
|
||||
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
|
||||
"version": "7.1.1",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
|
||||
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"to-regex-range": "^5.0.1"
|
||||
|
@ -99,6 +99,7 @@
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.4.2",
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"cron": "^3.1.7",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.26.0",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
|
2
backend/src/@types/fastify.d.ts
vendored
2
backend/src/@types/fastify.d.ts
vendored
@ -15,6 +15,7 @@ import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-con
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
|
||||
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
@ -153,6 +154,7 @@ declare module "fastify" {
|
||||
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
|
||||
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
|
||||
secretSharing: TSecretSharingServiceFactory;
|
||||
rateLimit: TRateLimitServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
4
backend/src/@types/knex.d.ts
vendored
4
backend/src/@types/knex.d.ts
vendored
@ -170,6 +170,9 @@ import {
|
||||
TProjectUserMembershipRoles,
|
||||
TProjectUserMembershipRolesInsert,
|
||||
TProjectUserMembershipRolesUpdate,
|
||||
TRateLimit,
|
||||
TRateLimitInsert,
|
||||
TRateLimitUpdate,
|
||||
TSamlConfigs,
|
||||
TSamlConfigsInsert,
|
||||
TSamlConfigsUpdate,
|
||||
@ -395,6 +398,7 @@ declare module "knex/types/tables" {
|
||||
TSecretFolderVersionsUpdate
|
||||
>;
|
||||
[TableName.SecretSharing]: Knex.CompositeTableType<TSecretSharing, TSecretSharingInsert, TSecretSharingUpdate>;
|
||||
[TableName.RateLimit]: Knex.CompositeTableType<TRateLimit, TRateLimitInsert, TRateLimitUpdate>;
|
||||
[TableName.SecretTag]: Knex.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
|
||||
[TableName.SecretImport]: Knex.CompositeTableType<TSecretImports, TSecretImportsInsert, TSecretImportsUpdate>;
|
||||
[TableName.Integration]: Knex.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
|
||||
|
@ -0,0 +1,61 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesPasswordFieldExist = await knex.schema.hasColumn(TableName.UserEncryptionKey, "hashedPassword");
|
||||
const doesPrivateKeyFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKey"
|
||||
);
|
||||
const doesPrivateKeyIVFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyIV"
|
||||
);
|
||||
const doesPrivateKeyTagFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyTag"
|
||||
);
|
||||
const doesPrivateKeyEncodingFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyEncoding"
|
||||
);
|
||||
if (await knex.schema.hasTable(TableName.UserEncryptionKey)) {
|
||||
await knex.schema.alterTable(TableName.UserEncryptionKey, (t) => {
|
||||
if (!doesPasswordFieldExist) t.string("hashedPassword");
|
||||
if (!doesPrivateKeyFieldExist) t.text("serverEncryptedPrivateKey");
|
||||
if (!doesPrivateKeyIVFieldExist) t.text("serverEncryptedPrivateKeyIV");
|
||||
if (!doesPrivateKeyTagFieldExist) t.text("serverEncryptedPrivateKeyTag");
|
||||
if (!doesPrivateKeyEncodingFieldExist) t.text("serverEncryptedPrivateKeyEncoding");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesPasswordFieldExist = await knex.schema.hasColumn(TableName.UserEncryptionKey, "hashedPassword");
|
||||
const doesPrivateKeyFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKey"
|
||||
);
|
||||
const doesPrivateKeyIVFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyIV"
|
||||
);
|
||||
const doesPrivateKeyTagFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyTag"
|
||||
);
|
||||
const doesPrivateKeyEncodingFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyEncoding"
|
||||
);
|
||||
if (await knex.schema.hasTable(TableName.UserEncryptionKey)) {
|
||||
await knex.schema.alterTable(TableName.UserEncryptionKey, (t) => {
|
||||
if (doesPasswordFieldExist) t.dropColumn("hashedPassword");
|
||||
if (doesPrivateKeyFieldExist) t.dropColumn("serverEncryptedPrivateKey");
|
||||
if (doesPrivateKeyIVFieldExist) t.dropColumn("serverEncryptedPrivateKeyIV");
|
||||
if (doesPrivateKeyTagFieldExist) t.dropColumn("serverEncryptedPrivateKeyTag");
|
||||
if (doesPrivateKeyEncodingFieldExist) t.dropColumn("serverEncryptedPrivateKeyEncoding");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.RateLimit))) {
|
||||
await knex.schema.createTable(TableName.RateLimit, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.integer("readRateLimit").defaultTo(600).notNullable();
|
||||
t.integer("writeRateLimit").defaultTo(200).notNullable();
|
||||
t.integer("secretsRateLimit").defaultTo(60).notNullable();
|
||||
t.integer("authRateLimit").defaultTo(60).notNullable();
|
||||
t.integer("inviteUserRateLimit").defaultTo(30).notNullable();
|
||||
t.integer("mfaRateLimit").defaultTo(20).notNullable();
|
||||
t.integer("creationLimit").defaultTo(30).notNullable();
|
||||
t.integer("publicEndpointLimit").defaultTo(30).notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.RateLimit);
|
||||
|
||||
// create init rate limit entry with defaults
|
||||
await knex(TableName.RateLimit).insert({});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.RateLimit);
|
||||
await dropOnUpdateTrigger(knex, TableName.RateLimit);
|
||||
}
|
@ -0,0 +1,25 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasCreatedByActorType = await knex.schema.hasColumn(TableName.SecretTag, "createdByActorType");
|
||||
await knex.schema.alterTable(TableName.SecretTag, (tb) => {
|
||||
if (!hasCreatedByActorType) {
|
||||
tb.string("createdByActorType").notNullable().defaultTo(ActorType.USER);
|
||||
tb.dropForeign("createdBy");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasCreatedByActorType = await knex.schema.hasColumn(TableName.SecretTag, "createdByActorType");
|
||||
await knex.schema.alterTable(TableName.SecretTag, (tb) => {
|
||||
if (hasCreatedByActorType) {
|
||||
tb.dropColumn("createdByActorType");
|
||||
tb.foreign("createdBy").references("id").inTable(TableName.Users).onDelete("SET NULL");
|
||||
}
|
||||
});
|
||||
}
|
@ -0,0 +1,27 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasOrgIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "orgId");
|
||||
const hasUserIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "userId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
if (hasOrgIdColumn) t.uuid("orgId").nullable().alter();
|
||||
if (hasUserIdColumn) t.uuid("userId").nullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasOrgIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "orgId");
|
||||
const hasUserIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "userId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
if (hasOrgIdColumn) t.uuid("orgId").notNullable().alter();
|
||||
if (hasUserIdColumn) t.uuid("userId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
@ -55,6 +55,7 @@ export * from "./project-roles";
|
||||
export * from "./project-user-additional-privilege";
|
||||
export * from "./project-user-membership-roles";
|
||||
export * from "./projects";
|
||||
export * from "./rate-limit";
|
||||
export * from "./saml-configs";
|
||||
export * from "./scim-tokens";
|
||||
export * from "./secret-approval-policies";
|
||||
|
@ -25,6 +25,7 @@ export enum TableName {
|
||||
IncidentContact = "incident_contacts",
|
||||
UserAction = "user_actions",
|
||||
SuperAdmin = "super_admin",
|
||||
RateLimit = "rate_limit",
|
||||
ApiKey = "api_keys",
|
||||
Project = "projects",
|
||||
ProjectBot = "project_bots",
|
||||
|
26
backend/src/db/schemas/rate-limit.ts
Normal file
26
backend/src/db/schemas/rate-limit.ts
Normal file
@ -0,0 +1,26 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const RateLimitSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
readRateLimit: z.number().default(600),
|
||||
writeRateLimit: z.number().default(200),
|
||||
secretsRateLimit: z.number().default(60),
|
||||
authRateLimit: z.number().default(60),
|
||||
inviteUserRateLimit: z.number().default(30),
|
||||
mfaRateLimit: z.number().default(20),
|
||||
creationLimit: z.number().default(30),
|
||||
publicEndpointLimit: z.number().default(30),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TRateLimit = z.infer<typeof RateLimitSchema>;
|
||||
export type TRateLimitInsert = Omit<z.input<typeof RateLimitSchema>, TImmutableDBKeys>;
|
||||
export type TRateLimitUpdate = Partial<Omit<z.input<typeof RateLimitSchema>, TImmutableDBKeys>>;
|
@ -14,8 +14,8 @@ export const SecretSharingSchema = z.object({
|
||||
tag: z.string(),
|
||||
hashedHex: z.string(),
|
||||
expiresAt: z.date(),
|
||||
userId: z.string().uuid(),
|
||||
orgId: z.string().uuid(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
orgId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
expiresAfterViews: z.number().nullable().optional()
|
||||
|
@ -15,7 +15,8 @@ export const SecretTagsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
createdBy: z.string().uuid().nullable().optional(),
|
||||
projectId: z.string()
|
||||
projectId: z.string(),
|
||||
createdByActorType: z.string().default("user")
|
||||
});
|
||||
|
||||
export type TSecretTags = z.infer<typeof SecretTagsSchema>;
|
||||
|
@ -21,7 +21,12 @@ export const UserEncryptionKeysSchema = z.object({
|
||||
tag: z.string(),
|
||||
salt: z.string(),
|
||||
verifier: z.string(),
|
||||
userId: z.string().uuid()
|
||||
userId: z.string().uuid(),
|
||||
hashedPassword: z.string().nullable().optional(),
|
||||
serverEncryptedPrivateKey: z.string().nullable().optional(),
|
||||
serverEncryptedPrivateKeyIV: z.string().nullable().optional(),
|
||||
serverEncryptedPrivateKeyTag: z.string().nullable().optional(),
|
||||
serverEncryptedPrivateKeyEncoding: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TUserEncryptionKeys = z.infer<typeof UserEncryptionKeysSchema>;
|
||||
|
@ -11,6 +11,7 @@ import { registerLicenseRouter } from "./license-router";
|
||||
import { registerOrgRoleRouter } from "./org-role-router";
|
||||
import { registerProjectRoleRouter } from "./project-role-router";
|
||||
import { registerProjectRouter } from "./project-router";
|
||||
import { registerRateLimitRouter } from "./rate-limit-router";
|
||||
import { registerSamlRouter } from "./saml-router";
|
||||
import { registerScimRouter } from "./scim-router";
|
||||
import { registerSecretApprovalPolicyRouter } from "./secret-approval-policy-router";
|
||||
@ -46,6 +47,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
|
||||
await server.register(registerAccessApprovalPolicyRouter, { prefix: "/access-approvals/policies" });
|
||||
await server.register(registerAccessApprovalRequestRouter, { prefix: "/access-approvals/requests" });
|
||||
await server.register(registerRateLimitRouter, { prefix: "/rate-limit" });
|
||||
|
||||
await server.register(
|
||||
async (dynamicSecretRouter) => {
|
||||
|
@ -53,7 +53,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
// eslint-disable-next-line
|
||||
async (req: IncomingMessage, user, cb) => {
|
||||
try {
|
||||
if (!user.email) throw new BadRequestError({ message: "Invalid request. Missing email." });
|
||||
if (!user.mail) throw new BadRequestError({ message: "Invalid request. Missing mail attribute on user." });
|
||||
const ldapConfig = (req as unknown as FastifyRequest).ldapConfig as TLDAPConfig;
|
||||
|
||||
let groups: { dn: string; cn: string }[] | undefined;
|
||||
|
@ -143,7 +143,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
projectId: req.params.workspaceId,
|
||||
...req.query,
|
||||
startDate: req.query.endDate || getLastMidnightDateISO(),
|
||||
endDate: req.query.endDate,
|
||||
startDate: req.query.startDate || getLastMidnightDateISO(),
|
||||
auditLogActor: req.query.actor,
|
||||
actor: req.permission.type
|
||||
});
|
||||
|
75
backend/src/ee/routes/v1/rate-limit-router.ts
Normal file
75
backend/src/ee/routes/v1/rate-limit-router.ts
Normal file
@ -0,0 +1,75 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { RateLimitSchema } from "@app/db/schemas";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerRateLimitRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
rateLimit: RateLimitSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
handler: async () => {
|
||||
const rateLimit = await server.services.rateLimit.getRateLimits();
|
||||
if (!rateLimit) {
|
||||
throw new BadRequestError({
|
||||
name: "Get Rate Limit Error",
|
||||
message: "Rate limit configuration does not exist."
|
||||
});
|
||||
}
|
||||
return { rateLimit };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PUT",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
|
||||
schema: {
|
||||
body: z.object({
|
||||
readRateLimit: z.number(),
|
||||
writeRateLimit: z.number(),
|
||||
secretsRateLimit: z.number(),
|
||||
authRateLimit: z.number(),
|
||||
inviteUserRateLimit: z.number(),
|
||||
mfaRateLimit: z.number(),
|
||||
creationLimit: z.number(),
|
||||
publicEndpointLimit: z.number()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
rateLimit: RateLimitSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const rateLimit = await server.services.rateLimit.updateRateLimit(req.body);
|
||||
return { rateLimit };
|
||||
}
|
||||
});
|
||||
};
|
@ -23,6 +23,8 @@ import {
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
@ -30,6 +32,7 @@ import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membe
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
|
||||
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { normalizeUsername } from "@app/services/user/user-fns";
|
||||
@ -73,11 +76,19 @@ type TLdapConfigServiceFactoryDep = {
|
||||
>;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
"create" | "findOne" | "transaction" | "updateById" | "findUserEncKeyByUserIdsBatch" | "find"
|
||||
| "create"
|
||||
| "findOne"
|
||||
| "transaction"
|
||||
| "updateById"
|
||||
| "findUserEncKeyByUserIdsBatch"
|
||||
| "find"
|
||||
| "findUserEncKeyByUserId"
|
||||
>;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
};
|
||||
|
||||
export type TLdapConfigServiceFactory = ReturnType<typeof ldapConfigServiceFactory>;
|
||||
@ -97,7 +108,9 @@ export const ldapConfigServiceFactory = ({
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
licenseService,
|
||||
tokenService,
|
||||
smtpService
|
||||
}: TLdapConfigServiceFactoryDep) => {
|
||||
const createLdapCfg = async ({
|
||||
actor,
|
||||
@ -488,7 +501,7 @@ export const ldapConfigServiceFactory = ({
|
||||
if (!orgMembership) {
|
||||
await orgMembershipDAL.create(
|
||||
{
|
||||
userId: userAlias.userId,
|
||||
userId: newUser.id,
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
@ -592,12 +605,14 @@ export const ldapConfigServiceFactory = ({
|
||||
});
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
|
||||
const providerAuthToken = jwt.sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
||||
firstName,
|
||||
lastName,
|
||||
@ -619,6 +634,22 @@ export const ldapConfigServiceFactory = ({
|
||||
}
|
||||
);
|
||||
|
||||
if (user.email && !user.isEmailVerified) {
|
||||
const token = await tokenService.createTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return { isUserCompleted, providerAuthToken };
|
||||
};
|
||||
|
||||
|
@ -575,6 +575,9 @@ export const licenseServiceFactory = ({
|
||||
getInstanceType() {
|
||||
return instanceType;
|
||||
},
|
||||
get onPremFeatures() {
|
||||
return onPremFeatures;
|
||||
},
|
||||
getPlan,
|
||||
updateSubscriptionOrgMemberCount,
|
||||
refreshPlan,
|
||||
|
7
backend/src/ee/services/rate-limit/rate-limit-dal.ts
Normal file
7
backend/src/ee/services/rate-limit/rate-limit-dal.ts
Normal file
@ -0,0 +1,7 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TRateLimitDALFactory = ReturnType<typeof rateLimitDALFactory>;
|
||||
|
||||
export const rateLimitDALFactory = (db: TDbClient) => ormify(db, TableName.RateLimit, {});
|
106
backend/src/ee/services/rate-limit/rate-limit-service.ts
Normal file
106
backend/src/ee/services/rate-limit/rate-limit-service.ts
Normal file
@ -0,0 +1,106 @@
|
||||
import { CronJob } from "cron";
|
||||
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { TRateLimitDALFactory } from "./rate-limit-dal";
|
||||
import { TRateLimit, TRateLimitUpdateDTO } from "./rate-limit-types";
|
||||
|
||||
let rateLimitMaxConfiguration = {
|
||||
readLimit: 60,
|
||||
publicEndpointLimit: 30,
|
||||
writeLimit: 200,
|
||||
secretsLimit: 60,
|
||||
authRateLimit: 60,
|
||||
inviteUserRateLimit: 30,
|
||||
mfaRateLimit: 20,
|
||||
creationLimit: 30
|
||||
};
|
||||
|
||||
Object.freeze(rateLimitMaxConfiguration);
|
||||
|
||||
export const getRateLimiterConfig = () => {
|
||||
return rateLimitMaxConfiguration;
|
||||
};
|
||||
|
||||
type TRateLimitServiceFactoryDep = {
|
||||
rateLimitDAL: TRateLimitDALFactory;
|
||||
licenseService: Pick<TLicenseServiceFactory, "onPremFeatures">;
|
||||
};
|
||||
|
||||
export type TRateLimitServiceFactory = ReturnType<typeof rateLimitServiceFactory>;
|
||||
|
||||
export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateLimitServiceFactoryDep) => {
|
||||
const DEFAULT_RATE_LIMIT_CONFIG_ID = "00000000-0000-0000-0000-000000000000";
|
||||
|
||||
const getRateLimits = async (): Promise<TRateLimit | undefined> => {
|
||||
let rateLimit: TRateLimit;
|
||||
|
||||
try {
|
||||
rateLimit = await rateLimitDAL.findOne({ id: DEFAULT_RATE_LIMIT_CONFIG_ID });
|
||||
if (!rateLimit) {
|
||||
// rate limit might not exist
|
||||
rateLimit = await rateLimitDAL.create({
|
||||
// @ts-expect-error id is kept as fixed because there should only be one rate limit config per instance
|
||||
id: DEFAULT_RATE_LIMIT_CONFIG_ID
|
||||
});
|
||||
}
|
||||
return rateLimit;
|
||||
} catch (err) {
|
||||
logger.error("Error fetching rate limits %o", err);
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
const updateRateLimit = async (updates: TRateLimitUpdateDTO): Promise<TRateLimit> => {
|
||||
return rateLimitDAL.updateById(DEFAULT_RATE_LIMIT_CONFIG_ID, updates);
|
||||
};
|
||||
|
||||
const syncRateLimitConfiguration = async () => {
|
||||
try {
|
||||
const rateLimit = await getRateLimits();
|
||||
if (rateLimit) {
|
||||
const newRateLimitMaxConfiguration: typeof rateLimitMaxConfiguration = {
|
||||
readLimit: rateLimit.readRateLimit,
|
||||
publicEndpointLimit: rateLimit.publicEndpointLimit,
|
||||
writeLimit: rateLimit.writeRateLimit,
|
||||
secretsLimit: rateLimit.secretsRateLimit,
|
||||
authRateLimit: rateLimit.authRateLimit,
|
||||
inviteUserRateLimit: rateLimit.inviteUserRateLimit,
|
||||
mfaRateLimit: rateLimit.mfaRateLimit,
|
||||
creationLimit: rateLimit.creationLimit
|
||||
};
|
||||
|
||||
logger.info(`syncRateLimitConfiguration: rate limit configuration: %o`, newRateLimitMaxConfiguration);
|
||||
Object.freeze(newRateLimitMaxConfiguration);
|
||||
rateLimitMaxConfiguration = newRateLimitMaxConfiguration;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error syncing rate limit configurations: %o`, error);
|
||||
}
|
||||
};
|
||||
|
||||
const initializeBackgroundSync = async () => {
|
||||
if (!licenseService.onPremFeatures.customRateLimits) {
|
||||
logger.info("Current license does not support custom rate limit configuration");
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info("Setting up background sync process for rate limits");
|
||||
// initial sync upon startup
|
||||
await syncRateLimitConfiguration();
|
||||
|
||||
// sync rate limits configuration every 10 minutes
|
||||
const job = new CronJob("*/10 * * * *", syncRateLimitConfiguration);
|
||||
job.start();
|
||||
|
||||
return job;
|
||||
};
|
||||
|
||||
return {
|
||||
getRateLimits,
|
||||
updateRateLimit,
|
||||
initializeBackgroundSync,
|
||||
syncRateLimitConfiguration
|
||||
};
|
||||
};
|
16
backend/src/ee/services/rate-limit/rate-limit-types.ts
Normal file
16
backend/src/ee/services/rate-limit/rate-limit-types.ts
Normal file
@ -0,0 +1,16 @@
|
||||
export type TRateLimitUpdateDTO = {
|
||||
readRateLimit: number;
|
||||
writeRateLimit: number;
|
||||
secretsRateLimit: number;
|
||||
authRateLimit: number;
|
||||
inviteUserRateLimit: number;
|
||||
mfaRateLimit: number;
|
||||
creationLimit: number;
|
||||
publicEndpointLimit: number;
|
||||
};
|
||||
|
||||
export type TRateLimit = {
|
||||
id: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
} & TRateLimitUpdateDTO;
|
@ -41,7 +41,10 @@ import { TCreateSamlCfgDTO, TGetSamlCfgDTO, TSamlLoginDTO, TUpdateSamlCfgDTO } f
|
||||
|
||||
type TSamlConfigServiceFactoryDep = {
|
||||
samlConfigDAL: Pick<TSamlConfigDALFactory, "create" | "findOne" | "update" | "findById">;
|
||||
userDAL: Pick<TUserDALFactory, "create" | "findOne" | "transaction" | "updateById" | "findById">;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
"create" | "findOne" | "transaction" | "updateById" | "findById" | "findUserEncKeyByUserId"
|
||||
>;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
|
||||
orgDAL: Pick<
|
||||
TOrgDALFactory,
|
||||
@ -452,6 +455,7 @@ export const samlConfigServiceFactory = ({
|
||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
const providerAuthToken = jwt.sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
@ -464,6 +468,7 @@ export const samlConfigServiceFactory = ({
|
||||
organizationId: organization.id,
|
||||
organizationSlug: organization.slug,
|
||||
authMethod: authProvider,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
authType: UserAliasType.SAML,
|
||||
isUserCompleted,
|
||||
...(relayState
|
||||
|
@ -331,8 +331,8 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
* Prunes excess snapshots from the database to ensure only a specified number of recent snapshots are retained for each folder.
|
||||
*
|
||||
* This function operates in three main steps:
|
||||
* 1. Pruning snapshots from root/non-versioned folders.
|
||||
* 2. Pruning snapshots from versioned folders.
|
||||
* 1. Pruning snapshots from current folders.
|
||||
* 2. Pruning snapshots from non-current folders (versioned ones).
|
||||
* 3. Removing orphaned snapshots that do not belong to any existing folder or folder version.
|
||||
*
|
||||
* The function processes snapshots in batches, determined by the `PRUNE_FOLDER_BATCH_SIZE` constant,
|
||||
@ -350,7 +350,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
|
||||
try {
|
||||
let uuidOffset = "00000000-0000-0000-0000-000000000000";
|
||||
// cleanup snapshots from root/non-versioned folders
|
||||
// cleanup snapshots from current folders
|
||||
// eslint-disable-next-line no-constant-condition, no-unreachable-loop
|
||||
while (true) {
|
||||
const folderBatch = await db(TableName.SecretFolder)
|
||||
@ -382,12 +382,11 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`)
|
||||
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
|
||||
.join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`)
|
||||
.whereNull(`${TableName.SecretFolder}.parentId`)
|
||||
.whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
|
||||
.delete();
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`Failed to prune snapshots from root/non-versioned folders in range ${batchEntries[0]}:${
|
||||
`Failed to prune snapshots from current folders in range ${batchEntries[0]}:${
|
||||
batchEntries[batchEntries.length - 1]
|
||||
}`
|
||||
);
|
||||
@ -399,7 +398,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
}
|
||||
|
||||
// cleanup snapshots from versioned folders
|
||||
// cleanup snapshots from non-current folders
|
||||
uuidOffset = "00000000-0000-0000-0000-000000000000";
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
@ -440,7 +439,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
.delete();
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`Failed to prune snapshots from versioned folders in range ${batchEntries[0]}:${
|
||||
`Failed to prune snapshots from non-current folders in range ${batchEntries[0]}:${
|
||||
batchEntries[batchEntries.length - 1]
|
||||
}`
|
||||
);
|
||||
|
@ -343,7 +343,8 @@ export const RAW_SECRETS = {
|
||||
secretValue: "The value of the secret to create.",
|
||||
skipMultilineEncoding: "Skip multiline encoding for the secret value.",
|
||||
type: "The type of the secret to create.",
|
||||
workspaceId: "The ID of the project to create the secret in."
|
||||
workspaceId: "The ID of the project to create the secret in.",
|
||||
tagIds: "The ID of the tags to be attached to the created secret."
|
||||
},
|
||||
GET: {
|
||||
secretName: "The name of the secret to get.",
|
||||
@ -364,7 +365,8 @@ export const RAW_SECRETS = {
|
||||
skipMultilineEncoding: "Skip multiline encoding for the secret value.",
|
||||
type: "The type of the secret to update.",
|
||||
projectSlug: "The slug of the project to update the secret in.",
|
||||
workspaceId: "The ID of the project to update the secret in."
|
||||
workspaceId: "The ID of the project to update the secret in.",
|
||||
tagIds: "The ID of the tags to be attached to the updated secret."
|
||||
},
|
||||
DELETE: {
|
||||
secretName: "The name of the secret to delete.",
|
||||
@ -506,12 +508,27 @@ export const SECRET_TAGS = {
|
||||
LIST: {
|
||||
projectId: "The ID of the project to list tags from."
|
||||
},
|
||||
GET_TAG_BY_ID: {
|
||||
projectId: "The ID of the project to get tags from.",
|
||||
tagId: "The ID of the tag to get details"
|
||||
},
|
||||
GET_TAG_BY_SLUG: {
|
||||
projectId: "The ID of the project to get tags from.",
|
||||
tagSlug: "The slug of the tag to get details"
|
||||
},
|
||||
CREATE: {
|
||||
projectId: "The ID of the project to create the tag in.",
|
||||
name: "The name of the tag to create.",
|
||||
slug: "The slug of the tag to create.",
|
||||
color: "The color of the tag to create."
|
||||
},
|
||||
UPDATE: {
|
||||
projectId: "The ID of the project to update the tag in.",
|
||||
tagId: "The ID of the tag to get details",
|
||||
name: "The name of the tag to update.",
|
||||
slug: "The slug of the tag to update.",
|
||||
color: "The color of the tag to update."
|
||||
},
|
||||
DELETE: {
|
||||
tagId: "The ID of the tag to delete.",
|
||||
projectId: "The ID of the project to delete the tag from."
|
||||
|
@ -29,7 +29,7 @@ const envSchema = z
|
||||
DB_USER: zpStr(z.string().describe("Postgres database username").optional()),
|
||||
DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()),
|
||||
DB_NAME: zpStr(z.string().describe("Postgres database name").optional()),
|
||||
|
||||
BCRYPT_SALT_ROUND: z.number().default(12),
|
||||
NODE_ENV: z.enum(["development", "test", "production"]).default("production"),
|
||||
SALT_ROUNDS: z.coerce.number().default(10),
|
||||
INITIAL_ORGANIZATION_NAME: zpStr(z.string().optional()),
|
||||
|
@ -6,7 +6,7 @@ import tweetnacl from "tweetnacl-util";
|
||||
|
||||
import { TUserEncryptionKeys } from "@app/db/schemas";
|
||||
|
||||
import { decryptSymmetric, encryptAsymmetric, encryptSymmetric } from "./encryption";
|
||||
import { decryptSymmetric128BitHexKeyUTF8, encryptAsymmetric, encryptSymmetric } from "./encryption";
|
||||
|
||||
export const generateSrpServerKey = async (salt: string, verifier: string) => {
|
||||
// eslint-disable-next-line new-cap
|
||||
@ -97,7 +97,13 @@ export const generateUserSrpKeys = async (email: string, password: string) => {
|
||||
};
|
||||
};
|
||||
|
||||
export const getUserPrivateKey = async (password: string, user: TUserEncryptionKeys) => {
|
||||
export const getUserPrivateKey = async (
|
||||
password: string,
|
||||
user: Pick<
|
||||
TUserEncryptionKeys,
|
||||
"protectedKeyTag" | "protectedKey" | "protectedKeyIV" | "encryptedPrivateKey" | "iv" | "salt" | "tag"
|
||||
>
|
||||
) => {
|
||||
const derivedKey = await argon2.hash(password, {
|
||||
salt: Buffer.from(user.salt),
|
||||
memoryCost: 65536,
|
||||
@ -108,17 +114,18 @@ export const getUserPrivateKey = async (password: string, user: TUserEncryptionK
|
||||
raw: true
|
||||
});
|
||||
if (!derivedKey) throw new Error("Failed to derive key from password");
|
||||
const key = decryptSymmetric({
|
||||
ciphertext: user.protectedKey!,
|
||||
iv: user.protectedKeyIV!,
|
||||
tag: user.protectedKeyTag!,
|
||||
key: derivedKey.toString("base64")
|
||||
const key = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: user.protectedKey as string,
|
||||
iv: user.protectedKeyIV as string,
|
||||
tag: user.protectedKeyTag as string,
|
||||
key: derivedKey
|
||||
});
|
||||
const privateKey = decryptSymmetric({
|
||||
|
||||
const privateKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: user.encryptedPrivateKey,
|
||||
iv: user.iv,
|
||||
tag: user.tag,
|
||||
key
|
||||
key: Buffer.from(key, "hex")
|
||||
});
|
||||
return privateKey;
|
||||
};
|
||||
|
@ -59,6 +59,18 @@ export class BadRequestError extends Error {
|
||||
}
|
||||
}
|
||||
|
||||
export class NotFoundError extends Error {
|
||||
name: string;
|
||||
|
||||
error: unknown;
|
||||
|
||||
constructor({ name, error, message }: { message?: string; name?: string; error?: unknown }) {
|
||||
super(message ?? "The requested entity is not found");
|
||||
this.name = name || "NotFound";
|
||||
this.error = error;
|
||||
}
|
||||
}
|
||||
|
||||
export class DisableRotationErrors extends Error {
|
||||
name: string;
|
||||
|
||||
|
@ -71,6 +71,7 @@ export const main = async ({ db, smtp, logger, queue, keyStore }: TMain) => {
|
||||
if (appCfg.isProductionMode) {
|
||||
await server.register<FastifyRateLimitOptions>(ratelimiter, globalRateLimiterCfg());
|
||||
}
|
||||
|
||||
await server.register(helmet, { contentSecurityPolicy: false });
|
||||
|
||||
await server.register(maintenanceMode);
|
||||
|
@ -1,6 +1,7 @@
|
||||
import type { RateLimitOptions, RateLimitPluginOptions } from "@fastify/rate-limit";
|
||||
import { Redis } from "ioredis";
|
||||
|
||||
import { getRateLimiterConfig } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
|
||||
export const globalRateLimiterCfg = (): RateLimitPluginOptions => {
|
||||
@ -21,14 +22,14 @@ export const globalRateLimiterCfg = (): RateLimitPluginOptions => {
|
||||
// GET endpoints
|
||||
export const readLimit: RateLimitOptions = {
|
||||
timeWindow: 60 * 1000,
|
||||
max: 600,
|
||||
max: () => getRateLimiterConfig().readLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
// POST, PATCH, PUT, DELETE endpoints
|
||||
export const writeLimit: RateLimitOptions = {
|
||||
timeWindow: 60 * 1000,
|
||||
max: 200, // (too low, FA having issues so increasing it - maidul)
|
||||
max: () => getRateLimiterConfig().writeLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
@ -36,25 +37,25 @@ export const writeLimit: RateLimitOptions = {
|
||||
export const secretsLimit: RateLimitOptions = {
|
||||
// secrets, folders, secret imports
|
||||
timeWindow: 60 * 1000,
|
||||
max: 60,
|
||||
max: () => getRateLimiterConfig().secretsLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
export const authRateLimit: RateLimitOptions = {
|
||||
timeWindow: 60 * 1000,
|
||||
max: 60,
|
||||
max: () => getRateLimiterConfig().authRateLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
export const inviteUserRateLimit: RateLimitOptions = {
|
||||
timeWindow: 60 * 1000,
|
||||
max: 30,
|
||||
max: () => getRateLimiterConfig().inviteUserRateLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
export const mfaRateLimit: RateLimitOptions = {
|
||||
timeWindow: 60 * 1000,
|
||||
max: 20,
|
||||
max: () => getRateLimiterConfig().mfaRateLimit,
|
||||
keyGenerator: (req) => {
|
||||
return req.headers.authorization?.split(" ")[1] || req.realIp;
|
||||
}
|
||||
@ -63,14 +64,21 @@ export const mfaRateLimit: RateLimitOptions = {
|
||||
export const creationLimit: RateLimitOptions = {
|
||||
// identity, project, org
|
||||
timeWindow: 60 * 1000,
|
||||
max: 30,
|
||||
max: () => getRateLimiterConfig().creationLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
// Public endpoints to avoid brute force attacks
|
||||
export const publicEndpointLimit: RateLimitOptions = {
|
||||
// Shared Secrets
|
||||
// Read Shared Secrets
|
||||
timeWindow: 60 * 1000,
|
||||
max: 30,
|
||||
max: () => getRateLimiterConfig().publicEndpointLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
export const publicSecretShareCreationLimit: RateLimitOptions = {
|
||||
// Create Shared Secrets
|
||||
timeWindow: 60 * 1000,
|
||||
max: 5,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
@ -6,6 +6,7 @@ import {
|
||||
BadRequestError,
|
||||
DatabaseError,
|
||||
InternalServerError,
|
||||
NotFoundError,
|
||||
ScimRequestError,
|
||||
UnauthorizedError
|
||||
} from "@app/lib/errors";
|
||||
@ -15,6 +16,8 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
|
||||
req.log.error(error);
|
||||
if (error instanceof BadRequestError) {
|
||||
void res.status(400).send({ statusCode: 400, message: error.message, error: error.name });
|
||||
} else if (error instanceof NotFoundError) {
|
||||
void res.status(404).send({ statusCode: 404, message: error.message, error: error.name });
|
||||
} else if (error instanceof UnauthorizedError) {
|
||||
void res.status(403).send({ statusCode: 403, message: error.message, error: error.name });
|
||||
} else if (error instanceof DatabaseError || error instanceof InternalServerError) {
|
||||
|
@ -1,3 +1,4 @@
|
||||
import { CronJob } from "cron";
|
||||
import { Knex } from "knex";
|
||||
import { z } from "zod";
|
||||
|
||||
@ -35,6 +36,8 @@ import { permissionDALFactory } from "@app/ee/services/permission/permission-dal
|
||||
import { permissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { projectUserAdditionalPrivilegeDALFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-dal";
|
||||
import { projectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||
import { rateLimitDALFactory } from "@app/ee/services/rate-limit/rate-limit-dal";
|
||||
import { rateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
import { samlConfigDALFactory } from "@app/ee/services/saml-config/saml-config-dal";
|
||||
import { samlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
|
||||
import { scimDALFactory } from "@app/ee/services/scim/scim-dal";
|
||||
@ -195,6 +198,7 @@ export const registerRoutes = async (
|
||||
const incidentContactDAL = incidentContactDALFactory(db);
|
||||
const orgRoleDAL = orgRoleDALFactory(db);
|
||||
const superAdminDAL = superAdminDALFactory(db);
|
||||
const rateLimitDAL = rateLimitDALFactory(db);
|
||||
const apiKeyDAL = apiKeyDALFactory(db);
|
||||
|
||||
const projectDAL = projectDALFactory(db);
|
||||
@ -388,7 +392,9 @@ export const registerRoutes = async (
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
licenseService,
|
||||
tokenService,
|
||||
smtpService
|
||||
});
|
||||
|
||||
const telemetryService = telemetryServiceFactory({
|
||||
@ -454,6 +460,10 @@ export const registerRoutes = async (
|
||||
orgService,
|
||||
keyStore
|
||||
});
|
||||
const rateLimitService = rateLimitServiceFactory({
|
||||
rateLimitDAL,
|
||||
licenseService
|
||||
});
|
||||
const apiKeyService = apiKeyServiceFactory({ apiKeyDAL, userDAL });
|
||||
|
||||
const secretScanningQueue = secretScanningQueueFactory({
|
||||
@ -926,6 +936,7 @@ export const registerRoutes = async (
|
||||
secret: secretService,
|
||||
secretReplication: secretReplicationService,
|
||||
secretTag: secretTagService,
|
||||
rateLimit: rateLimitService,
|
||||
folder: folderService,
|
||||
secretImport: secretImportService,
|
||||
projectBot: projectBotService,
|
||||
@ -967,6 +978,14 @@ export const registerRoutes = async (
|
||||
secretSharing: secretSharingService
|
||||
});
|
||||
|
||||
const cronJobs: CronJob[] = [];
|
||||
if (appCfg.isProductionMode) {
|
||||
const rateLimitSyncJob = await rateLimitService.initializeBackgroundSync();
|
||||
if (rateLimitSyncJob) {
|
||||
cronJobs.push(rateLimitSyncJob);
|
||||
}
|
||||
}
|
||||
|
||||
server.decorate<FastifyZodProvider["store"]>("store", {
|
||||
user: userDAL
|
||||
});
|
||||
@ -1021,6 +1040,7 @@ export const registerRoutes = async (
|
||||
await server.register(registerV3Routes, { prefix: "/api/v3" });
|
||||
|
||||
server.addHook("onClose", async () => {
|
||||
cronJobs.forEach((job) => job.stop());
|
||||
await telemetryService.flushAll();
|
||||
});
|
||||
};
|
||||
|
@ -79,6 +79,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
schema: {
|
||||
body: z.object({
|
||||
email: z.string().email().trim(),
|
||||
password: z.string().trim(),
|
||||
firstName: z.string().trim(),
|
||||
lastName: z.string().trim().optional(),
|
||||
protectedKey: z.string().trim(),
|
||||
|
@ -198,7 +198,7 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
identityKubernetesAuth: IdentityKubernetesAuthsSchema
|
||||
identityKubernetesAuth: IdentityKubernetesAuthResponseSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
|
@ -51,7 +51,8 @@ export const registerPasswordRouter = async (server: FastifyZodProvider) => {
|
||||
encryptedPrivateKeyIV: z.string().trim(),
|
||||
encryptedPrivateKeyTag: z.string().trim(),
|
||||
salt: z.string().trim(),
|
||||
verifier: z.string().trim()
|
||||
verifier: z.string().trim(),
|
||||
password: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -309,4 +309,32 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
return { membership };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:workspaceId/leave",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
workspaceId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
membership: ProjectMembershipsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const membership = await server.services.projectMembership.leaveProject({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
projectId: req.params.workspaceId
|
||||
});
|
||||
return { membership };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -1,7 +1,12 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretSharingSchema } from "@app/db/schemas";
|
||||
import { publicEndpointLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import {
|
||||
publicEndpointLimit,
|
||||
publicSecretShareCreationLimit,
|
||||
readLimit,
|
||||
writeLimit
|
||||
} from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
@ -72,7 +77,7 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
url: "/public",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
@ -82,9 +87,42 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
iv: z.string(),
|
||||
tag: z.string(),
|
||||
hashedHex: z.string(),
|
||||
expiresAt: z
|
||||
.string()
|
||||
.refine((date) => date === undefined || new Date(date) > new Date(), "Expires at should be a future date"),
|
||||
expiresAt: z.string(),
|
||||
expiresAfterViews: z.number()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
id: z.string().uuid()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { encryptedValue, iv, tag, hashedHex, expiresAt, expiresAfterViews } = req.body;
|
||||
const sharedSecret = await req.server.services.secretSharing.createPublicSharedSecret({
|
||||
encryptedValue,
|
||||
iv,
|
||||
tag,
|
||||
hashedHex,
|
||||
expiresAt: new Date(expiresAt),
|
||||
expiresAfterViews
|
||||
});
|
||||
return { id: sharedSecret.id };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: publicSecretShareCreationLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
encryptedValue: z.string(),
|
||||
iv: z.string(),
|
||||
tag: z.string(),
|
||||
hashedHex: z.string(),
|
||||
expiresAt: z.string(),
|
||||
expiresAfterViews: z.number()
|
||||
}),
|
||||
response: {
|
||||
|
@ -23,7 +23,7 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const workspaceTags = await server.services.secretTag.getProjectTags({
|
||||
actor: req.permission.type,
|
||||
@ -36,6 +36,67 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:projectId/tags/:tagId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
projectId: z.string().trim().describe(SECRET_TAGS.GET_TAG_BY_ID.projectId),
|
||||
tagId: z.string().trim().describe(SECRET_TAGS.GET_TAG_BY_ID.tagId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
workspaceTag: SecretTagsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const workspaceTag = await server.services.secretTag.getTagById({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.tagId
|
||||
});
|
||||
return { workspaceTag };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:projectId/tags/slug/:tagSlug",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
projectId: z.string().trim().describe(SECRET_TAGS.GET_TAG_BY_SLUG.projectId),
|
||||
tagSlug: z.string().trim().describe(SECRET_TAGS.GET_TAG_BY_SLUG.tagSlug)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
workspaceTag: SecretTagsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const workspaceTag = await server.services.secretTag.getTagBySlug({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
slug: req.params.tagSlug,
|
||||
projectId: req.params.projectId
|
||||
});
|
||||
return { workspaceTag };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:projectId/tags",
|
||||
@ -57,7 +118,7 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const workspaceTag = await server.services.secretTag.createTag({
|
||||
actor: req.permission.type,
|
||||
@ -71,6 +132,42 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:projectId/tags/:tagId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
projectId: z.string().trim().describe(SECRET_TAGS.UPDATE.projectId),
|
||||
tagId: z.string().trim().describe(SECRET_TAGS.UPDATE.tagId)
|
||||
}),
|
||||
body: z.object({
|
||||
name: z.string().trim().describe(SECRET_TAGS.UPDATE.name),
|
||||
slug: z.string().trim().describe(SECRET_TAGS.UPDATE.slug),
|
||||
color: z.string().trim().describe(SECRET_TAGS.UPDATE.color)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
workspaceTag: SecretTagsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const workspaceTag = await server.services.secretTag.updateTag({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body,
|
||||
id: req.params.tagId
|
||||
});
|
||||
return { workspaceTag };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:projectId/tags/:tagId",
|
||||
@ -88,7 +185,7 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const workspaceTag = await server.services.secretTag.deleteTag({
|
||||
actor: req.permission.type,
|
||||
|
@ -259,4 +259,50 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/token-exchange",
|
||||
method: "POST",
|
||||
schema: {
|
||||
body: z.object({
|
||||
providerAuthToken: z.string(),
|
||||
email: z.string()
|
||||
})
|
||||
},
|
||||
handler: async (req, res) => {
|
||||
const userAgent = req.headers["user-agent"];
|
||||
if (!userAgent) throw new Error("user agent header is required");
|
||||
|
||||
const data = await server.services.login.oauth2TokenExchange({
|
||||
email: req.body.email,
|
||||
ip: req.realIp,
|
||||
userAgent,
|
||||
providerAuthToken: req.body.providerAuthToken
|
||||
});
|
||||
|
||||
if (data.isMfaEnabled) {
|
||||
return { mfaEnabled: true, token: data.token } as const; // for discriminated union
|
||||
}
|
||||
|
||||
void res.setCookie("jid", data.token.refresh, {
|
||||
httpOnly: true,
|
||||
path: "/",
|
||||
sameSite: "strict",
|
||||
secure: appCfg.HTTPS_ENABLED
|
||||
});
|
||||
|
||||
return {
|
||||
mfaEnabled: false,
|
||||
encryptionVersion: data.user.encryptionVersion,
|
||||
token: data.token.access,
|
||||
publicKey: data.user.publicKey,
|
||||
encryptedPrivateKey: data.user.encryptedPrivateKey,
|
||||
iv: data.user.iv,
|
||||
tag: data.user.tag,
|
||||
protectedKey: data.user.protectedKey || null,
|
||||
protectedKeyIV: data.user.protectedKeyIV || null,
|
||||
protectedKeyTag: data.user.protectedKeyTag || null
|
||||
} as const;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -19,7 +19,23 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
user: UsersSchema.merge(UserEncryptionKeysSchema.omit({ verifier: true }))
|
||||
user: UsersSchema.merge(
|
||||
UserEncryptionKeysSchema.pick({
|
||||
clientPublicKey: true,
|
||||
serverPrivateKey: true,
|
||||
encryptionVersion: true,
|
||||
protectedKey: true,
|
||||
protectedKeyIV: true,
|
||||
protectedKeyTag: true,
|
||||
publicKey: true,
|
||||
encryptedPrivateKey: true,
|
||||
iv: true,
|
||||
tag: true,
|
||||
salt: true,
|
||||
verifier: true,
|
||||
userId: true
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -30,6 +46,26 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/private-key",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
privateKey: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT], { requireOrg: false }),
|
||||
handler: async (req) => {
|
||||
const privateKey = await server.services.user.getUserPrivateKey(req.permission.id);
|
||||
return { privateKey };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:userId/unlock",
|
||||
|
@ -255,7 +255,23 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
description: "Retrieve the current user on the request",
|
||||
response: {
|
||||
200: z.object({
|
||||
user: UsersSchema.merge(UserEncryptionKeysSchema.omit({ verifier: true }))
|
||||
user: UsersSchema.merge(
|
||||
UserEncryptionKeysSchema.pick({
|
||||
clientPublicKey: true,
|
||||
serverPrivateKey: true,
|
||||
encryptionVersion: true,
|
||||
protectedKey: true,
|
||||
protectedKeyIV: true,
|
||||
protectedKeyTag: true,
|
||||
publicKey: true,
|
||||
encryptedPrivateKey: true,
|
||||
iv: true,
|
||||
tag: true,
|
||||
salt: true,
|
||||
verifier: true,
|
||||
userId: true
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
},
|
||||
|
@ -81,7 +81,8 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
|
||||
email: z.string().trim(),
|
||||
providerAuthToken: z.string().trim().optional(),
|
||||
clientProof: z.string().trim(),
|
||||
captchaToken: z.string().trim().optional()
|
||||
captchaToken: z.string().trim().optional(),
|
||||
password: z.string().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.discriminatedUnion("mfaEnabled", [
|
||||
@ -112,7 +113,8 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
|
||||
ip: req.realIp,
|
||||
userAgent,
|
||||
providerAuthToken: req.body.providerAuthToken,
|
||||
clientProof: req.body.clientProof
|
||||
clientProof: req.body.clientProof,
|
||||
password: req.body.password
|
||||
});
|
||||
|
||||
if (data.isMfaEnabled) {
|
||||
|
@ -8,7 +8,7 @@ import {
|
||||
SecretType,
|
||||
ServiceTokenScopes
|
||||
} from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { RAW_SECRETS, SECRETS } from "@app/lib/api-docs";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
@ -259,18 +259,20 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretPulled,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: secrets.length,
|
||||
workspaceId,
|
||||
environment,
|
||||
secretPath: req.query.secretPath,
|
||||
channel: getUserAgentType(req.headers["user-agent"]),
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
if (getUserAgentType(req.headers["user-agent"]) !== UserAgentType.K8_OPERATOR) {
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretPulled,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: secrets.length,
|
||||
workspaceId,
|
||||
environment,
|
||||
secretPath: req.query.secretPath,
|
||||
channel: getUserAgentType(req.headers["user-agent"]),
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
}
|
||||
return { secrets, imports };
|
||||
}
|
||||
});
|
||||
@ -306,7 +308,16 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secret: secretRawSchema
|
||||
secret: secretRawSchema.extend({
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
name: true,
|
||||
color: true
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -358,18 +369,20 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretPulled,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: 1,
|
||||
workspaceId: secret.workspace,
|
||||
environment,
|
||||
secretPath: req.query.secretPath,
|
||||
channel: getUserAgentType(req.headers["user-agent"]),
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
if (getUserAgentType(req.headers["user-agent"]) !== UserAgentType.K8_OPERATOR) {
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretPulled,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: 1,
|
||||
workspaceId: secret.workspace,
|
||||
environment,
|
||||
secretPath: req.query.secretPath,
|
||||
channel: getUserAgentType(req.headers["user-agent"]),
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
}
|
||||
return { secret };
|
||||
}
|
||||
});
|
||||
@ -404,6 +417,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
.transform((val) => (val.at(-1) === "\n" ? `${val.trim()}\n` : val.trim()))
|
||||
.describe(RAW_SECRETS.CREATE.secretValue),
|
||||
secretComment: z.string().trim().optional().default("").describe(RAW_SECRETS.CREATE.secretComment),
|
||||
tagIds: z.string().array().optional().describe(RAW_SECRETS.CREATE.tagIds),
|
||||
skipMultilineEncoding: z.boolean().optional().describe(RAW_SECRETS.CREATE.skipMultilineEncoding),
|
||||
type: z.nativeEnum(SecretType).default(SecretType.Shared).describe(RAW_SECRETS.CREATE.type)
|
||||
}),
|
||||
@ -427,7 +441,8 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
type: req.body.type,
|
||||
secretValue: req.body.secretValue,
|
||||
skipMultilineEncoding: req.body.skipMultilineEncoding,
|
||||
secretComment: req.body.secretComment
|
||||
secretComment: req.body.secretComment,
|
||||
tagIds: req.body.tagIds
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
@ -492,7 +507,8 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(RAW_SECRETS.UPDATE.secretPath),
|
||||
skipMultilineEncoding: z.boolean().optional().describe(RAW_SECRETS.UPDATE.skipMultilineEncoding),
|
||||
type: z.nativeEnum(SecretType).default(SecretType.Shared).describe(RAW_SECRETS.UPDATE.type)
|
||||
type: z.nativeEnum(SecretType).default(SecretType.Shared).describe(RAW_SECRETS.UPDATE.type),
|
||||
tagIds: z.string().array().optional().describe(RAW_SECRETS.UPDATE.tagIds)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -513,7 +529,8 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
secretName: req.params.secretName,
|
||||
type: req.body.type,
|
||||
secretValue: req.body.secretValue,
|
||||
skipMultilineEncoding: req.body.skipMultilineEncoding
|
||||
skipMultilineEncoding: req.body.skipMultilineEncoding,
|
||||
tagIds: req.body.tagIds
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
@ -710,24 +727,22 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
});
|
||||
|
||||
// TODO: Move to telemetry plugin
|
||||
let shouldRecordK8Event = false;
|
||||
if (req.headers["user-agent"] === "k8-operatoer") {
|
||||
const randomNumber = Math.random();
|
||||
if (randomNumber > 0.95) {
|
||||
shouldRecordK8Event = true;
|
||||
}
|
||||
}
|
||||
// let shouldRecordK8Event = false;
|
||||
// if (req.headers["user-agent"] === "k8-operatoer") {
|
||||
// const randomNumber = Math.random();
|
||||
// if (randomNumber > 0.95) {
|
||||
// shouldRecordK8Event = true;
|
||||
// }
|
||||
// }
|
||||
|
||||
const shouldCapture =
|
||||
req.query.workspaceId !== "650e71fbae3e6c8572f436d4" &&
|
||||
(req.headers["user-agent"] !== "k8-operator" || shouldRecordK8Event);
|
||||
const approximateNumberTotalSecrets = secrets.length * 20;
|
||||
req.query.workspaceId !== "650e71fbae3e6c8572f436d4" && req.headers["user-agent"] !== "k8-operator";
|
||||
if (shouldCapture) {
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretPulled,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: shouldRecordK8Event ? approximateNumberTotalSecrets : secrets.length,
|
||||
numberOfSecrets: secrets.length,
|
||||
workspaceId: req.query.workspaceId,
|
||||
environment: req.query.environment,
|
||||
secretPath: req.query.secretPath,
|
||||
@ -804,18 +819,20 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretPulled,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: 1,
|
||||
workspaceId: req.query.workspaceId,
|
||||
environment: req.query.environment,
|
||||
secretPath: req.query.secretPath,
|
||||
channel: getUserAgentType(req.headers["user-agent"]),
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
if (getUserAgentType(req.headers["user-agent"]) !== UserAgentType.K8_OPERATOR) {
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretPulled,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: 1,
|
||||
workspaceId: req.query.workspaceId,
|
||||
environment: req.query.environment,
|
||||
secretPath: req.query.secretPath,
|
||||
channel: getUserAgentType(req.headers["user-agent"]),
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
}
|
||||
return { secret };
|
||||
}
|
||||
});
|
||||
|
@ -102,7 +102,8 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
|
||||
verifier: z.string().trim(),
|
||||
organizationName: z.string().trim().min(1),
|
||||
providerAuthToken: z.string().trim().optional().nullish(),
|
||||
attributionSource: z.string().trim().optional()
|
||||
attributionSource: z.string().trim().optional(),
|
||||
password: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -167,6 +168,7 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
|
||||
schema: {
|
||||
body: z.object({
|
||||
email: z.string().email().trim(),
|
||||
password: z.string(),
|
||||
firstName: z.string().trim(),
|
||||
lastName: z.string().trim().optional(),
|
||||
protectedKey: z.string().trim(),
|
||||
|
@ -15,10 +15,10 @@ export const validateProviderAuthToken = (providerToken: string, username?: stri
|
||||
if (decodedToken.username !== username) throw new Error("Invalid auth credentials");
|
||||
|
||||
if (decodedToken.organizationId) {
|
||||
return { orgId: decodedToken.organizationId, authMethod: decodedToken.authMethod };
|
||||
return { orgId: decodedToken.organizationId, authMethod: decodedToken.authMethod, userName: decodedToken.username };
|
||||
}
|
||||
|
||||
return { authMethod: decodedToken.authMethod, orgId: null };
|
||||
return { authMethod: decodedToken.authMethod, orgId: null, userName: decodedToken.username };
|
||||
};
|
||||
|
||||
export const validateSignUpAuthorization = (token: string, userId: string, validate = true) => {
|
||||
|
@ -1,3 +1,4 @@
|
||||
import bcrypt from "bcrypt";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { TUsers, UserDeviceSchema } from "@app/db/schemas";
|
||||
@ -5,6 +6,8 @@ import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { generateSrpServerKey, srpCheckClientProof } from "@app/lib/crypto";
|
||||
import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { getUserPrivateKey } from "@app/lib/crypto/srp";
|
||||
import { BadRequestError, DatabaseError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
|
||||
@ -19,6 +22,7 @@ import {
|
||||
TLoginClientProofDTO,
|
||||
TLoginGenServerPublicKeyDTO,
|
||||
TOauthLoginDTO,
|
||||
TOauthTokenExchangeDTO,
|
||||
TVerifyMfaTokenDTO
|
||||
} from "./auth-login-type";
|
||||
import { AuthMethod, AuthModeJwtTokenPayload, AuthModeMfaJwtTokenPayload, AuthTokenType } from "./auth-type";
|
||||
@ -101,7 +105,7 @@ export const authLoginServiceFactory = ({
|
||||
user: TUsers;
|
||||
ip: string;
|
||||
userAgent: string;
|
||||
organizationId: string | undefined;
|
||||
organizationId?: string;
|
||||
authMethod: AuthMethod;
|
||||
}) => {
|
||||
const cfg = getConfig();
|
||||
@ -178,7 +182,8 @@ export const authLoginServiceFactory = ({
|
||||
ip,
|
||||
userAgent,
|
||||
providerAuthToken,
|
||||
captchaToken
|
||||
captchaToken,
|
||||
password
|
||||
}: TLoginClientProofDTO) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
@ -248,14 +253,29 @@ export const authLoginServiceFactory = ({
|
||||
throw new Error("Failed to authenticate. Try again?");
|
||||
}
|
||||
|
||||
await userDAL.updateUserEncryptionByUserId(userEnc.userId, {
|
||||
serverPrivateKey: null,
|
||||
clientPublicKey: null
|
||||
});
|
||||
|
||||
await userDAL.updateById(userEnc.userId, {
|
||||
consecutiveFailedPasswordAttempts: 0
|
||||
});
|
||||
// from password decrypt the private key
|
||||
if (password) {
|
||||
const privateKey = await getUserPrivateKey(password, userEnc);
|
||||
const hashedPassword = await bcrypt.hash(password, cfg.BCRYPT_SALT_ROUND);
|
||||
const { iv, tag, ciphertext, encoding } = infisicalSymmetricEncypt(privateKey);
|
||||
await userDAL.updateUserEncryptionByUserId(userEnc.userId, {
|
||||
serverPrivateKey: null,
|
||||
clientPublicKey: null,
|
||||
hashedPassword,
|
||||
serverEncryptedPrivateKey: ciphertext,
|
||||
serverEncryptedPrivateKeyIV: iv,
|
||||
serverEncryptedPrivateKeyTag: tag,
|
||||
serverEncryptedPrivateKeyEncoding: encoding
|
||||
});
|
||||
} else {
|
||||
await userDAL.updateUserEncryptionByUserId(userEnc.userId, {
|
||||
serverPrivateKey: null,
|
||||
clientPublicKey: null
|
||||
});
|
||||
}
|
||||
|
||||
// send multi factor auth token if they it enabled
|
||||
if (userEnc.isMfaEnabled && userEnc.email) {
|
||||
@ -499,8 +519,14 @@ export const authLoginServiceFactory = ({
|
||||
authMethods: [authMethod],
|
||||
isGhost: false
|
||||
});
|
||||
} else {
|
||||
const isLinkingRequired = !user?.authMethods?.includes(authMethod);
|
||||
if (isLinkingRequired) {
|
||||
user = await userDAL.updateById(user.id, { authMethods: [...(user.authMethods || []), authMethod] });
|
||||
}
|
||||
}
|
||||
const isLinkingRequired = !user?.authMethods?.includes(authMethod);
|
||||
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
const isUserCompleted = user.isAccepted;
|
||||
const providerAuthToken = jwt.sign(
|
||||
{
|
||||
@ -511,9 +537,9 @@ export const authLoginServiceFactory = ({
|
||||
isEmailVerified: user.isEmailVerified,
|
||||
firstName: user.firstName,
|
||||
lastName: user.lastName,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
authMethod,
|
||||
isUserCompleted,
|
||||
isLinkingRequired,
|
||||
...(callbackPort
|
||||
? {
|
||||
callbackPort
|
||||
@ -525,10 +551,71 @@ export const authLoginServiceFactory = ({
|
||||
expiresIn: appCfg.JWT_PROVIDER_AUTH_LIFETIME
|
||||
}
|
||||
);
|
||||
|
||||
return { isUserCompleted, providerAuthToken };
|
||||
};
|
||||
|
||||
/**
|
||||
* Handles OAuth2 token exchange for user login with private key handoff.
|
||||
*
|
||||
* The process involves exchanging a provider's authorization token for an Infisical access token.
|
||||
* The provider token is returned to the client, who then sends it back to obtain the Infisical access token.
|
||||
*
|
||||
* This approach is used instead of directly sending the access token for the following reasons:
|
||||
* 1. To facilitate easier logic changes from SRP OAuth to simple OAuth.
|
||||
* 2. To avoid attaching the access token to the URL, which could be logged. The provider token has a very short lifespan, reducing security risks.
|
||||
*/
|
||||
const oauth2TokenExchange = async ({ userAgent, ip, providerAuthToken, email }: TOauthTokenExchangeDTO) => {
|
||||
const decodedProviderToken = validateProviderAuthToken(providerAuthToken, email);
|
||||
|
||||
const appCfg = getConfig();
|
||||
const { authMethod, userName } = decodedProviderToken;
|
||||
if (!userName) throw new BadRequestError({ message: "Missing user name" });
|
||||
const organizationId =
|
||||
(isAuthMethodSaml(authMethod) || authMethod === AuthMethod.LDAP) && decodedProviderToken.orgId
|
||||
? decodedProviderToken.orgId
|
||||
: undefined;
|
||||
|
||||
const userEnc = await userDAL.findUserEncKeyByUsername({
|
||||
username: email
|
||||
});
|
||||
if (!userEnc) throw new BadRequestError({ message: "Invalid token" });
|
||||
if (!userEnc.serverEncryptedPrivateKey)
|
||||
throw new BadRequestError({ message: "Key handoff incomplete. Please try logging in again." });
|
||||
// send multi factor auth token if they it enabled
|
||||
if (userEnc.isMfaEnabled && userEnc.email) {
|
||||
enforceUserLockStatus(Boolean(userEnc.isLocked), userEnc.temporaryLockDateEnd);
|
||||
|
||||
const mfaToken = jwt.sign(
|
||||
{
|
||||
authMethod,
|
||||
authTokenType: AuthTokenType.MFA_TOKEN,
|
||||
userId: userEnc.userId
|
||||
},
|
||||
appCfg.AUTH_SECRET,
|
||||
{
|
||||
expiresIn: appCfg.JWT_MFA_LIFETIME
|
||||
}
|
||||
);
|
||||
|
||||
await sendUserMfaCode({
|
||||
userId: userEnc.userId,
|
||||
email: userEnc.email
|
||||
});
|
||||
|
||||
return { isMfaEnabled: true, token: mfaToken } as const;
|
||||
}
|
||||
|
||||
const token = await generateUserTokens({
|
||||
user: { ...userEnc, id: userEnc.userId },
|
||||
ip,
|
||||
userAgent,
|
||||
authMethod,
|
||||
organizationId
|
||||
});
|
||||
|
||||
return { token, isMfaEnabled: false, user: userEnc } as const;
|
||||
};
|
||||
|
||||
/*
|
||||
* logout user by incrementing the version by 1 meaning any old session will become invalid
|
||||
* as there number is behind
|
||||
@ -542,6 +629,7 @@ export const authLoginServiceFactory = ({
|
||||
loginExchangeClientProof,
|
||||
logout,
|
||||
oauth2Login,
|
||||
oauth2TokenExchange,
|
||||
resendMfaToken,
|
||||
verifyMfaToken,
|
||||
selectOrganization,
|
||||
|
@ -13,6 +13,7 @@ export type TLoginClientProofDTO = {
|
||||
ip: string;
|
||||
userAgent: string;
|
||||
captchaToken?: string;
|
||||
password?: string;
|
||||
};
|
||||
|
||||
export type TVerifyMfaTokenDTO = {
|
||||
@ -31,3 +32,10 @@ export type TOauthLoginDTO = {
|
||||
authMethod: AuthMethod;
|
||||
callbackPort?: string;
|
||||
};
|
||||
|
||||
export type TOauthTokenExchangeDTO = {
|
||||
providerAuthToken: string;
|
||||
ip: string;
|
||||
userAgent: string;
|
||||
email: string;
|
||||
};
|
||||
|
@ -1,3 +1,4 @@
|
||||
import bcrypt from "bcrypt";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { SecretEncryptionAlgo, SecretKeyEncoding } from "@app/db/schemas";
|
||||
@ -57,7 +58,8 @@ export const authPaswordServiceFactory = ({
|
||||
encryptedPrivateKeyTag,
|
||||
salt,
|
||||
verifier,
|
||||
tokenVersionId
|
||||
tokenVersionId,
|
||||
password
|
||||
}: TChangePasswordDTO) => {
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(userId);
|
||||
if (!userEnc) throw new Error("Failed to find user");
|
||||
@ -76,6 +78,8 @@ export const authPaswordServiceFactory = ({
|
||||
);
|
||||
if (!isValidClientProof) throw new Error("Failed to authenticate. Try again?");
|
||||
|
||||
const appCfg = getConfig();
|
||||
const hashedPassword = await bcrypt.hash(password, appCfg.BCRYPT_SALT_ROUND);
|
||||
await userDAL.updateUserEncryptionByUserId(userId, {
|
||||
encryptionVersion: 2,
|
||||
protectedKey,
|
||||
@ -87,7 +91,8 @@ export const authPaswordServiceFactory = ({
|
||||
salt,
|
||||
verifier,
|
||||
serverPrivateKey: null,
|
||||
clientPublicKey: null
|
||||
clientPublicKey: null,
|
||||
hashedPassword
|
||||
});
|
||||
|
||||
if (tokenVersionId) {
|
||||
|
@ -10,6 +10,7 @@ export type TChangePasswordDTO = {
|
||||
salt: string;
|
||||
verifier: string;
|
||||
tokenVersionId?: string;
|
||||
password: string;
|
||||
};
|
||||
|
||||
export type TResetPasswordViaBackupKeyDTO = {
|
||||
|
@ -1,3 +1,4 @@
|
||||
import bcrypt from "bcrypt";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { OrgMembershipStatus, TableName } from "@app/db/schemas";
|
||||
@ -6,6 +7,8 @@ import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-grou
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { getUserPrivateKey } from "@app/lib/crypto/srp";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { isDisposableEmail } from "@app/lib/validator";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
@ -119,6 +122,7 @@ export const authSignupServiceFactory = ({
|
||||
|
||||
const completeEmailAccountSignup = async ({
|
||||
email,
|
||||
password,
|
||||
firstName,
|
||||
lastName,
|
||||
providerAuthToken,
|
||||
@ -137,6 +141,7 @@ export const authSignupServiceFactory = ({
|
||||
userAgent,
|
||||
authorization
|
||||
}: TCompleteAccountSignupDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const user = await userDAL.findOne({ username: email });
|
||||
if (!user || (user && user.isAccepted)) {
|
||||
throw new Error("Failed to complete account for complete user");
|
||||
@ -152,6 +157,17 @@ export const authSignupServiceFactory = ({
|
||||
validateSignUpAuthorization(authorization, user.id);
|
||||
}
|
||||
|
||||
const hashedPassword = await bcrypt.hash(password, appCfg.BCRYPT_SALT_ROUND);
|
||||
const privateKey = await getUserPrivateKey(password, {
|
||||
salt,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag
|
||||
});
|
||||
const { tag, encoding, ciphertext, iv } = infisicalSymmetricEncypt(privateKey);
|
||||
const updateduser = await authDAL.transaction(async (tx) => {
|
||||
const us = await userDAL.updateById(user.id, { firstName, lastName, isAccepted: true }, tx);
|
||||
if (!us) throw new Error("User not found");
|
||||
@ -166,7 +182,12 @@ export const authSignupServiceFactory = ({
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag
|
||||
tag: encryptedPrivateKeyTag,
|
||||
hashedPassword,
|
||||
serverEncryptedPrivateKeyEncoding: encoding,
|
||||
serverEncryptedPrivateKeyTag: tag,
|
||||
serverEncryptedPrivateKeyIV: iv,
|
||||
serverEncryptedPrivateKey: ciphertext
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -227,7 +248,6 @@ export const authSignupServiceFactory = ({
|
||||
userId: updateduser.info.id
|
||||
});
|
||||
if (!tokenSession) throw new Error("Failed to create token");
|
||||
const appCfg = getConfig();
|
||||
|
||||
const accessToken = jwt.sign(
|
||||
{
|
||||
@ -265,6 +285,7 @@ export const authSignupServiceFactory = ({
|
||||
ip,
|
||||
salt,
|
||||
email,
|
||||
password,
|
||||
verifier,
|
||||
firstName,
|
||||
publicKey,
|
||||
@ -295,6 +316,18 @@ export const authSignupServiceFactory = ({
|
||||
name: "complete account invite"
|
||||
});
|
||||
|
||||
const appCfg = getConfig();
|
||||
const hashedPassword = await bcrypt.hash(password, appCfg.BCRYPT_SALT_ROUND);
|
||||
const privateKey = await getUserPrivateKey(password, {
|
||||
salt,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag
|
||||
});
|
||||
const { tag, encoding, ciphertext, iv } = infisicalSymmetricEncypt(privateKey);
|
||||
const updateduser = await authDAL.transaction(async (tx) => {
|
||||
const us = await userDAL.updateById(user.id, { firstName, lastName, isAccepted: true }, tx);
|
||||
if (!us) throw new Error("User not found");
|
||||
@ -310,7 +343,12 @@ export const authSignupServiceFactory = ({
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag
|
||||
tag: encryptedPrivateKeyTag,
|
||||
hashedPassword,
|
||||
serverEncryptedPrivateKeyEncoding: encoding,
|
||||
serverEncryptedPrivateKeyTag: tag,
|
||||
serverEncryptedPrivateKeyIV: iv,
|
||||
serverEncryptedPrivateKey: ciphertext
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -343,7 +381,6 @@ export const authSignupServiceFactory = ({
|
||||
userId: updateduser.info.id
|
||||
});
|
||||
if (!tokenSession) throw new Error("Failed to create token");
|
||||
const appCfg = getConfig();
|
||||
|
||||
const accessToken = jwt.sign(
|
||||
{
|
||||
|
@ -1,5 +1,6 @@
|
||||
export type TCompleteAccountSignupDTO = {
|
||||
email: string;
|
||||
password: string;
|
||||
firstName: string;
|
||||
lastName?: string;
|
||||
protectedKey: string;
|
||||
@ -21,6 +22,7 @@ export type TCompleteAccountSignupDTO = {
|
||||
|
||||
export type TCompleteAccountInviteDTO = {
|
||||
email: string;
|
||||
password: string;
|
||||
firstName: string;
|
||||
lastName?: string;
|
||||
protectedKey: string;
|
||||
|
@ -442,7 +442,34 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
|
||||
const updatedKubernetesAuth = await identityKubernetesAuthDAL.updateById(identityKubernetesAuth.id, updateQuery);
|
||||
|
||||
return { ...updatedKubernetesAuth, orgId: identityMembershipOrg.orgId };
|
||||
const updatedCACert =
|
||||
updatedKubernetesAuth.encryptedCaCert && updatedKubernetesAuth.caCertIV && updatedKubernetesAuth.caCertTag
|
||||
? decryptSymmetric({
|
||||
ciphertext: updatedKubernetesAuth.encryptedCaCert,
|
||||
iv: updatedKubernetesAuth.caCertIV,
|
||||
tag: updatedKubernetesAuth.caCertTag,
|
||||
key
|
||||
})
|
||||
: "";
|
||||
|
||||
const updatedTokenReviewerJwt =
|
||||
updatedKubernetesAuth.encryptedTokenReviewerJwt &&
|
||||
updatedKubernetesAuth.tokenReviewerJwtIV &&
|
||||
updatedKubernetesAuth.tokenReviewerJwtTag
|
||||
? decryptSymmetric({
|
||||
ciphertext: updatedKubernetesAuth.encryptedTokenReviewerJwt,
|
||||
iv: updatedKubernetesAuth.tokenReviewerJwtIV,
|
||||
tag: updatedKubernetesAuth.tokenReviewerJwtTag,
|
||||
key
|
||||
})
|
||||
: "";
|
||||
|
||||
return {
|
||||
...updatedKubernetesAuth,
|
||||
orgId: identityMembershipOrg.orgId,
|
||||
caCert: updatedCACert,
|
||||
tokenReviewerJwt: updatedTokenReviewerJwt
|
||||
};
|
||||
};
|
||||
|
||||
const getKubernetesAuth = async ({
|
||||
|
@ -18,7 +18,7 @@ import {
|
||||
UpdateSecretCommand
|
||||
} from "@aws-sdk/client-secrets-manager";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import AWS from "aws-sdk";
|
||||
import AWS, { AWSError } from "aws-sdk";
|
||||
import { AxiosError } from "axios";
|
||||
import sodium from "libsodium-wrappers";
|
||||
import isEqual from "lodash.isequal";
|
||||
@ -452,7 +452,11 @@ const syncSecretsAWSParameterStore = async ({
|
||||
accessId: string | null;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
if (!accessId) return;
|
||||
let response: { isSynced: boolean; syncMessage: string } | null = null;
|
||||
|
||||
if (!accessId) {
|
||||
throw new Error("AWS access ID is required");
|
||||
}
|
||||
|
||||
const config = new AWS.Config({
|
||||
region: integration.region as string,
|
||||
@ -557,6 +561,11 @@ const syncSecretsAWSParameterStore = async ({
|
||||
`AWS Parameter Store Error [integration=${integration.id}]: double check AWS account permissions (refer to the Infisical docs)`
|
||||
);
|
||||
}
|
||||
|
||||
response = {
|
||||
isSynced: false,
|
||||
syncMessage: (err as AWSError)?.message || "Error syncing with AWS Parameter Store"
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -585,6 +594,8 @@ const syncSecretsAWSParameterStore = async ({
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return response;
|
||||
};
|
||||
|
||||
/**
|
||||
@ -603,7 +614,9 @@ const syncSecretsAWSSecretManager = async ({
|
||||
}) => {
|
||||
const metadata = z.record(z.any()).parse(integration.metadata || {});
|
||||
|
||||
if (!accessId) return;
|
||||
if (!accessId) {
|
||||
throw new Error("AWS access ID is required");
|
||||
}
|
||||
|
||||
const secretsManager = new SecretsManagerClient({
|
||||
region: integration.region as string,
|
||||
@ -722,7 +735,7 @@ const syncSecretsAWSSecretManager = async ({
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// case when AWS manager can't find the specified secret
|
||||
// case 1: when AWS manager can't find the specified secret
|
||||
if (err instanceof ResourceNotFoundException && secretsManager) {
|
||||
await secretsManager.send(
|
||||
new CreateSecretCommand({
|
||||
@ -734,6 +747,9 @@ const syncSecretsAWSSecretManager = async ({
|
||||
: []
|
||||
})
|
||||
);
|
||||
// case 2: something unexpected went wrong, so we'll throw the error to reflect the error in the integration sync status
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -753,14 +769,12 @@ const syncSecretsAWSSecretManager = async ({
|
||||
const syncSecretsHeroku = async ({
|
||||
createManySecretsRawFn,
|
||||
updateManySecretsRawFn,
|
||||
integrationDAL,
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
}: {
|
||||
createManySecretsRawFn: (params: TCreateManySecretsRawFn) => Promise<Array<TSecrets & { _id: string }>>;
|
||||
updateManySecretsRawFn: (params: TUpdateManySecretsRawFn) => Promise<Array<TSecrets & { _id: string }>>;
|
||||
integrationDAL: Pick<TIntegrationDALFactory, "updateById">;
|
||||
integration: TIntegrations & {
|
||||
projectId: string;
|
||||
environment: {
|
||||
@ -862,10 +876,6 @@ const syncSecretsHeroku = async ({
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
await integrationDAL.updateById(integration.id, {
|
||||
lastUsed: new Date()
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
@ -2656,7 +2666,9 @@ const syncSecretsHashiCorpVault = async ({
|
||||
accessId: string | null;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
if (!accessId) return;
|
||||
if (!accessId) {
|
||||
throw new Error("Access ID is required");
|
||||
}
|
||||
|
||||
interface LoginAppRoleRes {
|
||||
auth: {
|
||||
@ -3486,6 +3498,8 @@ export const syncIntegrationSecrets = async ({
|
||||
accessToken: string;
|
||||
appendices?: { prefix: string; suffix: string };
|
||||
}) => {
|
||||
let response: { isSynced: boolean; syncMessage: string } | null = null;
|
||||
|
||||
switch (integration.integration) {
|
||||
case Integrations.GCP_SECRET_MANAGER:
|
||||
await syncSecretsGCPSecretManager({
|
||||
@ -3502,7 +3516,7 @@ export const syncIntegrationSecrets = async ({
|
||||
});
|
||||
break;
|
||||
case Integrations.AWS_PARAMETER_STORE:
|
||||
await syncSecretsAWSParameterStore({
|
||||
response = await syncSecretsAWSParameterStore({
|
||||
integration,
|
||||
secrets,
|
||||
accessId,
|
||||
@ -3521,7 +3535,6 @@ export const syncIntegrationSecrets = async ({
|
||||
await syncSecretsHeroku({
|
||||
createManySecretsRawFn,
|
||||
updateManySecretsRawFn,
|
||||
integrationDAL,
|
||||
integration,
|
||||
secrets,
|
||||
accessToken
|
||||
@ -3727,4 +3740,6 @@ export const syncIntegrationSecrets = async ({
|
||||
default:
|
||||
throw new BadRequestError({ message: "Invalid integration" });
|
||||
}
|
||||
|
||||
return response;
|
||||
};
|
||||
|
@ -36,6 +36,7 @@ import {
|
||||
TDeleteProjectMembershipsDTO,
|
||||
TGetProjectMembershipByUsernameDTO,
|
||||
TGetProjectMembershipDTO,
|
||||
TLeaveProjectDTO,
|
||||
TUpdateProjectMembershipDTO
|
||||
} from "./project-membership-types";
|
||||
import { TProjectUserMembershipRoleDALFactory } from "./project-user-membership-role-dal";
|
||||
@ -531,6 +532,53 @@ export const projectMembershipServiceFactory = ({
|
||||
return memberships;
|
||||
};
|
||||
|
||||
const leaveProject = async ({ projectId, actorId, actor }: TLeaveProjectDTO) => {
|
||||
if (actor !== ActorType.USER) {
|
||||
throw new BadRequestError({ message: "Only users can leave projects" });
|
||||
}
|
||||
|
||||
const project = await projectDAL.findById(projectId);
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
if (project.version !== ProjectVersion.V2) {
|
||||
throw new BadRequestError({
|
||||
message: "Please ask your project administrator to upgrade the project before leaving."
|
||||
});
|
||||
}
|
||||
|
||||
const projectMembers = await projectMembershipDAL.findAllProjectMembers(projectId);
|
||||
|
||||
if (!projectMembers?.length) {
|
||||
throw new BadRequestError({ message: "Failed to find project members" });
|
||||
}
|
||||
|
||||
if (projectMembers.length < 2) {
|
||||
throw new BadRequestError({ message: "You cannot leave the project as you are the only member" });
|
||||
}
|
||||
|
||||
const adminMembers = projectMembers.filter(
|
||||
(member) => member.roles.map((r) => r.role).includes("admin") && member.userId !== actorId
|
||||
);
|
||||
if (!adminMembers.length) {
|
||||
throw new BadRequestError({
|
||||
message: "You cannot leave the project as you are the only admin. Promote another user to admin before leaving."
|
||||
});
|
||||
}
|
||||
|
||||
const deletedMembership = (
|
||||
await projectMembershipDAL.delete({
|
||||
projectId: project.id,
|
||||
userId: actorId
|
||||
})
|
||||
)?.[0];
|
||||
|
||||
if (!deletedMembership) {
|
||||
throw new BadRequestError({ message: "Failed to leave project" });
|
||||
}
|
||||
|
||||
return deletedMembership;
|
||||
};
|
||||
|
||||
return {
|
||||
getProjectMemberships,
|
||||
getProjectMembershipByUsername,
|
||||
@ -538,6 +586,7 @@ export const projectMembershipServiceFactory = ({
|
||||
addUsersToProjectNonE2EE,
|
||||
deleteProjectMemberships,
|
||||
deleteProjectMembership, // TODO: Remove this
|
||||
addUsersToProject
|
||||
addUsersToProject,
|
||||
leaveProject
|
||||
};
|
||||
};
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
export type TGetProjectMembershipDTO = TProjectPermission;
|
||||
export type TLeaveProjectDTO = Omit<TProjectPermission, "actorOrgId" | "actorAuthMethod">;
|
||||
export enum ProjectUserMembershipTemporaryMode {
|
||||
Relative = "relative"
|
||||
}
|
||||
|
@ -1,8 +1,13 @@
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { UnauthorizedError } from "@app/lib/errors";
|
||||
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
|
||||
import { TSecretSharingDALFactory } from "./secret-sharing-dal";
|
||||
import { TCreateSharedSecretDTO, TDeleteSharedSecretDTO, TSharedSecretPermission } from "./secret-sharing-types";
|
||||
import {
|
||||
TCreatePublicSharedSecretDTO,
|
||||
TCreateSharedSecretDTO,
|
||||
TDeleteSharedSecretDTO,
|
||||
TSharedSecretPermission
|
||||
} from "./secret-sharing-types";
|
||||
|
||||
type TSecretSharingServiceFactoryDep = {
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
@ -31,6 +36,24 @@ export const secretSharingServiceFactory = ({
|
||||
} = createSharedSecretInput;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
if (!permission) throw new UnauthorizedError({ name: "User not in org" });
|
||||
|
||||
if (new Date(expiresAt) < new Date()) {
|
||||
throw new BadRequestError({ message: "Expiration date cannot be in the past" });
|
||||
}
|
||||
|
||||
// Limit Expiry Time to 1 month
|
||||
const expiryTime = new Date(expiresAt).getTime();
|
||||
const currentTime = new Date().getTime();
|
||||
const thirtyDays = 30 * 24 * 60 * 60 * 1000;
|
||||
if (expiryTime - currentTime > thirtyDays) {
|
||||
throw new BadRequestError({ message: "Expiration date cannot be more than 30 days" });
|
||||
}
|
||||
|
||||
// Limit Input ciphertext length to 13000 (equivalent to 10,000 characters of Plaintext)
|
||||
if (encryptedValue.length > 13000) {
|
||||
throw new BadRequestError({ message: "Shared secret value too long" });
|
||||
}
|
||||
|
||||
const newSharedSecret = await secretSharingDAL.create({
|
||||
encryptedValue,
|
||||
iv,
|
||||
@ -44,6 +67,36 @@ export const secretSharingServiceFactory = ({
|
||||
return { id: newSharedSecret.id };
|
||||
};
|
||||
|
||||
const createPublicSharedSecret = async (createSharedSecretInput: TCreatePublicSharedSecretDTO) => {
|
||||
const { encryptedValue, iv, tag, hashedHex, expiresAt, expiresAfterViews } = createSharedSecretInput;
|
||||
if (new Date(expiresAt) < new Date()) {
|
||||
throw new BadRequestError({ message: "Expiration date cannot be in the past" });
|
||||
}
|
||||
|
||||
// Limit Expiry Time to 1 month
|
||||
const expiryTime = new Date(expiresAt).getTime();
|
||||
const currentTime = new Date().getTime();
|
||||
const thirtyDays = 30 * 24 * 60 * 60 * 1000;
|
||||
if (expiryTime - currentTime > thirtyDays) {
|
||||
throw new BadRequestError({ message: "Expiration date cannot exceed more than 30 days" });
|
||||
}
|
||||
|
||||
// Limit Input ciphertext length to 13000 (equivalent to 10,000 characters of Plaintext)
|
||||
if (encryptedValue.length > 13000) {
|
||||
throw new BadRequestError({ message: "Shared secret value too long" });
|
||||
}
|
||||
|
||||
const newSharedSecret = await secretSharingDAL.create({
|
||||
encryptedValue,
|
||||
iv,
|
||||
tag,
|
||||
hashedHex,
|
||||
expiresAt,
|
||||
expiresAfterViews
|
||||
});
|
||||
return { id: newSharedSecret.id };
|
||||
};
|
||||
|
||||
const getSharedSecrets = async (getSharedSecretsInput: TSharedSecretPermission) => {
|
||||
const { actor, actorId, orgId, actorAuthMethod, actorOrgId } = getSharedSecretsInput;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
@ -54,6 +107,7 @@ export const secretSharingServiceFactory = ({
|
||||
|
||||
const getActiveSharedSecretByIdAndHashedHex = async (sharedSecretId: string, hashedHex: string) => {
|
||||
const sharedSecret = await secretSharingDAL.findOne({ id: sharedSecretId, hashedHex });
|
||||
if (!sharedSecret) return;
|
||||
if (sharedSecret.expiresAt && sharedSecret.expiresAt < new Date()) {
|
||||
return;
|
||||
}
|
||||
@ -77,6 +131,7 @@ export const secretSharingServiceFactory = ({
|
||||
|
||||
return {
|
||||
createSharedSecret,
|
||||
createPublicSharedSecret,
|
||||
getSharedSecrets,
|
||||
deleteSharedSecretById,
|
||||
getActiveSharedSecretByIdAndHashedHex
|
||||
|
@ -8,14 +8,16 @@ export type TSharedSecretPermission = {
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export type TCreateSharedSecretDTO = {
|
||||
export type TCreatePublicSharedSecretDTO = {
|
||||
encryptedValue: string;
|
||||
iv: string;
|
||||
tag: string;
|
||||
hashedHex: string;
|
||||
expiresAt: Date;
|
||||
expiresAfterViews: number;
|
||||
} & TSharedSecretPermission;
|
||||
};
|
||||
|
||||
export type TCreateSharedSecretDTO = TSharedSecretPermission & TCreatePublicSharedSecretDTO;
|
||||
|
||||
export type TDeleteSharedSecretDTO = {
|
||||
sharedSecretId: string;
|
||||
|
@ -2,10 +2,17 @@ import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
|
||||
import { TSecretTagDALFactory } from "./secret-tag-dal";
|
||||
import { TCreateTagDTO, TDeleteTagDTO, TListProjectTagsDTO } from "./secret-tag-types";
|
||||
import {
|
||||
TCreateTagDTO,
|
||||
TDeleteTagDTO,
|
||||
TGetTagByIdDTO,
|
||||
TGetTagBySlugDTO,
|
||||
TListProjectTagsDTO,
|
||||
TUpdateTagDTO
|
||||
} from "./secret-tag-types";
|
||||
|
||||
type TSecretTagServiceFactoryDep = {
|
||||
secretTagDAL: TSecretTagDALFactory;
|
||||
@ -42,11 +49,34 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe
|
||||
name,
|
||||
slug,
|
||||
color,
|
||||
createdBy: actorId
|
||||
createdBy: actorId,
|
||||
createdByActorType: actor
|
||||
});
|
||||
return newTag;
|
||||
};
|
||||
|
||||
const updateTag = async ({ actorId, actor, actorOrgId, actorAuthMethod, id, name, color, slug }: TUpdateTagDTO) => {
|
||||
const tag = await secretTagDAL.findById(id);
|
||||
if (!tag) throw new BadRequestError({ message: "Tag doesn't exist" });
|
||||
|
||||
if (slug) {
|
||||
const existingTag = await secretTagDAL.findOne({ slug, projectId: tag.projectId });
|
||||
if (existingTag && existingTag.id !== tag.id) throw new BadRequestError({ message: "Tag already exist" });
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
tag.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Tags);
|
||||
|
||||
const updatedTag = await secretTagDAL.updateById(tag.id, { name, color, slug });
|
||||
return updatedTag;
|
||||
};
|
||||
|
||||
const deleteTag = async ({ actorId, actor, actorOrgId, actorAuthMethod, id }: TDeleteTagDTO) => {
|
||||
const tag = await secretTagDAL.findById(id);
|
||||
if (!tag) throw new BadRequestError({ message: "Tag doesn't exist" });
|
||||
@ -64,6 +94,38 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe
|
||||
return deletedTag;
|
||||
};
|
||||
|
||||
const getTagById = async ({ actorId, actor, actorOrgId, actorAuthMethod, id }: TGetTagByIdDTO) => {
|
||||
const tag = await secretTagDAL.findById(id);
|
||||
if (!tag) throw new NotFoundError({ message: "Tag doesn't exist" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
tag.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
|
||||
|
||||
return tag;
|
||||
};
|
||||
|
||||
const getTagBySlug = async ({ actorId, actor, actorOrgId, actorAuthMethod, slug, projectId }: TGetTagBySlugDTO) => {
|
||||
const tag = await secretTagDAL.findOne({ projectId, slug });
|
||||
if (!tag) throw new NotFoundError({ message: "Tag doesn't exist" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
tag.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
|
||||
|
||||
return tag;
|
||||
};
|
||||
|
||||
const getProjectTags = async ({ actor, actorId, actorOrgId, actorAuthMethod, projectId }: TListProjectTagsDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@ -78,5 +140,5 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe
|
||||
return tags;
|
||||
};
|
||||
|
||||
return { createTag, deleteTag, getProjectTags };
|
||||
return { createTag, deleteTag, getProjectTags, getTagById, getTagBySlug, updateTag };
|
||||
};
|
||||
|
@ -6,6 +6,21 @@ export type TCreateTagDTO = {
|
||||
slug: string;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TUpdateTagDTO = {
|
||||
id: string;
|
||||
name?: string;
|
||||
slug?: string;
|
||||
color?: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetTagByIdDTO = {
|
||||
id: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetTagBySlugDTO = {
|
||||
slug: string;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TDeleteTagDTO = {
|
||||
id: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
@ -311,6 +311,40 @@ export const secretDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findOneWithTags = async (filter: Partial<TSecrets>, tx?: Knex) => {
|
||||
try {
|
||||
const rawDocs = await (tx || db)(TableName.Secret)
|
||||
.where(filter)
|
||||
.leftJoin(TableName.JnSecretTag, `${TableName.Secret}.id`, `${TableName.JnSecretTag}.${TableName.Secret}Id`)
|
||||
.leftJoin(TableName.SecretTag, `${TableName.JnSecretTag}.${TableName.SecretTag}Id`, `${TableName.SecretTag}.id`)
|
||||
.select(selectAllTableCols(TableName.Secret))
|
||||
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
|
||||
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
|
||||
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"))
|
||||
.select(db.ref("name").withSchema(TableName.SecretTag).as("tagName"));
|
||||
const docs = sqlNestRelationships({
|
||||
data: rawDocs,
|
||||
key: "id",
|
||||
parentMapper: (el) => ({ _id: el.id, ...SecretsSchema.parse(el) }),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "tagId",
|
||||
label: "tags" as const,
|
||||
mapper: ({ tagId: id, tagColor: color, tagSlug: slug, tagName: name }) => ({
|
||||
id,
|
||||
color,
|
||||
slug,
|
||||
name
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
return docs?.[0];
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindOneWIthTags" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...secretOrm,
|
||||
update,
|
||||
@ -318,6 +352,7 @@ export const secretDALFactory = (db: TDbClient) => {
|
||||
deleteMany,
|
||||
bulkUpdateNoVersionIncrement,
|
||||
getSecretTags,
|
||||
findOneWithTags,
|
||||
findByFolderId,
|
||||
findByFolderIds,
|
||||
findByBlindIndexes,
|
||||
|
@ -356,7 +356,17 @@ export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderD
|
||||
};
|
||||
|
||||
export const decryptSecretRaw = (
|
||||
secret: TSecrets & { workspace: string; environment: string; secretPath: string },
|
||||
secret: TSecrets & {
|
||||
workspace: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
tags?: {
|
||||
id: string;
|
||||
slug: string;
|
||||
color?: string | null;
|
||||
name: string;
|
||||
}[];
|
||||
},
|
||||
key: string
|
||||
) => {
|
||||
const secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
@ -396,6 +406,7 @@ export const decryptSecretRaw = (
|
||||
_id: secret.id,
|
||||
id: secret.id,
|
||||
user: secret.userId,
|
||||
tags: secret.tags,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding
|
||||
};
|
||||
};
|
||||
|
@ -421,94 +421,88 @@ export const secretQueueFactory = ({
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder) {
|
||||
logger.error(new Error("Secret path not found"));
|
||||
return;
|
||||
throw new Error("Secret path not found");
|
||||
}
|
||||
|
||||
// start syncing all linked imports also
|
||||
if (depth < MAX_SYNC_SECRET_DEPTH) {
|
||||
// find all imports made with the given environment and secret path
|
||||
const linkSourceDto = {
|
||||
projectId,
|
||||
importEnv: folder.environment.id,
|
||||
importPath: secretPath,
|
||||
isReplication: false
|
||||
};
|
||||
const imports = await secretImportDAL.find(linkSourceDto);
|
||||
// find all imports made with the given environment and secret path
|
||||
const linkSourceDto = {
|
||||
projectId,
|
||||
importEnv: folder.environment.id,
|
||||
importPath: secretPath,
|
||||
isReplication: false
|
||||
};
|
||||
const imports = await secretImportDAL.find(linkSourceDto);
|
||||
|
||||
if (imports.length) {
|
||||
// keep calling sync secret for all the imports made
|
||||
const importedFolderIds = unique(imports, (i) => i.folderId).map(({ folderId }) => folderId);
|
||||
const importedFolders = await folderDAL.findSecretPathByFolderIds(projectId, importedFolderIds);
|
||||
const foldersGroupedById = groupBy(importedFolders.filter(Boolean), (i) => i?.id as string);
|
||||
logger.info(
|
||||
`getIntegrationSecrets: Syncing secret due to link change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]`
|
||||
);
|
||||
await Promise.all(
|
||||
imports
|
||||
.filter(({ folderId }) => Boolean(foldersGroupedById[folderId][0]?.path as string))
|
||||
// filter out already synced ones
|
||||
.filter(
|
||||
({ folderId }) =>
|
||||
!deDupeQueue[
|
||||
uniqueSecretQueueKey(
|
||||
foldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
foldersGroupedById[folderId][0]?.path as string
|
||||
)
|
||||
]
|
||||
)
|
||||
.map(({ folderId }) =>
|
||||
syncSecrets({
|
||||
projectId,
|
||||
secretPath: foldersGroupedById[folderId][0]?.path as string,
|
||||
environmentSlug: foldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
_deDupeQueue: deDupeQueue,
|
||||
_depth: depth + 1,
|
||||
excludeReplication: true
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const secretReferences = await secretDAL.findReferencedSecretReferences(
|
||||
projectId,
|
||||
folder.environment.slug,
|
||||
secretPath
|
||||
if (imports.length) {
|
||||
// keep calling sync secret for all the imports made
|
||||
const importedFolderIds = unique(imports, (i) => i.folderId).map(({ folderId }) => folderId);
|
||||
const importedFolders = await folderDAL.findSecretPathByFolderIds(projectId, importedFolderIds);
|
||||
const foldersGroupedById = groupBy(importedFolders.filter(Boolean), (i) => i?.id as string);
|
||||
logger.info(
|
||||
`getIntegrationSecrets: Syncing secret due to link change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]`
|
||||
);
|
||||
await Promise.all(
|
||||
imports
|
||||
.filter(({ folderId }) => Boolean(foldersGroupedById[folderId][0]?.path as string))
|
||||
// filter out already synced ones
|
||||
.filter(
|
||||
({ folderId }) =>
|
||||
!deDupeQueue[
|
||||
uniqueSecretQueueKey(
|
||||
foldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
foldersGroupedById[folderId][0]?.path as string
|
||||
)
|
||||
]
|
||||
)
|
||||
.map(({ folderId }) =>
|
||||
syncSecrets({
|
||||
projectId,
|
||||
secretPath: foldersGroupedById[folderId][0]?.path as string,
|
||||
environmentSlug: foldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
_deDupeQueue: deDupeQueue,
|
||||
_depth: depth + 1,
|
||||
excludeReplication: true
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const secretReferences = await secretDAL.findReferencedSecretReferences(
|
||||
projectId,
|
||||
folder.environment.slug,
|
||||
secretPath
|
||||
);
|
||||
if (secretReferences.length) {
|
||||
const referencedFolderIds = unique(secretReferences, (i) => i.folderId).map(({ folderId }) => folderId);
|
||||
const referencedFolders = await folderDAL.findSecretPathByFolderIds(projectId, referencedFolderIds);
|
||||
const referencedFoldersGroupedById = groupBy(referencedFolders.filter(Boolean), (i) => i?.id as string);
|
||||
logger.info(
|
||||
`getIntegrationSecrets: Syncing secret due to reference change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]`
|
||||
);
|
||||
await Promise.all(
|
||||
secretReferences
|
||||
.filter(({ folderId }) => Boolean(referencedFoldersGroupedById[folderId][0]?.path))
|
||||
// filter out already synced ones
|
||||
.filter(
|
||||
({ folderId }) =>
|
||||
!deDupeQueue[
|
||||
uniqueSecretQueueKey(
|
||||
referencedFoldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
referencedFoldersGroupedById[folderId][0]?.path as string
|
||||
)
|
||||
]
|
||||
)
|
||||
.map(({ folderId }) =>
|
||||
syncSecrets({
|
||||
projectId,
|
||||
secretPath: referencedFoldersGroupedById[folderId][0]?.path as string,
|
||||
environmentSlug: referencedFoldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
_deDupeQueue: deDupeQueue,
|
||||
_depth: depth + 1,
|
||||
excludeReplication: true
|
||||
})
|
||||
)
|
||||
);
|
||||
if (secretReferences.length) {
|
||||
const referencedFolderIds = unique(secretReferences, (i) => i.folderId).map(({ folderId }) => folderId);
|
||||
const referencedFolders = await folderDAL.findSecretPathByFolderIds(projectId, referencedFolderIds);
|
||||
const referencedFoldersGroupedById = groupBy(referencedFolders.filter(Boolean), (i) => i?.id as string);
|
||||
logger.info(
|
||||
`getIntegrationSecrets: Syncing secret due to reference change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]`
|
||||
);
|
||||
await Promise.all(
|
||||
secretReferences
|
||||
.filter(({ folderId }) => Boolean(referencedFoldersGroupedById[folderId][0]?.path))
|
||||
// filter out already synced ones
|
||||
.filter(
|
||||
({ folderId }) =>
|
||||
!deDupeQueue[
|
||||
uniqueSecretQueueKey(
|
||||
referencedFoldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
referencedFoldersGroupedById[folderId][0]?.path as string
|
||||
)
|
||||
]
|
||||
)
|
||||
.map(({ folderId }) =>
|
||||
syncSecrets({
|
||||
projectId,
|
||||
secretPath: referencedFoldersGroupedById[folderId][0]?.path as string,
|
||||
environmentSlug: referencedFoldersGroupedById[folderId][0]?.environmentSlug as string,
|
||||
_deDupeQueue: deDupeQueue,
|
||||
_depth: depth + 1,
|
||||
excludeReplication: true
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
} else {
|
||||
logger.info(`getIntegrationSecrets: Secret depth exceeded for [projectId=${projectId}] [folderId=${folder.id}]`);
|
||||
}
|
||||
|
||||
const integrations = await integrationDAL.findByProjectIdV2(projectId, environment); // note: returns array of integrations + integration auths in this environment
|
||||
@ -550,7 +544,7 @@ export const secretQueueFactory = ({
|
||||
}
|
||||
|
||||
try {
|
||||
await syncIntegrationSecrets({
|
||||
const response = await syncIntegrationSecrets({
|
||||
createManySecretsRawFn,
|
||||
updateManySecretsRawFn,
|
||||
integrationDAL,
|
||||
@ -568,13 +562,15 @@ export const secretQueueFactory = ({
|
||||
await integrationDAL.updateById(integration.id, {
|
||||
lastSyncJobId: job.id,
|
||||
lastUsed: new Date(),
|
||||
syncMessage: "",
|
||||
isSynced: true
|
||||
syncMessage: response?.syncMessage ?? "",
|
||||
isSynced: response?.isSynced ?? true
|
||||
});
|
||||
} catch (err: unknown) {
|
||||
} catch (err) {
|
||||
logger.info("Secret integration sync error: %o", err);
|
||||
|
||||
const message =
|
||||
err instanceof AxiosError ? JSON.stringify((err as AxiosError)?.response?.data) : (err as Error)?.message;
|
||||
(err instanceof AxiosError ? JSON.stringify(err?.response?.data) : (err as Error)?.message) ||
|
||||
"Unknown error occurred.";
|
||||
|
||||
await integrationDAL.updateById(integration.id, {
|
||||
lastSyncJobId: job.id,
|
||||
|
@ -608,7 +608,7 @@ export const secretServiceFactory = ({
|
||||
}
|
||||
|
||||
const secret = await (version === undefined
|
||||
? secretDAL.findOne({
|
||||
? secretDAL.findOneWithTags({
|
||||
folderId,
|
||||
type: secretType,
|
||||
userId: secretType === SecretType.Personal ? actorId : null,
|
||||
@ -1120,7 +1120,8 @@ export const secretServiceFactory = ({
|
||||
secretPath,
|
||||
secretValue,
|
||||
secretComment,
|
||||
skipMultilineEncoding
|
||||
skipMultilineEncoding,
|
||||
tagIds
|
||||
}: TCreateSecretRawDTO) => {
|
||||
const botKey = await projectBotService.getBotKey(projectId);
|
||||
if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" });
|
||||
@ -1148,7 +1149,8 @@ export const secretServiceFactory = ({
|
||||
secretCommentCiphertext: secretCommentEncrypted.ciphertext,
|
||||
secretCommentIV: secretCommentEncrypted.iv,
|
||||
secretCommentTag: secretCommentEncrypted.tag,
|
||||
skipMultilineEncoding
|
||||
skipMultilineEncoding,
|
||||
tags: tagIds
|
||||
});
|
||||
|
||||
return decryptSecretRaw(secret, botKey);
|
||||
@ -1165,7 +1167,8 @@ export const secretServiceFactory = ({
|
||||
type,
|
||||
secretPath,
|
||||
secretValue,
|
||||
skipMultilineEncoding
|
||||
skipMultilineEncoding,
|
||||
tagIds
|
||||
}: TUpdateSecretRawDTO) => {
|
||||
const botKey = await projectBotService.getBotKey(projectId);
|
||||
if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" });
|
||||
@ -1185,7 +1188,8 @@ export const secretServiceFactory = ({
|
||||
secretValueCiphertext: secretValueEncrypted.ciphertext,
|
||||
secretValueIV: secretValueEncrypted.iv,
|
||||
secretValueTag: secretValueEncrypted.tag,
|
||||
skipMultilineEncoding
|
||||
skipMultilineEncoding,
|
||||
tags: tagIds
|
||||
});
|
||||
|
||||
await snapshotService.performSnapshot(secret.folderId);
|
||||
|
@ -164,6 +164,7 @@ export type TCreateSecretRawDTO = TProjectPermission & {
|
||||
secretName: string;
|
||||
secretValue: string;
|
||||
type: SecretType;
|
||||
tagIds?: string[];
|
||||
secretComment?: string;
|
||||
skipMultilineEncoding?: boolean;
|
||||
};
|
||||
@ -174,6 +175,7 @@ export type TUpdateSecretRawDTO = TProjectPermission & {
|
||||
secretName: string;
|
||||
secretValue?: string;
|
||||
type: SecretType;
|
||||
tagIds?: string[];
|
||||
skipMultilineEncoding?: boolean;
|
||||
secretReminderRepeatDays?: number | null;
|
||||
secretReminderNote?: string | null;
|
||||
|
@ -1,6 +1,10 @@
|
||||
import bcrypt from "bcrypt";
|
||||
|
||||
import { TSuperAdmin, TSuperAdminUpdate } from "@app/db/schemas";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { getUserPrivateKey } from "@app/lib/crypto/srp";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
import { TAuthLoginFactory } from "../auth/auth-login-service";
|
||||
@ -77,6 +81,7 @@ export const superAdminServiceFactory = ({
|
||||
firstName,
|
||||
salt,
|
||||
email,
|
||||
password,
|
||||
verifier,
|
||||
publicKey,
|
||||
protectedKey,
|
||||
@ -92,6 +97,17 @@ export const superAdminServiceFactory = ({
|
||||
const existingUser = await userDAL.findOne({ email });
|
||||
if (existingUser) throw new BadRequestError({ name: "Admin sign up", message: "User already exist" });
|
||||
|
||||
const privateKey = await getUserPrivateKey(password, {
|
||||
salt,
|
||||
protectedKey,
|
||||
protectedKeyIV,
|
||||
protectedKeyTag,
|
||||
encryptedPrivateKey,
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag
|
||||
});
|
||||
const hashedPassword = await bcrypt.hash(password, appCfg.BCRYPT_SALT_ROUND);
|
||||
const { iv, tag, ciphertext, encoding } = infisicalSymmetricEncypt(privateKey);
|
||||
const userInfo = await userDAL.transaction(async (tx) => {
|
||||
const newUser = await userDAL.create(
|
||||
{
|
||||
@ -119,7 +135,12 @@ export const superAdminServiceFactory = ({
|
||||
iv: encryptedPrivateKeyIV,
|
||||
tag: encryptedPrivateKeyTag,
|
||||
verifier,
|
||||
userId: newUser.id
|
||||
userId: newUser.id,
|
||||
hashedPassword,
|
||||
serverEncryptedPrivateKey: ciphertext,
|
||||
serverEncryptedPrivateKeyIV: iv,
|
||||
serverEncryptedPrivateKeyTag: tag,
|
||||
serverEncryptedPrivateKeyEncoding: encoding
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@ -1,5 +1,6 @@
|
||||
export type TAdminSignUpDTO = {
|
||||
email: string;
|
||||
password: string;
|
||||
publicKey: string;
|
||||
salt: string;
|
||||
lastName?: string;
|
||||
|
@ -1,3 +1,5 @@
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
@ -230,6 +232,21 @@ export const userServiceFactory = ({
|
||||
);
|
||||
};
|
||||
|
||||
const getUserPrivateKey = async (userId: string) => {
|
||||
const user = await userDAL.findUserEncKeyByUserId(userId);
|
||||
if (!user?.serverEncryptedPrivateKey || !user.serverEncryptedPrivateKeyIV || !user.serverEncryptedPrivateKeyTag) {
|
||||
throw new BadRequestError({ message: "Private key not found. Please login again" });
|
||||
}
|
||||
const privateKey = infisicalSymmetricDecrypt({
|
||||
ciphertext: user.serverEncryptedPrivateKey,
|
||||
tag: user.serverEncryptedPrivateKeyTag,
|
||||
iv: user.serverEncryptedPrivateKeyIV,
|
||||
keyEncoding: user.serverEncryptedPrivateKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
return privateKey;
|
||||
};
|
||||
|
||||
return {
|
||||
sendEmailVerificationCode,
|
||||
verifyEmailVerificationCode,
|
||||
@ -240,6 +257,7 @@ export const userServiceFactory = ({
|
||||
getMe,
|
||||
createUserAction,
|
||||
getUserAction,
|
||||
unlockUser
|
||||
unlockUser,
|
||||
getUserPrivateKey
|
||||
};
|
||||
};
|
||||
|
55
cli/go.mod
55
cli/go.mod
@ -10,6 +10,7 @@ require (
|
||||
github.com/fatih/semgroup v1.2.0
|
||||
github.com/gitleaks/go-gitdiff v0.8.0
|
||||
github.com/h2non/filetype v1.1.3
|
||||
github.com/infisical/go-sdk v0.2.0
|
||||
github.com/mattn/go-isatty v0.0.14
|
||||
github.com/muesli/ansi v0.0.0-20221106050444-61f0cd9a192a
|
||||
github.com/muesli/mango-cobra v1.2.0
|
||||
@ -22,23 +23,48 @@ require (
|
||||
github.com/rs/zerolog v1.26.1
|
||||
github.com/spf13/cobra v1.6.1
|
||||
github.com/spf13/viper v1.8.1
|
||||
github.com/stretchr/testify v1.8.1
|
||||
golang.org/x/crypto v0.14.0
|
||||
golang.org/x/term v0.13.0
|
||||
github.com/stretchr/testify v1.9.0
|
||||
golang.org/x/crypto v0.23.0
|
||||
golang.org/x/term v0.20.0
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go/auth v0.5.1 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.2 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.3.0 // indirect
|
||||
cloud.google.com/go/iam v1.1.8 // indirect
|
||||
github.com/alessio/shellescape v1.4.1 // indirect
|
||||
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef // indirect
|
||||
github.com/aws/aws-sdk-go-v2 v1.27.2 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/config v1.27.18 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.18 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.20.11 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.24.5 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.28.12 // indirect
|
||||
github.com/aws/smithy-go v1.20.2 // indirect
|
||||
github.com/chzyer/readline v1.5.1 // indirect
|
||||
github.com/danieljoos/wincred v1.2.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/dvsekhvalnov/jose2go v1.5.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/fsnotify/fsnotify v1.4.9 // indirect
|
||||
github.com/go-logr/logr v1.4.1 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-openapi/errors v0.20.2 // indirect
|
||||
github.com/go-openapi/strfmt v0.21.3 // indirect
|
||||
github.com/godbus/dbus/v5 v5.1.0 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
||||
github.com/golang/protobuf v1.5.4 // indirect
|
||||
github.com/google/s2a-go v0.1.7 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.12.4 // indirect
|
||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||
github.com/magiconair/properties v1.8.5 // indirect
|
||||
@ -59,17 +85,30 @@ require (
|
||||
github.com/subosito/gotenv v1.2.0 // indirect
|
||||
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect
|
||||
go.mongodb.org/mongo-driver v1.10.0 // indirect
|
||||
golang.org/x/net v0.17.0 // indirect
|
||||
golang.org/x/sync v0.1.0 // indirect
|
||||
golang.org/x/sys v0.13.0 // indirect
|
||||
golang.org/x/text v0.13.0 // indirect
|
||||
go.opencensus.io v0.24.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect
|
||||
go.opentelemetry.io/otel v1.24.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.24.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.24.0 // indirect
|
||||
golang.org/x/net v0.25.0 // indirect
|
||||
golang.org/x/oauth2 v0.21.0 // indirect
|
||||
golang.org/x/sync v0.7.0 // indirect
|
||||
golang.org/x/sys v0.20.0 // indirect
|
||||
golang.org/x/text v0.15.0 // indirect
|
||||
golang.org/x/time v0.5.0 // indirect
|
||||
google.golang.org/api v0.183.0 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240521202816-d264139d666e // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240528184218-531527333157 // indirect
|
||||
google.golang.org/grpc v1.64.0 // indirect
|
||||
google.golang.org/protobuf v1.34.1 // indirect
|
||||
gopkg.in/ini.v1 v1.62.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/fatih/color v1.13.0
|
||||
github.com/go-resty/resty/v2 v2.10.0
|
||||
github.com/go-resty/resty/v2 v2.13.1
|
||||
github.com/inconshreveable/mousetrap v1.0.1 // indirect
|
||||
github.com/jedib0t/go-pretty v4.3.0+incompatible
|
||||
github.com/manifoldco/promptui v0.9.0
|
||||
|
125
cli/go.sum
125
cli/go.sum
@ -18,15 +18,23 @@ cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmW
|
||||
cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg=
|
||||
cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8=
|
||||
cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0=
|
||||
cloud.google.com/go/auth v0.5.1 h1:0QNO7VThG54LUzKiQxv8C6x1YX7lUrzlAa1nVLF8CIw=
|
||||
cloud.google.com/go/auth v0.5.1/go.mod h1:vbZT8GjzDf3AVqCcQmqeeM32U9HBFc32vVVAbwDsa6s=
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.2 h1:+TTV8aXpjeChS9M+aTtN/TjdQnzJvmzKFt//oWu7HX4=
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.2/go.mod h1:wcYjgpZI9+Yu7LyYBg4pqSiaRkfEK3GQcpb7C/uyF1Q=
|
||||
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
|
||||
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
|
||||
cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
|
||||
cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=
|
||||
cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=
|
||||
cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ=
|
||||
cloud.google.com/go/compute/metadata v0.3.0 h1:Tz+eQXMEqDIKRsmY3cHTL6FVaynIjX2QxYC4trgAKZc=
|
||||
cloud.google.com/go/compute/metadata v0.3.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k=
|
||||
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
|
||||
cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
|
||||
cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk=
|
||||
cloud.google.com/go/iam v1.1.8 h1:r7umDwhj+BQyz0ScZMp4QrGXjSTI3ZINnpgU2nlB/K0=
|
||||
cloud.google.com/go/iam v1.1.8/go.mod h1:GvE6lyMmfxXauzNq8NbgJbeVQNspG+tcdL/W8QO1+zE=
|
||||
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
|
||||
cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
|
||||
cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA=
|
||||
@ -49,6 +57,32 @@ github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmV
|
||||
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef h1:46PFijGLmAjMPwCCCo7Jf0W6f9slllCkkv7vyc1yOSg=
|
||||
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
|
||||
github.com/aws/aws-sdk-go-v2 v1.27.2 h1:pLsTXqX93rimAOZG2FIYraDQstZaaGVVN4tNw65v0h8=
|
||||
github.com/aws/aws-sdk-go-v2 v1.27.2/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM=
|
||||
github.com/aws/aws-sdk-go-v2/config v1.27.18 h1:wFvAnwOKKe7QAyIxziwSKjmer9JBMH1vzIL6W+fYuKk=
|
||||
github.com/aws/aws-sdk-go-v2/config v1.27.18/go.mod h1:0xz6cgdX55+kmppvPm2IaKzIXOheGJhAufacPJaXZ7c=
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.18 h1:D/ALDWqK4JdY3OFgA2thcPO1c9aYTT5STS/CvnkqY1c=
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.18/go.mod h1:JuitCWq+F5QGUrmMPsk945rop6bB57jdscu+Glozdnc=
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5 h1:dDgptDO9dxeFkXy+tEgVkzSClHZje/6JkPW5aZyEvrQ=
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5/go.mod h1:gjvE2KBUgUQhcv89jqxrIxH9GaKs1JbZzWejj/DaHGA=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9 h1:cy8ahBJuhtM8GTTSyOkfy6WVPV1IE+SS5/wfXUYuulw=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9/go.mod h1:CZBXGLaJnEZI6EVNcPd7a6B5IC5cA/GkRWtu9fp3S6Y=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9 h1:A4SYk07ef04+vxZToz9LWvAXl9LW0NClpPpMsi31cz0=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9/go.mod h1:5jJcHuwDagxN+ErjQ3PU3ocf6Ylc/p9x+BLO/+X4iXw=
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU=
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 h1:Ji0DY1xUsUr3I8cHps0G+XM3WWU16lP6yG8qu1GAZAs=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2/go.mod h1:5CsjAbs3NlGQyZNFACh+zztPDI7fU6eW9QsxjfnuBKg=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11 h1:o4T+fKxA3gTMcluBNZZXE9DNaMkJuUL1O3mffCUjoJo=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11/go.mod h1:84oZdJ+VjuJKs9v1UTC9NaodRZRseOXCTgku+vQJWR8=
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.20.11 h1:gEYM2GSpr4YNWc6hCd5nod4+d4kd9vWIAWrmGuLdlMw=
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.20.11/go.mod h1:gVvwPdPNYehHSP9Rs7q27U1EU+3Or2ZpXvzAYJNh63w=
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.24.5 h1:iXjh3uaH3vsVcnyZX7MqCoCfcyxIrVE9iOQruRaWPrQ=
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.24.5/go.mod h1:5ZXesEuy/QcO0WUnt+4sDkxhdXRHTu2yG0uCSH8B6os=
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.28.12 h1:M/1u4HBpwLuMtjlxuI2y6HoVLzF5e2mfxHCg7ZVMYmk=
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.28.12/go.mod h1:kcfd+eTdEi/40FIbLq4Hif3XMXnl5b/+t/KTfLt9xIk=
|
||||
github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q=
|
||||
github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E=
|
||||
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
||||
github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM=
|
||||
github.com/bradleyjkemp/cupaloy/v2 v2.8.0 h1:any4BmKE+jGIaMpnU8YgH/I2LPiLBufr6oMMlVBbn9M=
|
||||
@ -97,6 +131,8 @@ github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w=
|
||||
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
|
||||
github.com/fatih/semgroup v1.2.0 h1:h/OLXwEM+3NNyAdZEpMiH1OzfplU09i2qXPVThGZvyg=
|
||||
github.com/fatih/semgroup v1.2.0/go.mod h1:1KAD4iIYfXjE4U13B48VM4z9QUwV5Tt8O4rS879kgm8=
|
||||
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
|
||||
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
@ -105,12 +141,17 @@ github.com/gitleaks/go-gitdiff v0.8.0/go.mod h1:pKz0X4YzCKZs30BL+weqBIG7mx0jl4tF
|
||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
|
||||
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||
github.com/go-openapi/errors v0.20.2 h1:dxy7PGTqEh94zj2E3h1cUmQQWiM1+aeCROfAr02EmK8=
|
||||
github.com/go-openapi/errors v0.20.2/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M=
|
||||
github.com/go-openapi/strfmt v0.21.3 h1:xwhj5X6CjXEZZHMWy1zKJxvW9AfHC9pkyUjLvHtKG7o=
|
||||
github.com/go-openapi/strfmt v0.21.3/go.mod h1:k+RzNO0Da+k3FrrynSNN8F7n/peCmQQqbbXjtDfvmGg=
|
||||
github.com/go-resty/resty/v2 v2.10.0 h1:Qla4W/+TMmv0fOeeRqzEpXPLfTUnR5HZ1+lGs+CkiCo=
|
||||
github.com/go-resty/resty/v2 v2.10.0/go.mod h1:iiP/OpA0CkcL3IGt1O0+/SIItFUbkkyw5BGXiVdTu+A=
|
||||
github.com/go-resty/resty/v2 v2.13.1 h1:x+LHXBI2nMB1vqndymf26quycC4aggYJ7DECYbiz03g=
|
||||
github.com/go-resty/resty/v2 v2.13.1/go.mod h1:GznXlLxkq6Nh4sU59rPmUw3VtgpO3aS96ORAI6Q7d+0=
|
||||
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk=
|
||||
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
@ -119,6 +160,8 @@ github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfU
|
||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
|
||||
@ -144,6 +187,8 @@ github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw
|
||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||
github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM=
|
||||
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
|
||||
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
|
||||
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
@ -157,8 +202,9 @@ github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
|
||||
@ -175,11 +221,18 @@ github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLe
|
||||
github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o=
|
||||
github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw=
|
||||
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y=
|
||||
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs=
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0=
|
||||
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
|
||||
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
||||
github.com/googleapis/gax-go/v2 v2.12.4 h1:9gWcmF85Wvq4ryPFvGFaOgPIs1AQX0d0bcbGw4Z96qg=
|
||||
github.com/googleapis/gax-go/v2 v2.12.4/go.mod h1:KYEYLorsnIGDi/rPC8b5TdlB9kbKoFubselGIoBMCwI=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
|
||||
@ -210,6 +263,8 @@ github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
|
||||
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/infisical/go-sdk v0.2.0 h1:n1/KNdYpeQavSqVwC9BfeV8VRzf3N2X9zO1tzQOSj5Q=
|
||||
github.com/infisical/go-sdk v0.2.0/go.mod h1:vHTDVw3k+wfStXab513TGk1n53kaKF2xgLqpw/xvtl4=
|
||||
github.com/jedib0t/go-pretty v4.3.0+incompatible h1:CGs8AVhEKg/n9YbUenWmNStRW2PHJzaeDodcfvRAbIo=
|
||||
github.com/jedib0t/go-pretty v4.3.0+incompatible/go.mod h1:XemHduiw8R651AF9Pt4FwCTKeG3oo7hrHJAoznj9nag=
|
||||
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
@ -330,8 +385,9 @@ github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
|
||||
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
@ -340,8 +396,9 @@ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
|
||||
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
||||
github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
|
||||
@ -372,6 +429,18 @@ go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
|
||||
go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
|
||||
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
|
||||
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg=
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw=
|
||||
go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo=
|
||||
go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo=
|
||||
go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI=
|
||||
go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco=
|
||||
go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI=
|
||||
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
|
||||
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
|
||||
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
|
||||
go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo=
|
||||
@ -385,8 +454,9 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20211215165025-cf75a172585e/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8=
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
|
||||
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI=
|
||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
@ -465,8 +535,9 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||
golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM=
|
||||
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
|
||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||
golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
@ -479,6 +550,8 @@ golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ
|
||||
golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/oauth2 v0.21.0 h1:tsimM75w1tF/uws5rbeHzIWxEqElMehnc+iW793zsZs=
|
||||
golang.org/x/oauth2 v0.21.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@ -491,8 +564,9 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ
|
||||
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@ -546,14 +620,16 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
|
||||
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||
golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek=
|
||||
golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
|
||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/term v0.20.0 h1:VnkxpohqXaOBYJtBmEppKUG6mXpi+4O6purfc2+sMhw=
|
||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
@ -565,13 +641,14 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
|
||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4=
|
||||
golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
|
||||
golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||
@ -652,6 +729,8 @@ google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjR
|
||||
google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU=
|
||||
google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94=
|
||||
google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8=
|
||||
google.golang.org/api v0.183.0 h1:PNMeRDwo1pJdgNcFQ9GstuLe/noWKIc89pRWRLMvLwE=
|
||||
google.golang.org/api v0.183.0/go.mod h1:q43adC5/pHoSZTx5h2mSmdF7NcyfW9JuDyIOJAgS9ZQ=
|
||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
@ -700,6 +779,10 @@ google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6D
|
||||
google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A=
|
||||
google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240521202816-d264139d666e h1:SkdGTrROJl2jRGT/Fxv5QUf9jtdKCQh4KQJXbXVLAi0=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240521202816-d264139d666e/go.mod h1:LweJcLbyVij6rCex8YunD8DYR5VDonap/jYl3ZRxcIU=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240528184218-531527333157 h1:Zy9XzmMEflZ/MAaA7vNcoebnRAld7FsPW1EeBB7V0m8=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240528184218-531527333157/go.mod h1:EfXuqaE1J41VCDicxHzUDm+8rk+7ZdXzHV0IhO/I6s0=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||
@ -720,6 +803,8 @@ google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAG
|
||||
google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
|
||||
google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
|
||||
google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
|
||||
google.golang.org/grpc v1.64.0 h1:KH3VH9y/MgNQg1dE7b3XfVK0GsPSIzJwdF617gUSbvY=
|
||||
google.golang.org/grpc v1.64.0/go.mod h1:oxjF8E3FBnjp+/gVFYdWacaLDx9na1aqy9oovLpxQYg=
|
||||
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
||||
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
||||
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
||||
@ -732,6 +817,8 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj
|
||||
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg=
|
||||
google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
|
@ -391,6 +391,7 @@ func CallCreateSecretsV3(httpClient *resty.Client, request CreateSecretV3Request
|
||||
}
|
||||
|
||||
func CallDeleteSecretsV3(httpClient *resty.Client, request DeleteSecretV3Request) error {
|
||||
|
||||
var secretsResponse GetEncryptedSecretsV3Response
|
||||
response, err := httpClient.
|
||||
R().
|
||||
@ -490,7 +491,7 @@ func CallUniversalAuthLogin(httpClient *resty.Client, request UniversalAuthLogin
|
||||
return universalAuthLoginResponse, nil
|
||||
}
|
||||
|
||||
func CallUniversalAuthRefreshAccessToken(httpClient *resty.Client, request UniversalAuthRefreshRequest) (UniversalAuthRefreshResponse, error) {
|
||||
func CallMachineIdentityRefreshAccessToken(httpClient *resty.Client, request UniversalAuthRefreshRequest) (UniversalAuthRefreshResponse, error) {
|
||||
var universalAuthRefreshResponse UniversalAuthRefreshResponse
|
||||
response, err := httpClient.
|
||||
R().
|
||||
@ -500,11 +501,11 @@ func CallUniversalAuthRefreshAccessToken(httpClient *resty.Client, request Unive
|
||||
Post(fmt.Sprintf("%v/v1/auth/token/renew", config.INFISICAL_URL))
|
||||
|
||||
if err != nil {
|
||||
return UniversalAuthRefreshResponse{}, fmt.Errorf("CallUniversalAuthRefreshAccessToken: Unable to complete api request [err=%s]", err)
|
||||
return UniversalAuthRefreshResponse{}, fmt.Errorf("CallMachineIdentityRefreshAccessToken: Unable to complete api request [err=%s]", err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return UniversalAuthRefreshResponse{}, fmt.Errorf("CallUniversalAuthRefreshAccessToken: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
return UniversalAuthRefreshResponse{}, fmt.Errorf("CallMachineIdentityRefreshAccessToken: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
}
|
||||
|
||||
return universalAuthRefreshResponse, nil
|
||||
@ -566,3 +567,39 @@ func CallCreateDynamicSecretLeaseV1(httpClient *resty.Client, request CreateDyna
|
||||
|
||||
return createDynamicSecretLeaseResponse, nil
|
||||
}
|
||||
|
||||
func CallCreateRawSecretsV3(httpClient *resty.Client, request CreateRawSecretV3Request) error {
|
||||
response, err := httpClient.
|
||||
R().
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
SetBody(request).
|
||||
Post(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName))
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("CallCreateRawSecretsV3: Unable to complete api request [err=%w]", err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return fmt.Errorf("CallCreateRawSecretsV3: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func CallUpdateRawSecretsV3(httpClient *resty.Client, request UpdateRawSecretByNameV3Request) error {
|
||||
response, err := httpClient.
|
||||
R().
|
||||
SetHeader("User-Agent", USER_AGENT).
|
||||
SetBody(request).
|
||||
Patch(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName))
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("CallUpdateRawSecretsV3: Unable to complete api request [err=%w]", err)
|
||||
}
|
||||
|
||||
if response.IsError() {
|
||||
return fmt.Errorf("CallUpdateRawSecretsV3: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -161,6 +161,14 @@ type Secret struct {
|
||||
PlainTextKey string `json:"plainTextKey"`
|
||||
}
|
||||
|
||||
type RawSecret struct {
|
||||
SecretKey string `json:"secretKey,omitempty"`
|
||||
SecretValue string `json:"secretValue,omitempty"`
|
||||
Type string `json:"type,omitempty"`
|
||||
SecretComment string `json:"secretComment,omitempty"`
|
||||
ID string `json:"id,omitempty"`
|
||||
}
|
||||
|
||||
type GetEncryptedWorkspaceKeyRequest struct {
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
}
|
||||
@ -233,6 +241,7 @@ type GetLoginOneV2Response struct {
|
||||
type GetLoginTwoV2Request struct {
|
||||
Email string `json:"email"`
|
||||
ClientProof string `json:"clientProof"`
|
||||
Password string `json:"password"`
|
||||
}
|
||||
|
||||
type GetLoginTwoV2Response struct {
|
||||
@ -409,12 +418,23 @@ type CreateSecretV3Request struct {
|
||||
SecretPath string `json:"secretPath"`
|
||||
}
|
||||
|
||||
type CreateRawSecretV3Request struct {
|
||||
SecretName string `json:"-"`
|
||||
WorkspaceID string `json:"workspaceId"`
|
||||
Type string `json:"type,omitempty"`
|
||||
Environment string `json:"environment"`
|
||||
SecretPath string `json:"secretPath,omitempty"`
|
||||
SecretValue string `json:"secretValue"`
|
||||
SecretComment string `json:"secretComment,omitempty"`
|
||||
SkipMultilineEncoding bool `json:"skipMultilineEncoding,omitempty"`
|
||||
}
|
||||
|
||||
type DeleteSecretV3Request struct {
|
||||
SecretName string `json:"secretName"`
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
Environment string `json:"environment"`
|
||||
Type string `json:"type"`
|
||||
SecretPath string `json:"secretPath"`
|
||||
Type string `json:"type,omitempty"`
|
||||
SecretPath string `json:"secretPath,omitempty"`
|
||||
}
|
||||
|
||||
type UpdateSecretByNameV3Request struct {
|
||||
@ -427,6 +447,15 @@ type UpdateSecretByNameV3Request struct {
|
||||
SecretValueTag string `json:"secretValueTag"`
|
||||
}
|
||||
|
||||
type UpdateRawSecretByNameV3Request struct {
|
||||
SecretName string `json:"-"`
|
||||
WorkspaceID string `json:"workspaceId"`
|
||||
Environment string `json:"environment"`
|
||||
SecretPath string `json:"secretPath,omitempty"`
|
||||
SecretValue string `json:"secretValue"`
|
||||
Type string `json:"type,omitempty"`
|
||||
}
|
||||
|
||||
type GetSingleSecretByNameV3Request struct {
|
||||
SecretName string `json:"secretName"`
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
|
@ -20,6 +20,7 @@ import (
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
infisicalSdk "github.com/infisical/go-sdk"
|
||||
"github.com/rs/zerolog/log"
|
||||
"gopkg.in/yaml.v2"
|
||||
|
||||
@ -59,9 +60,26 @@ type UniversalAuth struct {
|
||||
RemoveClientSecretOnRead bool `yaml:"remove_client_secret_on_read"`
|
||||
}
|
||||
|
||||
type OAuthConfig struct {
|
||||
ClientID string `yaml:"client-id"`
|
||||
ClientSecret string `yaml:"client-secret"`
|
||||
type KubernetesAuth struct {
|
||||
IdentityID string `yaml:"identity-id"`
|
||||
ServiceAccountToken string `yaml:"service-account-token"`
|
||||
}
|
||||
|
||||
type AzureAuth struct {
|
||||
IdentityID string `yaml:"identity-id"`
|
||||
}
|
||||
|
||||
type GcpIdTokenAuth struct {
|
||||
IdentityID string `yaml:"identity-id"`
|
||||
}
|
||||
|
||||
type GcpIamAuth struct {
|
||||
IdentityID string `yaml:"identity-id"`
|
||||
ServiceAccountKey string `yaml:"service-account-key"`
|
||||
}
|
||||
|
||||
type AwsIamAuth struct {
|
||||
IdentityID string `yaml:"identity-id"`
|
||||
}
|
||||
|
||||
type Sink struct {
|
||||
@ -87,15 +105,6 @@ type Template struct {
|
||||
} `yaml:"config"`
|
||||
}
|
||||
|
||||
func newAgentTemplateChannels(templates []Template) map[string]chan bool {
|
||||
// we keep each destination as an identifier for various channel
|
||||
templateChannel := make(map[string]chan bool)
|
||||
for _, template := range templates {
|
||||
templateChannel[template.DestinationPath] = make(chan bool)
|
||||
}
|
||||
return templateChannel
|
||||
}
|
||||
|
||||
type DynamicSecretLease struct {
|
||||
LeaseID string
|
||||
ExpireAt time.Time
|
||||
@ -256,6 +265,14 @@ func WriteBytesToFile(data *bytes.Buffer, outputPath string) error {
|
||||
return err
|
||||
}
|
||||
|
||||
func ParseAuthConfig(authConfigFile []byte, destination interface{}) error {
|
||||
if err := yaml.Unmarshal(authConfigFile, destination); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ParseAgentConfig(configFile []byte) (*Config, error) {
|
||||
var rawConfig struct {
|
||||
Infisical InfisicalConfig `yaml:"infisical"`
|
||||
@ -283,36 +300,13 @@ func ParseAgentConfig(configFile []byte) (*Config, error) {
|
||||
config := &Config{
|
||||
Infisical: rawConfig.Infisical,
|
||||
Auth: AuthConfig{
|
||||
Type: rawConfig.Auth.Type,
|
||||
Type: rawConfig.Auth.Type,
|
||||
Config: rawConfig.Auth.Config,
|
||||
},
|
||||
Sinks: rawConfig.Sinks,
|
||||
Templates: rawConfig.Templates,
|
||||
}
|
||||
|
||||
// Marshal and then unmarshal the config based on the type
|
||||
configBytes, err := yaml.Marshal(rawConfig.Auth.Config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch rawConfig.Auth.Type {
|
||||
case "universal-auth":
|
||||
var tokenConfig UniversalAuth
|
||||
if err := yaml.Unmarshal(configBytes, &tokenConfig); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
config.Auth.Config = tokenConfig
|
||||
case "oauth": // aws, gcp, k8s service account, etc
|
||||
var oauthConfig OAuthConfig
|
||||
if err := yaml.Unmarshal(configBytes, &oauthConfig); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
config.Auth.Config = oauthConfig
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown auth type: %s", rawConfig.Auth.Type)
|
||||
}
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
@ -337,7 +331,7 @@ func dynamicSecretTemplateFunction(accessToken string, dynamicSecretManager *Dyn
|
||||
return func(args ...string) (map[string]interface{}, error) {
|
||||
argLength := len(args)
|
||||
if argLength != 4 && argLength != 5 {
|
||||
return nil, fmt.Errorf("Invalid arguments found for dynamic-secret function. Check template %i", templateId)
|
||||
return nil, fmt.Errorf("invalid arguments found for dynamic-secret function. Check template %d", templateId)
|
||||
}
|
||||
|
||||
projectSlug, envSlug, secretPath, slug, ttl := args[0], args[1], args[2], args[3], ""
|
||||
@ -421,32 +415,54 @@ func ProcessBase64Template(templateId int, encodedTemplate string, data interfac
|
||||
}
|
||||
|
||||
type AgentManager struct {
|
||||
accessToken string
|
||||
accessTokenTTL time.Duration
|
||||
accessTokenMaxTTL time.Duration
|
||||
accessTokenFetchedTime time.Time
|
||||
accessTokenRefreshedTime time.Time
|
||||
mutex sync.Mutex
|
||||
filePaths []Sink // Store file paths if needed
|
||||
templates []Template
|
||||
dynamicSecretLeases *DynamicSecretLeaseManager
|
||||
clientIdPath string
|
||||
clientSecretPath string
|
||||
newAccessTokenNotificationChan chan bool
|
||||
removeClientSecretOnRead bool
|
||||
cachedClientSecret string
|
||||
exitAfterAuth bool
|
||||
accessToken string
|
||||
accessTokenTTL time.Duration
|
||||
accessTokenMaxTTL time.Duration
|
||||
accessTokenFetchedTime time.Time
|
||||
accessTokenRefreshedTime time.Time
|
||||
mutex sync.Mutex
|
||||
filePaths []Sink // Store file paths if needed
|
||||
templates []Template
|
||||
dynamicSecretLeases *DynamicSecretLeaseManager
|
||||
|
||||
authConfigBytes []byte
|
||||
authStrategy util.AuthStrategyType
|
||||
|
||||
newAccessTokenNotificationChan chan bool
|
||||
removeUniversalAuthClientSecretOnRead bool
|
||||
cachedUniversalAuthClientSecret string
|
||||
exitAfterAuth bool
|
||||
|
||||
infisicalClient infisicalSdk.InfisicalClientInterface
|
||||
}
|
||||
|
||||
func NewAgentManager(fileDeposits []Sink, templates []Template, clientIdPath string, clientSecretPath string, newAccessTokenNotificationChan chan bool, removeClientSecretOnRead bool, exitAfterAuth bool) *AgentManager {
|
||||
type NewAgentMangerOptions struct {
|
||||
FileDeposits []Sink
|
||||
Templates []Template
|
||||
|
||||
AuthConfigBytes []byte
|
||||
AuthStrategy util.AuthStrategyType
|
||||
|
||||
NewAccessTokenNotificationChan chan bool
|
||||
ExitAfterAuth bool
|
||||
}
|
||||
|
||||
func NewAgentManager(options NewAgentMangerOptions) *AgentManager {
|
||||
|
||||
return &AgentManager{
|
||||
filePaths: fileDeposits,
|
||||
templates: templates,
|
||||
clientIdPath: clientIdPath,
|
||||
clientSecretPath: clientSecretPath,
|
||||
newAccessTokenNotificationChan: newAccessTokenNotificationChan,
|
||||
removeClientSecretOnRead: removeClientSecretOnRead,
|
||||
exitAfterAuth: exitAfterAuth,
|
||||
filePaths: options.FileDeposits,
|
||||
templates: options.Templates,
|
||||
|
||||
authConfigBytes: options.AuthConfigBytes,
|
||||
authStrategy: options.AuthStrategy,
|
||||
|
||||
newAccessTokenNotificationChan: options.NewAccessTokenNotificationChan,
|
||||
exitAfterAuth: options.ExitAfterAuth,
|
||||
|
||||
infisicalClient: infisicalSdk.NewInfisicalClient(infisicalSdk.Config{
|
||||
SiteUrl: config.INFISICAL_URL,
|
||||
UserAgent: api.USER_AGENT, // ? Should we perhaps use a different user agent for the Agent for better analytics?
|
||||
}),
|
||||
}
|
||||
|
||||
}
|
||||
@ -469,52 +485,164 @@ func (tm *AgentManager) GetToken() string {
|
||||
return tm.accessToken
|
||||
}
|
||||
|
||||
func (tm *AgentManager) FetchUniversalAuthAccessToken() (credential infisicalSdk.MachineIdentityCredential, e error) {
|
||||
|
||||
var universalAuthConfig UniversalAuth
|
||||
if err := ParseAuthConfig(tm.authConfigBytes, &universalAuthConfig); err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to parse auth config due to error: %v", err)
|
||||
}
|
||||
|
||||
clientID, err := util.GetEnvVarOrFileContent(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME, universalAuthConfig.ClientIDPath)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get client id: %v", err)
|
||||
}
|
||||
|
||||
clientSecret, err := util.GetEnvVarOrFileContent("INFISICAL_UNIVERSAL_CLIENT_SECRET", universalAuthConfig.ClientSecretPath)
|
||||
if err != nil {
|
||||
if len(tm.cachedUniversalAuthClientSecret) == 0 {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get client secret: %v", err)
|
||||
}
|
||||
clientSecret = tm.cachedUniversalAuthClientSecret
|
||||
}
|
||||
|
||||
tm.cachedUniversalAuthClientSecret = clientSecret
|
||||
if tm.removeUniversalAuthClientSecretOnRead {
|
||||
defer os.Remove(universalAuthConfig.ClientSecretPath)
|
||||
}
|
||||
|
||||
return tm.infisicalClient.Auth().UniversalAuthLogin(clientID, clientSecret)
|
||||
|
||||
}
|
||||
|
||||
func (tm *AgentManager) FetchKubernetesAuthAccessToken() (credential infisicalSdk.MachineIdentityCredential, err error) {
|
||||
|
||||
var kubernetesAuthConfig KubernetesAuth
|
||||
if err := ParseAuthConfig(tm.authConfigBytes, &kubernetesAuthConfig); err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to parse auth config due to error: %v", err)
|
||||
}
|
||||
|
||||
identityId, err := util.GetEnvVarOrFileContent(util.INFISICAL_MACHINE_IDENTITY_ID_NAME, kubernetesAuthConfig.IdentityID)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get identity id: %v", err)
|
||||
}
|
||||
|
||||
serviceAccountTokenPath := os.Getenv(util.INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_NAME)
|
||||
if serviceAccountTokenPath == "" {
|
||||
serviceAccountTokenPath = kubernetesAuthConfig.ServiceAccountToken
|
||||
if serviceAccountTokenPath == "" {
|
||||
serviceAccountTokenPath = "/var/run/secrets/kubernetes.io/serviceaccount/token"
|
||||
}
|
||||
}
|
||||
|
||||
return tm.infisicalClient.Auth().KubernetesAuthLogin(identityId, serviceAccountTokenPath)
|
||||
|
||||
}
|
||||
|
||||
func (tm *AgentManager) FetchAzureAuthAccessToken() (credential infisicalSdk.MachineIdentityCredential, err error) {
|
||||
|
||||
var azureAuthConfig AzureAuth
|
||||
if err := ParseAuthConfig(tm.authConfigBytes, &azureAuthConfig); err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to parse auth config due to error: %v", err)
|
||||
}
|
||||
|
||||
identityId, err := util.GetEnvVarOrFileContent(util.INFISICAL_MACHINE_IDENTITY_ID_NAME, azureAuthConfig.IdentityID)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get identity id: %v", err)
|
||||
}
|
||||
|
||||
return tm.infisicalClient.Auth().AzureAuthLogin(identityId)
|
||||
|
||||
}
|
||||
|
||||
func (tm *AgentManager) FetchGcpIdTokenAuthAccessToken() (credential infisicalSdk.MachineIdentityCredential, err error) {
|
||||
|
||||
var gcpIdTokenAuthConfig GcpIdTokenAuth
|
||||
if err := ParseAuthConfig(tm.authConfigBytes, &gcpIdTokenAuthConfig); err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to parse auth config due to error: %v", err)
|
||||
}
|
||||
|
||||
identityId, err := util.GetEnvVarOrFileContent(util.INFISICAL_MACHINE_IDENTITY_ID_NAME, gcpIdTokenAuthConfig.IdentityID)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get identity id: %v", err)
|
||||
}
|
||||
|
||||
return tm.infisicalClient.Auth().GcpIdTokenAuthLogin(identityId)
|
||||
|
||||
}
|
||||
|
||||
func (tm *AgentManager) FetchGcpIamAuthAccessToken() (credential infisicalSdk.MachineIdentityCredential, err error) {
|
||||
|
||||
var gcpIamAuthConfig GcpIamAuth
|
||||
if err := ParseAuthConfig(tm.authConfigBytes, &gcpIamAuthConfig); err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to parse auth config due to error: %v", err)
|
||||
}
|
||||
|
||||
identityId, err := util.GetEnvVarOrFileContent(util.INFISICAL_MACHINE_IDENTITY_ID_NAME, gcpIamAuthConfig.IdentityID)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get identity id: %v", err)
|
||||
}
|
||||
|
||||
serviceAccountKeyPath := os.Getenv(util.INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH_NAME)
|
||||
if serviceAccountKeyPath == "" {
|
||||
// we don't need to read this file, because the service account key path is directly read inside the sdk
|
||||
serviceAccountKeyPath = gcpIamAuthConfig.ServiceAccountKey
|
||||
if serviceAccountKeyPath == "" {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("gcp service account key path not found")
|
||||
}
|
||||
}
|
||||
|
||||
return tm.infisicalClient.Auth().GcpIamAuthLogin(identityId, serviceAccountKeyPath)
|
||||
|
||||
}
|
||||
|
||||
func (tm *AgentManager) FetchAwsIamAuthAccessToken() (credential infisicalSdk.MachineIdentityCredential, err error) {
|
||||
|
||||
var awsIamAuthConfig AwsIamAuth
|
||||
if err := ParseAuthConfig(tm.authConfigBytes, &awsIamAuthConfig); err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to parse auth config due to error: %v", err)
|
||||
}
|
||||
|
||||
identityId, err := util.GetEnvVarOrFileContent(util.INFISICAL_MACHINE_IDENTITY_ID_NAME, awsIamAuthConfig.IdentityID)
|
||||
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, fmt.Errorf("unable to get identity id: %v", err)
|
||||
}
|
||||
|
||||
return tm.infisicalClient.Auth().AwsIamAuthLogin(identityId)
|
||||
|
||||
}
|
||||
|
||||
// Fetches a new access token using client credentials
|
||||
func (tm *AgentManager) FetchNewAccessToken() error {
|
||||
clientID := os.Getenv(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME)
|
||||
if clientID == "" {
|
||||
clientIDAsByte, err := ReadFile(tm.clientIdPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to read client id from file path '%s' due to error: %v", tm.clientIdPath, err)
|
||||
}
|
||||
clientID = string(clientIDAsByte)
|
||||
|
||||
authStrategies := map[util.AuthStrategyType]func() (credential infisicalSdk.MachineIdentityCredential, e error){
|
||||
util.AuthStrategy.UNIVERSAL_AUTH: tm.FetchUniversalAuthAccessToken,
|
||||
util.AuthStrategy.KUBERNETES_AUTH: tm.FetchKubernetesAuthAccessToken,
|
||||
util.AuthStrategy.AZURE_AUTH: tm.FetchAzureAuthAccessToken,
|
||||
util.AuthStrategy.GCP_ID_TOKEN_AUTH: tm.FetchGcpIdTokenAuthAccessToken,
|
||||
util.AuthStrategy.GCP_IAM_AUTH: tm.FetchGcpIamAuthAccessToken,
|
||||
util.AuthStrategy.AWS_IAM_AUTH: tm.FetchAwsIamAuthAccessToken,
|
||||
}
|
||||
|
||||
clientSecret := os.Getenv("INFISICAL_UNIVERSAL_CLIENT_SECRET")
|
||||
if clientSecret == "" {
|
||||
clientSecretAsByte, err := ReadFile(tm.clientSecretPath)
|
||||
if err != nil {
|
||||
if len(tm.cachedClientSecret) == 0 {
|
||||
return fmt.Errorf("unable to read client secret from file and no cached client secret found: %v", err)
|
||||
} else {
|
||||
clientSecretAsByte = []byte(tm.cachedClientSecret)
|
||||
}
|
||||
}
|
||||
clientSecret = string(clientSecretAsByte)
|
||||
if _, ok := authStrategies[tm.authStrategy]; !ok {
|
||||
return fmt.Errorf("auth strategy %s not found", tm.authStrategy)
|
||||
}
|
||||
|
||||
// remove client secret after first read
|
||||
if tm.removeClientSecretOnRead {
|
||||
os.Remove(tm.clientSecretPath)
|
||||
}
|
||||
credential, err := authStrategies[tm.authStrategy]()
|
||||
|
||||
// save as cache in memory
|
||||
tm.cachedClientSecret = clientSecret
|
||||
|
||||
loginResponse, err := util.UniversalAuthLogin(clientID, clientSecret)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
accessTokenTTL := time.Duration(loginResponse.AccessTokenTTL * int(time.Second))
|
||||
accessTokenMaxTTL := time.Duration(loginResponse.AccessTokenMaxTTL * int(time.Second))
|
||||
accessTokenTTL := time.Duration(credential.ExpiresIn * int64(time.Second))
|
||||
accessTokenMaxTTL := time.Duration(credential.AccessTokenMaxTTL * int64(time.Second))
|
||||
|
||||
if accessTokenTTL <= time.Duration(5)*time.Second {
|
||||
util.PrintErrorMessageAndExit("At this this, agent does not support refresh of tokens with 5 seconds or less ttl. Please increase access token ttl and try again")
|
||||
util.PrintErrorMessageAndExit("At this time, agent does not support refresh of tokens with 5 seconds or less ttl. Please increase access token ttl and try again")
|
||||
}
|
||||
|
||||
tm.accessTokenFetchedTime = time.Now()
|
||||
tm.SetToken(loginResponse.AccessToken, accessTokenTTL, accessTokenMaxTTL)
|
||||
tm.SetToken(credential.AccessToken, accessTokenTTL, accessTokenMaxTTL)
|
||||
|
||||
return nil
|
||||
}
|
||||
@ -527,7 +655,7 @@ func (tm *AgentManager) RefreshAccessToken() error {
|
||||
SetRetryWaitTime(5 * time.Second)
|
||||
|
||||
accessToken := tm.GetToken()
|
||||
response, err := api.CallUniversalAuthRefreshAccessToken(httpClient, api.UniversalAuthRefreshRequest{AccessToken: accessToken})
|
||||
response, err := api.CallMachineIdentityRefreshAccessToken(httpClient, api.UniversalAuthRefreshRequest{AccessToken: accessToken})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -564,6 +692,7 @@ func (tm *AgentManager) ManageTokenLifecycle() {
|
||||
continue
|
||||
}
|
||||
} else if time.Now().After(accessTokenMaxTTLExpiresInTime) {
|
||||
// case: token has reached max ttl and we should re-authenticate entirely (cannot refresh)
|
||||
log.Info().Msgf("token has reached max ttl, attempting to re authenticate...")
|
||||
err := tm.FetchNewAccessToken()
|
||||
if err != nil {
|
||||
@ -574,6 +703,7 @@ func (tm *AgentManager) ManageTokenLifecycle() {
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
// case: token ttl has expired, but the token is still within max ttl, so we can refresh
|
||||
log.Info().Msgf("attempting to refresh existing token...")
|
||||
err := tm.RefreshAccessToken()
|
||||
if err != nil {
|
||||
@ -770,18 +900,33 @@ var agentCmd = &cobra.Command{
|
||||
return
|
||||
}
|
||||
|
||||
if agentConfig.Auth.Type != "universal-auth" {
|
||||
util.PrintErrorMessageAndExit("Only auth type of 'universal-auth' is supported at this time")
|
||||
}
|
||||
authMethodValid, authStrategy := util.IsAuthMethodValid(agentConfig.Auth.Type, false)
|
||||
|
||||
configUniversalAuthType := agentConfig.Auth.Config.(UniversalAuth)
|
||||
if !authMethodValid {
|
||||
util.PrintErrorMessageAndExit(fmt.Sprintf("The auth method '%s' is not supported.", agentConfig.Auth.Type))
|
||||
}
|
||||
|
||||
tokenRefreshNotifier := make(chan bool)
|
||||
sigChan := make(chan os.Signal, 1)
|
||||
signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM)
|
||||
|
||||
filePaths := agentConfig.Sinks
|
||||
tm := NewAgentManager(filePaths, agentConfig.Templates, configUniversalAuthType.ClientIDPath, configUniversalAuthType.ClientSecretPath, tokenRefreshNotifier, configUniversalAuthType.RemoveClientSecretOnRead, agentConfig.Infisical.ExitAfterAuth)
|
||||
|
||||
configBytes, err := yaml.Marshal(agentConfig.Auth.Config)
|
||||
if err != nil {
|
||||
log.Error().Msgf("unable to marshal auth config because %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
tm := NewAgentManager(NewAgentMangerOptions{
|
||||
FileDeposits: filePaths,
|
||||
Templates: agentConfig.Templates,
|
||||
AuthConfigBytes: configBytes,
|
||||
NewAccessTokenNotificationChan: tokenRefreshNotifier,
|
||||
ExitAfterAuth: agentConfig.Infisical.ExitAfterAuth,
|
||||
AuthStrategy: authStrategy,
|
||||
})
|
||||
|
||||
tm.dynamicSecretLeases = NewDynamicSecretLeaseManager(sigChan)
|
||||
|
||||
go tm.ManageTokenLifecycle()
|
||||
|
@ -55,6 +55,11 @@ var exportCmd = &cobra.Command{
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
format, err := cmd.Flags().GetString("format")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
@ -70,11 +75,6 @@ var exportCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
tagSlugs, err := cmd.Flags().GetString("tags")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
@ -169,9 +169,9 @@ func init() {
|
||||
exportCmd.Flags().StringP("format", "f", "dotenv", "Set the format of the output file (dotenv, json, csv)")
|
||||
exportCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
|
||||
exportCmd.Flags().Bool("include-imports", true, "Imported linked secrets")
|
||||
exportCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
exportCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
exportCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs")
|
||||
exportCmd.Flags().String("projectId", "", "manually set the projectId to fetch secrets from")
|
||||
exportCmd.Flags().String("projectId", "", "manually set the projectId to export secrets from")
|
||||
exportCmd.Flags().String("path", "/", "get secrets within a folder path")
|
||||
exportCmd.Flags().String("template", "", "The path to the template file used to render secrets")
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
@ -71,10 +72,6 @@ var getCmd = &cobra.Command{
|
||||
var createCmd = &cobra.Command{
|
||||
Use: "create",
|
||||
Short: "Create a folder",
|
||||
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
if !cmd.Flags().Changed("env") {
|
||||
@ -84,6 +81,16 @@ var createCmd = &cobra.Command{
|
||||
}
|
||||
}
|
||||
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
projectId, err := cmd.Flags().GetString("projectId")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
folderPath, err := cmd.Flags().GetString("path")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
@ -95,19 +102,31 @@ var createCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
if folderName == "" {
|
||||
util.HandleError(fmt.Errorf("Invalid folder name"), "Folder name cannot be empty")
|
||||
util.HandleError(errors.New("invalid folder name, folder name cannot be empty"))
|
||||
}
|
||||
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get workspace file")
|
||||
}
|
||||
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get workspace file")
|
||||
}
|
||||
|
||||
projectId = workspaceFile.WorkspaceId
|
||||
}
|
||||
|
||||
params := models.CreateFolderParameters{
|
||||
FolderName: folderName,
|
||||
WorkspaceId: workspaceFile.WorkspaceId,
|
||||
Environment: environmentName,
|
||||
FolderPath: folderPath,
|
||||
WorkspaceId: projectId,
|
||||
}
|
||||
|
||||
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
|
||||
params.InfisicalToken = token.Token
|
||||
}
|
||||
|
||||
_, err = util.CreateFolder(params)
|
||||
@ -124,10 +143,6 @@ var createCmd = &cobra.Command{
|
||||
var deleteCmd = &cobra.Command{
|
||||
Use: "delete",
|
||||
Short: "Delete a folder",
|
||||
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
|
||||
environmentName, _ := cmd.Flags().GetString("env")
|
||||
@ -138,6 +153,16 @@ var deleteCmd = &cobra.Command{
|
||||
}
|
||||
}
|
||||
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
projectId, err := cmd.Flags().GetString("projectId")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
folderPath, err := cmd.Flags().GetString("path")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
@ -149,21 +174,29 @@ var deleteCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
if folderName == "" {
|
||||
util.HandleError(fmt.Errorf("Invalid folder name"), "Folder name cannot be empty")
|
||||
util.HandleError(errors.New("invalid folder name, folder name cannot be empty"))
|
||||
}
|
||||
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get workspace file")
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get workspace file")
|
||||
}
|
||||
|
||||
projectId = workspaceFile.WorkspaceId
|
||||
}
|
||||
|
||||
params := models.DeleteFolderParameters{
|
||||
FolderName: folderName,
|
||||
WorkspaceId: workspaceFile.WorkspaceId,
|
||||
WorkspaceId: projectId,
|
||||
Environment: environmentName,
|
||||
FolderPath: folderPath,
|
||||
}
|
||||
|
||||
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
|
||||
params.InfisicalToken = token.Token
|
||||
}
|
||||
|
||||
_, err = util.DeleteFolder(params)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to delete folder")
|
||||
|
@ -34,6 +34,8 @@ import (
|
||||
"github.com/spf13/cobra"
|
||||
"golang.org/x/crypto/argon2"
|
||||
"golang.org/x/term"
|
||||
|
||||
infisicalSdk "github.com/infisical/go-sdk"
|
||||
)
|
||||
|
||||
type params struct {
|
||||
@ -44,6 +46,86 @@ type params struct {
|
||||
keyLength uint32
|
||||
}
|
||||
|
||||
func handleUniversalAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) {
|
||||
|
||||
clientId, err := util.GetCmdFlagOrEnv(cmd, "client-id", util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME)
|
||||
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, err
|
||||
}
|
||||
|
||||
clientSecret, err := util.GetCmdFlagOrEnv(cmd, "client-secret", util.INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, err
|
||||
}
|
||||
|
||||
return infisicalClient.Auth().UniversalAuthLogin(clientId, clientSecret)
|
||||
}
|
||||
|
||||
func handleKubernetesAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) {
|
||||
|
||||
identityId, err := util.GetCmdFlagOrEnv(cmd, "machine-identity-id", util.INFISICAL_MACHINE_IDENTITY_ID_NAME)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, err
|
||||
}
|
||||
|
||||
serviceAccountTokenPath, err := util.GetCmdFlagOrEnv(cmd, "service-account-token-path", util.INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_NAME)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, err
|
||||
}
|
||||
|
||||
return infisicalClient.Auth().KubernetesAuthLogin(identityId, serviceAccountTokenPath)
|
||||
}
|
||||
|
||||
func handleAzureAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) {
|
||||
|
||||
identityId, err := util.GetCmdFlagOrEnv(cmd, "machine-identity-id", util.INFISICAL_MACHINE_IDENTITY_ID_NAME)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, err
|
||||
}
|
||||
|
||||
return infisicalClient.Auth().AzureAuthLogin(identityId)
|
||||
}
|
||||
|
||||
func handleGcpIdTokenAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) {
|
||||
|
||||
identityId, err := util.GetCmdFlagOrEnv(cmd, "machine-identity-id", util.INFISICAL_MACHINE_IDENTITY_ID_NAME)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, err
|
||||
}
|
||||
|
||||
return infisicalClient.Auth().GcpIdTokenAuthLogin(identityId)
|
||||
}
|
||||
|
||||
func handleGcpIamAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) {
|
||||
|
||||
identityId, err := util.GetCmdFlagOrEnv(cmd, "machine-identity-id", util.INFISICAL_MACHINE_IDENTITY_ID_NAME)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, err
|
||||
}
|
||||
|
||||
serviceAccountKeyFilePath, err := util.GetCmdFlagOrEnv(cmd, "service-account-key-file-path", util.INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH_NAME)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, err
|
||||
}
|
||||
|
||||
return infisicalClient.Auth().GcpIamAuthLogin(identityId, serviceAccountKeyFilePath)
|
||||
}
|
||||
|
||||
func handleAwsIamAuthLogin(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error) {
|
||||
|
||||
identityId, err := util.GetCmdFlagOrEnv(cmd, "machine-identity-id", util.INFISICAL_MACHINE_IDENTITY_ID_NAME)
|
||||
if err != nil {
|
||||
return infisicalSdk.MachineIdentityCredential{}, err
|
||||
}
|
||||
|
||||
return infisicalClient.Auth().AwsIamAuthLogin(identityId)
|
||||
}
|
||||
|
||||
func formatAuthMethod(authMethod string) string {
|
||||
return strings.ReplaceAll(authMethod, "-", " ")
|
||||
}
|
||||
|
||||
const ADD_USER = "Add a new account login"
|
||||
const REPLACE_USER = "Override current logged in user"
|
||||
const EXIT_USER_MENU = "Exit"
|
||||
@ -56,6 +138,11 @@ var loginCmd = &cobra.Command{
|
||||
DisableFlagsInUseLine: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
|
||||
infisicalClient := infisicalSdk.NewInfisicalClient(infisicalSdk.Config{
|
||||
SiteUrl: config.INFISICAL_URL,
|
||||
UserAgent: api.USER_AGENT,
|
||||
})
|
||||
|
||||
loginMethod, err := cmd.Flags().GetString("method")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
@ -65,12 +152,13 @@ var loginCmd = &cobra.Command{
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
if loginMethod != "user" && loginMethod != "universal-auth" {
|
||||
util.PrintErrorMessageAndExit("Invalid login method. Please use either 'user' or 'universal-auth'")
|
||||
authMethodValid, strategy := util.IsAuthMethodValid(loginMethod, true)
|
||||
if !authMethodValid {
|
||||
util.PrintErrorMessageAndExit(fmt.Sprintf("Invalid login method: %s", loginMethod))
|
||||
}
|
||||
|
||||
// standalone user auth
|
||||
if loginMethod == "user" {
|
||||
|
||||
currentLoggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
// if the key can't be found or there is an error getting current credentials from key ring, allow them to override
|
||||
if err != nil && (strings.Contains(err.Error(), "we couldn't find your logged in details")) {
|
||||
@ -133,7 +221,7 @@ var loginCmd = &cobra.Command{
|
||||
|
||||
err = util.StoreUserCredsInKeyRing(&userCredentialsToBeStored)
|
||||
if err != nil {
|
||||
log.Error().Msgf("Unable to store your credentials in system vault [%s]")
|
||||
log.Error().Msgf("Unable to store your credentials in system vault")
|
||||
log.Error().Msgf("\nTo trouble shoot further, read https://infisical.com/docs/cli/faq")
|
||||
log.Debug().Err(err)
|
||||
//return here
|
||||
@ -160,47 +248,33 @@ var loginCmd = &cobra.Command{
|
||||
fmt.Println("- Learn to inject secrets into your application at https://infisical.com/docs/cli/usage")
|
||||
fmt.Println("- Stuck? Join our slack for quick support https://infisical.com/slack")
|
||||
Telemetry.CaptureEvent("cli-command:login", posthog.NewProperties().Set("infisical-backend", config.INFISICAL_URL).Set("version", util.CLI_VERSION))
|
||||
} else if loginMethod == "universal-auth" {
|
||||
} else {
|
||||
|
||||
clientId, err := cmd.Flags().GetString("client-id")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
authStrategies := map[util.AuthStrategyType]func(cmd *cobra.Command, infisicalClient infisicalSdk.InfisicalClientInterface) (credential infisicalSdk.MachineIdentityCredential, e error){
|
||||
util.AuthStrategy.UNIVERSAL_AUTH: handleUniversalAuthLogin,
|
||||
util.AuthStrategy.KUBERNETES_AUTH: handleKubernetesAuthLogin,
|
||||
util.AuthStrategy.AZURE_AUTH: handleAzureAuthLogin,
|
||||
util.AuthStrategy.GCP_ID_TOKEN_AUTH: handleGcpIdTokenAuthLogin,
|
||||
util.AuthStrategy.GCP_IAM_AUTH: handleGcpIamAuthLogin,
|
||||
util.AuthStrategy.AWS_IAM_AUTH: handleAwsIamAuthLogin,
|
||||
}
|
||||
|
||||
clientSecret, err := cmd.Flags().GetString("client-secret")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
if clientId == "" {
|
||||
clientId = os.Getenv(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME)
|
||||
if clientId == "" {
|
||||
util.PrintErrorMessageAndExit("Please provide client-id")
|
||||
}
|
||||
}
|
||||
if clientSecret == "" {
|
||||
clientSecret = os.Getenv(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME)
|
||||
if clientSecret == "" {
|
||||
util.PrintErrorMessageAndExit("Please provide client-secret")
|
||||
}
|
||||
}
|
||||
|
||||
res, err := util.UniversalAuthLogin(clientId, clientSecret)
|
||||
credential, err := authStrategies[strategy](cmd, infisicalClient)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
util.HandleError(fmt.Errorf("unable to authenticate with %s [err=%v]", formatAuthMethod(loginMethod), err))
|
||||
}
|
||||
|
||||
if plainOutput {
|
||||
fmt.Println(res.AccessToken)
|
||||
fmt.Println(credential.AccessToken)
|
||||
return
|
||||
}
|
||||
|
||||
boldGreen := color.New(color.FgGreen).Add(color.Bold)
|
||||
boldPlain := color.New(color.Bold)
|
||||
time.Sleep(time.Second * 1)
|
||||
boldGreen.Printf(">>>> Successfully authenticated with Universal Auth!\n\n")
|
||||
boldPlain.Printf("Universal Auth Access Token:\n%v", res.AccessToken)
|
||||
boldGreen.Printf(">>>> Successfully authenticated with %s!\n\n", formatAuthMethod(loginMethod))
|
||||
boldPlain.Printf("Access Token:\n%v", credential.AccessToken)
|
||||
|
||||
plainBold := color.New(color.Bold)
|
||||
plainBold.Println("\n\nYou can use this access token to authenticate through other commands in the CLI.")
|
||||
@ -376,9 +450,12 @@ func init() {
|
||||
rootCmd.AddCommand(loginCmd)
|
||||
loginCmd.Flags().BoolP("interactive", "i", false, "login via the command line")
|
||||
loginCmd.Flags().String("method", "user", "login method [user, universal-auth]")
|
||||
loginCmd.Flags().String("client-id", "", "client id for universal auth")
|
||||
loginCmd.Flags().Bool("plain", false, "only output the token without any formatting")
|
||||
loginCmd.Flags().String("client-id", "", "client id for universal auth")
|
||||
loginCmd.Flags().String("client-secret", "", "client secret for universal auth")
|
||||
loginCmd.Flags().String("machine-identity-id", "", "machine identity id for kubernetes, azure, gcp-id-token, gcp-iam, and aws-iam auth methods")
|
||||
loginCmd.Flags().String("service-account-token-path", "", "service account token path for kubernetes auth")
|
||||
loginCmd.Flags().String("service-account-key-file-path", "", "service account key file path for GCP IAM auth")
|
||||
}
|
||||
|
||||
func DomainOverridePrompt() (bool, error) {
|
||||
@ -539,6 +616,7 @@ func getFreshUserCredentials(email string, password string) (*api.GetLoginOneV2R
|
||||
loginTwoResponseResult, err := api.CallLogin2V2(httpClient, api.GetLoginTwoV2Request{
|
||||
Email: email,
|
||||
ClientProof: hex.EncodeToString(srpM1),
|
||||
Password: password,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
|
@ -237,8 +237,8 @@ func filterReservedEnvVars(env map[string]models.SingleEnvironmentVariable) {
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(runCmd)
|
||||
runCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
runCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
|
||||
runCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
runCmd.Flags().String("projectId", "", "manually set the project ID to fetch secrets from when using machine identity based auth")
|
||||
runCmd.Flags().StringP("env", "e", "dev", "Set the environment (dev, prod, etc.) from which your secrets should be pulled from")
|
||||
runCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
runCmd.Flags().Bool("include-imports", true, "Import linked secrets ")
|
||||
|
@ -4,23 +4,17 @@ Copyright (c) 2023 Infisical Inc.
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/crypto"
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
"github.com/Infisical/infisical-merge/packages/visualize"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/posthog/posthog-go"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@ -160,14 +154,19 @@ var secretsSetCmd = &cobra.Command{
|
||||
}
|
||||
}
|
||||
|
||||
secretsPath, err := cmd.Flags().GetString("path")
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
projectId, err := cmd.Flags().GetString("projectId")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get your local config details")
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secretsPath, err := cmd.Flags().GetString("path")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secretType, err := cmd.Flags().GetString("type")
|
||||
@ -175,196 +174,18 @@ var secretsSetCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse secret type")
|
||||
}
|
||||
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
var secretOperations []models.SecretSetOperation
|
||||
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
|
||||
secretOperations, err = util.SetRawSecrets(args, secretType, environmentName, secretsPath, projectId, token)
|
||||
} else {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
|
||||
secretOperations, err = util.SetEncryptedSecrets(args, secretType, environmentName, secretsPath)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to authenticate")
|
||||
}
|
||||
|
||||
if loggedInUserDetails.LoginExpired {
|
||||
util.PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
|
||||
}
|
||||
|
||||
|
||||
httpClient := resty.New().
|
||||
SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
request := api.GetEncryptedWorkspaceKeyRequest{
|
||||
WorkspaceId: workspaceFile.WorkspaceId,
|
||||
}
|
||||
|
||||
workspaceKeyResponse, err := api.CallGetEncryptedWorkspaceKey(httpClient, request)
|
||||
if err != nil {
|
||||
util.HandleError(err, "unable to get your encrypted workspace key")
|
||||
}
|
||||
|
||||
encryptedWorkspaceKey, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.EncryptedKey)
|
||||
encryptedWorkspaceKeySenderPublicKey, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.Sender.PublicKey)
|
||||
encryptedWorkspaceKeyNonce, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.Nonce)
|
||||
currentUsersPrivateKey, _ := base64.StdEncoding.DecodeString(loggedInUserDetails.UserCredentials.PrivateKey)
|
||||
|
||||
if len(currentUsersPrivateKey) == 0 || len(encryptedWorkspaceKeySenderPublicKey) == 0 {
|
||||
log.Debug().Msgf("Missing credentials for generating plainTextEncryptionKey: [currentUsersPrivateKey=%s] [encryptedWorkspaceKeySenderPublicKey=%s]", currentUsersPrivateKey, encryptedWorkspaceKeySenderPublicKey)
|
||||
util.PrintErrorMessageAndExit("Some required user credentials are missing to generate your [plainTextEncryptionKey]. Please run [infisical login] then try again")
|
||||
}
|
||||
|
||||
// decrypt workspace key
|
||||
plainTextEncryptionKey := crypto.DecryptAsymmetric(encryptedWorkspaceKey, encryptedWorkspaceKeyNonce, encryptedWorkspaceKeySenderPublicKey, currentUsersPrivateKey)
|
||||
|
||||
infisicalTokenEnv := os.Getenv(util.INFISICAL_TOKEN_NAME)
|
||||
|
||||
// pull current secrets
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, SecretsPath: secretsPath, InfisicalToken: infisicalTokenEnv}, "")
|
||||
if err != nil {
|
||||
util.HandleError(err, "unable to retrieve secrets")
|
||||
}
|
||||
|
||||
type SecretSetOperation struct {
|
||||
SecretKey string
|
||||
SecretValue string
|
||||
SecretOperation string
|
||||
}
|
||||
|
||||
secretsToCreate := []api.Secret{}
|
||||
secretsToModify := []api.Secret{}
|
||||
secretOperations := []SecretSetOperation{}
|
||||
|
||||
sharedSecretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets))
|
||||
personalSecretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets))
|
||||
|
||||
for _, secret := range secrets {
|
||||
if secret.Type == util.SECRET_TYPE_PERSONAL {
|
||||
personalSecretMapByName[secret.Key] = secret
|
||||
} else {
|
||||
sharedSecretMapByName[secret.Key] = secret
|
||||
}
|
||||
}
|
||||
|
||||
for _, arg := range args {
|
||||
splitKeyValueFromArg := strings.SplitN(arg, "=", 2)
|
||||
if splitKeyValueFromArg[0] == "" || splitKeyValueFromArg[1] == "" {
|
||||
util.PrintErrorMessageAndExit("ensure that each secret has a none empty key and value. Modify the input and try again")
|
||||
}
|
||||
|
||||
if unicode.IsNumber(rune(splitKeyValueFromArg[0][0])) {
|
||||
util.PrintErrorMessageAndExit("keys of secrets cannot start with a number. Modify the key name(s) and try again")
|
||||
}
|
||||
|
||||
// Key and value from argument
|
||||
key := splitKeyValueFromArg[0]
|
||||
value := splitKeyValueFromArg[1]
|
||||
|
||||
hashedKey := fmt.Sprintf("%x", sha256.Sum256([]byte(key)))
|
||||
encryptedKey, err := crypto.EncryptSymmetric([]byte(key), []byte(plainTextEncryptionKey))
|
||||
if err != nil {
|
||||
util.HandleError(err, "unable to encrypt your secrets")
|
||||
}
|
||||
|
||||
hashedValue := fmt.Sprintf("%x", sha256.Sum256([]byte(value)))
|
||||
encryptedValue, err := crypto.EncryptSymmetric([]byte(value), []byte(plainTextEncryptionKey))
|
||||
if err != nil {
|
||||
util.HandleError(err, "unable to encrypt your secrets")
|
||||
}
|
||||
|
||||
var existingSecret models.SingleEnvironmentVariable
|
||||
var doesSecretExist bool
|
||||
|
||||
if secretType == util.SECRET_TYPE_SHARED {
|
||||
existingSecret, doesSecretExist = sharedSecretMapByName[key]
|
||||
} else {
|
||||
existingSecret, doesSecretExist = personalSecretMapByName[key]
|
||||
}
|
||||
|
||||
if doesSecretExist {
|
||||
// case: secret exists in project so it needs to be modified
|
||||
encryptedSecretDetails := api.Secret{
|
||||
ID: existingSecret.ID,
|
||||
SecretValueCiphertext: base64.StdEncoding.EncodeToString(encryptedValue.CipherText),
|
||||
SecretValueIV: base64.StdEncoding.EncodeToString(encryptedValue.Nonce),
|
||||
SecretValueTag: base64.StdEncoding.EncodeToString(encryptedValue.AuthTag),
|
||||
SecretValueHash: hashedValue,
|
||||
PlainTextKey: key,
|
||||
Type: existingSecret.Type,
|
||||
}
|
||||
|
||||
// Only add to modifications if the value is different
|
||||
if existingSecret.Value != value {
|
||||
secretsToModify = append(secretsToModify, encryptedSecretDetails)
|
||||
secretOperations = append(secretOperations, SecretSetOperation{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
SecretOperation: "SECRET VALUE MODIFIED",
|
||||
})
|
||||
} else {
|
||||
// Current value is same as exisitng so no change
|
||||
secretOperations = append(secretOperations, SecretSetOperation{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
SecretOperation: "SECRET VALUE UNCHANGED",
|
||||
})
|
||||
}
|
||||
|
||||
} else {
|
||||
// case: secret doesn't exist in project so it needs to be created
|
||||
encryptedSecretDetails := api.Secret{
|
||||
SecretKeyCiphertext: base64.StdEncoding.EncodeToString(encryptedKey.CipherText),
|
||||
SecretKeyIV: base64.StdEncoding.EncodeToString(encryptedKey.Nonce),
|
||||
SecretKeyTag: base64.StdEncoding.EncodeToString(encryptedKey.AuthTag),
|
||||
SecretKeyHash: hashedKey,
|
||||
SecretValueCiphertext: base64.StdEncoding.EncodeToString(encryptedValue.CipherText),
|
||||
SecretValueIV: base64.StdEncoding.EncodeToString(encryptedValue.Nonce),
|
||||
SecretValueTag: base64.StdEncoding.EncodeToString(encryptedValue.AuthTag),
|
||||
SecretValueHash: hashedValue,
|
||||
Type: secretType,
|
||||
PlainTextKey: key,
|
||||
}
|
||||
secretsToCreate = append(secretsToCreate, encryptedSecretDetails)
|
||||
secretOperations = append(secretOperations, SecretSetOperation{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
SecretOperation: "SECRET CREATED",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for _, secret := range secretsToCreate {
|
||||
createSecretRequest := api.CreateSecretV3Request{
|
||||
WorkspaceID: workspaceFile.WorkspaceId,
|
||||
Environment: environmentName,
|
||||
SecretName: secret.PlainTextKey,
|
||||
SecretKeyCiphertext: secret.SecretKeyCiphertext,
|
||||
SecretKeyIV: secret.SecretKeyIV,
|
||||
SecretKeyTag: secret.SecretKeyTag,
|
||||
SecretValueCiphertext: secret.SecretValueCiphertext,
|
||||
SecretValueIV: secret.SecretValueIV,
|
||||
SecretValueTag: secret.SecretValueTag,
|
||||
Type: secret.Type,
|
||||
SecretPath: secretsPath,
|
||||
}
|
||||
|
||||
err = api.CallCreateSecretsV3(httpClient, createSecretRequest)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to process new secret creations")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
for _, secret := range secretsToModify {
|
||||
updateSecretRequest := api.UpdateSecretByNameV3Request{
|
||||
WorkspaceID: workspaceFile.WorkspaceId,
|
||||
Environment: environmentName,
|
||||
SecretValueCiphertext: secret.SecretValueCiphertext,
|
||||
SecretValueIV: secret.SecretValueIV,
|
||||
SecretValueTag: secret.SecretValueTag,
|
||||
Type: secret.Type,
|
||||
SecretPath: secretsPath,
|
||||
}
|
||||
|
||||
err = api.CallUpdateSecretsV3(httpClient, updateSecretRequest, secret.PlainTextKey)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to process secret update request")
|
||||
return
|
||||
}
|
||||
util.HandleError(err, "Unable to set secrets")
|
||||
}
|
||||
|
||||
// Print secret operations
|
||||
@ -395,6 +216,16 @@ var secretsDeleteCmd = &cobra.Command{
|
||||
}
|
||||
}
|
||||
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
projectId, err := cmd.Flags().GetString("projectId")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secretsPath, err := cmd.Flags().GetString("path")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
@ -405,33 +236,44 @@ var secretsDeleteCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to authenticate")
|
||||
httpClient := resty.New().
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get local project details")
|
||||
}
|
||||
projectId = workspaceFile.WorkspaceId
|
||||
}
|
||||
|
||||
if loggedInUserDetails.LoginExpired {
|
||||
util.PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
|
||||
}
|
||||
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
|
||||
httpClient.SetAuthToken(token.Token)
|
||||
} else {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get local project details")
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to authenticate")
|
||||
}
|
||||
|
||||
if loggedInUserDetails.LoginExpired {
|
||||
util.PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
|
||||
}
|
||||
|
||||
httpClient.SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken)
|
||||
}
|
||||
|
||||
for _, secretName := range args {
|
||||
request := api.DeleteSecretV3Request{
|
||||
WorkspaceId: workspaceFile.WorkspaceId,
|
||||
WorkspaceId: projectId,
|
||||
Environment: environmentName,
|
||||
SecretName: secretName,
|
||||
Type: secretType,
|
||||
SecretPath: secretsPath,
|
||||
}
|
||||
|
||||
httpClient := resty.New().
|
||||
SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
err = api.CallDeleteSecretsV3(httpClient, request)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to complete your delete request")
|
||||
@ -789,13 +631,13 @@ func getSecretsByKeys(secrets []models.SingleEnvironmentVariable) map[string]mod
|
||||
}
|
||||
|
||||
func init() {
|
||||
secretsGenerateExampleEnvCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
secretsGenerateExampleEnvCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
|
||||
secretsGenerateExampleEnvCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
secretsGenerateExampleEnvCmd.Flags().String("projectId", "", "manually set the projectId when using machine identity based auth")
|
||||
secretsGenerateExampleEnvCmd.Flags().String("path", "/", "Fetch secrets from within a folder path")
|
||||
secretsCmd.AddCommand(secretsGenerateExampleEnvCmd)
|
||||
|
||||
secretsGetCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
secretsGetCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
|
||||
secretsGetCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
secretsGetCmd.Flags().String("projectId", "", "manually set the project ID to fetch secrets from when using machine identity based auth")
|
||||
secretsGetCmd.Flags().String("path", "/", "get secrets within a folder path")
|
||||
secretsGetCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
secretsGetCmd.Flags().Bool("raw-value", false, "Returns only the value of secret, only works with one secret")
|
||||
@ -804,41 +646,37 @@ func init() {
|
||||
|
||||
secretsCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
|
||||
secretsCmd.AddCommand(secretsSetCmd)
|
||||
secretsSetCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
secretsSetCmd.Flags().String("projectId", "", "manually set the project ID to for setting secrets when using machine identity based auth")
|
||||
secretsSetCmd.Flags().String("path", "/", "set secrets within a folder path")
|
||||
secretsSetCmd.Flags().String("type", util.SECRET_TYPE_SHARED, "the type of secret to create: personal or shared")
|
||||
|
||||
// Only supports logged in users (JWT auth)
|
||||
secretsSetCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
}
|
||||
|
||||
secretsDeleteCmd.Flags().String("type", "personal", "the type of secret to delete: personal or shared (default: personal)")
|
||||
secretsDeleteCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
secretsDeleteCmd.Flags().String("projectId", "", "manually set the projectId to delete secrets from when using machine identity based auth")
|
||||
secretsDeleteCmd.Flags().String("path", "/", "get secrets within a folder path")
|
||||
secretsCmd.AddCommand(secretsDeleteCmd)
|
||||
|
||||
// Only supports logged in users (JWT auth)
|
||||
secretsDeleteCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
}
|
||||
|
||||
// *** Folders sub command ***
|
||||
folderCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on")
|
||||
|
||||
// Add getCmd, createCmd and deleteCmd flags here
|
||||
getCmd.Flags().StringP("path", "p", "/", "The path from where folders should be fetched from")
|
||||
getCmd.Flags().String("token", "", "Fetch folders using the infisical token")
|
||||
getCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
|
||||
getCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
getCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from when using machine identity based auth")
|
||||
folderCmd.AddCommand(getCmd)
|
||||
|
||||
// Add createCmd flags here
|
||||
createCmd.Flags().StringP("path", "p", "/", "Path to where the folder should be created")
|
||||
createCmd.Flags().StringP("name", "n", "", "Name of the folder to be created in selected `--path`")
|
||||
createCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
createCmd.Flags().String("projectId", "", "manually set the project ID for creating folders in when using machine identity based auth")
|
||||
folderCmd.AddCommand(createCmd)
|
||||
|
||||
// Add deleteCmd flags here
|
||||
deleteCmd.Flags().StringP("path", "p", "/", "Path to the folder to be deleted")
|
||||
deleteCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
deleteCmd.Flags().String("projectId", "", "manually set the projectId to delete folders when using machine identity based auth")
|
||||
deleteCmd.Flags().StringP("name", "n", "", "Name of the folder to be deleted within selected `--path`")
|
||||
folderCmd.AddCommand(deleteCmd)
|
||||
|
||||
@ -846,8 +684,8 @@ func init() {
|
||||
|
||||
// ** End of folders sub command
|
||||
|
||||
secretsCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
secretsCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
|
||||
secretsCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
|
||||
secretsCmd.Flags().String("projectId", "", "manually set the projectId to fetch secrets when using machine identity based auth")
|
||||
secretsCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on")
|
||||
secretsCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
secretsCmd.Flags().Bool("include-imports", true, "Imported linked secrets ")
|
||||
|
@ -39,7 +39,7 @@ var tokenRenewCmd = &cobra.Command{
|
||||
util.PrintErrorMessageAndExit("You are trying to renew a service token. You can only renew universal auth access tokens.")
|
||||
}
|
||||
|
||||
renewedAccessToken, err := util.RenewUniversalAuthAccessToken(token)
|
||||
renewedAccessToken, err := util.RenewMachineIdentityAccessToken(token)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to renew token")
|
||||
|
@ -134,3 +134,9 @@ type MachineIdentityCredentials struct {
|
||||
ClientId string
|
||||
ClientSecret string
|
||||
}
|
||||
|
||||
type SecretSetOperation struct {
|
||||
SecretKey string
|
||||
SecretValue string
|
||||
SecretOperation string
|
||||
}
|
||||
|
42
cli/packages/util/auth.go
Normal file
42
cli/packages/util/auth.go
Normal file
@ -0,0 +1,42 @@
|
||||
package util
|
||||
|
||||
type AuthStrategyType string
|
||||
|
||||
var AuthStrategy = struct {
|
||||
UNIVERSAL_AUTH AuthStrategyType
|
||||
KUBERNETES_AUTH AuthStrategyType
|
||||
AZURE_AUTH AuthStrategyType
|
||||
GCP_ID_TOKEN_AUTH AuthStrategyType
|
||||
GCP_IAM_AUTH AuthStrategyType
|
||||
AWS_IAM_AUTH AuthStrategyType
|
||||
}{
|
||||
UNIVERSAL_AUTH: "universal-auth",
|
||||
KUBERNETES_AUTH: "kubernetes",
|
||||
AZURE_AUTH: "azure",
|
||||
GCP_ID_TOKEN_AUTH: "gcp-id-token",
|
||||
GCP_IAM_AUTH: "gcp-iam",
|
||||
AWS_IAM_AUTH: "aws-iam",
|
||||
}
|
||||
|
||||
var AVAILABLE_AUTH_STRATEGIES = []AuthStrategyType{
|
||||
AuthStrategy.UNIVERSAL_AUTH,
|
||||
AuthStrategy.KUBERNETES_AUTH,
|
||||
AuthStrategy.AZURE_AUTH,
|
||||
AuthStrategy.GCP_ID_TOKEN_AUTH,
|
||||
AuthStrategy.GCP_IAM_AUTH,
|
||||
AuthStrategy.AWS_IAM_AUTH,
|
||||
}
|
||||
|
||||
func IsAuthMethodValid(authMethod string, allowUserAuth bool) (isValid bool, strategy AuthStrategyType) {
|
||||
|
||||
if authMethod == "user" && allowUserAuth {
|
||||
return true, ""
|
||||
}
|
||||
|
||||
for _, strategy := range AVAILABLE_AUTH_STRATEGIES {
|
||||
if string(strategy) == authMethod {
|
||||
return true, strategy
|
||||
}
|
||||
}
|
||||
return false, ""
|
||||
}
|
@ -1,20 +1,32 @@
|
||||
package util
|
||||
|
||||
const (
|
||||
CONFIG_FILE_NAME = "infisical-config.json"
|
||||
CONFIG_FOLDER_NAME = ".infisical"
|
||||
INFISICAL_DEFAULT_API_URL = "https://app.infisical.com/api"
|
||||
INFISICAL_DEFAULT_URL = "https://app.infisical.com"
|
||||
INFISICAL_WORKSPACE_CONFIG_FILE_NAME = ".infisical.json"
|
||||
INFISICAL_TOKEN_NAME = "INFISICAL_TOKEN"
|
||||
CONFIG_FILE_NAME = "infisical-config.json"
|
||||
CONFIG_FOLDER_NAME = ".infisical"
|
||||
INFISICAL_DEFAULT_API_URL = "https://app.infisical.com/api"
|
||||
INFISICAL_DEFAULT_URL = "https://app.infisical.com"
|
||||
INFISICAL_WORKSPACE_CONFIG_FILE_NAME = ".infisical.json"
|
||||
INFISICAL_TOKEN_NAME = "INFISICAL_TOKEN"
|
||||
INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN_NAME = "INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN"
|
||||
|
||||
// Universal Auth
|
||||
INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME = "INFISICAL_UNIVERSAL_AUTH_CLIENT_ID"
|
||||
INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME = "INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET"
|
||||
INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN_NAME = "INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN"
|
||||
SECRET_TYPE_PERSONAL = "personal"
|
||||
SECRET_TYPE_SHARED = "shared"
|
||||
KEYRING_SERVICE_NAME = "infisical"
|
||||
PERSONAL_SECRET_TYPE_NAME = "personal"
|
||||
SHARED_SECRET_TYPE_NAME = "shared"
|
||||
|
||||
// Kubernetes auth
|
||||
INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_NAME = "INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_PATH"
|
||||
|
||||
// GCP Auth
|
||||
INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH_NAME = "INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH"
|
||||
|
||||
// Generic env variable used for auth methods that require a machine identity ID
|
||||
INFISICAL_MACHINE_IDENTITY_ID_NAME = "INFISICAL_MACHINE_IDENTITY_ID"
|
||||
|
||||
SECRET_TYPE_PERSONAL = "personal"
|
||||
SECRET_TYPE_SHARED = "shared"
|
||||
KEYRING_SERVICE_NAME = "infisical"
|
||||
PERSONAL_SECRET_TYPE_NAME = "personal"
|
||||
SHARED_SECRET_TYPE_NAME = "shared"
|
||||
|
||||
SERVICE_TOKEN_IDENTIFIER = "service-token"
|
||||
UNIVERSAL_AUTH_TOKEN_IDENTIFIER = "universal-auth-token"
|
||||
|
@ -172,19 +172,28 @@ func GetFoldersViaMachineIdentity(accessToken string, workspaceId string, envSlu
|
||||
|
||||
// CreateFolder creates a folder in Infisical
|
||||
func CreateFolder(params models.CreateFolderParameters) (models.SingleFolder, error) {
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
if err != nil {
|
||||
return models.SingleFolder{}, err
|
||||
}
|
||||
|
||||
if loggedInUserDetails.LoginExpired {
|
||||
PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
|
||||
// If no token is provided, we will try to get the token from the current logged in user
|
||||
if params.InfisicalToken == "" {
|
||||
RequireLogin()
|
||||
RequireLocalWorkspaceFile()
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
|
||||
if err != nil {
|
||||
return models.SingleFolder{}, err
|
||||
}
|
||||
|
||||
if loggedInUserDetails.LoginExpired {
|
||||
PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
|
||||
}
|
||||
|
||||
params.InfisicalToken = loggedInUserDetails.UserCredentials.JTWToken
|
||||
}
|
||||
|
||||
// set up resty client
|
||||
httpClient := resty.New()
|
||||
httpClient.
|
||||
SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken).
|
||||
SetAuthToken(params.InfisicalToken).
|
||||
SetHeader("Accept", "application/json").
|
||||
SetHeader("Content-Type", "application/json")
|
||||
|
||||
@ -209,19 +218,29 @@ func CreateFolder(params models.CreateFolderParameters) (models.SingleFolder, er
|
||||
}
|
||||
|
||||
func DeleteFolder(params models.DeleteFolderParameters) ([]models.SingleFolder, error) {
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if loggedInUserDetails.LoginExpired {
|
||||
PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
|
||||
// If no token is provided, we will try to get the token from the current logged in user
|
||||
if params.InfisicalToken == "" {
|
||||
RequireLogin()
|
||||
RequireLocalWorkspaceFile()
|
||||
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if loggedInUserDetails.LoginExpired {
|
||||
PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
|
||||
}
|
||||
|
||||
params.InfisicalToken = loggedInUserDetails.UserCredentials.JTWToken
|
||||
}
|
||||
|
||||
// set up resty client
|
||||
httpClient := resty.New()
|
||||
httpClient.
|
||||
SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken).
|
||||
SetAuthToken(params.InfisicalToken).
|
||||
SetHeader("Accept", "application/json").
|
||||
SetHeader("Content-Type", "application/json")
|
||||
|
||||
|
@ -123,7 +123,7 @@ func UniversalAuthLogin(clientId string, clientSecret string) (api.UniversalAuth
|
||||
return tokenResponse, nil
|
||||
}
|
||||
|
||||
func RenewUniversalAuthAccessToken(accessToken string) (string, error) {
|
||||
func RenewMachineIdentityAccessToken(accessToken string) (string, error) {
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient.SetRetryCount(10000).
|
||||
@ -134,7 +134,7 @@ func RenewUniversalAuthAccessToken(accessToken string) (string, error) {
|
||||
AccessToken: accessToken,
|
||||
}
|
||||
|
||||
tokenResponse, err := api.CallUniversalAuthRefreshAccessToken(httpClient, request)
|
||||
tokenResponse, err := api.CallMachineIdentityRefreshAccessToken(httpClient, request)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@ -246,3 +246,44 @@ func AppendAPIEndpoint(address string) string {
|
||||
}
|
||||
return address + "/api"
|
||||
}
|
||||
|
||||
func ReadFileAsString(filePath string) (string, error) {
|
||||
fileBytes, err := os.ReadFile(filePath)
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return string(fileBytes), nil
|
||||
|
||||
}
|
||||
|
||||
func GetEnvVarOrFileContent(envName string, filePath string) (string, error) {
|
||||
// First check if the environment variable is set
|
||||
if envVarValue := os.Getenv(envName); envVarValue != "" {
|
||||
return envVarValue, nil
|
||||
}
|
||||
|
||||
// If it's not set, try to read the file
|
||||
fileContent, err := ReadFileAsString(filePath)
|
||||
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("unable to read file content from file path '%s' [err=%v]", filePath, err)
|
||||
}
|
||||
|
||||
return fileContent, nil
|
||||
}
|
||||
|
||||
func GetCmdFlagOrEnv(cmd *cobra.Command, flag, envName string) (string, error) {
|
||||
value, flagsErr := cmd.Flags().GetString(flag)
|
||||
if flagsErr != nil {
|
||||
return "", flagsErr
|
||||
}
|
||||
if value == "" {
|
||||
value = os.Getenv(envName)
|
||||
}
|
||||
if value == "" {
|
||||
return "", fmt.Errorf("please provide %s flag", flag)
|
||||
}
|
||||
return value, nil
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
@ -9,6 +10,7 @@ import (
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/crypto"
|
||||
@ -806,3 +808,336 @@ func GetPlainTextWorkspaceKey(authenticationToken string, receiverPrivateKey str
|
||||
|
||||
return crypto.DecryptAsymmetric(encryptedWorkspaceKey, encryptedWorkspaceKeyNonce, encryptedWorkspaceKeySenderPublicKey, currentUsersPrivateKey), nil
|
||||
}
|
||||
|
||||
func SetEncryptedSecrets(secretArgs []string, secretType string, environmentName string, secretsPath string) ([]models.SecretSetOperation, error) {
|
||||
|
||||
workspaceFile, err := GetWorkSpaceFromFile()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get your local config details [err=%v]", err)
|
||||
}
|
||||
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to authenticate [err=%v]", err)
|
||||
}
|
||||
|
||||
if loggedInUserDetails.LoginExpired {
|
||||
PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
|
||||
}
|
||||
|
||||
httpClient := resty.New().
|
||||
SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
request := api.GetEncryptedWorkspaceKeyRequest{
|
||||
WorkspaceId: workspaceFile.WorkspaceId,
|
||||
}
|
||||
|
||||
workspaceKeyResponse, err := api.CallGetEncryptedWorkspaceKey(httpClient, request)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get your encrypted workspace key [err=%v]", err)
|
||||
}
|
||||
|
||||
encryptedWorkspaceKey, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.EncryptedKey)
|
||||
encryptedWorkspaceKeySenderPublicKey, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.Sender.PublicKey)
|
||||
encryptedWorkspaceKeyNonce, _ := base64.StdEncoding.DecodeString(workspaceKeyResponse.Nonce)
|
||||
currentUsersPrivateKey, _ := base64.StdEncoding.DecodeString(loggedInUserDetails.UserCredentials.PrivateKey)
|
||||
|
||||
if len(currentUsersPrivateKey) == 0 || len(encryptedWorkspaceKeySenderPublicKey) == 0 {
|
||||
log.Debug().Msgf("Missing credentials for generating plainTextEncryptionKey: [currentUsersPrivateKey=%s] [encryptedWorkspaceKeySenderPublicKey=%s]", currentUsersPrivateKey, encryptedWorkspaceKeySenderPublicKey)
|
||||
PrintErrorMessageAndExit("Some required user credentials are missing to generate your [plainTextEncryptionKey]. Please run [infisical login] then try again")
|
||||
}
|
||||
|
||||
// decrypt workspace key
|
||||
plainTextEncryptionKey := crypto.DecryptAsymmetric(encryptedWorkspaceKey, encryptedWorkspaceKeyNonce, encryptedWorkspaceKeySenderPublicKey, currentUsersPrivateKey)
|
||||
|
||||
infisicalTokenEnv := os.Getenv(INFISICAL_TOKEN_NAME)
|
||||
|
||||
// pull current secrets
|
||||
secrets, err := GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, SecretsPath: secretsPath, InfisicalToken: infisicalTokenEnv}, "")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to retrieve secrets [err=%v]", err)
|
||||
}
|
||||
|
||||
secretsToCreate := []api.Secret{}
|
||||
secretsToModify := []api.Secret{}
|
||||
secretOperations := []models.SecretSetOperation{}
|
||||
|
||||
sharedSecretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets))
|
||||
personalSecretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets))
|
||||
|
||||
for _, secret := range secrets {
|
||||
if secret.Type == SECRET_TYPE_PERSONAL {
|
||||
personalSecretMapByName[secret.Key] = secret
|
||||
} else {
|
||||
sharedSecretMapByName[secret.Key] = secret
|
||||
}
|
||||
}
|
||||
|
||||
for _, arg := range secretArgs {
|
||||
splitKeyValueFromArg := strings.SplitN(arg, "=", 2)
|
||||
if splitKeyValueFromArg[0] == "" || splitKeyValueFromArg[1] == "" {
|
||||
PrintErrorMessageAndExit("ensure that each secret has a none empty key and value. Modify the input and try again")
|
||||
}
|
||||
|
||||
if unicode.IsNumber(rune(splitKeyValueFromArg[0][0])) {
|
||||
PrintErrorMessageAndExit("keys of secrets cannot start with a number. Modify the key name(s) and try again")
|
||||
}
|
||||
|
||||
// Key and value from argument
|
||||
key := splitKeyValueFromArg[0]
|
||||
value := splitKeyValueFromArg[1]
|
||||
|
||||
hashedKey := fmt.Sprintf("%x", sha256.Sum256([]byte(key)))
|
||||
encryptedKey, err := crypto.EncryptSymmetric([]byte(key), []byte(plainTextEncryptionKey))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to encrypt your secrets [err=%v]", err)
|
||||
}
|
||||
|
||||
hashedValue := fmt.Sprintf("%x", sha256.Sum256([]byte(value)))
|
||||
encryptedValue, err := crypto.EncryptSymmetric([]byte(value), []byte(plainTextEncryptionKey))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to encrypt your secrets [err=%v]", err)
|
||||
}
|
||||
|
||||
var existingSecret models.SingleEnvironmentVariable
|
||||
var doesSecretExist bool
|
||||
|
||||
if secretType == SECRET_TYPE_SHARED {
|
||||
existingSecret, doesSecretExist = sharedSecretMapByName[key]
|
||||
} else {
|
||||
existingSecret, doesSecretExist = personalSecretMapByName[key]
|
||||
}
|
||||
|
||||
if doesSecretExist {
|
||||
// case: secret exists in project so it needs to be modified
|
||||
encryptedSecretDetails := api.Secret{
|
||||
ID: existingSecret.ID,
|
||||
SecretValueCiphertext: base64.StdEncoding.EncodeToString(encryptedValue.CipherText),
|
||||
SecretValueIV: base64.StdEncoding.EncodeToString(encryptedValue.Nonce),
|
||||
SecretValueTag: base64.StdEncoding.EncodeToString(encryptedValue.AuthTag),
|
||||
SecretValueHash: hashedValue,
|
||||
PlainTextKey: key,
|
||||
Type: existingSecret.Type,
|
||||
}
|
||||
|
||||
// Only add to modifications if the value is different
|
||||
if existingSecret.Value != value {
|
||||
secretsToModify = append(secretsToModify, encryptedSecretDetails)
|
||||
secretOperations = append(secretOperations, models.SecretSetOperation{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
SecretOperation: "SECRET VALUE MODIFIED",
|
||||
})
|
||||
} else {
|
||||
// Current value is same as exisitng so no change
|
||||
secretOperations = append(secretOperations, models.SecretSetOperation{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
SecretOperation: "SECRET VALUE UNCHANGED",
|
||||
})
|
||||
}
|
||||
|
||||
} else {
|
||||
// case: secret doesn't exist in project so it needs to be created
|
||||
encryptedSecretDetails := api.Secret{
|
||||
SecretKeyCiphertext: base64.StdEncoding.EncodeToString(encryptedKey.CipherText),
|
||||
SecretKeyIV: base64.StdEncoding.EncodeToString(encryptedKey.Nonce),
|
||||
SecretKeyTag: base64.StdEncoding.EncodeToString(encryptedKey.AuthTag),
|
||||
SecretKeyHash: hashedKey,
|
||||
SecretValueCiphertext: base64.StdEncoding.EncodeToString(encryptedValue.CipherText),
|
||||
SecretValueIV: base64.StdEncoding.EncodeToString(encryptedValue.Nonce),
|
||||
SecretValueTag: base64.StdEncoding.EncodeToString(encryptedValue.AuthTag),
|
||||
SecretValueHash: hashedValue,
|
||||
Type: secretType,
|
||||
PlainTextKey: key,
|
||||
}
|
||||
secretsToCreate = append(secretsToCreate, encryptedSecretDetails)
|
||||
secretOperations = append(secretOperations, models.SecretSetOperation{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
SecretOperation: "SECRET CREATED",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for _, secret := range secretsToCreate {
|
||||
createSecretRequest := api.CreateSecretV3Request{
|
||||
WorkspaceID: workspaceFile.WorkspaceId,
|
||||
Environment: environmentName,
|
||||
SecretName: secret.PlainTextKey,
|
||||
SecretKeyCiphertext: secret.SecretKeyCiphertext,
|
||||
SecretKeyIV: secret.SecretKeyIV,
|
||||
SecretKeyTag: secret.SecretKeyTag,
|
||||
SecretValueCiphertext: secret.SecretValueCiphertext,
|
||||
SecretValueIV: secret.SecretValueIV,
|
||||
SecretValueTag: secret.SecretValueTag,
|
||||
Type: secret.Type,
|
||||
SecretPath: secretsPath,
|
||||
}
|
||||
|
||||
err = api.CallCreateSecretsV3(httpClient, createSecretRequest)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to process new secret creations [err=%v]", err)
|
||||
}
|
||||
}
|
||||
|
||||
for _, secret := range secretsToModify {
|
||||
updateSecretRequest := api.UpdateSecretByNameV3Request{
|
||||
WorkspaceID: workspaceFile.WorkspaceId,
|
||||
Environment: environmentName,
|
||||
SecretValueCiphertext: secret.SecretValueCiphertext,
|
||||
SecretValueIV: secret.SecretValueIV,
|
||||
SecretValueTag: secret.SecretValueTag,
|
||||
Type: secret.Type,
|
||||
SecretPath: secretsPath,
|
||||
}
|
||||
|
||||
err = api.CallUpdateSecretsV3(httpClient, updateSecretRequest, secret.PlainTextKey)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to process secret update request [err=%v]", err)
|
||||
}
|
||||
}
|
||||
|
||||
return secretOperations, nil
|
||||
|
||||
}
|
||||
|
||||
func SetRawSecrets(secretArgs []string, secretType string, environmentName string, secretsPath string, projectId string, tokenDetails *models.TokenDetails) ([]models.SecretSetOperation, error) {
|
||||
|
||||
if tokenDetails == nil {
|
||||
return nil, fmt.Errorf("unable to process set secret operations, token details are missing")
|
||||
}
|
||||
|
||||
getAllEnvironmentVariablesRequest := models.GetAllSecretsParameters{Environment: environmentName, SecretsPath: secretsPath, WorkspaceId: projectId}
|
||||
if tokenDetails.Type == UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
|
||||
getAllEnvironmentVariablesRequest.UniversalAuthAccessToken = tokenDetails.Token
|
||||
} else {
|
||||
getAllEnvironmentVariablesRequest.InfisicalToken = tokenDetails.Token
|
||||
}
|
||||
|
||||
httpClient := resty.New().
|
||||
SetAuthToken(tokenDetails.Token).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
// pull current secrets
|
||||
secrets, err := GetAllEnvironmentVariables(getAllEnvironmentVariablesRequest, "")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to retrieve secrets [err=%v]", err)
|
||||
}
|
||||
|
||||
secretsToCreate := []api.RawSecret{}
|
||||
secretsToModify := []api.RawSecret{}
|
||||
secretOperations := []models.SecretSetOperation{}
|
||||
|
||||
sharedSecretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets))
|
||||
personalSecretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets))
|
||||
|
||||
for _, secret := range secrets {
|
||||
if secret.Type == SECRET_TYPE_PERSONAL {
|
||||
personalSecretMapByName[secret.Key] = secret
|
||||
} else {
|
||||
sharedSecretMapByName[secret.Key] = secret
|
||||
}
|
||||
}
|
||||
|
||||
for _, arg := range secretArgs {
|
||||
splitKeyValueFromArg := strings.SplitN(arg, "=", 2)
|
||||
if splitKeyValueFromArg[0] == "" || splitKeyValueFromArg[1] == "" {
|
||||
PrintErrorMessageAndExit("ensure that each secret has a none empty key and value. Modify the input and try again")
|
||||
}
|
||||
|
||||
if unicode.IsNumber(rune(splitKeyValueFromArg[0][0])) {
|
||||
PrintErrorMessageAndExit("keys of secrets cannot start with a number. Modify the key name(s) and try again")
|
||||
}
|
||||
|
||||
// Key and value from argument
|
||||
key := splitKeyValueFromArg[0]
|
||||
value := splitKeyValueFromArg[1]
|
||||
|
||||
var existingSecret models.SingleEnvironmentVariable
|
||||
var doesSecretExist bool
|
||||
|
||||
if secretType == SECRET_TYPE_SHARED {
|
||||
existingSecret, doesSecretExist = sharedSecretMapByName[key]
|
||||
} else {
|
||||
existingSecret, doesSecretExist = personalSecretMapByName[key]
|
||||
}
|
||||
|
||||
if doesSecretExist {
|
||||
// case: secret exists in project so it needs to be modified
|
||||
encryptedSecretDetails := api.RawSecret{
|
||||
ID: existingSecret.ID,
|
||||
SecretValue: value,
|
||||
SecretKey: key,
|
||||
Type: existingSecret.Type,
|
||||
}
|
||||
|
||||
// Only add to modifications if the value is different
|
||||
if existingSecret.Value != value {
|
||||
secretsToModify = append(secretsToModify, encryptedSecretDetails)
|
||||
secretOperations = append(secretOperations, models.SecretSetOperation{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
SecretOperation: "SECRET VALUE MODIFIED",
|
||||
})
|
||||
} else {
|
||||
// Current value is same as existing so no change
|
||||
secretOperations = append(secretOperations, models.SecretSetOperation{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
SecretOperation: "SECRET VALUE UNCHANGED",
|
||||
})
|
||||
}
|
||||
|
||||
} else {
|
||||
// case: secret doesn't exist in project so it needs to be created
|
||||
encryptedSecretDetails := api.RawSecret{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
Type: secretType,
|
||||
}
|
||||
secretsToCreate = append(secretsToCreate, encryptedSecretDetails)
|
||||
secretOperations = append(secretOperations, models.SecretSetOperation{
|
||||
SecretKey: key,
|
||||
SecretValue: value,
|
||||
SecretOperation: "SECRET CREATED",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for _, secret := range secretsToCreate {
|
||||
createSecretRequest := api.CreateRawSecretV3Request{
|
||||
SecretName: secret.SecretKey,
|
||||
SecretValue: secret.SecretValue,
|
||||
Type: secret.Type,
|
||||
SecretPath: secretsPath,
|
||||
WorkspaceID: projectId,
|
||||
Environment: environmentName,
|
||||
}
|
||||
|
||||
err = api.CallCreateRawSecretsV3(httpClient, createSecretRequest)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to process new secret creations [err=%v]", err)
|
||||
}
|
||||
}
|
||||
|
||||
for _, secret := range secretsToModify {
|
||||
updateSecretRequest := api.UpdateRawSecretByNameV3Request{
|
||||
SecretName: secret.SecretKey,
|
||||
SecretValue: secret.SecretValue,
|
||||
SecretPath: secretsPath,
|
||||
WorkspaceID: projectId,
|
||||
Environment: environmentName,
|
||||
Type: secret.Type,
|
||||
}
|
||||
|
||||
err = api.CallUpdateRawSecretsV3(httpClient, updateSecretRequest)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to process secret update request [err=%v]", err)
|
||||
}
|
||||
}
|
||||
|
||||
return secretOperations, nil
|
||||
|
||||
}
|
||||
|
@ -4,59 +4,63 @@ sidebarTitle: "What is Infisical?"
|
||||
description: "An Introduction to the Infisical secret management platform."
|
||||
---
|
||||
|
||||
Infisical is an [open-source](https://github.com/infisical/infisical) secret management platform for developers.
|
||||
It provides capabilities for storing, managing, and syncing application configuration and secrets like API keys, database
|
||||
credentials, and certificates across infrastructure. In addition, Infisical prevents secrets leaks to git and enables secure
|
||||
sharing of secrets among engineers.
|
||||
**[Infisical](https://infisical.com)** is the open source secret management platform that developers use to centralize their application configuration and secrets like API keys and database credentials as well as manage their internal PKI. In addition, developers use Infisical to prevent secrets leaks to git and securely share secrets amongst engineers.
|
||||
|
||||
Start managing secrets securely with [Infisical Cloud](https://app.infisical.com) or learn how to [host Infisical](/self-hosting/overview) yourself.
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card
|
||||
title="Infisical Cloud"
|
||||
href="https://app.infisical.com/signup"
|
||||
icon="cloud"
|
||||
color="#000000"
|
||||
>
|
||||
Get started with Infisical Cloud in just a few minutes.
|
||||
</Card>
|
||||
<Card
|
||||
href="/self-hosting/overview"
|
||||
title="Self-hosting"
|
||||
icon="server"
|
||||
color="#000000"
|
||||
>
|
||||
Self-host Infisical on your own infrastructure.
|
||||
</Card>
|
||||
<Card
|
||||
title="Infisical Cloud"
|
||||
href="https://app.infisical.com/signup"
|
||||
icon="cloud"
|
||||
color="#000000"
|
||||
>
|
||||
Get started with Infisical Cloud in just a few minutes.
|
||||
</Card>
|
||||
<Card
|
||||
href="/self-hosting/overview"
|
||||
title="Self-hosting"
|
||||
icon="server"
|
||||
color="#000000"
|
||||
>
|
||||
Self-host Infisical on your own infrastructure.
|
||||
</Card>
|
||||
</CardGroup>
|
||||
|
||||
## Why Infisical?
|
||||
## Why Infisical?
|
||||
|
||||
Infisical helps developers achieve secure centralized secret management and provides all the tools to easily manage secrets in various environments and infrastructure components. In particular, here are some of the most common points that developers mention after adopting Infisical:
|
||||
|
||||
Infisical helps developers achieve secure centralized secret management and provides all the tools to easily manage secrets in various environments and infrastructure components. In particular, here are some of the most common points that developers mention after adopting Infisical:
|
||||
- Streamlined **local development** processes (switching .env files to [Infisical CLI](/cli/commands/run) and removing secrets from developer machines).
|
||||
- **Best-in-class developer experience** with an easy-to-use [Web Dashboard](/documentation/platform/project).
|
||||
- Simple secret management inside **[CI/CD pipelines](/integrations/cicd/githubactions)** and staging environments.
|
||||
- Secure and compliant secret management practices in **[production environments](/sdks/overview)**.
|
||||
- **Best-in-class developer experience** with an easy-to-use [Web Dashboard](/documentation/platform/project).
|
||||
- Simple secret management inside **[CI/CD pipelines](/integrations/cicd/githubactions)** and staging environments.
|
||||
- Secure and compliant secret management practices in **[production environments](/sdks/overview)**.
|
||||
- **Facilitated workflows** around [secret change management](/documentation/platform/pr-workflows), [access requests](/documentation/platform/access-controls/access-requests), [temporary access provisioning](/documentation/platform/access-controls/temporary-access), and more.
|
||||
- **Improved security posture** thanks to [secret scanning](/cli/scanning-overview), [granular access control policies](/documentation/platform/access-controls/overview), [automated secret rotation](https://infisical.com/docs/documentation/platform/secret-rotation/overview), and [dynamic secrets](/documentation/platform/dynamic-secrets/overview) capabilities.
|
||||
|
||||
## How does Infisical work?
|
||||
## How does Infisical work?
|
||||
|
||||
To make secret management effortless and secure, Infisical follows a certain structure for enabling secret management workflows as defined below.
|
||||
To make secret management effortless and secure, Infisical follows a certain structure for enabling secret management workflows as defined below.
|
||||
|
||||
**Identities** in Infisical are users or machine which have a certain set of roles and permissions assigned to them. Such identities are able to manage secrets in various **Clients** throughout the entire infrastructure. To do that, identities have to verify themselves through one of the available **Authentication Methods**.
|
||||
**Identities** in Infisical are users or machine which have a certain set of roles and permissions assigned to them. Such identities are able to manage secrets in various **Clients** throughout the entire infrastructure. To do that, identities have to verify themselves through one of the available **Authentication Methods**.
|
||||
|
||||
As a result, the 3 main concepts that are important to understand are:
|
||||
- **[Identities](/documentation/platform/identities/overview)**: users or machines with a set permissions assigned to them.
|
||||
As a result, the 3 main concepts that are important to understand are:
|
||||
|
||||
- **[Identities](/documentation/platform/identities/overview)**: users or machines with a set permissions assigned to them.
|
||||
- **[Clients](/integrations/platforms/kubernetes)**: Infisical-developed tools for managing secrets in various infrastructure components (e.g., [Kubernetes Operator](/integrations/platforms/kubernetes), [Infisical Agent](/integrations/platforms/infisical-agent), [CLI](/cli/usage), [SDKs](/sdks/overview), [API](/api-reference/overview/introduction), [Web Dashboard](/documentation/platform/organization)).
|
||||
- **[Authentication Methods](/documentation/platform/identities/universal-auth)**: ways for Identities to authenticate inside different clients (e.g., SAML SSO for Web Dashboard, Universal Auth for Infisical Agent, etc.).
|
||||
|
||||
## How to get started with Infisical?
|
||||
## How to get started with Infisical?
|
||||
|
||||
Depending on your use case, it might be helpful to look into some of the resources and guides provided below.
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card href="../../cli/overview" title="Command Line Interface (CLI)" icon="square-terminal" color="#000000">
|
||||
<Card
|
||||
href="../../cli/overview"
|
||||
title="Command Line Interface (CLI)"
|
||||
icon="square-terminal"
|
||||
color="#000000"
|
||||
>
|
||||
Inject secrets into any application process/environment.
|
||||
</Card>
|
||||
<Card
|
||||
@ -67,7 +71,12 @@ Depending on your use case, it might be helpful to look into some of the resourc
|
||||
>
|
||||
Fetch secrets with any programming language on demand.
|
||||
</Card>
|
||||
<Card href="../../integrations/platforms/docker-intro" title="Docker" icon="docker" color="#000000">
|
||||
<Card
|
||||
href="../../integrations/platforms/docker-intro"
|
||||
title="Docker"
|
||||
icon="docker"
|
||||
color="#000000"
|
||||
>
|
||||
Inject secrets into Docker containers.
|
||||
</Card>
|
||||
<Card
|
||||
|
4
docs/api-reference/endpoints/secret-tags/get-by-id.mdx
Normal file
4
docs/api-reference/endpoints/secret-tags/get-by-id.mdx
Normal file
@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get By ID"
|
||||
openapi: "GET /api/v1/workspace/{projectId}/tags/{tagId}"
|
||||
---
|
4
docs/api-reference/endpoints/secret-tags/get-by-slug.mdx
Normal file
4
docs/api-reference/endpoints/secret-tags/get-by-slug.mdx
Normal file
@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Get By Slug"
|
||||
openapi: "GET /api/v1/workspace/{projectId}/tags/slug/{tagSlug}"
|
||||
---
|
4
docs/api-reference/endpoints/secret-tags/update.mdx
Normal file
4
docs/api-reference/endpoints/secret-tags/update.mdx
Normal file
@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Update"
|
||||
openapi: "PATCH /api/v1/workspace/{projectId}/tags/{tagId}"
|
||||
---
|
@ -4,6 +4,25 @@ title: "Changelog"
|
||||
|
||||
The changelog below reflects new product developments and updates on a monthly basis.
|
||||
|
||||
## May 2024
|
||||
- Released [AWS](https://infisical.com/docs/documentation/platform/identities/aws-auth), [GCP](https://infisical.com/docs/documentation/platform/identities/gcp-auth), [Azure](https://infisical.com/docs/documentation/platform/identities/azure-auth), and [Kubernetes](https://infisical.com/docs/documentation/platform/identities/kubernetes-auth) Native Auth Methods.
|
||||
- Added [Secret Sharing](https://infisical.com/docs/documentation/platform/secret-sharing) functionality for sharing sensitive data through encrypted links – within and outside of an organization.
|
||||
- Updated [Secret Referencing](https://infisical.com/docs/documentation/platform/secret-reference) to be supported in all Infisical clients. Infisical UI is now able to provide automatic reference suggestions when typing.
|
||||
- Released new [Infisical Jenkins Plugin](https://infisical.com/docs/integrations/cicd/jenkins).
|
||||
- Added statuses and manual sync option to integrations in the Dashboard UI.
|
||||
- Released universal [Audit Log Streaming](https://infisical.com/docs/documentation/platform/audit-log-streams).
|
||||
- Added [Dynamic Secret template for AWS IAM](https://infisical.com/docs/documentation/platform/dynamic-secrets/aws-iam).
|
||||
- Added support for syncing tags and custom KMS keys to [AWS Secrets Manager](https://infisical.com/docs/integrations/cloud/aws-secret-manager) and [Parameter Store](https://infisical.com/docs/integrations/cloud/aws-parameter-store) Integrations.
|
||||
- Officially released Infisical on [AWS Marketplace](https://infisical.com/blog/infisical-launches-on-aws-marketplace).
|
||||
|
||||
## April 2024
|
||||
- Added [Access Requests](https://infisical.com/docs/documentation/platform/access-controls/access-requests) as part of self-serve secrets management workflows.
|
||||
- Added [Temporary Access Provisioning](https://infisical.com/docs/documentation/platform/access-controls/temporary-access) for roles and additional privileges.
|
||||
|
||||
## March 2024
|
||||
- Released support for [Dynamic Secrets](https://infisical.com/docs/documentation/platform/dynamic-secrets/overview).
|
||||
- Released the concept of [Additional Privileges](https://infisical.com/docs/documentation/platform/access-controls/additional-privileges) on top of user/machine roles.
|
||||
|
||||
## Feb 2024
|
||||
- Added org-scoped authentication enforcement for SAML
|
||||
- Added support for [SCIM](https://infisical.com/docs/documentation/platform/scim/overview) along with instructions for setting it up with [Okta](https://infisical.com/docs/documentation/platform/scim/okta), [Azure](https://infisical.com/docs/documentation/platform/scim/azure), and [JumpCloud](https://infisical.com/docs/documentation/platform/scim/jumpcloud).
|
||||
|
@ -7,32 +7,38 @@ description: "Login into Infisical from the CLI"
|
||||
infisical login
|
||||
```
|
||||
|
||||
## Description
|
||||
### Description
|
||||
The CLI uses authentication to verify your identity. When you enter the correct email and password for your account, a token is generated and saved in your system Keyring to allow you to make future interactions with the CLI.
|
||||
|
||||
To change where the login credentials are stored, visit the [vaults command](./vault).
|
||||
|
||||
If you have added multiple users, you can switch between the users by using the [user command](./user).
|
||||
|
||||
<Info>
|
||||
When you authenticate with **any other method than `user`**, an access token will be printed to the console upon successful login. This token can be used to authenticate with the Infisical API and the CLI by passing it in the `--token` flag when applicable.
|
||||
|
||||
Use flag `--plain` along with `--silent` to print only the token in plain text when using a machine identity auth method.
|
||||
|
||||
</Info>
|
||||
|
||||
|
||||
### Flags
|
||||
The login command supports a number of flags that you can use for different authentication methods. Below is a list of all the flags that can be used with the login command.
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="--method">
|
||||
```bash
|
||||
infisical login --method=<auth-method> # Optional, will default to 'user'.
|
||||
```
|
||||
|
||||
#### Valid values for the `method` flag are:
|
||||
- `user`: Login using email and password.
|
||||
- `user`: Login using email and password. (default)
|
||||
- `universal-auth`: Login using a universal auth client ID and client secret.
|
||||
|
||||
<Info>
|
||||
When `method` is set to `universal-auth`, the `client-id` and `client-secret` flags are required. Optionally you can set the `INFISICAL_UNIVERSAL_AUTH_CLIENT_ID` and `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET` environment variables instead of using the flags.
|
||||
|
||||
When you authenticate with universal auth, an access token will be printed to the console upon successful login. This token can be used to authenticate with the Infisical API and the CLI by passing it in the `--token` flag when applicable.
|
||||
|
||||
Use flag `--plain` along with `--silent` to print only the token in plain text when using the `universal-auth` method.
|
||||
|
||||
</Info>
|
||||
- `kubernetes`: Login using a Kubernetes native auth.
|
||||
- `azure`: Login using an Azure native auth.
|
||||
- `gcp-id-token`: Login using a GCP ID token native auth.
|
||||
- `gcp-iam`: Login using a GCP IAM.
|
||||
- `aws-iam`: Login using an AWS IAM native auth.
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="--client-id">
|
||||
@ -41,7 +47,7 @@ If you have added multiple users, you can switch between the users by using the
|
||||
```
|
||||
|
||||
#### Description
|
||||
The client ID of the universal auth client. This is required if the `--method` flag is set to `universal-auth`.
|
||||
The client ID of the universal auth machine identity. This is required if the `--method` flag is set to `universal-auth`.
|
||||
|
||||
<Tip>
|
||||
The `client-id` flag can be substituted with the `INFISICAL_UNIVERSAL_AUTH_CLIENT_ID` environment variable.
|
||||
@ -52,13 +58,245 @@ If you have added multiple users, you can switch between the users by using the
|
||||
infisical login --client-secret=<client-secret> # Optional, required if --method=universal-auth.
|
||||
```
|
||||
#### Description
|
||||
The client secret of the universal auth client. This is required if the `--method` flag is set to `universal-auth`.
|
||||
The client secret of the universal auth machine identity. This is required if the `--method` flag is set to `universal-auth`.
|
||||
|
||||
<Tip>
|
||||
The `client-secret` flag can be substituted with the `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET` environment variable.
|
||||
</Tip>
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="--machine-identity-id">
|
||||
```bash
|
||||
infisical login --machine-identity-id=<your-machine-identity-id> # Optional, required if --method=kubernetes, azure, gcp-id-token, gcp-iam, or aws-iam.
|
||||
```
|
||||
|
||||
#### Description
|
||||
The ID of the machine identity. This is required if the `--method` flag is set to `kubernetes`, `azure`, `gcp-id-token`, `gcp-iam`, or `aws-iam`.
|
||||
|
||||
<Tip>
|
||||
The `machine-identity-id` flag can be substituted with the `INFISICAL_MACHINE_IDENTITY_ID` environment variable.
|
||||
</Tip>
|
||||
</Accordion>
|
||||
<Accordion title="--service-account-token-path">
|
||||
```bash
|
||||
infisical login --service-account-token-path=<service-account-token-path> # Optional Will default to '/var/run/secrets/kubernetes.io/serviceaccount/token'.
|
||||
```
|
||||
|
||||
#### Description
|
||||
The path to the Kubernetes service account token to use for authentication.
|
||||
This is optional and will default to `/var/run/secrets/kubernetes.io/serviceaccount/token`.
|
||||
|
||||
<Tip>
|
||||
The `service-account-token-path` flag can be substituted with the `INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_PATH` environment variable.
|
||||
</Tip>
|
||||
</Accordion>
|
||||
<Accordion title="--service-account-key-file-path">
|
||||
```bash
|
||||
infisical login --service-account-key-file-path=<gcp-service-account-key-file-path> # Optional, but required if --method=gcp-iam.
|
||||
```
|
||||
|
||||
#### Description
|
||||
The path to your GCP service account key file. This is required if the `--method` flag is set to `gcp-iam`.
|
||||
|
||||
<Tip>
|
||||
The `service-account-key-path` flag can be substituted with the `INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH` environment variable.
|
||||
</Tip>
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
||||
|
||||
|
||||
|
||||
### Authentication Methods
|
||||
|
||||
The Infisical CLI supports multiple authentication methods. Below are the available authentication methods, with their respective flags.
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="Universal Auth">
|
||||
The Universal Auth method is a simple and secure way to authenticate with Infisical. It requires a client ID and a client secret to authenticate with Infisical.
|
||||
|
||||
<ParamField query="Flags">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="client-id" type="string" required>
|
||||
Your machine identity client ID.
|
||||
</ParamField>
|
||||
<ParamField query="client-secret" type="string" required>
|
||||
Your machine identity client secret.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a universal auth machine identity">
|
||||
To create a universal auth machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/universal-auth).
|
||||
</Step>
|
||||
<Step title="Obtain an access token">
|
||||
Run the `login` command with the following flags to obtain an access token:
|
||||
|
||||
```bash
|
||||
infisical login --method=universal-auth --client-id=<client-id> --client-secret=<client-secret>
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
</Accordion>
|
||||
<Accordion title="Native Kubernetes">
|
||||
The Native Kubernetes method is used to authenticate with Infisical when running in a Kubernetes environment. It requires a service account token to authenticate with Infisical.
|
||||
|
||||
<ParamField query="Flags">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="machine-identity-id" type="string" required>
|
||||
Your machine identity ID.
|
||||
</ParamField>
|
||||
<ParamField query="service-account-token-path" type="string" optional>
|
||||
Path to the Kubernetes service account token to use. Default: `/var/run/secrets/kubernetes.io/serviceaccount/token`.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a Kubernetes machine identity">
|
||||
To create a Kubernetes machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/kubernetes-auth).
|
||||
</Step>
|
||||
<Step title="Obtain access an token">
|
||||
Run the `login` command with the following flags to obtain an access token:
|
||||
|
||||
```bash
|
||||
# --service-account-token-path is optional, and will default to '/var/run/secrets/kubernetes.io/serviceaccount/token' if not provided.
|
||||
infisical login --method=kubernetes --machine-identity-id=<machine-identity-id> --service-account-token-path=<service-account-token-path>
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="Native Azure">
|
||||
The Native Azure method is used to authenticate with Infisical when running in an Azure environment.
|
||||
|
||||
<ParamField query="Flags">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="machine-identity-id" type="string" required>
|
||||
Your machine identity ID.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create an Azure machine identity">
|
||||
To create an Azure machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/azure-auth).
|
||||
</Step>
|
||||
<Step title="Obtain an access token">
|
||||
Run the `login` command with the following flags to obtain an access token:
|
||||
|
||||
```bash
|
||||
infisical login --method=azure --machine-identity-id=<machine-identity-id>
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="Native GCP ID Token">
|
||||
The Native GCP ID Token method is used to authenticate with Infisical when running in a GCP environment.
|
||||
|
||||
<ParamField query="Flags">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="machine-identity-id" type="string" required>
|
||||
Your machine identity ID.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a GCP machine identity">
|
||||
To create a GCP machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/gcp-auth).
|
||||
</Step>
|
||||
<Step title="Obtain an access token">
|
||||
Run the `login` command with the following flags to obtain an access token:
|
||||
|
||||
```bash
|
||||
infisical login --method=gcp-id-token --machine-identity-id=<machine-identity-id>
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
</Accordion>
|
||||
<Accordion title="GCP IAM">
|
||||
The GCP IAM method is used to authenticate with Infisical with a GCP service account key.
|
||||
|
||||
<ParamField query="Flags">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="machine-identity-id" type="string" required>
|
||||
Your machine identity ID.
|
||||
</ParamField>
|
||||
<ParamField query="service-account-key-file-path" type="string" required>
|
||||
Path to your GCP service account key file _(Must be in JSON format!)_
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a GCP machine identity">
|
||||
To create a GCP machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/gcp-auth).
|
||||
</Step>
|
||||
<Step title="Obtain an access token">
|
||||
Run the `login` command with the following flags to obtain an access token:
|
||||
|
||||
```bash
|
||||
infisical login --method=gcp-iam --machine-identity-id=<machine-identity-id> --service-account-key-file-path=<service-account-key-file-path>
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
</Accordion>
|
||||
<Accordion title="Native AWS IAM">
|
||||
The AWS IAM method is used to authenticate with Infisical with an AWS IAM role while running in an AWS environment like EC2, Lambda, etc.
|
||||
|
||||
<ParamField query="Flags">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="machine-identity-id" type="string" required>
|
||||
Your machine identity ID.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create an AWS machine identity">
|
||||
To create an AWS machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/aws-auth).
|
||||
</Step>
|
||||
<Step title="Obtain an access token">
|
||||
Run the `login` command with the following flags to obtain an access token:
|
||||
|
||||
```bash
|
||||
infisical login --method=aws-iam --machine-identity-id=<machine-identity-id>
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
||||
|
||||
### Machine Identity Authentication Quick Start
|
||||
In this example we'll be using the `universal-auth` method to login to obtain an Infisical access token, which we will then use to fetch secrets with.
|
||||
|
||||
<Steps>
|
||||
<Step title="Obtain an access token">
|
||||
```bash
|
||||
export INFISICAL_TOKEN=$(infisical login --method=universal-auth --client-id=<client-id> --client-secret=<client-secret> --silent --plain) # silent and plain is important to ensure only the token itself is printed, so we can easily set it as an environment variable.
|
||||
```
|
||||
|
||||
Now that we've set the `INFISICAL_TOKEN` environment variable, we can use the CLI to interact with Infisical. The CLI will automatically check for the presence of the `INFISICAL_TOKEN` environment variable and use it for authentication.
|
||||
|
||||
|
||||
Alternatively, if you would rather use the `--token` flag to pass the token directly, you can do so by running the following command:
|
||||
|
||||
```bash
|
||||
infisical [command] --token=<your-access-token> # The token output from the login command.
|
||||
```
|
||||
</Step>
|
||||
|
||||
<Step title="Fetch all secrets from an evironment">
|
||||
```bash
|
||||
infisical secrets --projectId=<your-project-id --env=dev --recursive
|
||||
```
|
||||
|
||||
This command will fetch all secrets from the `dev` environment in your project, including all secrets in subfolders.
|
||||
|
||||
<Info>
|
||||
The `--recursive`, and `--env` flag is optional and will fetch all secrets in subfolders. The default environment is `dev` if no `--env` flag is provided.
|
||||
</Info>
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
And that's it! Now you're ready to start using the Infisical CLI to interact with your secrets, with the use of Machine Identities.
|
||||
|
@ -4,10 +4,7 @@ sidebarTitle: "What is Infisical?"
|
||||
description: "An Introduction to the Infisical secret management platform."
|
||||
---
|
||||
|
||||
Infisical is an [open-source](https://github.com/infisical/infisical) secret management platform for developers.
|
||||
It provides capabilities for storing, managing, and syncing application configuration and secrets like API keys, database
|
||||
credentials, and certificates across infrastructure. In addition, Infisical prevents secrets leaks to git and enables secure
|
||||
sharing of secrets among engineers.
|
||||
**[Infisical](https://infisical.com)** is the open source secret management platform that developers use to centralize their application configuration and secrets like API keys and database credentials as well as manage their internal PKI. Additionally, developers use Infisical to prevent secrets leaks to git and securely share secrets amongst engineers.
|
||||
|
||||
Start managing secrets securely with [Infisical Cloud](https://app.infisical.com) or learn how to [host Infisical](/self-hosting/overview) yourself.
|
||||
|
||||
|
@ -13,11 +13,11 @@ There is a number of issues that arise with secret management in local developme
|
||||
|
||||
## Solution
|
||||
|
||||
One of the main benefits of Infisical is the facilitation of secret management workflows in local development use cases. In particular, Infisical heavily follows the "Security Shift Left" principle to enable developers to effotlessly follow secure practices when coding.
|
||||
One of the main benefits of Infisical is the facilitation of secret management workflows in local development use cases. In particular, Infisical heavily follows the "Security Shift Left" principle to enable developers to effortlessly follow secure practices when coding.
|
||||
|
||||
### CLI
|
||||
|
||||
[Infisical CLI](/cli/overview) is the most frequently used Infisical tool for secret management in local development environments. It makes it easy to inject secrets right into the local application environments based on the permissions given to corresponsing developers.
|
||||
[Infisical CLI](/cli/overview) is the most frequently used Infisical tool for secret management in local development environments. It makes it easy to inject secrets right into the local application environments based on the permissions given to corresponding developers.
|
||||
|
||||
### Dashboard
|
||||
|
||||
@ -31,4 +31,4 @@ By default, all the secrets in the Infisical environments are shared among proje
|
||||
|
||||
### Secret Scanning
|
||||
|
||||
In addition, Infisical also provides a set of tools to automatically prevent secret leaks to git history. This functionlality can be set up on the level of [Infisical CLI using pre-commit hooks](/cli/scanning-overview#automatically-scan-changes-before-you-commit) or through a direct integration with platforms like GitHub.
|
||||
In addition, Infisical also provides a set of tools to automatically prevent secret leaks to git history. This functionality can be set up on the level of [Infisical CLI using pre-commit hooks](/cli/scanning-overview#automatically-scan-changes-before-you-commit) or through a direct integration with platforms like GitHub.
|
Binary file not shown.
Before ![]() (image error) Size: 467 KiB After ![]() (image error) Size: 1.2 MiB ![]() ![]() |
@ -3,7 +3,48 @@ title: "Ansible"
|
||||
description: "Learn how to use Infisical for secret management in Ansible."
|
||||
---
|
||||
|
||||
The documentation for using Infisical to manage secrets in Ansible is currently available [here](https://galaxy.ansible.com/ui/repo/published/infisical/vault/).
|
||||
You can find the Infisical Ansible collection on [Ansible Galaxy](https://galaxy.ansible.com/ui/repo/published/infisical/vault/).
|
||||
|
||||
|
||||
This Ansible Infisical collection includes a variety of Ansible content to help automate the management of Infisical services. This collection is maintained by the Infisical team.
|
||||
|
||||
|
||||
## Ansible version compatibility
|
||||
Tested with the Ansible Core >= 2.12.0 versions, and the current development version of Ansible. Ansible Core versions prior to 2.12.0 have not been tested.
|
||||
|
||||
## Python version compatibility
|
||||
This collection depends on the Infisical SDK for Python.
|
||||
|
||||
Requires Python 3.7 or greater.
|
||||
|
||||
## Installing this collection
|
||||
You can install the Infisical collection with the Ansible Galaxy CLI:
|
||||
|
||||
```bash
|
||||
$ ansible-galaxy collection install infisical.vault
|
||||
```
|
||||
|
||||
The python module dependencies are not installed by ansible-galaxy. They can be manually installed using pip:
|
||||
|
||||
```bash
|
||||
$ pip install infisical-python
|
||||
```
|
||||
|
||||
## Using this collection
|
||||
|
||||
You can either call modules by their Fully Qualified Collection Name (FQCN), such as `infisical.vault.read_secrets`, or you can call modules by their short name if you list the `infisical.vault` collection in the playbook's collections keyword:
|
||||
|
||||
|
||||
```bash
|
||||
---
|
||||
vars:
|
||||
read_all_secrets_within_scope: "{{ lookup('infisical.vault.read_secrets', universal_auth_client_id='<>', universal_auth_client_secret='<>', project_id='<>', path='/', env_slug='dev', url='https://spotify.infisical.com') }}"
|
||||
# [{ "key": "HOST", "value": "google.com" }, { "key": "SMTP", "value": "gmail.smtp.edu" }]
|
||||
|
||||
read_secret_by_name_within_scope: "{{ lookup('infisical.vault.read_secrets', universal_auth_client_id='<>', universal_auth_client_secret='<>', project_id='<>', path='/', env_slug='dev', secret_name='HOST', url='https://spotify.infisical.com') }}"
|
||||
# [{ "key": "HOST", "value": "google.com" }]
|
||||
```
|
||||
|
||||
|
||||
## Troubleshoot
|
||||
|
||||
|
@ -41,22 +41,212 @@ It then formats these secrets using the user provided templates and writes the f
|
||||
To set up the authentication method for token renewal and to define secret templates, the Infisical agent requires a YAML configuration file containing properties defined below.
|
||||
While specifying an authentication method is mandatory to start the agent, configuring sinks and secret templates are optional.
|
||||
|
||||
| Field | Description |
|
||||
| ---------------------------- | ----------- |
|
||||
| `infisical.address` | The URL of the Infisical service. Default: `"https://app.infisical.com"`. |
|
||||
| `auth.type` | The type of authentication method used. Only `"universal-auth"` type is currently available |
|
||||
| `auth.config.client-id` | The file path where the universal-auth client id is stored. |
|
||||
| `auth.config.client-secret` | The file path where the universal-auth client secret is stored. |
|
||||
| `auth.config.remove_client_secret_on_read` | This will instruct the agent to remove the client secret from disk. |
|
||||
| `sinks[].type` | The type of sink in a list of sinks. Each item specifies a sink type. Currently, only `"file"` type is available. |
|
||||
| `sinks[].config.path` | The file path where the access token should be stored for each sink in the list. |
|
||||
| `templates[].source-path` | The path to the template file that should be used to render secrets. |
|
||||
| `templates[].destination-path` | The path where the rendered secrets from the source template will be saved to. |
|
||||
| `templates[].config.polling-interval` | How frequently to check for secret changes. Default: `5 minutes` (optional) |
|
||||
| `templates[].config.execute.command` | The command to execute when secret change is detected (optional) |
|
||||
| `templates[].config.execute.timeout` | How long in seconds to wait for command to execute before timing out (optional) |
|
||||
| Field | Description |
|
||||
| ------------------------------------------------| ----------------------------- |
|
||||
| `infisical.address` | The URL of the Infisical service. Default: `"https://app.infisical.com"`. |
|
||||
| `auth.type` | The type of authentication method used. Available options: `universal-auth`, `kubernetes`, `azure`, `gcp-id-token`, `gcp-iam`, `aws-iam`|
|
||||
| `auth.config.identity-id` | The file path where the machine identity id is stored<br/><br/>This field is required when using any of the following auth types: `kubernetes`, `azure`, `gcp-id-token`, `gcp-iam`, or `aws-iam`. |
|
||||
| `auth.config.service-account-token` | Path to the Kubernetes service account token to use (optional)<br/><br/>Default: `/var/run/secrets/kubernetes.io/serviceaccount/token` |
|
||||
| `auth.config.service-account-key` | Path to your GCP service account key file. This field is required when using `gcp-iam` auth type.<br/><br/>Please note that the file should be in JSON format. |
|
||||
| `auth.config.client-id` | The file path where the universal-auth client id is stored. |
|
||||
| `auth.config.client-secret` | The file path where the universal-auth client secret is stored. |
|
||||
| `auth.config.remove_client_secret_on_read` | This will instruct the agent to remove the client secret from disk. |
|
||||
| `sinks[].type` | The type of sink in a list of sinks. Each item specifies a sink type. Currently, only `"file"` type is available. |
|
||||
| `sinks[].config.path` | The file path where the access token should be stored for each sink in the list. |
|
||||
| `templates[].source-path` | The path to the template file that should be used to render secrets. |
|
||||
| `templates[].destination-path` | The path where the rendered secrets from the source template will be saved to. |
|
||||
| `templates[].config.polling-interval` | How frequently to check for secret changes. Default: `5 minutes` (optional) |
|
||||
| `templates[].config.execute.command` | The command to execute when secret change is detected (optional) |
|
||||
| `templates[].config.execute.timeout` | How long in seconds to wait for command to execute before timing out (optional) |
|
||||
|
||||
|
||||
## Authentication
|
||||
|
||||
The Infisical agent supports multiple authentication methods. Below are the available authentication methods, with their respective configurations.
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="Universal Auth">
|
||||
The Universal Auth method is a simple and secure way to authenticate with Infisical. It requires a client ID and a client secret to authenticate with Infisical.
|
||||
|
||||
<ParamField query="config" type="UniversalAuthConfig">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="client-id" type="string" required>
|
||||
Path to the file containing the universal auth client ID.
|
||||
</ParamField>
|
||||
<ParamField query="client-secret" type="string" required>
|
||||
Path to the file containing the universal auth client secret.
|
||||
</ParamField>
|
||||
<ParamField query="remove_client_secret_on_read" type="boolean" optional>
|
||||
Instructs the agent to remove the client secret from disk after reading it.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a universal auth machine identity">
|
||||
To create a universal auth machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/universal-auth).
|
||||
</Step>
|
||||
<Step title="Configure the agent">
|
||||
Update the agent configuration file with the specified auth method, client ID, and client secret. In the snippet below you can see a sample configuration of the `auth` field when using the Universal Auth method.
|
||||
|
||||
```yaml example-auth-config.yaml
|
||||
auth:
|
||||
type: "universal-auth"
|
||||
config:
|
||||
client-id: "./client-id" # Path to the file containing the client ID
|
||||
client-secret: "./client" # Path to the file containing the client secret
|
||||
remove_client_secret_on_read: false # Optional field, instructs the agent to remove the client secret from disk after reading it
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
</Accordion>
|
||||
<Accordion title="Native Kubernetes">
|
||||
The Native Kubernetes method is used to authenticate with Infisical when running in a Kubernetes environment. It requires a service account token to authenticate with Infisical.
|
||||
|
||||
<ParamField query="config" type="KubernetesAuthConfig">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="identity-id" type="string" required>
|
||||
Path to the file containing the machine identity ID.
|
||||
</ParamField>
|
||||
<ParamField query="service-account-token" type="string" optional>
|
||||
Path to the Kubernetes service account token to use. Default: `/var/run/secrets/kubernetes.io/serviceaccount/token`.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a Kubernetes machine identity">
|
||||
To create a Kubernetes machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/kubernetes-auth).
|
||||
</Step>
|
||||
<Step title="Configure the agent">
|
||||
Update the agent configuration file with the specified auth method, identity ID, and service account token. In the snippet below you can see a sample configuration of the `auth` field when using the Kubernetes method.
|
||||
|
||||
```yaml example-auth-config.yaml
|
||||
auth:
|
||||
type: "kubernetes"
|
||||
config:
|
||||
identity-id: "./identity-id" # Path to the file containing the machine identity ID
|
||||
service-account-token: "/var/run/secrets/kubernetes.io/serviceaccount/token" # Optional field, custom path to the Kubernetes service account token to use
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="Native Azure">
|
||||
The Native Azure method is used to authenticate with Infisical when running in an Azure environment.
|
||||
|
||||
<ParamField query="config" type="AzureAuthConfig">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="identity-id" type="string" required>
|
||||
Path to the file containing the machine identity ID.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create an Azure machine identity">
|
||||
To create an Azure machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/azure-auth).
|
||||
</Step>
|
||||
<Step title="Configure the agent">
|
||||
Update the agent configuration file with the specified auth method and identity ID. In the snippet below you can see a sample configuration of the `auth` field when using the Azure method.
|
||||
|
||||
```yaml example-auth-config.yaml
|
||||
auth:
|
||||
type: "azure"
|
||||
config:
|
||||
identity-id: "./identity-id" # Path to the file containing the machine identity ID
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="Native GCP ID Token">
|
||||
The Native GCP ID Token method is used to authenticate with Infisical when running in a GCP environment.
|
||||
|
||||
<ParamField query="config" type="GCPIDTokenAuthConfig">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="identity-id" type="string" required>
|
||||
Path to the file containing the machine identity ID.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a GCP machine identity">
|
||||
To create a GCP machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/gcp-auth).
|
||||
</Step>
|
||||
<Step title="Configure the agent">
|
||||
Update the agent configuration file with the specified auth method and identity ID. In the snippet below you can see a sample configuration of the `auth` field when using the GCP ID Token method.
|
||||
|
||||
```yaml example-auth-config.yaml
|
||||
auth:
|
||||
type: "gcp-id-token"
|
||||
config:
|
||||
identity-id: "./identity-id" # Path to the file containing the machine identity ID
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
</Accordion>
|
||||
<Accordion title="GCP IAM">
|
||||
The GCP IAM method is used to authenticate with Infisical with a GCP service account key.
|
||||
|
||||
<ParamField query="config" type="GCPIAMAuthConfig">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="identity-id" type="string" required>
|
||||
Path to the file containing the machine identity ID.
|
||||
</ParamField>
|
||||
<ParamField query="service-account-key" type="string" required>
|
||||
Path to your GCP service account key file.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a GCP machine identity">
|
||||
To create a GCP machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/gcp-auth).
|
||||
</Step>
|
||||
<Step title="Configure the agent">
|
||||
Update the agent configuration file with the specified auth method, identity ID, and service account key. In the snippet below you can see a sample configuration of the `auth` field when using the GCP IAM method.
|
||||
|
||||
```yaml example-auth-config.yaml
|
||||
auth:
|
||||
type: "gcp-iam"
|
||||
config:
|
||||
identity-id: "./identity-id" # Path to the file containing the machine identity ID
|
||||
service-account-key: "./service-account-key.json" # Path to your GCP service account key file
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
</Accordion>
|
||||
<Accordion title="Native AWS IAM">
|
||||
The AWS IAM method is used to authenticate with Infisical with an AWS IAM role while running in an AWS environment like EC2, Lambda, etc.
|
||||
|
||||
<ParamField query="config" type="AWSIAMAuthConfig">
|
||||
<Expandable title="properties">
|
||||
<ParamField query="identity-id" type="string" required>
|
||||
Path to the file containing the machine identity ID.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
</ParamField>
|
||||
|
||||
<Steps>
|
||||
<Step title="Create an AWS machine identity">
|
||||
To create an AWS machine identity, follow the step by step guide outlined [here](/documentation/platform/identities/aws-auth).
|
||||
</Step>
|
||||
<Step title="Configure the agent">
|
||||
Update the agent configuration file with the specified auth method and identity ID. In the snippet below you can see a sample configuration of the `auth` field when using the AWS IAM method.
|
||||
|
||||
```yaml example-auth-config.yaml
|
||||
auth:
|
||||
type: "aws-iam"
|
||||
config:
|
||||
identity-id: "./identity-id" # Path to the file containing the machine identity ID
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
||||
|
||||
## Quick start Infisical Agent
|
||||
To install the Infisical agent, you must first install the [Infisical CLI](../cli/overview) in the desired environment where you'd like the agent to run. This is because the Infisical agent is a sub-command of the Infisical CLI.
|
||||
|
||||
|
@ -24,14 +24,13 @@ The operator can be install via [Helm](https://helm.sh) or [kubectl](https://git
|
||||
|
||||
**Install the Helm chart**
|
||||
|
||||
For production deployments, it is highly recommended to set the chart version and the application version during installs and upgrades.
|
||||
This will prevent the operator from being accidentally updated to the latest version and introduce unintended breaking changes.
|
||||
|
||||
View application versions [here](https://hub.docker.com/r/infisical/kubernetes-operator/tags) and chart versions [here](https://cloudsmith.io/~infisical/repos/helm-charts/packages/detail/helm/secrets-operator/#versions)
|
||||
To select a specific version, view the application versions [here](https://hub.docker.com/r/infisical/kubernetes-operator/tags) and chart versions [here](https://cloudsmith.io/~infisical/repos/helm-charts/packages/detail/helm/secrets-operator/#versions)
|
||||
|
||||
```bash
|
||||
helm install --generate-name infisical-helm-charts/secrets-operator --version=<PLACE-CHART-VERSION-HERE> --set controllerManager.manager.image.tag=<PLACE-APP-VERSION-HERE>
|
||||
helm install --generate-name infisical-helm-charts/secrets-operator
|
||||
```
|
||||
|
||||
```bash
|
||||
# Example installing app version v0.2.0 and chart version 0.1.4
|
||||
helm install --generate-name infisical-helm-charts/secrets-operator --version=0.1.4 --set controllerManager.manager.image.tag=v0.2.0
|
||||
```
|
||||
@ -58,46 +57,108 @@ Once you apply the manifest, the operator will be installed in `infisical-operat
|
||||
Once you have installed the operator to your cluster, you'll need to create a `InfisicalSecret` custom resource definition (CRD).
|
||||
|
||||
```yaml example-infisical-secret-crd.yaml
|
||||
|
||||
apiVersion: secrets.infisical.com/v1alpha1
|
||||
kind: InfisicalSecret
|
||||
metadata:
|
||||
name: infisicalsecret-sample
|
||||
labels:
|
||||
label-to-be-passed-to-managed-secret: sample-value
|
||||
annotations:
|
||||
example.com/annotation-to-be-passed-to-managed-secret: "sample-value"
|
||||
name: infisicalsecret-sample
|
||||
labels:
|
||||
label-to-be-passed-to-managed-secret: sample-value
|
||||
annotations:
|
||||
example.com/annotation-to-be-passed-to-managed-secret: "sample-value"
|
||||
spec:
|
||||
hostAPI: https://app.infisical.com/api
|
||||
resyncInterval: 10
|
||||
authentication:
|
||||
# Make sure to only have 1 authentication method defined, serviceToken/universalAuth.
|
||||
# If you have multiple authentication methods defined, it may cause issues.
|
||||
universalAuth:
|
||||
secretsScope:
|
||||
projectSlug: <project-slug>
|
||||
envSlug: <env-slug> # "dev", "staging", "prod", etc..
|
||||
secretsPath: "<secrets-path>" # Root is "/"
|
||||
recursive: true # Fetch all secrets from the specified path and all sub-directories. Default is false.
|
||||
|
||||
credentialsRef:
|
||||
secretName: universal-auth-credentials
|
||||
secretNamespace: default
|
||||
hostAPI: https://app.infisical.com/api
|
||||
resyncInterval: 10
|
||||
authentication:
|
||||
# Make sure to only have 1 authentication method defined, serviceToken/universalAuth.
|
||||
# If you have multiple authentication methods defined, it may cause issues.
|
||||
|
||||
# (Deprecated) Service Token Auth
|
||||
serviceToken:
|
||||
serviceTokenSecretReference:
|
||||
secretName: service-token
|
||||
secretNamespace: default
|
||||
secretsScope:
|
||||
envSlug: <env-slug>
|
||||
secretsPath: <secrets-path>
|
||||
recursive: true
|
||||
|
||||
# Universal Auth
|
||||
universalAuth:
|
||||
secretsScope:
|
||||
projectSlug: new-ob-em
|
||||
envSlug: dev # "dev", "staging", "prod", etc..
|
||||
secretsPath: "/" # Root is "/"
|
||||
recursive: true # Wether or not to use recursive mode (Fetches all secrets in an environment from a given secret path, and all folders inside the path) / defaults to false
|
||||
credentialsRef:
|
||||
secretName: universal-auth-credentials
|
||||
secretNamespace: default
|
||||
|
||||
# Native Kubernetes Auth
|
||||
kubernetesAuth:
|
||||
identityId: <machine-identity-id>
|
||||
serviceAccountRef:
|
||||
name: <service-account-name>
|
||||
namespace: <service-account-namespace>
|
||||
|
||||
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
|
||||
secretsScope:
|
||||
projectSlug: your-project-slug
|
||||
envSlug: prod
|
||||
secretsPath: "/path"
|
||||
recursive: true
|
||||
|
||||
# AWS IAM Auth
|
||||
awsIamAuth:
|
||||
identityId: <your-machine-identity-id>
|
||||
|
||||
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
|
||||
secretsScope:
|
||||
projectSlug: your-project-slug
|
||||
envSlug: prod
|
||||
secretsPath: "/path"
|
||||
recursive: true
|
||||
|
||||
# Azure Auth
|
||||
azureAuth:
|
||||
identityId: <your-machine-identity-id>
|
||||
|
||||
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
|
||||
secretsScope:
|
||||
projectSlug: your-project-slug
|
||||
envSlug: prod
|
||||
secretsPath: "/path"
|
||||
recursive: true
|
||||
|
||||
# GCP ID Token Auth
|
||||
gcpIdTokenAuth:
|
||||
identityId: <your-machine-identity-id>
|
||||
|
||||
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
|
||||
secretsScope:
|
||||
projectSlug: your-project-slug
|
||||
envSlug: prod
|
||||
secretsPath: "/path"
|
||||
recursive: true
|
||||
|
||||
# GCP IAM Auth
|
||||
gcpIamAuth:
|
||||
identityId: <your-machine-identity-id>
|
||||
|
||||
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
|
||||
secretsScope:
|
||||
projectSlug: your-project-slug
|
||||
envSlug: prod
|
||||
secretsPath: "/path"
|
||||
recursive: true
|
||||
|
||||
managedSecretReference:
|
||||
secretName: managed-secret
|
||||
secretNamespace: default
|
||||
creationPolicy: "Orphan" ## Owner | Orphan
|
||||
# secretType: kubernetes.io/dockerconfigjson
|
||||
|
||||
# Service tokens are deprecated and will be removed in the near future. Please use Machine Identities for authenticating with Infisical.
|
||||
serviceToken:
|
||||
serviceTokenSecretReference:
|
||||
secretName: service-token
|
||||
secretNamespace: default
|
||||
secretsScope:
|
||||
envSlug: <env-slug>
|
||||
secretsPath: <secrets-path> # Root is "/"
|
||||
recursive: true # Fetch all secrets from the specified path and all sub-directories. Default is false.
|
||||
|
||||
managedSecretReference:
|
||||
secretName: managed-secret
|
||||
secretNamespace: default
|
||||
creationPolicy: "Orphan" ## Owner | Orphan (default)
|
||||
# secretType: kubernetes.io/dockerconfigjson
|
||||
```
|
||||
|
||||
### InfisicalSecret CRD properties
|
||||
@ -156,7 +217,7 @@ When `hostAPI` is not defined the operator fetches secrets from Infisical Cloud.
|
||||
|
||||
</Steps>
|
||||
|
||||
{" "}
|
||||
|
||||
|
||||
<Info>
|
||||
Make sure to also populate the `secretsScope` field with the project slug
|
||||
@ -187,6 +248,339 @@ spec:
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="authentication.kubernetesAuth">
|
||||
The Kubernetes machine identity authentication method is used to authenticate with Infisical. The identity ID is stored in a field in the InfisicalSecret resource. This authentication method can only be used within a Kubernetes environment.
|
||||
|
||||
<Steps>
|
||||
<Step title="Obtaining the token reviewer JWT for Infisical">
|
||||
1.1. Start by creating a service account in your Kubernetes cluster that will be used by Infisical to authenticate with the Kubernetes API Server.
|
||||
|
||||
```yaml infisical-service-account.yaml
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: infisical-auth
|
||||
namespace: default
|
||||
|
||||
```
|
||||
|
||||
```
|
||||
kubectl apply -f infisical-service-account.yaml
|
||||
```
|
||||
|
||||
1.2. Bind the service account to the `system:auth-delegator` cluster role. As described [here](https://kubernetes.io/docs/reference/access-authn-authz/rbac/#other-component-roles), this role allows delegated authentication and authorization checks, specifically for Infisical to access the [TokenReview API](https://kubernetes.io/docs/reference/kubernetes-api/authentication-resources/token-review-v1/). You can apply the following configuration file:
|
||||
|
||||
```yaml cluster-role-binding.yaml
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRoleBinding
|
||||
metadata:
|
||||
name: role-tokenreview-binding
|
||||
namespace: default
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: ClusterRole
|
||||
name: system:auth-delegator
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: infisical-auth
|
||||
namespace: default
|
||||
```
|
||||
|
||||
```
|
||||
kubectl apply -f cluster-role-binding.yaml
|
||||
```
|
||||
|
||||
1.3. Next, create a long-lived service account JWT token (i.e. the token reviewer JWT token) for the service account using this configuration file for a new `Secret` resource:
|
||||
|
||||
```yaml service-account-token.yaml
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
type: kubernetes.io/service-account-token
|
||||
metadata:
|
||||
name: infisical-auth-token
|
||||
annotations:
|
||||
kubernetes.io/service-account.name: "infisical-auth"
|
||||
```
|
||||
|
||||
|
||||
```
|
||||
kubectl apply -f service-account-token.yaml
|
||||
```
|
||||
|
||||
1.4. Link the secret in step 1.3 to the service account in step 1.1:
|
||||
|
||||
```bash
|
||||
kubectl patch serviceaccount infisical-auth -p '{"secrets": [{"name": "infisical-auth-token"}]}' -n default
|
||||
```
|
||||
|
||||
1.5. Finally, retrieve the token reviewer JWT token from the secret.
|
||||
|
||||
```bash
|
||||
kubectl get secret infisical-auth-token -n default -o=jsonpath='{.data.token}' | base64 --decode
|
||||
```
|
||||
|
||||
Keep this JWT token handy as you will need it for the **Token Reviewer JWT** field when configuring the Kubernetes Auth authentication method for the identity in step 2.
|
||||
|
||||
</Step>
|
||||
|
||||
<Step title="Creating an identity">
|
||||
To create an identity, head to your Organization Settings > Access Control > Machine Identities and press **Create identity**.
|
||||
|
||||

|
||||
|
||||
When creating an identity, you specify an organization level [role](/documentation/platform/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles.
|
||||
|
||||

|
||||
|
||||
Now input a few details for your new identity. Here's some guidance for each field:
|
||||
|
||||
- Name (required): A friendly name for the identity.
|
||||
- Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to.
|
||||
|
||||
Once you've created an identity, you'll be prompted to configure the authentication method for it. Here, select **Kubernetes Auth**.
|
||||
|
||||
<Info>
|
||||
To learn more about each field of the Kubernetes native authentication method, see step 2 of [guide](/documentation/platform/identities/kubernetes-auth#guide).
|
||||
</Info>
|
||||
|
||||

|
||||
|
||||
|
||||
</Step>
|
||||
<Step title="Adding an identity to a project">
|
||||
To allow the operator to use the given identity to access secrets, you will need to add the identity to project(s) that you would like to grant it access to.
|
||||
|
||||
To do this, head over to the project you want to add the identity to and go to Project Settings > Access Control > Machine Identities and press **Add identity**.
|
||||
|
||||
Next, select the identity you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this identity can have access to.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
</Step>
|
||||
<Step title="Add your identity ID & service account to your InfisicalSecret resource">
|
||||
Once you have created your machine identity and added it to your project(s), you will need to add the identity ID to your InfisicalSecret resource.
|
||||
In the `authentication.kubernetesAuth.identityId` field, add the identity ID of the machine identity you created.
|
||||
See the example below for more details.
|
||||
</Step>
|
||||
<Step title="Add your Kubernetes service account token to the InfisicalSecret resource">
|
||||
Add the service account details from the previous steps under `authentication.kubernetesAuth.serviceAccountRef`.
|
||||
Here you will need to enter the name and namespace of the service account.
|
||||
The example below shows a complete InfisicalSecret resource with all required fields defined.
|
||||
</Step>
|
||||
|
||||
</Steps>
|
||||
|
||||
<Info>
|
||||
Make sure to also populate the `secretsScope` field with the project slug
|
||||
_`projectSlug`_, environment slug _`envSlug`_, and secrets path
|
||||
_`secretsPath`_ that you want to fetch secrets from. Please see the example
|
||||
below.
|
||||
</Info>
|
||||
|
||||
## Example
|
||||
|
||||
```yaml example-kubernetes-auth.yaml
|
||||
apiVersion: secrets.infisical.com/v1alpha1
|
||||
kind: InfisicalSecret
|
||||
metadata:
|
||||
name: infisicalsecret-sample-crd
|
||||
spec:
|
||||
authentication:
|
||||
kubernetesAuth:
|
||||
identityId: <machine-identity-id>
|
||||
serviceAccountRef:
|
||||
name: <service-account-name>
|
||||
namespace: <service-account-namespace>
|
||||
|
||||
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
|
||||
secretsScope:
|
||||
projectSlug: your-project-slug
|
||||
envSlug: prod
|
||||
secretsPath: "/path"
|
||||
recursive: true
|
||||
...
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="authentication.awsIamAuth">
|
||||
The AWS IAM machine identity authentication method is used to authenticate with Infisical. The identity ID is stored in a field in the InfisicalSecret resource. This authentication method can only be used within an AWS environment like an EC2 or a Lambda function.
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a machine identity">
|
||||
You need to create a machine identity, and give it access to the project(s) you want to interact with. You can [read more about AWS machine identities here](/documentation/platform/identities/aws-auth).
|
||||
</Step>
|
||||
<Step title="Add your identity ID to your InfisicalSecret resource">
|
||||
Once you have created your machine identity and added it to your project(s), you will need to add the identity ID to your InfisicalSecret resource. In the `authentication.awsIamAuth.identityId` field, add the identity ID of the machine identity you created. See the example below for more details.
|
||||
</Step>
|
||||
|
||||
</Steps>
|
||||
|
||||
<Info>
|
||||
Make sure to also populate the `secretsScope` field with the project slug
|
||||
_`projectSlug`_, environment slug _`envSlug`_, and secrets path
|
||||
_`secretsPath`_ that you want to fetch secrets from. Please see the example
|
||||
below.
|
||||
</Info>
|
||||
|
||||
## Example
|
||||
|
||||
```yaml example-aws-iam-auth.yaml
|
||||
apiVersion: secrets.infisical.com/v1alpha1
|
||||
kind: InfisicalSecret
|
||||
metadata:
|
||||
name: infisicalsecret-sample-crd
|
||||
spec:
|
||||
authentication:
|
||||
awsIamAuth:
|
||||
identityId: <your-machine-identity-id>
|
||||
|
||||
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
|
||||
secretsScope:
|
||||
projectSlug: your-project-slug
|
||||
envSlug: prod
|
||||
secretsPath: "/path"
|
||||
recursive: true
|
||||
...
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="authentication.azureAuth">
|
||||
The Azure machine identity authentication method is used to authenticate with Infisical. The identity ID is stored in a field in the InfisicalSecret resource. This authentication method can only be used within an Azure environment.
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a machine identity">
|
||||
You need to create a machine identity, and give it access to the project(s) you want to interact with. You can [read more about Azure machine identities here](/documentation/platform/identities/azure-auth).
|
||||
</Step>
|
||||
<Step title="Add your identity ID to your InfisicalSecret resource">
|
||||
Once you have created your machine identity and added it to your project(s), you will need to add the identity ID to your InfisicalSecret resource. In the `authentication.azureAuth.identityId` field, add the identity ID of the machine identity you created. See the example below for more details.
|
||||
</Step>
|
||||
|
||||
</Steps>
|
||||
|
||||
<Info>
|
||||
Make sure to also populate the `secretsScope` field with the project slug
|
||||
_`projectSlug`_, environment slug _`envSlug`_, and secrets path
|
||||
_`secretsPath`_ that you want to fetch secrets from. Please see the example
|
||||
below.
|
||||
</Info>
|
||||
|
||||
## Example
|
||||
|
||||
```yaml example-azure-auth.yaml
|
||||
apiVersion: secrets.infisical.com/v1alpha1
|
||||
kind: InfisicalSecret
|
||||
metadata:
|
||||
name: infisicalsecret-sample-crd
|
||||
spec:
|
||||
authentication:
|
||||
azureAuth:
|
||||
identityId: <your-machine-identity-id>
|
||||
|
||||
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
|
||||
secretsScope:
|
||||
projectSlug: your-project-slug
|
||||
envSlug: prod
|
||||
secretsPath: "/path"
|
||||
recursive: true
|
||||
...
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="authentication.gcpIdTokenAuth">
|
||||
The GCP ID Token machine identity authentication method is used to authenticate with Infisical. The identity ID is stored in a field in the InfisicalSecret resource. This authentication method can only be used within GCP environments.
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a machine identity">
|
||||
You need to create a machine identity, and give it access to the project(s) you want to interact with. You can [read more about GCP machine identities here](/documentation/platform/identities/gcp-auth).
|
||||
</Step>
|
||||
<Step title="Add your identity ID to your InfisicalSecret resource">
|
||||
Once you have created your machine identity and added it to your project(s), you will need to add the identity ID to your InfisicalSecret resource. In the `authentication.gcpIdTokenAuth.identityId` field, add the identity ID of the machine identity you created. See the example below for more details.
|
||||
</Step>
|
||||
|
||||
</Steps>
|
||||
|
||||
<Info>
|
||||
Make sure to also populate the `secretsScope` field with the project slug
|
||||
_`projectSlug`_, environment slug _`envSlug`_, and secrets path
|
||||
_`secretsPath`_ that you want to fetch secrets from. Please see the example
|
||||
below.
|
||||
</Info>
|
||||
|
||||
## Example
|
||||
|
||||
```yaml example-gcp-id-token-auth.yaml
|
||||
apiVersion: secrets.infisical.com/v1alpha1
|
||||
kind: InfisicalSecret
|
||||
metadata:
|
||||
name: infisicalsecret-sample-crd
|
||||
spec:
|
||||
authentication:
|
||||
gcpIdTokenAuth:
|
||||
identityId: <your-machine-identity-id>
|
||||
|
||||
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
|
||||
secretsScope:
|
||||
projectSlug: your-project-slug
|
||||
envSlug: prod
|
||||
secretsPath: "/path"
|
||||
recursive: true
|
||||
...
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
|
||||
|
||||
|
||||
<Accordion title="authentication.gcpIamAuth">
|
||||
The GCP IAM machine identity authentication method is used to authenticate with Infisical. The identity ID is stored in a field in the InfisicalSecret resource. This authentication method can only be used both within and outside GCP environments.
|
||||
|
||||
<Steps>
|
||||
<Step title="Create a machine identity">
|
||||
You need to create a machine identity, and give it access to the project(s) you want to interact with. You can [read more about GCP machine identities here](/documentation/platform/identities/gcp-auth).
|
||||
</Step>
|
||||
<Step title="Add your identity ID and service account token path to your InfisicalSecret resource">
|
||||
Once you have created your machine identity and added it to your project(s), you will need to add the identity ID to your InfisicalSecret resource. In the `authentication.gcpIamAuth.identityId` field, add the identity ID of the machine identity you created.
|
||||
You'll also need to add the service account key file path to your InfisicalSecret resource. In the `authentication.gcpIamAuth.serviceAccountKeyFilePath` field, add the path to your service account key file path. Please see the example below for more details.
|
||||
</Step>
|
||||
|
||||
</Steps>
|
||||
|
||||
<Info>
|
||||
Make sure to also populate the `secretsScope` field with the project slug
|
||||
_`projectSlug`_, environment slug _`envSlug`_, and secrets path
|
||||
_`secretsPath`_ that you want to fetch secrets from. Please see the example
|
||||
below.
|
||||
</Info>
|
||||
|
||||
## Example
|
||||
|
||||
```yaml example-gcp-id-token-auth.yaml
|
||||
apiVersion: secrets.infisical.com/v1alpha1
|
||||
kind: InfisicalSecret
|
||||
metadata:
|
||||
name: infisicalsecret-sample-crd
|
||||
spec:
|
||||
authentication:
|
||||
gcpIamAuth:
|
||||
identityId: <your-machine-identity-id>
|
||||
serviceAccountKeyFilePath: "/path/to-service-account-key-file-path.json"
|
||||
|
||||
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
|
||||
secretsScope:
|
||||
projectSlug: your-project-slug
|
||||
envSlug: prod
|
||||
secretsPath: "/path"
|
||||
recursive: true
|
||||
...
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="authentication.serviceToken">
|
||||
<Warning>
|
||||
Service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities).
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user