1
0
mirror of https://github.com/Infisical/infisical.git synced 2025-03-22 14:05:22 +00:00

Compare commits

..

133 Commits

Author SHA1 Message Date
6081e2927e Merge pull request from rhythmbhiwani/fix-pagination-disappear
Fixed Pagination Disappearing on Secret Sharing Page
2024-08-14 14:54:37 -04:00
0b42f29916 Merge pull request from akhilmhdh/feat/replication-test
feat: added log point for aws tag and check for delete secret in bridge
2024-08-14 12:25:28 -04:00
=
b60d0992f4 feat: added log point for aws tag and check for delete secret in bridge 2024-08-14 21:42:07 +05:30
a8a68f600c Merge pull request from akhilmhdh/feat/replication-test
feat(ui): resolved a race condition in ui
2024-08-13 14:48:08 -04:00
=
742f5f6621 feat(ui): resolved a race condition in ui 2024-08-14 00:13:55 +05:30
f3cd7efe0e Merge pull request from akhilmhdh/feat/replication-test
feat: added more endpoints for delete
2024-08-13 12:41:54 -04:00
2b16c19b70 improve logs for aws ssm debug 2024-08-13 12:40:02 -04:00
=
943b540383 feat: added more endpoints for delete 2024-08-13 21:48:03 +05:30
e180021aa6 Merge pull request from akhilmhdh/feat/replication-test
feat: added debug points to test ssm integration in replication
2024-08-13 11:23:25 -04:00
=
8e08c443ad feat: added log to print operation based keys 2024-08-13 20:50:19 +05:30
=
dae26daeeb feat: added debug points to test ssm integration in replication 2024-08-13 20:40:53 +05:30
170f8d9add Merge pull request from Infisical/misc/addressed-reported-cli-behaviors
misc: addressed reported flaws with CLI usage
2024-08-13 12:49:20 +08:00
8d41ef198a Merge pull request from akhilmhdh/feat/client-secret-cleanup
fix: resolved secret approval broken due to tag name removal
2024-08-12 16:51:55 -04:00
=
69d60a227a fix: resolved secret approval broken due to tag name removal 2024-08-13 02:16:57 +05:30
c8eefcfbf9 Merge pull request from akhilmhdh/feat/client-secret-cleanup
feat: switched to ssm update as overwrite with tag as seperate operation
2024-08-12 16:38:57 -04:00
=
53cec754cc feat: switched to ssm update as overwrite with tag as seperate operation 2024-08-13 02:04:55 +05:30
5db3e177eb Fixed Pagination Disappearing on Secret Sharing Page 2024-08-13 02:01:25 +05:30
3fcc3ccff4 fix spending money tpyo 2024-08-12 12:41:15 -04:00
df07d7b6d7 update spending docs 2024-08-12 11:34:32 -04:00
28a655bef1 Merge pull request from akhilmhdh/feat/client-secret-cleanup
Client secret cleanup on resource cleanup queue
2024-08-12 11:01:46 -04:00
=
5f2cd04f46 feat: removed not needed condition 2024-08-12 20:29:05 +05:30
=
897ce1f267 chore: new reviewable command in root make file to check all the entities lint and type error 2024-08-12 13:19:55 +05:30
=
6afc17b84b feat: implemented universal auth client secret cleanup in resource cleanup queue 2024-08-12 13:19:25 +05:30
9017a5e838 Update spending-money.mdx 2024-08-12 01:29:45 -04:00
cb8e4d884e add equipment details to handbook 2024-08-11 23:17:58 -04:00
16807c3dd6 update k8s helm chart image tag 2024-08-11 13:09:22 -04:00
61791e385c update chart version of k8 2024-08-11 10:34:23 -04:00
bbd7bfb0f5 Merge pull request from MohamadTahir/fix-operator-bugs
Bug Fixes
2024-08-11 10:33:26 -04:00
4de8c48b2c Merge pull request from Ayush-Dutt-Sharma/ayush/minor-bug-#2269
replaced "creditnals" to "credentials"
2024-08-11 19:18:43 +05:30
a4bbe2c612 fix the client site url & the creation of new variable instead of updating the previous initiated variable 2024-08-11 16:46:48 +03:00
541a2e7d05 replaced "creditnals" to "credentials" 2024-08-11 14:10:49 +05:30
ea4e51d826 Merge pull request from Ayush-Dutt-Sharma/ayush/bug-2267-backend
better logging and while loop for ask propmt again
2024-08-10 19:48:05 +05:30
3bc920c593 better logging and while loop for ask propmt again 2024-08-10 15:36:43 +05:30
df38c761ad Merge pull request from akhilmhdh/fix/migration-switch-batch-insert
Secret migration switched to chunking based batch insert
2024-08-09 11:46:19 -04:00
=
32a84471f2 feat: added a new batch insert operation to convert inserts into chunks and updated secret migration 2024-08-09 21:02:26 +05:30
ea14df2cbd Merge pull request from akhilmhdh/fix/tag-filter-secret-api
Tag based filtering for secret endpoint
2024-08-09 20:33:43 +05:30
6bd6cac366 Merge pull request from Infisical/misc/addressed-misleading-google-saml-setup
misc: addressed misleading docs and placeholder values for Google SAML
2024-08-09 07:43:56 -07:00
45294253aa Merge pull request from GLEF1X/bugfix/yaml-exporting
fix(cli): make yaml exporting reliable and standardized
2024-08-09 10:01:26 -04:00
635fbdc80b misc: addressedm misleading docs and placeholder values for google saml 2024-08-09 21:29:33 +08:00
d20c48b7cf Merge pull request from Ayush-Dutt-Sharma/ayush/document-fixes
kubernetes operators integration doc fix
2024-08-09 14:59:50 +05:30
=
1fc18fe23b feat: added name in attach tag 2024-08-09 14:38:15 +05:30
99403e122b kubernetes operators integration doc fix 2024-08-09 14:33:29 +05:30
5176e70437 rephrase error messages 2024-08-08 18:15:13 -04:00
82b2b0af97 Merge pull request from akhilmhdh/feat/secret-get-personal
fix: resolved cli failign to get overriden secret in get command
2024-08-08 15:08:39 -04:00
e313c866a2 remove backup test for temp 2024-08-08 14:25:12 -04:00
2d81606049 update test with typo fix 2024-08-08 14:03:52 -04:00
718f4ef129 Merge pull request from Infisical/maidu-2321e
remove INFISICAL_VAULT_FILE_PASSPHRASE because it is being auto generated now
2024-08-08 13:52:05 -04:00
a42f3b3763 remove INFISICAL_VAULT_FILE_PASSPHRASE because it is being auto generated now 2024-08-08 13:50:34 -04:00
f7d882a6fc Merge pull request from akhilmhdh/fix/backup
Resolved keyring dataset too big by keeping only the encryption key
2024-08-08 13:19:50 -04:00
385afdfcf8 generate random string fn 2024-08-08 13:03:45 -04:00
281d703cc3 removeed vault use command and auto generated passphrase 2024-08-08 13:02:08 -04:00
6f56ed5474 add missing error logs on secrets backup 2024-08-08 13:01:14 -04:00
=
809e4eeba1 fix: resolved cli failign to get overriden secret in get command 2024-08-08 21:23:04 +05:30
=
254446c895 fix: resolved keyring dataset too big by keeping only the encryption key 2024-08-08 13:04:33 +05:30
bb52e2beb4 Update secret-tag-router.ts 2024-08-08 00:31:41 -04:00
2739b08e59 revert bb934ef7b1c47195b2ff65a335712add791cb59c 2024-08-07 22:15:06 -04:00
ba5e877a3b Revert "add base64 package"
This reverts commit 4892eea009ee1ed73c27d783d2dc4e7adc735d11.
2024-08-07 22:14:08 -04:00
d2752216f6 Merge pull request from Infisical/revert-2252-maidul-dhusduqwdhj
Revert "Patch CLI auto select file vault "
2024-08-07 22:13:00 -04:00
d91fb0db02 Revert "Patch CLI auto select file vault " 2024-08-07 22:12:50 -04:00
4892eea009 add base64 package 2024-08-07 19:06:25 -04:00
09c6fcb73b Merge pull request from Infisical/maidul-dhusduqwdhj
Patch CLI auto select file vault
2024-08-07 19:03:38 -04:00
79181a1e3d remove os 2024-08-07 23:03:14 +00:00
bb934ef7b1 set vault type when auto selection enabled 2024-08-07 23:02:35 +00:00
cd9316537d prevent auto saving passphrase to disk 2024-08-07 18:56:15 -04:00
942e5f2f65 update phrase 2024-08-07 18:35:57 -04:00
353d231a4e Patch CLI auto select file vault
# Description 📣

When we auto select file vault, we also need to set it's type. When we set the type, we don't need to fall back to file vault in the `GetValueInKeyring` and `DeleteValueInKeyring` because `currentVaultBackend` will be `file`.

Also rephrased the text asking the user to eneter a passphrase.
2024-08-07 18:35:07 -04:00
68e05b7198 add debug log to print keyring error 2024-08-07 14:51:55 -04:00
4f998e3940 Merge pull request from akhilmhdh/fix/replication
fix: resolved replication secret not getting deleted
2024-08-07 11:57:14 -04:00
=
1248840dc8 fix: resolved replication secret not getting deleted 2024-08-07 21:23:22 +05:30
64c8125e4b add external secrets operator mention in k8s docs 2024-08-07 11:13:02 -04:00
=
c109fbab3e feat: removed tag name used in queries 2024-08-07 13:24:22 +05:30
=
15fb01089b feat: name removal in tag respective changes in frontend 2024-08-07 13:15:53 +05:30
=
6f4be3e25a feat: removed name from tag and stricter slugification for tag endpoint 2024-08-07 13:14:39 +05:30
8d33647739 Merge pull request from Infisical/maidul-sqhdqwdgvqwjf
patch findProjectUserWorkspaceKey
2024-08-06 22:12:03 +05:30
d1c142e5b1 patch findProjectUserWorkspaceKey 2024-08-06 12:39:06 -04:00
bb1cad0c5b Merge pull request from Infisical/misc/add-org-level-rate-limit
misc: moved to license-plan-based rate limits
2024-08-06 10:42:57 -04:00
2a1cfe15b4 update text when secrets deleted after integ delete 2024-08-06 10:07:41 -04:00
881d70bc64 Merge pull request from Infisical/feat/enabled-secrets-deletion-on-integ-removal
feat: added secrets deletion feature on integration removal
2024-08-06 09:54:15 -04:00
14c1b4f07b misc: hide not found text when flag plain is enabled 2024-08-06 21:21:45 +08:00
3028bdd424 misc: made local workspace file not required if using auth token 2024-08-06 21:06:14 +08:00
902a0b0ed4 Merge pull request from akhilmhdh/fix/missing-coment-field 2024-08-06 08:18:18 -04:00
ba92192537 misc: removed creation limits completely 2024-08-06 19:41:09 +08:00
26ed8df73c misc: finalized list of license rate limits 2024-08-06 19:14:49 +08:00
c1decab912 misc: addressed comments 2024-08-06 18:58:07 +08:00
=
216c073290 fix: missing comment key in updated project 2024-08-06 16:14:25 +05:30
=
8626bce632 feat: added tag support for secret operation in cli 2024-08-06 15:36:03 +05:30
=
c5a2b0321f feat: completed secret v3 raw to support tag based filtering 2024-08-06 15:35:00 +05:30
1070954bdd misc: used destructuring 2024-08-06 02:05:13 +08:00
cc689d3178 feat: added secrets deletion feature on integration removal 2024-08-06 01:52:58 +08:00
e6848828f2 Merge pull request from Infisical/daniel/keyring-cli-improvements
feat(cli): persistant `file` vault passphrase
2024-08-05 13:13:29 -04:00
c8b93e4467 Update doc to show correct command 2024-08-05 13:11:40 -04:00
0bca24bb00 Merge pull request from Infisical/handbook-update
add meetings article to handbook
2024-08-05 12:42:07 -04:00
c563ada50f Merge pull request from akhilmhdh/fix/bot-creation-failing
fix: resolved auto bot create failing on update
2024-08-05 11:15:25 -04:00
=
26d1616e22 fix: resolved auto bot create failing on update 2024-08-05 20:41:19 +05:30
5fd071d1de Merge pull request from akhilmhdh/feat/org-project-management
Feat/org project management
2024-08-05 10:21:09 -04:00
9721d7a15e add meetings article to handbook 2024-08-04 14:04:09 -07:00
93db5c4555 Merge pull request from Infisical/maidul-mdjhquwqjhd
update broken image in ksm docs
2024-08-04 11:48:16 -04:00
ad4393fdef update broken image in ksm docs 2024-08-04 11:46:58 -04:00
cd06e4e7f3 hot patch 2024-08-03 19:05:34 -04:00
8e53a1b171 Merge pull request from Infisical/daniel/fix-lint
Fix: Linting
2024-08-02 22:00:28 -04:00
71af463ad8 fix format 2024-08-03 03:49:47 +02:00
7abd18b11c Merge pull request from LemmyMwaura/parse-secret-on-paste
feat: parse secrets (key,value) on paste
2024-08-03 03:33:17 +02:00
1aee50a751 Fix: Parser improvements and lint fixes 2024-08-03 03:29:45 +02:00
0f23b7e1d3 misc: added check for undefined orgId 2024-08-03 02:10:47 +08:00
e9b37a1f98 Merge pull request from Vishvsalvi/deleteActionModal-Placeholder
Placeholder value is same as it's label
2024-08-02 14:04:40 -04:00
33193a47ae misc: updated default onprem rate limits 2024-08-03 01:52:04 +08:00
43fded2350 refactor: take into account other delimiters 2024-08-02 20:41:47 +03:00
7b6f4d810d Placeholder value is same as it's label 2024-08-02 20:51:08 +05:30
1ad286ca87 misc: name updates and more comments 2024-08-02 22:58:53 +08:00
be7c11a3f5 Merge remote-tracking branch 'origin/main' into misc/add-org-level-rate-limit 2024-08-02 22:42:23 +08:00
55a6740714 misc: moved to plan-based rate limit 2024-08-02 21:37:48 +08:00
7467a05fc4 fix(lint): fix triple equal strict check 2024-08-01 14:42:15 +03:00
afba636850 feat: parse full env secrets (key,value) when pasted from clipboard 2024-08-01 14:22:22 +03:00
891cb06de0 Update keyringwrapper.go 2024-07-31 16:55:53 +02:00
02e8f20cbf remove extra : 2024-07-31 03:14:06 +00:00
dbe771dba0 refactor: remove unnecessary comment 2024-07-30 05:30:13 -04:00
273fd6c98f refactor: remove deprecated errors package
- Replace errors.Wrap with fmt.Errorf and %w verb
2024-07-30 05:23:43 -04:00
d5f4ce4376 Update vault.go 2024-07-30 10:22:15 +02:00
18aac6508b fix(cli): make yaml exporting reliable and standardized 2024-07-29 22:38:10 -04:00
85653a90d5 update phrasing 2024-07-29 22:06:03 -04:00
879ef2c178 Update keyringwrapper.go 2024-07-29 12:37:58 +02:00
8777cfe680 Update keyringwrapper.go 2024-07-29 12:34:35 +02:00
2b630f75aa Update keyringwrapper.go 2024-07-29 12:31:02 +02:00
91cee20cc8 Minor improvemnets 2024-07-29 12:21:38 +02:00
4249ec6030 Update login.go 2024-07-29 12:21:31 +02:00
e7a95e6af2 Update login.go 2024-07-29 12:15:53 +02:00
a9f04a3c1f Update keyringwrapper.go 2024-07-29 12:13:40 +02:00
3d380710ee Update keyringwrapper.go 2024-07-29 12:10:42 +02:00
2177ec6bcc Update vault.go 2024-07-29 12:04:34 +02:00
070eb2aacd Update keyringwrapper.go 2024-07-26 22:47:46 +02:00
e619cfa313 feat(cli): set persistent file vault password 2024-07-26 22:47:37 +02:00
c3038e3ca1 docs: passphrase command 2024-07-26 22:47:07 +02:00
ff0e7feeee feat(cli): CLI Keyring improvements 2024-07-26 19:14:21 +02:00
103 changed files with 1547 additions and 437 deletions
.github/workflows
Makefile
backend
cli
company
docs
cli/commands
documentation/platform
images/sso/google-saml
integrations/platforms
mint.json
frontend/src
components
tags/CreateTagModal
v2/DeleteActionModal
helpers
hooks/api
pages/integrations/aws-parameter-store
views
IntegrationsPage
IntegrationsPage.tsx
components/IntegrationsSection
SecretApprovalPage/components/SecretApprovalRequest/components
SecretMainPage/components
SecretOverviewPage/components/CreateSecretForm
Settings
OrgSettingsPage/components/OrgAuthTab
ProjectSettingsPage/components/SecretTagsSection
ShareSecretPage/components
admin/DashboardPage
helm-charts/secrets-operator
k8-operator/controllers

@ -50,6 +50,6 @@ jobs:
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
# INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
run: go test -v -count=1 ./test

@ -15,3 +15,16 @@ up-prod:
down:
docker compose -f docker-compose.dev.yml down
reviewable-ui:
cd frontend && \
npm run lint:fix && \
npm run type:check
reviewable-api:
cd backend && \
npm run lint:fix && \
npm run type:check
reviewable: reviewable-ui reviewable-api

@ -25,6 +25,7 @@
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@peculiar/asn1-schema": "^2.3.8",
@ -7812,19 +7813,45 @@
}
},
"node_modules/@octokit/plugin-retry": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-6.0.1.tgz",
"integrity": "sha512-SKs+Tz9oj0g4p28qkZwl/topGcb0k0qPNX/i7vBKmDsjoeqnVfFUquqrE/O9oJY7+oLzdCtkiWSXLpLjvl6uog==",
"version": "5.0.5",
"resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-5.0.5.tgz",
"integrity": "sha512-sB1RWMhSrre02Atv95K6bhESlJ/sPdZkK/wE/w1IdSCe0yM6FxSjksLa6T7aAvxvxlLKzQEC4KIiqpqyov1Tbg==",
"dependencies": {
"@octokit/request-error": "^5.0.0",
"@octokit/types": "^12.0.0",
"@octokit/request-error": "^4.0.1",
"@octokit/types": "^10.0.0",
"bottleneck": "^2.15.3"
},
"engines": {
"node": ">= 18"
},
"peerDependencies": {
"@octokit/core": ">=5"
"@octokit/core": ">=3"
}
},
"node_modules/@octokit/plugin-retry/node_modules/@octokit/openapi-types": {
"version": "18.1.1",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-18.1.1.tgz",
"integrity": "sha512-VRaeH8nCDtF5aXWnjPuEMIYf1itK/s3JYyJcWFJT8X9pSNnBtriDf7wlEWsGuhPLl4QIH4xM8fqTXDwJ3Mu6sw=="
},
"node_modules/@octokit/plugin-retry/node_modules/@octokit/request-error": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-4.0.2.tgz",
"integrity": "sha512-uqwUEmZw3x4I9DGYq9fODVAAvcLsPQv97NRycP6syEFu5916M189VnNBW2zANNwqg3OiligNcAey7P0SET843w==",
"dependencies": {
"@octokit/types": "^10.0.0",
"deprecation": "^2.0.0",
"once": "^1.4.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/plugin-retry/node_modules/@octokit/types": {
"version": "10.0.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-10.0.0.tgz",
"integrity": "sha512-Vm8IddVmhCgU1fxC1eyinpwqzXPEYu0NrYzD3YZjlGjyftdLBTeqNblRC0jmJmgxbJIsQlyogVeGnrNaaMVzIg==",
"dependencies": {
"@octokit/openapi-types": "^18.0.0"
}
},
"node_modules/@octokit/plugin-throttling": {
@ -17396,6 +17423,22 @@
"node": ">=18"
}
},
"node_modules/probot/node_modules/@octokit/plugin-retry": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-6.0.1.tgz",
"integrity": "sha512-SKs+Tz9oj0g4p28qkZwl/topGcb0k0qPNX/i7vBKmDsjoeqnVfFUquqrE/O9oJY7+oLzdCtkiWSXLpLjvl6uog==",
"dependencies": {
"@octokit/request-error": "^5.0.0",
"@octokit/types": "^12.0.0",
"bottleneck": "^2.15.3"
},
"engines": {
"node": ">= 18"
},
"peerDependencies": {
"@octokit/core": ">=5"
}
},
"node_modules/probot/node_modules/commander": {
"version": "11.1.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-11.1.0.tgz",

@ -121,6 +121,7 @@
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@peculiar/asn1-schema": "^2.3.8",

@ -7,14 +7,33 @@ const prompt = promptSync({
sigint: true
});
type ComponentType = 1 | 2 | 3;
console.log(`
Component List
--------------
0. Exit
1. Service component
2. DAL component
3. Router component
`);
const componentType = parseInt(prompt("Select a component: "), 10);
function getComponentType(): ComponentType {
while (true) {
const input = prompt("Select a component (0-3): ");
const componentType = parseInt(input, 10);
if (componentType === 0) {
console.log("Exiting the program. Goodbye!");
process.exit(0);
} else if (componentType === 1 || componentType === 2 || componentType === 3) {
return componentType;
} else {
console.log("Invalid input. Please enter 0, 1, 2, or 3.");
}
}
}
const componentType = getComponentType();
if (componentType === 1) {
const componentName = prompt("Enter service name: ");

@ -18,6 +18,7 @@ import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-ser
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
@ -89,6 +90,7 @@ declare module "fastify" {
id: string;
orgId: string;
};
rateLimits: RateLimitConfiguration;
// passport data
passportUser: {
isUserCompleted: string;

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasCreationLimitCol = await knex.schema.hasColumn(TableName.RateLimit, "creationLimit");
await knex.schema.alterTable(TableName.RateLimit, (t) => {
if (hasCreationLimitCol) {
t.dropColumn("creationLimit");
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasCreationLimitCol = await knex.schema.hasColumn(TableName.RateLimit, "creationLimit");
await knex.schema.alterTable(TableName.RateLimit, (t) => {
if (!hasCreationLimitCol) {
t.integer("creationLimit").defaultTo(30).notNullable();
}
});
}

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasNameField = await knex.schema.hasColumn(TableName.SecretTag, "name");
if (hasNameField) {
await knex.schema.alterTable(TableName.SecretTag, (t) => {
t.dropColumn("name");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasNameField = await knex.schema.hasColumn(TableName.SecretTag, "name");
if (!hasNameField) {
await knex.schema.alterTable(TableName.SecretTag, (t) => {
t.string("name");
});
}
}

@ -15,7 +15,6 @@ export const RateLimitSchema = z.object({
authRateLimit: z.number().default(60),
inviteUserRateLimit: z.number().default(30),
mfaRateLimit: z.number().default(20),
creationLimit: z.number().default(30),
publicEndpointLimit: z.number().default(30),
createdAt: z.date(),
updatedAt: z.date()

@ -9,7 +9,6 @@ import { TImmutableDBKeys } from "./models";
export const SecretTagsSchema = z.object({
id: z.string().uuid(),
name: z.string(),
slug: z.string(),
color: z.string().nullable().optional(),
createdAt: z.date(),

@ -58,7 +58,6 @@ export const registerRateLimitRouter = async (server: FastifyZodProvider) => {
authRateLimit: z.number(),
inviteUserRateLimit: z.number(),
mfaRateLimit: z.number(),
creationLimit: z.number(),
publicEndpointLimit: z.number()
}),
response: {

@ -75,15 +75,16 @@ export const auditLogDALFactory = (db: TDbClient) => {
.del()
.returning("id");
numberOfRetryOnFailure = 0; // reset
// eslint-disable-next-line no-await-in-loop
await new Promise((resolve) => {
setTimeout(resolve, 100); // time to breathe for db
});
} catch (error) {
numberOfRetryOnFailure += 1;
logger.error(error, "Failed to delete audit log on pruning");
} finally {
// eslint-disable-next-line no-await-in-loop
await new Promise((resolve) => {
setTimeout(resolve, 10); // time to breathe for db
});
}
} while (deletedAuditLogIds.length > 0 && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
} while (deletedAuditLogIds.length > 0 || numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
};
return { ...auditLogOrm, pruneAuditLog, find };

@ -338,6 +338,7 @@ interface DeleteIntegrationEvent {
targetServiceId?: string;
path?: string;
region?: string;
shouldDeleteIntegrationSecrets?: boolean;
};
}

@ -40,7 +40,12 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
secretRotation: true,
caCrl: false,
instanceUserManagement: false,
externalKms: false
externalKms: false,
rateLimits: {
readLimit: 60,
writeLimit: 200,
secretsLimit: 40
}
});
export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {

@ -58,6 +58,11 @@ export type TFeatureSet = {
caCrl: false;
instanceUserManagement: false;
externalKms: false;
rateLimits: {
readLimit: number;
writeLimit: number;
secretsLimit: number;
};
};
export type TOrgPlansTableDTO = {

@ -4,17 +4,16 @@ import { logger } from "@app/lib/logger";
import { TLicenseServiceFactory } from "../license/license-service";
import { TRateLimitDALFactory } from "./rate-limit-dal";
import { TRateLimit, TRateLimitUpdateDTO } from "./rate-limit-types";
import { RateLimitConfiguration, TRateLimit, TRateLimitUpdateDTO } from "./rate-limit-types";
let rateLimitMaxConfiguration = {
let rateLimitMaxConfiguration: RateLimitConfiguration = {
readLimit: 60,
publicEndpointLimit: 30,
writeLimit: 200,
secretsLimit: 60,
authRateLimit: 60,
inviteUserRateLimit: 30,
mfaRateLimit: 20,
creationLimit: 30
mfaRateLimit: 20
};
Object.freeze(rateLimitMaxConfiguration);
@ -67,8 +66,7 @@ export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateL
secretsLimit: rateLimit.secretsRateLimit,
authRateLimit: rateLimit.authRateLimit,
inviteUserRateLimit: rateLimit.inviteUserRateLimit,
mfaRateLimit: rateLimit.mfaRateLimit,
creationLimit: rateLimit.creationLimit
mfaRateLimit: rateLimit.mfaRateLimit
};
logger.info(`syncRateLimitConfiguration: rate limit configuration: %o`, newRateLimitMaxConfiguration);

@ -5,7 +5,6 @@ export type TRateLimitUpdateDTO = {
authRateLimit: number;
inviteUserRateLimit: number;
mfaRateLimit: number;
creationLimit: number;
publicEndpointLimit: number;
};
@ -14,3 +13,13 @@ export type TRateLimit = {
createdAt: Date;
updatedAt: Date;
} & TRateLimitUpdateDTO;
export type RateLimitConfiguration = {
readLimit: number;
publicEndpointLimit: number;
writeLimit: number;
secretsLimit: number;
authRateLimit: number;
inviteUserRateLimit: number;
mfaRateLimit: number;
};

@ -81,15 +81,13 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
.select({
secVerTagId: "secVerTag.id",
secVerTagColor: "secVerTag.color",
secVerTagSlug: "secVerTag.slug",
secVerTagName: "secVerTag.name"
secVerTagSlug: "secVerTag.slug"
})
.select(
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
db.ref("id").withSchema(TableName.SecretApprovalRequestSecretTag).as("tagJnId"),
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
db.ref("name").withSchema(TableName.SecretTag).as("tagName")
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")
)
.select(
db.ref("secretBlindIndex").withSchema(TableName.Secret).as("orgSecBlindIndex"),
@ -124,9 +122,9 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
{
key: "tagJnId",
label: "tags" as const,
mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color }) => ({
mapper: ({ tagId: id, tagSlug: slug, tagColor: color }) => ({
id,
name,
name: slug,
slug,
color
})
@ -200,11 +198,11 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
{
key: "secVerTagId",
label: "tags" as const,
mapper: ({ secVerTagId: id, secVerTagName: name, secVerTagSlug: slug, secVerTagColor: color }) => ({
mapper: ({ secVerTagId: id, secVerTagSlug: slug, secVerTagColor: color }) => ({
// eslint-disable-next-line
id,
// eslint-disable-next-line
name,
name: slug,
// eslint-disable-next-line
slug,
// eslint-disable-next-line
@ -262,15 +260,13 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
.select({
secVerTagId: "secVerTag.id",
secVerTagColor: "secVerTag.color",
secVerTagSlug: "secVerTag.slug",
secVerTagName: "secVerTag.name"
secVerTagSlug: "secVerTag.slug"
})
.select(
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
db.ref("id").withSchema(TableName.SecretApprovalRequestSecretTagV2).as("tagJnId"),
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
db.ref("name").withSchema(TableName.SecretTag).as("tagName")
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")
)
.select(
db.ref("version").withSchema(TableName.SecretV2).as("orgSecVersion"),
@ -292,9 +288,9 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
{
key: "tagJnId",
label: "tags" as const,
mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color }) => ({
mapper: ({ tagId: id, tagSlug: slug, tagColor: color }) => ({
id,
name,
name: slug,
slug,
color
})
@ -330,11 +326,11 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
{
key: "secVerTagId",
label: "tags" as const,
mapper: ({ secVerTagId: id, secVerTagName: name, secVerTagSlug: slug, secVerTagColor: color }) => ({
mapper: ({ secVerTagId: id, secVerTagSlug: slug, secVerTagColor: color }) => ({
// eslint-disable-next-line
id,
// eslint-disable-next-line
name,
name: slug,
// eslint-disable-next-line
slug,
// eslint-disable-next-line

@ -449,7 +449,7 @@ export const secretReplicationServiceFactory = ({
});
}
if (locallyDeletedSecrets.length) {
await secretDAL.delete(
await secretV2BridgeDAL.delete(
{
$in: {
id: locallyDeletedSecrets.map(({ id }) => id)

@ -100,8 +100,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
db.ref("id").withSchema(TableName.SecretVersionTag).as("tagVersionId"),
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
db.ref("name").withSchema(TableName.SecretTag).as("tagName")
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")
);
return sqlNestRelationships({
data,
@ -132,9 +131,9 @@ export const snapshotDALFactory = (db: TDbClient) => {
{
key: "tagVersionId",
label: "tags" as const,
mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
mapper: ({ tagId: id, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
id,
name,
name: slug,
slug,
color,
vId
@ -195,8 +194,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
db.ref("id").withSchema(TableName.SecretVersionV2Tag).as("tagVersionId"),
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
db.ref("name").withSchema(TableName.SecretTag).as("tagName")
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")
);
return sqlNestRelationships({
data,
@ -227,9 +225,9 @@ export const snapshotDALFactory = (db: TDbClient) => {
{
key: "tagVersionId",
label: "tags" as const,
mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
mapper: ({ tagId: id, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
id,
name,
name: slug,
slug,
color,
vId
@ -353,8 +351,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
db.ref("id").withSchema(TableName.SecretVersionTag).as("tagVersionId"),
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
db.ref("name").withSchema(TableName.SecretTag).as("tagName")
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")
);
const formated = sqlNestRelationships({
@ -377,9 +374,9 @@ export const snapshotDALFactory = (db: TDbClient) => {
{
key: "tagVersionId",
label: "tags" as const,
mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
mapper: ({ tagId: id, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
id,
name,
name: slug,
slug,
color,
vId
@ -508,8 +505,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
db.ref("id").withSchema(TableName.SecretVersionV2Tag).as("tagVersionId"),
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
db.ref("name").withSchema(TableName.SecretTag).as("tagName")
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")
);
const formated = sqlNestRelationships({
@ -532,9 +528,9 @@ export const snapshotDALFactory = (db: TDbClient) => {
{
key: "tagVersionId",
label: "tags" as const,
mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
mapper: ({ tagId: id, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
id,
name,
name: slug,
slug,
color,
vId

@ -596,7 +596,8 @@ export const RAW_SECRETS = {
"The slug of the project to list secrets from. This parameter is only applicable by machine identities.",
environment: "The slug of the environment to list secrets from.",
secretPath: "The secret path to list secrets from.",
includeImports: "Weather to include imported secrets or not."
includeImports: "Weather to include imported secrets or not.",
tagSlugs: "The comma separated tag slugs to filter secrets"
},
CREATE: {
secretName: "The name of the secret to create.",

@ -128,6 +128,16 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
throw new DatabaseError({ error, name: "Create" });
}
},
// This spilit the insert into multiple chunk
batchInsert: async (data: readonly Tables[Tname]["insert"][], tx?: Knex) => {
try {
if (!data.length) return [];
const res = await (tx || db).batchInsert(tableName, data as never).returning("*");
return res as Tables[Tname]["base"][];
} catch (error) {
throw new DatabaseError({ error, name: "batchInsert" });
}
},
upsert: async (data: readonly Tables[Tname]["insert"][], onConflictField: keyof Tables[Tname]["base"], tx?: Knex) => {
try {
if (!data.length) return [];

@ -1,7 +1,6 @@
import type { RateLimitOptions, RateLimitPluginOptions } from "@fastify/rate-limit";
import { Redis } from "ioredis";
import { getRateLimiterConfig } from "@app/ee/services/rate-limit/rate-limit-service";
import { getConfig } from "@app/lib/config/env";
export const globalRateLimiterCfg = (): RateLimitPluginOptions => {
@ -22,14 +21,16 @@ export const globalRateLimiterCfg = (): RateLimitPluginOptions => {
// GET endpoints
export const readLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
max: () => getRateLimiterConfig().readLimit,
hook: "preValidation",
max: (req) => req.rateLimits.readLimit,
keyGenerator: (req) => req.realIp
};
// POST, PATCH, PUT, DELETE endpoints
export const writeLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
max: () => getRateLimiterConfig().writeLimit,
hook: "preValidation",
max: (req) => req.rateLimits.writeLimit,
keyGenerator: (req) => req.realIp
};
@ -37,42 +38,40 @@ export const writeLimit: RateLimitOptions = {
export const secretsLimit: RateLimitOptions = {
// secrets, folders, secret imports
timeWindow: 60 * 1000,
max: () => getRateLimiterConfig().secretsLimit,
hook: "preValidation",
max: (req) => req.rateLimits.secretsLimit,
keyGenerator: (req) => req.realIp
};
export const authRateLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
max: () => getRateLimiterConfig().authRateLimit,
hook: "preValidation",
max: (req) => req.rateLimits.authRateLimit,
keyGenerator: (req) => req.realIp
};
export const inviteUserRateLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
max: () => getRateLimiterConfig().inviteUserRateLimit,
hook: "preValidation",
max: (req) => req.rateLimits.inviteUserRateLimit,
keyGenerator: (req) => req.realIp
};
export const mfaRateLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
max: () => getRateLimiterConfig().mfaRateLimit,
hook: "preValidation",
max: (req) => req.rateLimits.mfaRateLimit,
keyGenerator: (req) => {
return req.headers.authorization?.split(" ")[1] || req.realIp;
}
};
export const creationLimit: RateLimitOptions = {
// identity, project, org
timeWindow: 60 * 1000,
max: () => getRateLimiterConfig().creationLimit,
keyGenerator: (req) => req.realIp
};
// Public endpoints to avoid brute force attacks
export const publicEndpointLimit: RateLimitOptions = {
// Read Shared Secrets
timeWindow: 60 * 1000,
max: () => getRateLimiterConfig().publicEndpointLimit,
hook: "preValidation",
max: (req) => req.rateLimits.publicEndpointLimit,
keyGenerator: (req) => req.realIp
};

@ -0,0 +1,38 @@
import fp from "fastify-plugin";
import { getRateLimiterConfig } from "@app/ee/services/rate-limit/rate-limit-service";
import { getConfig } from "@app/lib/config/env";
export const injectRateLimits = fp(async (server) => {
server.decorateRequest("rateLimits", null);
server.addHook("onRequest", async (req) => {
const appCfg = getConfig();
const instanceRateLimiterConfig = getRateLimiterConfig();
if (!req.auth?.orgId) {
// for public endpoints, we always use the instance-wide default rate limits
req.rateLimits = instanceRateLimiterConfig;
return;
}
const { rateLimits, customRateLimits } = await server.services.license.getPlan(req.auth.orgId);
if (customRateLimits && !appCfg.isCloud) {
// we do this because for self-hosted/dedicated instances, we want custom rate limits to be based on admin configuration
// note that the syncing of custom rate limit happens on the instanceRateLimiterConfig object
req.rateLimits = instanceRateLimiterConfig;
return;
}
// we're using the null coalescing operator in order to handle outdated licenses
req.rateLimits = {
readLimit: rateLimits?.readLimit ?? instanceRateLimiterConfig.readLimit,
writeLimit: rateLimits?.writeLimit ?? instanceRateLimiterConfig.writeLimit,
secretsLimit: rateLimits?.secretsLimit ?? instanceRateLimiterConfig.secretsLimit,
publicEndpointLimit: instanceRateLimiterConfig.publicEndpointLimit,
authRateLimit: instanceRateLimiterConfig.authRateLimit,
inviteUserRateLimit: instanceRateLimiterConfig.inviteUserRateLimit,
mfaRateLimit: instanceRateLimiterConfig.mfaRateLimit
};
});
});

@ -184,6 +184,7 @@ import { webhookServiceFactory } from "@app/services/webhook/webhook-service";
import { injectAuditLogInfo } from "../plugins/audit-log";
import { injectIdentity } from "../plugins/auth/inject-identity";
import { injectPermission } from "../plugins/auth/inject-permission";
import { injectRateLimits } from "../plugins/inject-rate-limits";
import { registerSecretScannerGhApp } from "../plugins/secret-scanner";
import { registerV1Routes } from "./v1";
import { registerV2Routes } from "./v2";
@ -896,8 +897,15 @@ export const registerRoutes = async (
folderDAL,
integrationDAL,
integrationAuthDAL,
secretQueueService
secretQueueService,
integrationAuthService,
projectBotService,
secretV2BridgeDAL,
secretImportDAL,
secretDAL,
kmsService
});
const serviceTokenService = serviceTokenServiceFactory({
projectEnvDAL,
serviceTokenDAL,
@ -1029,7 +1037,8 @@ export const registerRoutes = async (
snapshotDAL,
identityAccessTokenDAL,
secretSharingDAL,
secretVersionV2DAL: secretVersionV2BridgeDAL
secretVersionV2DAL: secretVersionV2BridgeDAL,
identityUniversalAuthClientSecretDAL: identityUaClientSecretDAL
});
const oidcService = oidcConfigServiceFactory({
@ -1142,6 +1151,7 @@ export const registerRoutes = async (
await server.register(injectIdentity, { userDAL, serviceTokenDAL });
await server.register(injectPermission);
await server.register(injectRateLimits);
await server.register(injectAuditLogInfo);
server.route({

@ -3,7 +3,7 @@ import { z } from "zod";
import { IdentitiesSchema, IdentityOrgMembershipsSchema, OrgMembershipRole, OrgRolesSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { IDENTITIES } from "@app/lib/api-docs";
import { creationLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -16,7 +16,7 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
method: "POST",
url: "/",
config: {
rateLimit: creationLimit
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {

@ -170,6 +170,12 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
params: z.object({
integrationId: z.string().trim().describe(INTEGRATION.DELETE.integrationId)
}),
querystring: z.object({
shouldDeleteIntegrationSecrets: z
.enum(["true", "false"])
.optional()
.transform((val) => val === "true")
}),
response: {
200: z.object({
integration: IntegrationsSchema
@ -183,7 +189,8 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
actorAuthMethod: req.permission.authMethod,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
id: req.params.integrationId
id: req.params.integrationId,
shouldDeleteIntegrationSecrets: req.query.shouldDeleteIntegrationSecrets
});
await server.services.auditLog.createAuditLog({
@ -205,7 +212,8 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
targetService: integration.targetService,
targetServiceId: integration.targetServiceId,
path: integration.path,
region: integration.region
region: integration.region,
shouldDeleteIntegrationSecrets: req.query.shouldDeleteIntegrationSecrets
// eslint-disable-next-line
}) as any
}

@ -1,3 +1,4 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { SecretTagsSchema } from "@app/db/schemas";
@ -49,7 +50,8 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
workspaceTag: SecretTagsSchema
// akhilmhdh: for terraform backward compatiability
workspaceTag: SecretTagsSchema.extend({ name: z.string() })
})
}
},
@ -79,7 +81,8 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
workspaceTag: SecretTagsSchema
// akhilmhdh: for terraform backward compatiability
workspaceTag: SecretTagsSchema.extend({ name: z.string() })
})
}
},
@ -108,8 +111,14 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
projectId: z.string().trim().describe(SECRET_TAGS.CREATE.projectId)
}),
body: z.object({
name: z.string().trim().describe(SECRET_TAGS.CREATE.name),
slug: z.string().trim().describe(SECRET_TAGS.CREATE.slug),
slug: z
.string()
.toLowerCase()
.trim()
.describe(SECRET_TAGS.CREATE.slug)
.refine((v) => slugify(v) === v, {
message: "Invalid slug. Slug can only contain alphanumeric characters and hyphens."
}),
color: z.string().trim().describe(SECRET_TAGS.CREATE.color)
}),
response: {
@ -144,8 +153,14 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
tagId: z.string().trim().describe(SECRET_TAGS.UPDATE.tagId)
}),
body: z.object({
name: z.string().trim().describe(SECRET_TAGS.UPDATE.name),
slug: z.string().trim().describe(SECRET_TAGS.UPDATE.slug),
slug: z
.string()
.toLowerCase()
.trim()
.describe(SECRET_TAGS.UPDATE.slug)
.refine((v) => slugify(v) === v, {
message: "Invalid slug. Slug can only contain alphanumeric characters and hyphens."
}),
color: z.string().trim().describe(SECRET_TAGS.UPDATE.color)
}),
response: {

@ -9,7 +9,7 @@ import {
UsersSchema
} from "@app/db/schemas";
import { ORGANIZATIONS } from "@app/lib/api-docs";
import { creationLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
@ -307,7 +307,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
method: "POST",
url: "/",
config: {
rateLimit: creationLimit
rateLimit: writeLimit
},
schema: {
body: z.object({

@ -4,7 +4,7 @@ import { z } from "zod";
import { CertificateAuthoritiesSchema, CertificatesSchema, ProjectKeysSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { PROJECTS } from "@app/lib/api-docs";
import { creationLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -142,7 +142,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
method: "POST",
url: "/",
config: {
rateLimit: creationLimit
rateLimit: writeLimit
},
schema: {
description: "Create a new project",

@ -59,9 +59,10 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
tags: SecretTagsSchema.pick({
id: true,
slug: true,
name: true,
color: true
}).array()
})
.extend({ name: z.string() })
.array()
})
)
})
@ -116,16 +117,15 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
secret: SecretsSchema.omit({ secretBlindIndex: true }).merge(
z.object({
tags: SecretTagsSchema.pick({
id: true,
slug: true,
name: true,
color: true
}).array()
secret: SecretsSchema.omit({ secretBlindIndex: true }).extend({
tags: SecretTagsSchema.pick({
id: true,
slug: true,
color: true
})
)
.extend({ name: z.string() })
.array()
})
})
}
},
@ -180,7 +180,13 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
.enum(["true", "false"])
.default("false")
.transform((value) => value === "true")
.describe(RAW_SECRETS.LIST.includeImports)
.describe(RAW_SECRETS.LIST.includeImports),
tagSlugs: z
.string()
.describe(RAW_SECRETS.LIST.tagSlugs)
.optional()
// split by comma and trim the strings
.transform((el) => (el ? el.split(",").map((i) => i.trim()) : []))
}),
response: {
200: z.object({
@ -190,9 +196,9 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
tags: SecretTagsSchema.pick({
id: true,
slug: true,
name: true,
color: true
})
.extend({ name: z.string() })
.array()
.optional()
})
@ -251,7 +257,8 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
projectId: workspaceId,
path: secretPath,
includeImports: req.query.include_imports,
recursive: req.query.recursive
recursive: req.query.recursive,
tagSlugs: req.query.tagSlugs
});
await server.services.auditLog.createAuditLog({
@ -325,9 +332,9 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
tags: SecretTagsSchema.pick({
id: true,
slug: true,
name: true,
color: true
})
.extend({ name: z.string() })
.array()
.optional()
})
@ -731,9 +738,10 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
tags: SecretTagsSchema.pick({
id: true,
slug: true,
name: true,
color: true
}).array()
})
.extend({ name: z.string() })
.array()
})
.array(),
imports: z

@ -4,6 +4,7 @@ import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
export type TIdentityUaClientSecretDALFactory = ReturnType<typeof identityUaClientSecretDALFactory>;
@ -23,5 +24,55 @@ export const identityUaClientSecretDALFactory = (db: TDbClient) => {
}
};
return { ...uaClientSecretOrm, incrementUsage };
const removeExpiredClientSecrets = async (tx?: Knex) => {
const BATCH_SIZE = 10000;
const MAX_RETRY_ON_FAILURE = 3;
let deletedClientSecret: { id: string }[] = [];
let numberOfRetryOnFailure = 0;
do {
try {
const findExpiredClientSecretQuery = (tx || db)(TableName.IdentityUaClientSecret)
.where({
isClientSecretRevoked: true
})
.orWhere((qb) => {
void qb
.where("clientSecretNumUses", ">", 0)
.andWhere(
"clientSecretNumUses",
">=",
db.ref("clientSecretNumUsesLimit").withSchema(TableName.IdentityUaClientSecret)
);
})
.orWhere((qb) => {
void qb
.where("clientSecretTTL", ">", 0)
.andWhereRaw(
`"${TableName.IdentityUaClientSecret}"."createdAt" + make_interval(secs => "${TableName.IdentityUaClientSecret}"."clientSecretTTL") < NOW()`
);
})
.select("id")
.limit(BATCH_SIZE);
// eslint-disable-next-line no-await-in-loop
deletedClientSecret = await (tx || db)(TableName.IdentityUaClientSecret)
.whereIn("id", findExpiredClientSecretQuery)
.del()
.returning("id");
numberOfRetryOnFailure = 0; // reset
} catch (error) {
numberOfRetryOnFailure += 1;
logger.error(error, "Failed to delete client secret on pruning");
} finally {
// eslint-disable-next-line no-await-in-loop
await new Promise((resolve) => {
setTimeout(resolve, 10); // time to breathe for db
});
}
} while (deletedClientSecret.length > 0 || numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
};
return { ...uaClientSecretOrm, incrementUsage, removeExpiredClientSecrets };
};

@ -0,0 +1,357 @@
import { retry } from "@octokit/plugin-retry";
import { Octokit } from "@octokit/rest";
import { TIntegrationAuths, TIntegrations } from "@app/db/schemas";
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { IntegrationMetadataSchema } from "../integration/integration-schema";
import { TKmsServiceFactory } from "../kms/kms-service";
import { KmsDataKey } from "../kms/kms-types";
import { TProjectBotServiceFactory } from "../project-bot/project-bot-service";
import { TSecretDALFactory } from "../secret/secret-dal";
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
import { TSecretImportDALFactory } from "../secret-import/secret-import-dal";
import { fnSecretsV2FromImports } from "../secret-import/secret-import-fns";
import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal";
import { TIntegrationAuthServiceFactory } from "./integration-auth-service";
import { Integrations } from "./integration-list";
const MAX_SYNC_SECRET_DEPTH = 5;
/**
* Return the secrets in a given [folderId] including secrets from
* nested imported folders recursively.
*/
const getIntegrationSecretsV2 = async (
dto: {
projectId: string;
environment: string;
folderId: string;
depth: number;
decryptor: (value: Buffer | null | undefined) => string;
},
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "find" | "findByFolderId">,
folderDAL: Pick<TSecretFolderDALFactory, "findByManySecretPath">,
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds">
) => {
const content: Record<string, boolean> = {};
if (dto.depth > MAX_SYNC_SECRET_DEPTH) {
logger.info(
`getIntegrationSecrets: secret depth exceeded for [projectId=${dto.projectId}] [folderId=${dto.folderId}] [depth=${dto.depth}]`
);
return content;
}
// process secrets in current folder
const secrets = await secretV2BridgeDAL.findByFolderId(dto.folderId);
secrets.forEach((secret) => {
const secretKey = secret.key;
content[secretKey] = true;
});
// check if current folder has any imports from other folders
const secretImports = await secretImportDAL.find({ folderId: dto.folderId, isReplication: false });
// if no imports then return secrets in the current folder
if (!secretImports.length) return content;
const importedSecrets = await fnSecretsV2FromImports({
decryptor: dto.decryptor,
folderDAL,
secretDAL: secretV2BridgeDAL,
secretImportDAL,
allowedImports: secretImports
});
for (let i = importedSecrets.length - 1; i >= 0; i -= 1) {
for (let j = 0; j < importedSecrets[i].secrets.length; j += 1) {
const importedSecret = importedSecrets[i].secrets[j];
if (!content[importedSecret.key]) {
content[importedSecret.key] = true;
}
}
}
return content;
};
/**
* Return the secrets in a given [folderId] including secrets from
* nested imported folders recursively.
*/
const getIntegrationSecretsV1 = async (
dto: {
projectId: string;
environment: string;
folderId: string;
key: string;
depth: number;
},
secretDAL: Pick<TSecretDALFactory, "findByFolderId">,
folderDAL: Pick<TSecretFolderDALFactory, "findByManySecretPath">,
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds">
) => {
let content: Record<string, boolean> = {};
if (dto.depth > MAX_SYNC_SECRET_DEPTH) {
logger.info(
`getIntegrationSecrets: secret depth exceeded for [projectId=${dto.projectId}] [folderId=${dto.folderId}] [depth=${dto.depth}]`
);
return content;
}
// process secrets in current folder
const secrets = await secretDAL.findByFolderId(dto.folderId);
secrets.forEach((secret) => {
const secretKey = decryptSymmetric128BitHexKeyUTF8({
ciphertext: secret.secretKeyCiphertext,
iv: secret.secretKeyIV,
tag: secret.secretKeyTag,
key: dto.key
});
content[secretKey] = true;
});
// check if current folder has any imports from other folders
const secretImport = await secretImportDAL.find({ folderId: dto.folderId, isReplication: false });
// if no imports then return secrets in the current folder
if (!secretImport) return content;
const importedFolders = await folderDAL.findByManySecretPath(
secretImport.map(({ importEnv, importPath }) => ({
envId: importEnv.id,
secretPath: importPath
}))
);
for await (const folder of importedFolders) {
if (folder) {
// get secrets contained in each imported folder by recursively calling
// this function against the imported folder
const importedSecrets = await getIntegrationSecretsV1(
{
environment: dto.environment,
projectId: dto.projectId,
folderId: folder.id,
key: dto.key,
depth: dto.depth + 1
},
secretDAL,
folderDAL,
secretImportDAL
);
// add the imported secrets to the current folder secrets
content = { ...importedSecrets, ...content };
}
}
return content;
};
export const deleteGithubSecrets = async ({
integration,
secrets,
accessToken
}: {
integration: Omit<TIntegrations, "envId">;
secrets: Record<string, boolean>;
accessToken: string;
}) => {
interface GitHubSecret {
name: string;
created_at: string;
updated_at: string;
visibility?: "all" | "private" | "selected";
selected_repositories_url?: string | undefined;
}
const OctokitWithRetry = Octokit.plugin(retry);
const octokit = new OctokitWithRetry({
auth: accessToken
});
enum GithubScope {
Repo = "github-repo",
Org = "github-org",
Env = "github-env"
}
let encryptedGithubSecrets: GitHubSecret[];
switch (integration.scope) {
case GithubScope.Org: {
encryptedGithubSecrets = (
await octokit.request("GET /orgs/{org}/actions/secrets", {
org: integration.owner as string
})
).data.secrets;
break;
}
case GithubScope.Env: {
encryptedGithubSecrets = (
await octokit.request("GET /repositories/{repository_id}/environments/{environment_name}/secrets", {
repository_id: Number(integration.appId),
environment_name: integration.targetEnvironmentId as string
})
).data.secrets;
break;
}
default: {
encryptedGithubSecrets = (
await octokit.request("GET /repos/{owner}/{repo}/actions/secrets", {
owner: integration.owner as string,
repo: integration.app as string
})
).data.secrets;
break;
}
}
for await (const encryptedSecret of encryptedGithubSecrets) {
if (encryptedSecret.name in secrets) {
switch (integration.scope) {
case GithubScope.Org: {
await octokit.request("DELETE /orgs/{org}/actions/secrets/{secret_name}", {
org: integration.owner as string,
secret_name: encryptedSecret.name
});
break;
}
case GithubScope.Env: {
await octokit.request(
"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}",
{
repository_id: Number(integration.appId),
environment_name: integration.targetEnvironmentId as string,
secret_name: encryptedSecret.name
}
);
break;
}
default: {
await octokit.request("DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {
owner: integration.owner as string,
repo: integration.app as string,
secret_name: encryptedSecret.name
});
break;
}
}
// small delay to prevent hitting API rate limits
await new Promise((resolve) => {
setTimeout(resolve, 50);
});
}
}
};
export const deleteIntegrationSecrets = async ({
integration,
integrationAuth,
integrationAuthService,
projectBotService,
secretV2BridgeDAL,
folderDAL,
secretDAL,
secretImportDAL,
kmsService
}: {
integration: Omit<TIntegrations, "envId"> & {
projectId: string;
environment: {
id: string;
name: string;
slug: string;
};
secretPath: string;
};
integrationAuth: TIntegrationAuths;
integrationAuthService: Pick<TIntegrationAuthServiceFactory, "getIntegrationAccessToken" | "getIntegrationAuth">;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "find" | "findByFolderId">;
folderDAL: Pick<TSecretFolderDALFactory, "findByManySecretPath" | "findBySecretPath">;
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds">;
secretDAL: Pick<TSecretDALFactory, "findByFolderId">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
}) => {
const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integration.projectId);
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: integration.projectId
});
const folder = await folderDAL.findBySecretPath(
integration.projectId,
integration.environment.slug,
integration.secretPath
);
if (!folder) {
throw new NotFoundError({
message: "Folder not found."
});
}
const { accessToken } = await integrationAuthService.getIntegrationAccessToken(
integrationAuth,
shouldUseSecretV2Bridge,
botKey
);
const secrets = shouldUseSecretV2Bridge
? await getIntegrationSecretsV2(
{
environment: integration.environment.id,
projectId: integration.projectId,
folderId: folder.id,
depth: 1,
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : "")
},
secretV2BridgeDAL,
folderDAL,
secretImportDAL
)
: await getIntegrationSecretsV1(
{
environment: integration.environment.id,
projectId: integration.projectId,
folderId: folder.id,
key: botKey as string,
depth: 1
},
secretDAL,
folderDAL,
secretImportDAL
);
const suffixedSecrets: typeof secrets = {};
const metadata = IntegrationMetadataSchema.parse(integration.metadata);
if (metadata) {
Object.keys(secrets).forEach((key) => {
const prefix = metadata?.secretPrefix || "";
const suffix = metadata?.secretSuffix || "";
const newKey = prefix + key + suffix;
suffixedSecrets[newKey] = secrets[key];
});
}
switch (integration.integration) {
case Integrations.GITHUB: {
await deleteGithubSecrets({
integration,
accessToken,
secrets: Object.keys(suffixedSecrets).length !== 0 ? suffixedSecrets : secrets
});
break;
}
default:
throw new BadRequestError({
message: "Invalid integration"
});
}
};

@ -538,19 +538,20 @@ const syncSecretsAWSParameterStore = async ({
integration,
secrets,
accessId,
accessToken
accessToken,
projectId
}: {
integration: TIntegrations;
integration: TIntegrations & { secretPath: string; environment: { slug: string } };
secrets: Record<string, { value: string; comment?: string }>;
accessId: string | null;
accessToken: string;
projectId?: string;
}) => {
let response: { isSynced: boolean; syncMessage: string } | null = null;
if (!accessId) {
throw new Error("AWS access ID is required");
}
const config = new AWS.Config({
region: integration.region as string,
credentials: {
@ -567,7 +568,9 @@ const syncSecretsAWSParameterStore = async ({
const metadata = z.record(z.any()).parse(integration.metadata || {});
const awsParameterStoreSecretsObj: Record<string, AWS.SSM.Parameter> = {};
logger.info(
`getIntegrationSecrets: integration sync triggered for ssm with [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [shouldDisableDelete=${metadata.shouldDisableDelete}]`
);
// now fetch all aws parameter store secrets
let hasNext = true;
let nextToken: string | undefined;
@ -594,6 +597,18 @@ const syncSecretsAWSParameterStore = async ({
nextToken = parameters.NextToken;
}
logger.info(
`getIntegrationSecrets: all fetched keys from AWS SSM [projectId=${projectId}] [environment=${
integration.environment.slug
}] [secretPath=${integration.secretPath}] [awsParameterStoreSecretsObj=${Object.keys(
awsParameterStoreSecretsObj
).join(",")}]`
);
logger.info(
`getIntegrationSecrets: all secrets from Infisical to send to AWS SSM [projectId=${projectId}] [environment=${
integration.environment.slug
}] [secretPath=${integration.secretPath}] [secrets=${Object.keys(secrets).join(",")}]`
);
// Identify secrets to create
// don't use Promise.all() and promise map here
// it will cause rate limit
@ -603,24 +618,56 @@ const syncSecretsAWSParameterStore = async ({
// case: secret does not exist in AWS parameter store
// -> create secret
if (secrets[key].value) {
logger.info(
`getIntegrationSecrets: create secret in AWS SSM for [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}]`
);
await ssm
.putParameter({
Name: `${integration.path}${key}`,
Type: "SecureString",
Value: secrets[key].value,
...(metadata.kmsKeyId && { KeyId: metadata.kmsKeyId }),
// Overwrite: true,
Tags: metadata.secretAWSTag
? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({
Key: tag.key,
Value: tag.value
}))
: []
Overwrite: true
})
.promise();
if (metadata.secretAWSTag?.length) {
try {
await ssm
.addTagsToResource({
ResourceType: "Parameter",
ResourceId: `${integration.path}${key}`,
Tags: metadata.secretAWSTag
? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({
Key: tag.key,
Value: tag.value
}))
: []
})
.promise();
} catch (err) {
logger.error(
err,
`getIntegrationSecrets: create secret in AWS SSM for failed [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}]`
);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
if ((err as any).code === "AccessDeniedException") {
logger.error(
`AWS Parameter Store Error [integration=${integration.id}]: double check AWS account permissions (refer to the Infisical docs)`
);
}
response = {
isSynced: false,
syncMessage: (err as AWSError)?.message || "Error syncing with AWS Parameter Store"
};
}
}
}
// case: secret exists in AWS parameter store
} else {
logger.info(
`getIntegrationSecrets: update secret in AWS SSM for [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}]`
);
// -> update secret
if (awsParameterStoreSecretsObj[key].Value !== secrets[key].value) {
await ssm
@ -648,6 +695,10 @@ const syncSecretsAWSParameterStore = async ({
})
.promise();
} catch (err) {
logger.error(
err,
`getIntegrationSecrets: update secret in AWS SSM for failed [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}]`
);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
if ((err as any).code === "AccessDeniedException") {
logger.error(
@ -670,9 +721,18 @@ const syncSecretsAWSParameterStore = async ({
}
if (!metadata.shouldDisableDelete) {
logger.info(
`getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [step=1]`
);
for (const key in awsParameterStoreSecretsObj) {
if (Object.hasOwn(awsParameterStoreSecretsObj, key)) {
logger.info(
`getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}] [step=2]`
);
if (!(key in secrets)) {
logger.info(
`getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}] [step=3]`
);
// case:
// -> delete secret
await ssm
@ -680,6 +740,9 @@ const syncSecretsAWSParameterStore = async ({
Name: awsParameterStoreSecretsObj[key].Name as string
})
.promise();
logger.info(
`getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [key=${key}] [step=4]`
);
}
await new Promise((resolve) => {
setTimeout(resolve, 50);
@ -3656,7 +3719,8 @@ export const syncIntegrationSecrets = async ({
integration,
secrets,
accessId,
accessToken
accessToken,
projectId
});
break;
case Integrations.AWS_SECRET_MANAGER:

@ -6,8 +6,15 @@ import { BadRequestError } from "@app/lib/errors";
import { TProjectPermission } from "@app/lib/types";
import { TIntegrationAuthDALFactory } from "../integration-auth/integration-auth-dal";
import { TIntegrationAuthServiceFactory } from "../integration-auth/integration-auth-service";
import { deleteIntegrationSecrets } from "../integration-auth/integration-delete-secret";
import { TKmsServiceFactory } from "../kms/kms-service";
import { TProjectBotServiceFactory } from "../project-bot/project-bot-service";
import { TSecretDALFactory } from "../secret/secret-dal";
import { TSecretQueueFactory } from "../secret/secret-queue";
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
import { TSecretImportDALFactory } from "../secret-import/secret-import-dal";
import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal";
import { TIntegrationDALFactory } from "./integration-dal";
import {
TCreateIntegrationDTO,
@ -19,9 +26,15 @@ import {
type TIntegrationServiceFactoryDep = {
integrationDAL: TIntegrationDALFactory;
integrationAuthDAL: TIntegrationAuthDALFactory;
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
integrationAuthService: TIntegrationAuthServiceFactory;
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findByManySecretPath">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
projectBotService: TProjectBotServiceFactory;
secretQueueService: Pick<TSecretQueueFactory, "syncIntegrations">;
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "find" | "findByFolderId">;
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
secretDAL: Pick<TSecretDALFactory, "findByFolderId">;
};
export type TIntegrationServiceFactory = ReturnType<typeof integrationServiceFactory>;
@ -31,7 +44,13 @@ export const integrationServiceFactory = ({
integrationAuthDAL,
folderDAL,
permissionService,
secretQueueService
secretQueueService,
integrationAuthService,
projectBotService,
secretV2BridgeDAL,
secretImportDAL,
kmsService,
secretDAL
}: TIntegrationServiceFactoryDep) => {
const createIntegration = async ({
app,
@ -161,7 +180,14 @@ export const integrationServiceFactory = ({
return updatedIntegration;
};
const deleteIntegration = async ({ actorId, id, actor, actorAuthMethod, actorOrgId }: TDeleteIntegrationDTO) => {
const deleteIntegration = async ({
actorId,
id,
actor,
actorAuthMethod,
actorOrgId,
shouldDeleteIntegrationSecrets
}: TDeleteIntegrationDTO) => {
const integration = await integrationDAL.findById(id);
if (!integration) throw new BadRequestError({ message: "Integration auth not found" });
@ -174,6 +200,22 @@ export const integrationServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Integrations);
const integrationAuth = await integrationAuthDAL.findById(integration.integrationAuthId);
if (shouldDeleteIntegrationSecrets) {
await deleteIntegrationSecrets({
integration,
integrationAuth,
projectBotService,
integrationAuthService,
secretV2BridgeDAL,
folderDAL,
secretImportDAL,
secretDAL,
kmsService
});
}
const deletedIntegration = await integrationDAL.transaction(async (tx) => {
// delete integration
const deletedIntegrationResult = await integrationDAL.deleteById(id, tx);

@ -63,6 +63,7 @@ export type TUpdateIntegrationDTO = {
export type TDeleteIntegrationDTO = {
id: string;
shouldDeleteIntegrationSecrets?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TSyncIntegrationDTO = {

@ -46,6 +46,7 @@ export const projectBotDALFactory = (db: TDbClient) => {
const doc = await db
.replicaNode()(TableName.ProjectMembership)
.where(`${TableName.ProjectMembership}.projectId` as "projectId", projectId)
.where(`${TableName.ProjectKeys}.projectId` as "projectId", projectId)
.where(`${TableName.Users}.isGhost` as "isGhost", false)
.join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`)
.join(TableName.ProjectKeys, `${TableName.ProjectMembership}.userId`, `${TableName.ProjectKeys}.receiverId`)

@ -66,10 +66,10 @@ export const getBotKeyFnFactory = (
await projectBotDAL.create({
name: "Infisical Bot (Ghost)",
projectId,
isActive: true,
tag,
iv,
encryptedPrivateKey: ciphertext,
isActive: true,
publicKey: botKey.publicKey,
algorithm,
keyEncoding: encoding,
@ -80,6 +80,12 @@ export const getBotKeyFnFactory = (
} else {
await projectBotDAL.updateById(bot.id, {
isActive: true,
tag,
iv,
encryptedPrivateKey: ciphertext,
publicKey: botKey.publicKey,
algorithm,
keyEncoding: encoding,
encryptedProjectKey: encryptedWorkspaceKey.ciphertext,
encryptedProjectKeyNonce: encryptedWorkspaceKey.nonce,
senderId: projectV1Keys.userId
@ -89,7 +95,6 @@ export const getBotKeyFnFactory = (
}
const botPrivateKey = getBotPrivateKey({ bot });
const botKey = decryptAsymmetric({
ciphertext: bot.encryptedProjectKey,
privateKey: botPrivateKey,

@ -4,6 +4,7 @@ import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
import { TIdentityUaClientSecretDALFactory } from "../identity-ua/identity-ua-client-secret-dal";
import { TSecretVersionDALFactory } from "../secret/secret-version-dal";
import { TSecretFolderVersionDALFactory } from "../secret-folder/secret-folder-version-dal";
import { TSecretSharingDALFactory } from "../secret-sharing/secret-sharing-dal";
@ -12,6 +13,7 @@ import { TSecretVersionV2DALFactory } from "../secret-v2-bridge/secret-version-d
type TDailyResourceCleanUpQueueServiceFactoryDep = {
auditLogDAL: Pick<TAuditLogDALFactory, "pruneAuditLog">;
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "removeExpiredTokens">;
identityUniversalAuthClientSecretDAL: Pick<TIdentityUaClientSecretDALFactory, "removeExpiredClientSecrets">;
secretVersionDAL: Pick<TSecretVersionDALFactory, "pruneExcessVersions">;
secretVersionV2DAL: Pick<TSecretVersionV2DALFactory, "pruneExcessVersions">;
secretFolderVersionDAL: Pick<TSecretFolderVersionDALFactory, "pruneExcessVersions">;
@ -30,12 +32,14 @@ export const dailyResourceCleanUpQueueServiceFactory = ({
secretFolderVersionDAL,
identityAccessTokenDAL,
secretSharingDAL,
secretVersionV2DAL
secretVersionV2DAL,
identityUniversalAuthClientSecretDAL
}: TDailyResourceCleanUpQueueServiceFactoryDep) => {
queueService.start(QueueName.DailyResourceCleanUp, async () => {
logger.info(`${QueueName.DailyResourceCleanUp}: queue task started`);
await auditLogDAL.pruneAuditLog();
await identityAccessTokenDAL.removeExpiredTokens();
await identityUniversalAuthClientSecretDAL.removeExpiredClientSecrets();
await secretSharingDAL.pruneExpiredSharedSecrets();
await snapshotDAL.pruneExcessSnapshots();
await secretVersionDAL.pruneExcessVersions();

@ -51,7 +51,7 @@ export const secretTagDALFactory = (db: TDbClient) => {
...secretTagOrm,
saveTagsToSecret: secretJnTagOrm.insertMany,
deleteTagsToSecret: secretJnTagOrm.delete,
saveTagsToSecretV2: secretV2JnTagOrm.insertMany,
saveTagsToSecretV2: secretV2JnTagOrm.batchInsert,
deleteTagsToSecretV2: secretV2JnTagOrm.delete,
findSecretTagsByProjectId,
deleteTagsManySecret,

@ -22,16 +22,7 @@ type TSecretTagServiceFactoryDep = {
export type TSecretTagServiceFactory = ReturnType<typeof secretTagServiceFactory>;
export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSecretTagServiceFactoryDep) => {
const createTag = async ({
name,
slug,
actor,
color,
actorId,
actorOrgId,
actorAuthMethod,
projectId
}: TCreateTagDTO) => {
const createTag = async ({ slug, actor, color, actorId, actorOrgId, actorAuthMethod, projectId }: TCreateTagDTO) => {
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
@ -46,7 +37,6 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe
const newTag = await secretTagDAL.create({
projectId,
name,
slug,
color,
createdBy: actorId,
@ -55,7 +45,7 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe
return newTag;
};
const updateTag = async ({ actorId, actor, actorOrgId, actorAuthMethod, id, name, color, slug }: TUpdateTagDTO) => {
const updateTag = async ({ actorId, actor, actorOrgId, actorAuthMethod, id, color, slug }: TUpdateTagDTO) => {
const tag = await secretTagDAL.findById(id);
if (!tag) throw new BadRequestError({ message: "Tag doesn't exist" });
@ -73,7 +63,7 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Tags);
const updatedTag = await secretTagDAL.updateById(tag.id, { name, color, slug });
const updatedTag = await secretTagDAL.updateById(tag.id, { color, slug });
return updatedTag;
};
@ -107,7 +97,7 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
return tag;
return { ...tag, name: tag.slug };
};
const getTagBySlug = async ({ actorId, actor, actorOrgId, actorAuthMethod, slug, projectId }: TGetTagBySlugDTO) => {
@ -123,7 +113,7 @@ export const secretTagServiceFactory = ({ secretTagDAL, permissionService }: TSe
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
return tag;
return { ...tag, name: tag.slug };
};
const getProjectTags = async ({ actor, actorId, actorOrgId, actorAuthMethod, projectId }: TListProjectTagsDTO) => {

@ -1,14 +1,12 @@
import { TProjectPermission } from "@app/lib/types";
export type TCreateTagDTO = {
name: string;
color: string;
slug: string;
} & TProjectPermission;
export type TUpdateTagDTO = {
id: string;
name?: string;
slug?: string;
color?: string;
} & Omit<TProjectPermission, "projectId">;

@ -136,7 +136,6 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"))
.select(db.ref("name").withSchema(TableName.SecretTag).as("tagName"))
.orderBy("id", "asc");
const data = sqlNestRelationships({
@ -147,11 +146,11 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
{
key: "tagId",
label: "tags" as const,
mapper: ({ tagId: id, tagColor: color, tagSlug: slug, tagName: name }) => ({
mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({
id,
color,
slug,
name
name: slug
})
}
]
@ -169,14 +168,13 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
.where({ [`${TableName.SecretV2}Id` as const]: secretId })
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"))
.select(db.ref("name").withSchema(TableName.SecretTag).as("tagName"));
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"));
return tags.map((el) => ({
id: el.tagId,
color: el.tagColor,
slug: el.tagSlug,
name: el.tagName
name: el.tagSlug
}));
} catch (error) {
throw new DatabaseError({ error, name: "get secret tags" });
@ -210,7 +208,6 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"))
.select(db.ref("name").withSchema(TableName.SecretTag).as("tagName"))
.orderBy("id", "asc");
const data = sqlNestRelationships({
@ -221,11 +218,11 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
{
key: "tagId",
label: "tags" as const,
mapper: ({ tagId: id, tagColor: color, tagSlug: slug, tagName: name }) => ({
mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({
id,
color,
slug,
name
name: slug
})
}
]
@ -290,7 +287,7 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
}))
);
if (!newSecretReferences.length) return;
const secretReferences = await (tx || db)(TableName.SecretReferenceV2).insert(newSecretReferences);
const secretReferences = await (tx || db).batchInsert(TableName.SecretReferenceV2, newSecretReferences);
return secretReferences;
} catch (error) {
throw new DatabaseError({ error, name: "UpsertSecretReference" });
@ -350,8 +347,7 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
.select(selectAllTableCols(TableName.SecretV2))
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"))
.select(db.ref("name").withSchema(TableName.SecretTag).as("tagName"));
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"));
const docs = sqlNestRelationships({
data: rawDocs,
key: "id",
@ -360,11 +356,11 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
{
key: "tagId",
label: "tags" as const,
mapper: ({ tagId: id, tagColor: color, tagSlug: slug, tagName: name }) => ({
mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({
id,
color,
slug,
name
name: slug
})
}
]

@ -542,8 +542,8 @@ export const reshapeBridgeSecret = (
secretPath,
workspace: workspaceId,
environment,
secretValue: secret.value,
secretComment: secret.comment,
secretValue: secret.value || "",
secretComment: secret.comment || "",
version: secret.version,
type: secret.type,
_id: secret.id,

@ -378,6 +378,18 @@ export const secretV2BridgeServiceFactory = ({
throw new BadRequestError({ message: "Must be user to delete personal secret" });
}
const secretToDelete = await secretDAL.findOne({
key: inputSecret.secretName,
folderId,
...(inputSecret.type === SecretType.Shared
? {}
: {
type: SecretType.Personal,
userId: actorId
})
});
if (!secretToDelete) throw new NotFoundError({ message: "Secret not found" });
const deletedSecret = await secretDAL.transaction(async (tx) =>
fnSecretBulkDelete({
projectId,
@ -429,6 +441,7 @@ export const secretV2BridgeServiceFactory = ({
actorAuthMethod,
includeImports,
recursive,
tagSlugs = [],
expandSecretReferences: shouldExpandSecretReferences
}: TGetSecretsDTO) => {
const { permission } = await permissionService.getProjectPermission(
@ -490,12 +503,15 @@ export const secretV2BridgeServiceFactory = ({
...secret,
value: secret.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: secret.encryptedValue }).toString()
: undefined,
: "",
comment: secret.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: secret.encryptedComment }).toString()
: undefined
: ""
})
);
const filteredSecrets = tagSlugs.length
? decryptedSecrets.filter((secret) => Boolean(secret.tags?.find((el) => tagSlugs.includes(el.slug))))
: decryptedSecrets;
const expandSecretReferences = expandSecretReferencesFactory({
projectId,
folderDAL,
@ -504,7 +520,7 @@ export const secretV2BridgeServiceFactory = ({
});
if (shouldExpandSecretReferences) {
const secretsGroupByPath = groupBy(decryptedSecrets, (i) => i.secretPath);
const secretsGroupByPath = groupBy(filteredSecrets, (i) => i.secretPath);
for (const secretPathKey in secretsGroupByPath) {
if (Object.hasOwn(secretsGroupByPath, secretPathKey)) {
const secretsGroupByKey = secretsGroupByPath[secretPathKey].reduce(
@ -522,7 +538,7 @@ export const secretV2BridgeServiceFactory = ({
await expandSecretReferences(secretsGroupByKey);
secretsGroupByPath[secretPathKey].forEach((decryptedSecret) => {
// eslint-disable-next-line no-param-reassign
decryptedSecret.secretValue = secretsGroupByKey[decryptedSecret.secretKey].value;
decryptedSecret.secretValue = secretsGroupByKey[decryptedSecret.secretKey].value || "";
});
}
}
@ -530,7 +546,7 @@ export const secretV2BridgeServiceFactory = ({
if (!includeImports) {
return {
secrets: decryptedSecrets
secrets: filteredSecrets
};
}
@ -558,7 +574,7 @@ export const secretV2BridgeServiceFactory = ({
});
return {
secrets: decryptedSecrets,
secrets: filteredSecrets,
imports: importedSecrets
};
};

@ -20,6 +20,7 @@ export type TGetSecretsDTO = {
environment: string;
includeImports?: boolean;
recursive?: boolean;
tagSlugs?: string[];
} & TProjectPermission;
export type TGetASecretDTO = {

@ -123,7 +123,6 @@ export const secretDALFactory = (db: TDbClient) => {
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"))
.select(db.ref("name").withSchema(TableName.SecretTag).as("tagName"))
.orderBy("id", "asc");
const data = sqlNestRelationships({
data: secs,
@ -133,11 +132,11 @@ export const secretDALFactory = (db: TDbClient) => {
{
key: "tagId",
label: "tags" as const,
mapper: ({ tagId: id, tagColor: color, tagSlug: slug, tagName: name }) => ({
mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({
id,
color,
slug,
name
name: slug
})
}
]
@ -155,14 +154,13 @@ export const secretDALFactory = (db: TDbClient) => {
.where({ [`${TableName.Secret}Id` as const]: secretId })
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"))
.select(db.ref("name").withSchema(TableName.SecretTag).as("tagName"));
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"));
return tags.map((el) => ({
id: el.tagId,
color: el.tagColor,
slug: el.tagSlug,
name: el.tagName
name: el.tagSlug
}));
} catch (error) {
throw new DatabaseError({ error, name: "get secret tags" });
@ -188,7 +186,6 @@ export const secretDALFactory = (db: TDbClient) => {
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"))
.select(db.ref("name").withSchema(TableName.SecretTag).as("tagName"))
.orderBy("id", "asc");
const data = sqlNestRelationships({
data: secs,
@ -198,11 +195,11 @@ export const secretDALFactory = (db: TDbClient) => {
{
key: "tagId",
label: "tags" as const,
mapper: ({ tagId: id, tagColor: color, tagSlug: slug, tagName: name }) => ({
mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({
id,
color,
slug,
name
name: slug
})
}
]
@ -318,8 +315,7 @@ export const secretDALFactory = (db: TDbClient) => {
.select(selectAllTableCols(TableName.Secret))
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"))
.select(db.ref("name").withSchema(TableName.SecretTag).as("tagName"));
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"));
const docs = sqlNestRelationships({
data: rawDocs,
key: "id",
@ -328,11 +324,11 @@ export const secretDALFactory = (db: TDbClient) => {
{
key: "tagId",
label: "tags" as const,
mapper: ({ tagId: id, tagColor: color, tagSlug: slug, tagName: name }) => ({
mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({
id,
color,
slug,
name
name: slug
})
}
]

@ -370,7 +370,6 @@ export const decryptSecretRaw = (
id: string;
slug: string;
color?: string | null;
name: string;
}[];
},
key: string
@ -412,7 +411,7 @@ export const decryptSecretRaw = (
_id: secret.id,
id: secret.id,
user: secret.userId,
tags: secret.tags,
tags: secret.tags?.map((el) => ({ ...el, name: el.slug })),
skipMultilineEncoding: secret.skipMultilineEncoding,
secretReminderRepeatDays: secret.secretReminderRepeatDays,
secretReminderNote: secret.secretReminderNote,

@ -73,12 +73,12 @@ type TSecretQueueFactoryDep = {
secretVersionTagDAL: TSecretVersionTagDALFactory;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
secretV2BridgeDAL: TSecretV2BridgeDALFactory;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "batchInsert" | "insertMany" | "findLatestVersionMany">;
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany" | "batchInsert">;
secretRotationDAL: Pick<TSecretRotationDALFactory, "secretOutputV2InsertMany" | "find">;
secretApprovalRequestDAL: Pick<TSecretApprovalRequestDALFactory, "deleteByProjectId">;
snapshotDAL: Pick<TSnapshotDALFactory, "findNSecretV1SnapshotByFolderId" | "deleteSnapshotsAboveLimit">;
snapshotSecretV2BridgeDAL: Pick<TSnapshotSecretV2DALFactory, "insertMany">;
snapshotSecretV2BridgeDAL: Pick<TSnapshotSecretV2DALFactory, "insertMany" | "batchInsert">;
};
export type TGetSecrets = {
@ -728,7 +728,10 @@ export const secretQueueFactory = ({
isSynced: response?.isSynced ?? true
});
} catch (err) {
logger.info("Secret integration sync error: %o", err);
logger.error(
err,
`Secret integration sync error [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}]`
);
const message =
(err instanceof AxiosError ? JSON.stringify(err?.response?.data) : (err as Error)?.message) ||
@ -828,7 +831,7 @@ export const secretQueueFactory = ({
secretId: string;
references: { environment: string; secretPath: string; secretKey: string }[];
}[] = [];
await secretV2BridgeDAL.insertMany(
await secretV2BridgeDAL.batchInsert(
projectV1Secrets.map((el) => {
const key = decryptSymmetric128BitHexKeyUTF8({
ciphertext: el.secretKeyCiphertext,
@ -1004,14 +1007,14 @@ export const secretQueueFactory = ({
const projectV3SecretVersions = Object.values(projectV3SecretVersionsGroupById);
if (projectV3SecretVersions.length) {
await secretVersionV2BridgeDAL.insertMany(projectV3SecretVersions, tx);
await secretVersionV2BridgeDAL.batchInsert(projectV3SecretVersions, tx);
}
if (projectV3SecretVersionTags.length) {
await secretVersionTagV2BridgeDAL.insertMany(projectV3SecretVersionTags, tx);
await secretVersionTagV2BridgeDAL.batchInsert(projectV3SecretVersionTags, tx);
}
if (projectV3SnapshotSecrets.length) {
await snapshotSecretV2BridgeDAL.insertMany(projectV3SnapshotSecrets, tx);
await snapshotSecretV2BridgeDAL.batchInsert(projectV3SnapshotSecrets, tx);
}
await snapshotDAL.deleteSnapshotsAboveLimit(folderId, SNAPSHOT_BATCH_SIZE, tx);
}

@ -964,7 +964,8 @@ export const secretServiceFactory = ({
environment,
includeImports,
expandSecretReferences,
recursive
recursive,
tagSlugs = []
}: TGetSecretsRawDTO) => {
const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId);
if (shouldUseSecretV2Bridge) {
@ -978,7 +979,8 @@ export const secretServiceFactory = ({
path,
recursive,
actorAuthMethod,
includeImports
includeImports,
tagSlugs
});
return { secrets, imports };
}
@ -998,6 +1000,9 @@ export const secretServiceFactory = ({
});
const decryptedSecrets = secrets.map((el) => decryptSecretRaw(el, botKey));
const filteredSecrets = tagSlugs.length
? decryptedSecrets.filter((secret) => Boolean(secret.tags?.find((el) => tagSlugs.includes(el.slug))))
: decryptedSecrets;
const processedImports = (imports || [])?.map(({ secrets: importedSecrets, ...el }) => {
const decryptedImportSecrets = importedSecrets.map((sec) =>
decryptSecretRaw(
@ -1106,14 +1111,14 @@ export const secretServiceFactory = ({
};
// expand secrets
await batchSecretsExpand(decryptedSecrets);
await batchSecretsExpand(filteredSecrets);
// expand imports by batch
await Promise.all(processedImports.map((processedImport) => batchSecretsExpand(processedImport.secrets)));
}
return {
secrets: decryptedSecrets,
secrets: filteredSecrets,
imports: processedImports
};
};
@ -2081,7 +2086,7 @@ export const secretServiceFactory = ({
return {
...updatedSecret[0],
tags: [...existingSecretTags, ...tags].map((t) => ({ id: t.id, slug: t.slug, name: t.name, color: t.color }))
tags: [...existingSecretTags, ...tags].map((t) => ({ id: t.id, slug: t.slug, name: t.slug, color: t.color }))
};
};

@ -149,6 +149,7 @@ export type TGetSecretsRawDTO = {
environment: string;
includeImports?: boolean;
recursive?: boolean;
tagSlugs?: string[];
} & TProjectPermission;
export type TGetASecretRawDTO = {

@ -404,6 +404,10 @@ func CallGetRawSecretsV3(httpClient *resty.Client, request GetRawSecretsV3Reques
SetQueryParam("environment", request.Environment).
SetQueryParam("secretPath", request.SecretPath)
if request.TagSlugs != "" {
req.SetQueryParam("tagSlugs", request.TagSlugs)
}
if request.IncludeImport {
req.SetQueryParam("include_imports", "true")
}

@ -574,6 +574,7 @@ type GetRawSecretsV3Request struct {
SecretPath string `json:"secretPath"`
IncludeImport bool `json:"include_imports"`
Recursive bool `json:"recursive"`
TagSlugs string `json:"tagSlugs,omitempty"`
}
type GetRawSecretsV3Response struct {

@ -312,7 +312,7 @@ func ParseAgentConfig(configFile []byte) (*Config, error) {
func secretTemplateFunction(accessToken string, existingEtag string, currentEtag *string) func(string, string, string) ([]models.SingleEnvironmentVariable, error) {
return func(projectID, envSlug, secretPath string) ([]models.SingleEnvironmentVariable, error) {
res, err := util.GetPlainTextSecretsV3(accessToken, projectID, envSlug, secretPath, false, false)
res, err := util.GetPlainTextSecretsV3(accessToken, projectID, envSlug, secretPath, false, false, "")
if err != nil {
return nil, err
}

@ -14,6 +14,7 @@ import (
"github.com/Infisical/infisical-merge/packages/util"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"gopkg.in/yaml.v2"
)
const (
@ -188,7 +189,7 @@ func formatEnvs(envs []models.SingleEnvironmentVariable, format string) (string,
case FormatCSV:
return formatAsCSV(envs), nil
case FormatYaml:
return formatAsYaml(envs), nil
return formatAsYaml(envs)
default:
return "", fmt.Errorf("invalid format type: %s. Available format types are [%s]", format, []string{FormatDotenv, FormatJson, FormatCSV, FormatYaml, FormatDotEnvExport})
}
@ -224,12 +225,18 @@ func formatAsDotEnvExport(envs []models.SingleEnvironmentVariable) string {
return dotenv
}
func formatAsYaml(envs []models.SingleEnvironmentVariable) string {
var dotenv string
func formatAsYaml(envs []models.SingleEnvironmentVariable) (string, error) {
m := make(map[string]string)
for _, env := range envs {
dotenv += fmt.Sprintf("%s: %s\n", env.Key, env.Value)
m[env.Key] = env.Value
}
return dotenv
yamlBytes, err := yaml.Marshal(m)
if err != nil {
return "", fmt.Errorf("failed to format environment variables as YAML: %w", err)
}
return string(yamlBytes), nil
}
// Format environment variables as a JSON file

@ -0,0 +1,79 @@
package cmd
import (
"testing"
"github.com/Infisical/infisical-merge/packages/models"
"github.com/stretchr/testify/assert"
"gopkg.in/yaml.v2"
)
func TestFormatAsYaml(t *testing.T) {
tests := []struct {
name string
input []models.SingleEnvironmentVariable
expected string
}{
{
name: "Empty input",
input: []models.SingleEnvironmentVariable{},
expected: "{}\n",
},
{
name: "Single environment variable",
input: []models.SingleEnvironmentVariable{
{Key: "KEY1", Value: "VALUE1"},
},
expected: "KEY1: VALUE1\n",
},
{
name: "Multiple environment variables",
input: []models.SingleEnvironmentVariable{
{Key: "KEY1", Value: "VALUE1"},
{Key: "KEY2", Value: "VALUE2"},
{Key: "KEY3", Value: "VALUE3"},
},
expected: "KEY1: VALUE1\nKEY2: VALUE2\nKEY3: VALUE3\n",
},
{
name: "Overwriting duplicate keys",
input: []models.SingleEnvironmentVariable{
{Key: "KEY1", Value: "VALUE1"},
{Key: "KEY1", Value: "VALUE2"},
},
expected: "KEY1: VALUE2\n",
},
{
name: "Special characters in values",
input: []models.SingleEnvironmentVariable{
{Key: "KEY1", Value: "Value with spaces"},
{Key: "KEY2", Value: "Value:with:colons"},
{Key: "KEY3", Value: "Value\nwith\nnewlines"},
},
expected: "KEY1: Value with spaces\nKEY2: Value:with:colons\nKEY3: |-\n Value\n with\n newlines\n",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := formatAsYaml(tt.input)
assert.NoError(t, err)
// Compare the result with the expected output
assert.Equal(t, tt.expected, result)
// Additionally, parse the result back into a map to ensure it's valid YAML
var resultMap map[string]string
err = yaml.Unmarshal([]byte(result), &resultMap)
assert.NoError(t, err)
// Create an expected map from the input
expectedMap := make(map[string]string)
for _, env := range tt.input {
expectedMap[env.Key] = env.Value
}
assert.Equal(t, expectedMap, resultMap)
})
}
}

@ -155,22 +155,24 @@ var secretsSetCmd = &cobra.Command{
DisableFlagsInUseLine: true,
Args: cobra.MinimumNArgs(1),
Run: func(cmd *cobra.Command, args []string) {
util.RequireLocalWorkspaceFile()
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if (token == nil) {
util.RequireLocalWorkspaceFile()
}
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
@ -374,6 +376,11 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
util.HandleError(err, "Unable to parse flag")
}
secretOverriding, err := cmd.Flags().GetBool("secret-overriding")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
request := models.GetAllSecretsParameters{
Environment: environmentName,
WorkspaceId: projectId,
@ -394,6 +401,12 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
util.HandleError(err, "To fetch all secrets")
}
if secretOverriding {
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_PERSONAL)
} else {
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_SHARED)
}
if shouldExpand {
authParams := models.ExpandSecretsAuthentication{}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
@ -413,11 +426,13 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
if value, ok := secretsMap[secretKeyFromArg]; ok {
requestedSecrets = append(requestedSecrets, value)
} else {
requestedSecrets = append(requestedSecrets, models.SingleEnvironmentVariable{
Key: secretKeyFromArg,
Type: "*not found*",
Value: "*not found*",
})
if !(plainOutput || showOnlyValue) {
requestedSecrets = append(requestedSecrets, models.SingleEnvironmentVariable{
Key: secretKeyFromArg,
Type: "*not found*",
Value: "*not found*",
})
}
}
}
@ -688,6 +703,7 @@ func init() {
secretsGetCmd.Flags().Bool("include-imports", true, "Imported linked secrets ")
secretsGetCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets, and process your referenced secrets")
secretsGetCmd.Flags().Bool("recursive", false, "Fetch secrets from all sub-folders")
secretsGetCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
secretsCmd.AddCommand(secretsGetCmd)
secretsCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
secretsCmd.AddCommand(secretsSetCmd)

@ -4,6 +4,7 @@ Copyright (c) 2023 Infisical Inc.
package cmd
import (
"encoding/base64"
"fmt"
"strings"
@ -13,13 +14,26 @@ import (
"github.com/spf13/cobra"
)
var AvailableVaultsAndDescriptions = []string{"auto (automatically select native vault on system)", "file (encrypted file vault)"}
var AvailableVaults = []string{"auto", "file"}
type VaultBackendType struct {
Name string
Description string
}
var AvailableVaults = []VaultBackendType{
{
Name: "auto",
Description: "automatically select the system keyring",
},
{
Name: "file",
Description: "encrypted file vault",
},
}
var vaultSetCmd = &cobra.Command{
Example: `infisical vault set pass`,
Use: "set [vault-name]",
Short: "Used to set the vault backend to store your login details securely at rest",
Example: `infisical vault set file`,
Use: "set [file|auto]",
Short: "Used to configure the vault backends",
DisableFlagsInUseLine: true,
Args: cobra.MinimumNArgs(1),
Run: func(cmd *cobra.Command, args []string) {
@ -35,15 +49,16 @@ var vaultSetCmd = &cobra.Command{
return
}
if wantedVaultTypeName == "auto" || wantedVaultTypeName == "file" {
if wantedVaultTypeName == util.VAULT_BACKEND_AUTO_MODE || wantedVaultTypeName == util.VAULT_BACKEND_FILE_MODE {
configFile, err := util.GetConfigFile()
if err != nil {
log.Error().Msgf("Unable to set vault to [%s] because of [err=%s]", wantedVaultTypeName, err)
return
}
configFile.VaultBackendType = wantedVaultTypeName // save selected vault
configFile.LoggedInUserEmail = "" // reset the logged in user to prompt them to re login
configFile.VaultBackendType = wantedVaultTypeName
configFile.LoggedInUserEmail = ""
configFile.VaultBackendPassphrase = base64.StdEncoding.EncodeToString([]byte(util.GenerateRandomString(10)))
err = util.WriteConfigFile(&configFile)
if err != nil {
@ -55,7 +70,11 @@ var vaultSetCmd = &cobra.Command{
Telemetry.CaptureEvent("cli-command:vault set", posthog.NewProperties().Set("currentVault", currentVaultBackend).Set("wantedVault", wantedVaultTypeName).Set("version", util.CLI_VERSION))
} else {
log.Error().Msgf("The requested vault type [%s] is not available on this system. Only the following vault backends are available for you system: %s", wantedVaultTypeName, strings.Join(AvailableVaults, ", "))
var availableVaultsNames []string
for _, vault := range AvailableVaults {
availableVaultsNames = append(availableVaultsNames, vault.Name)
}
log.Error().Msgf("The requested vault type [%s] is not available on this system. Only the following vault backends are available for you system: %s", wantedVaultTypeName, strings.Join(availableVaultsNames, ", "))
}
},
}
@ -73,8 +92,8 @@ var vaultCmd = &cobra.Command{
func printAvailableVaultBackends() {
fmt.Printf("Vaults are used to securely store your login details locally. Available vaults:")
for _, backend := range AvailableVaultsAndDescriptions {
fmt.Printf("\n- %s", backend)
for _, vaultType := range AvailableVaults {
fmt.Printf("\n- %s (%s)", vaultType.Name, vaultType.Description)
}
currentVaultBackend, err := util.GetCurrentVaultBackend()
@ -89,5 +108,6 @@ func printAvailableVaultBackends() {
func init() {
vaultCmd.AddCommand(vaultSetCmd)
rootCmd.AddCommand(vaultCmd)
}

@ -11,10 +11,11 @@ type UserCredentials struct {
// The file struct for Infisical config file
type ConfigFile struct {
LoggedInUserEmail string `json:"loggedInUserEmail"`
LoggedInUserDomain string `json:"LoggedInUserDomain,omitempty"`
LoggedInUsers []LoggedInUser `json:"loggedInUsers,omitempty"`
VaultBackendType string `json:"vaultBackendType,omitempty"`
LoggedInUserEmail string `json:"loggedInUserEmail"`
LoggedInUserDomain string `json:"LoggedInUserDomain,omitempty"`
LoggedInUsers []LoggedInUser `json:"loggedInUsers,omitempty"`
VaultBackendType string `json:"vaultBackendType,omitempty"`
VaultBackendPassphrase string `json:"vaultBackendPassphrase,omitempty"`
}
type LoggedInUser struct {

@ -1,6 +1,7 @@
package util
import (
"encoding/base64"
"encoding/json"
"errors"
"fmt"
@ -50,10 +51,11 @@ func WriteInitalConfig(userCredentials *models.UserCredentials) error {
}
configFile := models.ConfigFile{
LoggedInUserEmail: userCredentials.Email,
LoggedInUserDomain: config.INFISICAL_URL,
LoggedInUsers: existingConfigFile.LoggedInUsers,
VaultBackendType: existingConfigFile.VaultBackendType,
LoggedInUserEmail: userCredentials.Email,
LoggedInUserDomain: config.INFISICAL_URL,
LoggedInUsers: existingConfigFile.LoggedInUsers,
VaultBackendType: existingConfigFile.VaultBackendType,
VaultBackendPassphrase: existingConfigFile.VaultBackendPassphrase,
}
configFileMarshalled, err := json.Marshal(configFile)
@ -215,6 +217,14 @@ func GetConfigFile() (models.ConfigFile, error) {
return models.ConfigFile{}, err
}
if configFile.VaultBackendPassphrase != "" {
decodedPassphrase, err := base64.StdEncoding.DecodeString(configFile.VaultBackendPassphrase)
if err != nil {
return models.ConfigFile{}, fmt.Errorf("GetConfigFile: Unable to decode base64 passphrase [err=%s]", err)
}
os.Setenv("INFISICAL_VAULT_FILE_PASSPHRASE", string(decodedPassphrase))
}
return configFile, nil
}

@ -8,6 +8,10 @@ const (
INFISICAL_WORKSPACE_CONFIG_FILE_NAME = ".infisical.json"
INFISICAL_TOKEN_NAME = "INFISICAL_TOKEN"
INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN_NAME = "INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN"
INFISICAL_VAULT_FILE_PASSPHRASE_ENV_NAME = "INFISICAL_VAULT_FILE_PASSPHRASE" // This works because we've forked the keyring package and added support for this env variable. This explains why you won't find any occurrences of it in the CLI codebase.
VAULT_BACKEND_AUTO_MODE = "auto"
VAULT_BACKEND_FILE_MODE = "file"
// Universal Auth
INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME = "INFISICAL_UNIVERSAL_AUTH_CLIENT_ID"
@ -34,7 +38,8 @@ const (
SERVICE_TOKEN_IDENTIFIER = "service-token"
UNIVERSAL_AUTH_TOKEN_IDENTIFIER = "universal-auth-token"
INFISICAL_BACKUP_SECRET = "infisical-backup-secrets"
INFISICAL_BACKUP_SECRET = "infisical-backup-secrets" // akhilmhdh: @depreciated remove in version v0.30
INFISICAL_BACKUP_SECRET_ENCRYPTION_KEY = "infisical-backup-secret-encryption-key"
)
var (

@ -71,7 +71,7 @@ func GetCurrentLoggedInUserDetails() (LoggedInUserDetails, error) {
if strings.Contains(err.Error(), "credentials not found in system keyring") {
return LoggedInUserDetails{}, errors.New("we couldn't find your logged in details, try running [infisical login] then try again")
} else {
return LoggedInUserDetails{}, fmt.Errorf("failed to fetch creditnals from keyring because [err=%s]", err)
return LoggedInUserDetails{}, fmt.Errorf("failed to fetch credentials from keyring because [err=%s]", err)
}
}

@ -5,6 +5,7 @@ import (
"crypto/sha256"
"encoding/base64"
"fmt"
"math/rand"
"os"
"os/exec"
"path"
@ -25,6 +26,8 @@ type DecodedSymmetricEncryptionDetails = struct {
Key []byte
}
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
func GetBase64DecodedSymmetricEncryptionDetails(key string, cipher string, IV string, tag string) (DecodedSymmetricEncryptionDetails, error) {
cipherx, err := base64.StdEncoding.DecodeString(cipher)
if err != nil {
@ -287,3 +290,11 @@ func GetCmdFlagOrEnv(cmd *cobra.Command, flag, envName string) (string, error) {
}
return value, nil
}
func GenerateRandomString(length int) string {
b := make([]byte, length)
for i := range b {
b[i] = charset[rand.Intn(len(charset))]
}
return string(b)
}

@ -1,6 +1,10 @@
package util
import (
"encoding/base64"
"fmt"
"github.com/rs/zerolog/log"
"github.com/zalando/go-keyring"
)
@ -20,16 +24,39 @@ func SetValueInKeyring(key, value string) error {
PrintErrorAndExit(1, err, "Unable to get current vault. Tip: run [infisical rest] then try again")
}
return keyring.Set(currentVaultBackend, MAIN_KEYRING_SERVICE, key, value)
err = keyring.Set(currentVaultBackend, MAIN_KEYRING_SERVICE, key, value)
if err != nil {
log.Debug().Msg(fmt.Sprintf("Error while setting default keyring: %v", err))
configFile, _ := GetConfigFile()
if configFile.VaultBackendPassphrase == "" {
encodedPassphrase := base64.StdEncoding.EncodeToString([]byte(GenerateRandomString(10))) // generate random passphrase
configFile.VaultBackendPassphrase = encodedPassphrase
configFile.VaultBackendType = VAULT_BACKEND_FILE_MODE
err = WriteConfigFile(&configFile)
if err != nil {
return err
}
// We call this function at last to trigger the environment variable to be set
GetConfigFile()
}
err = keyring.Set(VAULT_BACKEND_FILE_MODE, MAIN_KEYRING_SERVICE, key, value)
log.Debug().Msg(fmt.Sprintf("Error while setting file keyring: %v", err))
}
return err
}
func GetValueInKeyring(key string) (string, error) {
currentVaultBackend, err := GetCurrentVaultBackend()
if err != nil {
PrintErrorAndExit(1, err, "Unable to get current vault. Tip: run [infisical rest] then try again")
PrintErrorAndExit(1, err, "Unable to get current vault. Tip: run [infisical reset] then try again")
}
return keyring.Get(currentVaultBackend, MAIN_KEYRING_SERVICE, key)
}
func DeleteValueInKeyring(key string) error {

@ -1,14 +1,15 @@
package util
import (
"crypto/rand"
"encoding/base64"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"os"
"path"
"regexp"
"slices"
"strings"
"unicode"
@ -20,7 +21,7 @@ import (
"github.com/zalando/go-keyring"
)
func GetPlainTextSecretsViaServiceToken(fullServiceToken string, environment string, secretPath string, includeImports bool, recursive bool) ([]models.SingleEnvironmentVariable, error) {
func GetPlainTextSecretsViaServiceToken(fullServiceToken string, environment string, secretPath string, includeImports bool, recursive bool, tagSlugs string) ([]models.SingleEnvironmentVariable, error) {
serviceTokenParts := strings.SplitN(fullServiceToken, ".", 4)
if len(serviceTokenParts) < 4 {
return nil, fmt.Errorf("invalid service token entered. Please double check your service token and try again")
@ -53,6 +54,7 @@ func GetPlainTextSecretsViaServiceToken(fullServiceToken string, environment str
SecretPath: secretPath,
IncludeImport: includeImports,
Recursive: recursive,
TagSlugs: tagSlugs,
})
if err != nil {
@ -76,7 +78,7 @@ func GetPlainTextSecretsViaServiceToken(fullServiceToken string, environment str
}
func GetPlainTextSecretsV3(accessToken string, workspaceId string, environmentName string, secretsPath string, includeImports bool, recursive bool) (models.PlaintextSecretResult, error) {
func GetPlainTextSecretsV3(accessToken string, workspaceId string, environmentName string, secretsPath string, includeImports bool, recursive bool, tagSlugs string) (models.PlaintextSecretResult, error) {
httpClient := resty.New()
httpClient.SetAuthToken(accessToken).
SetHeader("Accept", "application/json")
@ -86,7 +88,7 @@ func GetPlainTextSecretsV3(accessToken string, workspaceId string, environmentNa
Environment: environmentName,
IncludeImport: includeImports,
Recursive: recursive,
// TagSlugs: tagSlugs,
TagSlugs: tagSlugs,
}
if secretsPath != "" {
@ -281,29 +283,36 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo
}
res, err := GetPlainTextSecretsV3(loggedInUserDetails.UserCredentials.JTWToken, infisicalDotJson.WorkspaceId,
params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive)
params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive, params.TagSlugs)
log.Debug().Msgf("GetAllEnvironmentVariables: Trying to fetch secrets JTW token [err=%s]", err)
if err == nil {
WriteBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, params.SecretsPath, res.Secrets)
backupEncryptionKey, err := GetBackupEncryptionKey()
if err != nil {
return nil, err
}
WriteBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, params.SecretsPath, backupEncryptionKey, res.Secrets)
}
secretsToReturn = res.Secrets
errorToReturn = err
// only attempt to serve cached secrets if no internet connection and if at least one secret cached
if !isConnected {
backedSecrets, err := ReadBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, params.SecretsPath)
if len(backedSecrets) > 0 {
PrintWarning("Unable to fetch latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug")
secretsToReturn = backedSecrets
errorToReturn = err
backupEncryptionKey, _ := GetBackupEncryptionKey()
if backupEncryptionKey != nil {
backedUpSecrets, err := ReadBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, params.SecretsPath, backupEncryptionKey)
if len(backedUpSecrets) > 0 {
PrintWarning("Unable to fetch the latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug")
secretsToReturn = backedUpSecrets
errorToReturn = err
}
}
}
} else {
if params.InfisicalToken != "" {
log.Debug().Msg("Trying to fetch secrets using service token")
secretsToReturn, errorToReturn = GetPlainTextSecretsViaServiceToken(params.InfisicalToken, params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive)
secretsToReturn, errorToReturn = GetPlainTextSecretsViaServiceToken(params.InfisicalToken, params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive, params.TagSlugs)
} else if params.UniversalAuthAccessToken != "" {
if params.WorkspaceId == "" {
@ -311,7 +320,7 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo
}
log.Debug().Msg("Trying to fetch secrets using universal auth")
res, err := GetPlainTextSecretsV3(params.UniversalAuthAccessToken, params.WorkspaceId, params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive)
res, err := GetPlainTextSecretsV3(params.UniversalAuthAccessToken, params.WorkspaceId, params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive, params.TagSlugs)
errorToReturn = err
secretsToReturn = res.Secrets
@ -476,71 +485,99 @@ func OverrideSecrets(secrets []models.SingleEnvironmentVariable, secretType stri
return secretsToReturn
}
func WriteBackupSecrets(workspace string, environment string, secretsPath string, secrets []models.SingleEnvironmentVariable) error {
var backedUpSecrets []models.BackupSecretKeyRing
secretValueInKeyRing, err := GetValueInKeyring(INFISICAL_BACKUP_SECRET)
func GetBackupEncryptionKey() ([]byte, error) {
encryptionKey, err := GetValueInKeyring(INFISICAL_BACKUP_SECRET_ENCRYPTION_KEY)
if err != nil {
if err == keyring.ErrUnsupportedPlatform {
return errors.New("your OS does not support keyring. Consider using a service token https://infisical.com/docs/documentation/platform/token")
} else if err != keyring.ErrNotFound {
return fmt.Errorf("something went wrong, failed to retrieve value from system keyring [error=%v]", err)
return nil, errors.New("your OS does not support keyring. Consider using a service token https://infisical.com/docs/documentation/platform/token")
} else if err == keyring.ErrNotFound {
// generate a new key
randomizedKey := make([]byte, 16)
rand.Read(randomizedKey)
encryptionKey = hex.EncodeToString(randomizedKey)
if err := SetValueInKeyring(INFISICAL_BACKUP_SECRET_ENCRYPTION_KEY, encryptionKey); err != nil {
return nil, err
}
return []byte(encryptionKey), nil
} else {
return nil, fmt.Errorf("something went wrong, failed to retrieve value from system keyring [error=%v]", err)
}
}
_ = json.Unmarshal([]byte(secretValueInKeyRing), &backedUpSecrets)
return []byte(encryptionKey), nil
}
backedUpSecrets = slices.DeleteFunc(backedUpSecrets, func(e models.BackupSecretKeyRing) bool {
return e.SecretPath == secretsPath && e.ProjectID == workspace && e.Environment == environment
})
newBackupSecret := models.BackupSecretKeyRing{
ProjectID: workspace,
Environment: environment,
SecretPath: secretsPath,
Secrets: secrets,
}
backedUpSecrets = append(backedUpSecrets, newBackupSecret)
func WriteBackupSecrets(workspace string, environment string, secretsPath string, encryptionKey []byte, secrets []models.SingleEnvironmentVariable) error {
formattedPath := strings.ReplaceAll(secretsPath, "/", "-")
fileName := fmt.Sprintf("project_secrets_%s_%s_%s.json", workspace, environment, formattedPath)
secrets_backup_folder_name := "secrets-backup"
listOfSecretsMarshalled, err := json.Marshal(backedUpSecrets)
_, fullConfigFileDirPath, err := GetFullConfigFilePath()
if err != nil {
return err
return fmt.Errorf("WriteBackupSecrets: unable to get full config folder path [err=%s]", err)
}
err = SetValueInKeyring(INFISICAL_BACKUP_SECRET, string(listOfSecretsMarshalled))
// create secrets backup directory
fullPathToSecretsBackupFolder := fmt.Sprintf("%s/%s", fullConfigFileDirPath, secrets_backup_folder_name)
if _, err := os.Stat(fullPathToSecretsBackupFolder); errors.Is(err, os.ErrNotExist) {
err := os.Mkdir(fullPathToSecretsBackupFolder, os.ModePerm)
if err != nil {
return err
}
}
marshaledSecrets, _ := json.Marshal(secrets)
result, err := crypto.EncryptSymmetric(marshaledSecrets, encryptionKey)
if err != nil {
return fmt.Errorf("StoreUserCredsInKeyRing: unable to store user credentials because [err=%s]", err)
return fmt.Errorf("WriteBackupSecrets: Unable to encrypt local secret backup to file [err=%s]", err)
}
listOfSecretsMarshalled, _ := json.Marshal(result)
err = os.WriteFile(fmt.Sprintf("%s/%s", fullPathToSecretsBackupFolder, fileName), listOfSecretsMarshalled, 0600)
if err != nil {
return fmt.Errorf("WriteBackupSecrets: Unable to write backup secrets to file [err=%s]", err)
}
return nil
}
func ReadBackupSecrets(workspace string, environment string, secretsPath string) ([]models.SingleEnvironmentVariable, error) {
secretValueInKeyRing, err := GetValueInKeyring(INFISICAL_BACKUP_SECRET)
func ReadBackupSecrets(workspace string, environment string, secretsPath string, encryptionKey []byte) ([]models.SingleEnvironmentVariable, error) {
formattedPath := strings.ReplaceAll(secretsPath, "/", "-")
fileName := fmt.Sprintf("project_secrets_%s_%s_%s.json", workspace, environment, formattedPath)
secrets_backup_folder_name := "secrets-backup"
_, fullConfigFileDirPath, err := GetFullConfigFilePath()
if err != nil {
if err == keyring.ErrUnsupportedPlatform {
return nil, errors.New("your OS does not support keyring. Consider using a service token https://infisical.com/docs/documentation/platform/token")
} else if err == keyring.ErrNotFound {
return nil, errors.New("credentials not found in system keyring")
} else {
return nil, fmt.Errorf("something went wrong, failed to retrieve value from system keyring [error=%v]", err)
}
return nil, fmt.Errorf("ReadBackupSecrets: unable to write config file because an error occurred when getting config file path [err=%s]", err)
}
var backedUpSecrets []models.BackupSecretKeyRing
err = json.Unmarshal([]byte(secretValueInKeyRing), &backedUpSecrets)
fullPathToSecretsBackupFolder := fmt.Sprintf("%s/%s", fullConfigFileDirPath, secrets_backup_folder_name)
if _, err := os.Stat(fullPathToSecretsBackupFolder); errors.Is(err, os.ErrNotExist) {
return nil, nil
}
encryptedBackupSecretsFilePath := fmt.Sprintf("%s/%s", fullPathToSecretsBackupFolder, fileName)
encryptedBackupSecretsAsBytes, err := os.ReadFile(encryptedBackupSecretsFilePath)
if err != nil {
return nil, fmt.Errorf("getUserCredsFromKeyRing: Something went wrong when unmarshalling user creds [err=%s]", err)
return nil, err
}
for _, backupSecret := range backedUpSecrets {
if backupSecret.Environment == environment && backupSecret.ProjectID == workspace && backupSecret.SecretPath == secretsPath {
return backupSecret.Secrets, nil
}
var encryptedBackUpSecrets models.SymmetricEncryptionResult
err = json.Unmarshal(encryptedBackupSecretsAsBytes, &encryptedBackUpSecrets)
if err != nil {
return nil, fmt.Errorf("ReadBackupSecrets: unable to parse encrypted backup secrets. The secrets backup may be malformed [err=%s]", err)
}
return nil, nil
result, err := crypto.DecryptSymmetric(encryptionKey, encryptedBackUpSecrets.CipherText, encryptedBackUpSecrets.AuthTag, encryptedBackUpSecrets.Nonce)
if err != nil {
return nil, fmt.Errorf("ReadBackupSecrets: unable to decrypt encrypted backup secrets [err=%s]", err)
}
var plainTextSecrets []models.SingleEnvironmentVariable
_ = json.Unmarshal(result, &plainTextSecrets)
return plainTextSecrets, nil
}
func DeleteBackupSecrets() error {
// keeping this logic for now. Need to remove it later as more users migrate keyring would be used and this folder will be removed completely by then
secrets_backup_folder_name := "secrets-backup"
_, fullConfigFileDirPath, err := GetFullConfigFilePath()
@ -549,8 +586,8 @@ func DeleteBackupSecrets() error {
}
fullPathToSecretsBackupFolder := fmt.Sprintf("%s/%s", fullConfigFileDirPath, secrets_backup_folder_name)
DeleteValueInKeyring(INFISICAL_BACKUP_SECRET)
DeleteValueInKeyring(INFISICAL_BACKUP_SECRET_ENCRYPTION_KEY)
return os.RemoveAll(fullPathToSecretsBackupFolder)
}

@ -11,11 +11,11 @@ func GetCurrentVaultBackend() (string, error) {
}
if configFile.VaultBackendType == "" {
return "auto", nil
return VAULT_BACKEND_AUTO_MODE, nil
}
if configFile.VaultBackendType != "auto" && configFile.VaultBackendType != "file" {
return "auto", nil
if configFile.VaultBackendType != VAULT_BACKEND_AUTO_MODE && configFile.VaultBackendType != VAULT_BACKEND_FILE_MODE {
return VAULT_BACKEND_AUTO_MODE, nil
}
return configFile.VaultBackendType, nil

@ -1,4 +1,4 @@
Warning: Unable to fetch latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug
Warning: Unable to fetch the latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug
┌───────────────┬──────────────┬─────────────┐
│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │
├───────────────┼──────────────┼─────────────┤

@ -7,7 +7,6 @@ import (
"github.com/bradleyjkemp/cupaloy/v2"
)
func TestServiceToken_SecretsGetWithImportsAndRecursiveCmd(t *testing.T) {
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent")
@ -94,7 +93,7 @@ func TestUserAuth_SecretsGetAll(t *testing.T) {
}
// explicitly called here because it should happen directly after successful secretsGetAll
testUserAuth_SecretsGetAllWithoutConnection(t)
// testUserAuth_SecretsGetAllWithoutConnection(t)
}
func testUserAuth_SecretsGetAllWithoutConnection(t *testing.T) {
@ -107,7 +106,7 @@ func testUserAuth_SecretsGetAllWithoutConnection(t *testing.T) {
// set it to a URL that will always be unreachable
newConfigFile.LoggedInUserDomain = "http://localhost:4999"
util.WriteConfigFile(&newConfigFile)
// restore config file
defer util.WriteConfigFile(&originalConfigFile)
@ -121,4 +120,4 @@ func testUserAuth_SecretsGetAllWithoutConnection(t *testing.T) {
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
}

@ -0,0 +1,15 @@
---
title: "Meetings"
sidebarTitle: "Meetings"
description: "The guide to meetings at Infisical."
---
## "Let's schedule a meeting about this"
Being a remote-first company, we try to be as async as possible. When an issue arises, it's best to create a public Slack thread and tag all the necessary team members. Otherwise, if you were to "put a meeting on a calendar", the decision making process will inevitable slow down by at least a day (e.g., trying to find the right time for folks in different time zones is not always straightforward).
In other words, we have almost no (recurring) meetings and prefer written communication or quick Slack huddles.
## Weekly All-hands
All-hands is the single recurring meeting that we run every Monday at 8:30am PT. Typically, we would discuss everything important that happened during the previous week and plan out the week ahead. This is also an opportunity to bring up any important topics in front of the whole company (but feel free to post those in Slack too).

@ -1,14 +1,16 @@
---
title: "Spenging Money"
title: "Spending Money"
sidebarTitle: "Spending Money"
description: "The guide to spending money at Infisical."
---
Fairly frequently, you might run into situations when you need to spend company money.
**Please spend money in a way that you think is in the best interest of the company.**
<Note>
Please spend money in a way that you think is in the best interest of the company.
</Note>
## Trivial expenses
# Trivial expenses
We don't want you to be slowed down because you're waiting for an approval to purchase some SaaS. For trivial expenses  **Just do it**.
@ -22,6 +24,35 @@ Make sure you keep copies for all receipts. If you expense something on a compan
You should default to using your company card in all cases - it has no transaction fees. If using your personal card is unavoidable, please reach out to Maidul to get it reimbursed manually.
# Equipment
Infisical is a remote first company so we understand the importance of having a comfortable work setup. To support this, we provide allowances for essential office equipment.
### Desk & Chair
Most people already have a comfortable desk and chair, but if you need an upgrade, we offer the following allowances.
While we're not yet able to provide the latest and greatest, we strive to be reasonable given the stage of our company.
**Desk**: $150 USD
**Chair**: $150 USD
### Laptop
Each team member will receive a company-issued Macbook Pro before they start their first day.
### Notes
1. All equipment purchased using company allowances remains the property of Infisical.
2. Keep all receipts for equipment purchases and submit them for reimbursement.
3. If you leave Infisical, you may be required to return company-owned equipment.
Please note that we're unable to offer a split payment option where the Infisical pays half and you pay half for equipment exceeding the allowance.
This is because we don't yet have a formal HR department to handle such logistics.
For any equipment related questions, please reach out to Maidul.
## Brex
We use Brex as our primary credit card provider. Don't have a company card yet? Reach out to Maidul.

@ -59,7 +59,8 @@
"handbook/onboarding",
"handbook/spending-money",
"handbook/time-off",
"handbook/hiring"
"handbook/hiring",
"handbook/meetings"
]
}
],

@ -30,8 +30,5 @@ description: "Change the vault type in Infisical"
## Description
To safeguard your login details when using the CLI, Infisical places them in a system vault or an encrypted text file, protected by a passphrase that only the user knows.
<Tip>To avoid constantly entering your passphrase when using the `file` vault type, set the `INFISICAL_VAULT_FILE_PASSPHRASE` environment variable with your password in your shell</Tip>
To safeguard your login details when using the CLI, Infisical attempts to store them in a system keyring. If a system keyring cannot be found on your machine, the data is stored in a config file.

@ -16,7 +16,7 @@ Before you begin, you'll first need to choose a method of authentication with AW
<Steps>
<Step title="Create the Managing User IAM Role">
1. Navigate to the [Create IAM Role](https://console.aws.amazon.com/iamv2/home#/roles/create?step=selectEntities) page in your AWS Console.
![IAM Role Creation](../../images/integrations/aws/integration-aws-iam-assume-role.png)
![IAM Role Creation](/images/integrations/aws/integration-aws-iam-assume-role.png)
2. Select **AWS Account** as the **Trusted Entity Type**.
3. Choose **Another AWS Account** and enter **381492033652** (Infisical AWS Account ID). This restricts the role to be assumed only by Infisical. If you are self-hosting, provide the AWS account number where Infisical is hosted.

@ -4,10 +4,10 @@ description: "Learn how to configure Google SAML for Infisical SSO."
---
<Info>
Google SAML SSO feature is a paid feature.
If you're using Infisical Cloud, then it is available under the **Pro Tier**. If you're self-hosting Infisical,
then you should contact sales@infisical.com to purchase an enterprise license to use it.
Google SAML SSO feature is a paid feature. If you're using Infisical Cloud,
then it is available under the **Pro Tier**. If you're self-hosting Infisical,
then you should contact sales@infisical.com to purchase an enterprise license
to use it.
</Info>
<Steps>
@ -15,8 +15,9 @@ description: "Learn how to configure Google SAML for Infisical SSO."
In Infisical, head to your Organization Settings > Authentication > SAML SSO Configuration and select **Set up SAML SSO**.
Next, note the **ACS URL** and **SP Entity ID** to use when configuring the Google SAML application.
![Google SAML initial configuration](../../../images/sso/google-saml/init-config.png)
</Step>
<Step title="Create a SAML application in Google">
2.1. In your [Google Admin console](https://support.google.com/a/answer/182076), head to Menu > Apps > Web and mobile apps and
@ -32,7 +33,7 @@ description: "Learn how to configure Google SAML for Infisical SSO."
![Google SAML custom app details](../../../images/sso/google-saml/custom-saml-app-config.png)
2.4. Back in Infisical, set **SSO URL**, **IdP Entity ID**, and **Certificate** to the corresponding items from step 2.3.
2.4. Back in Infisical, set **SSO URL** and **Certificate** to the corresponding items from step 2.3.
![Google SAML Infisical config](../../../images/sso/google-saml/infisical-config.png)
@ -41,7 +42,7 @@ description: "Learn how to configure Google SAML for Infisical SSO."
Also, check the **Signed response** checkbox.
![Google SAML app config 2](../../../images/sso/google-saml/custom-saml-app-config-2.png)
2.6. In the **Attribute mapping** tab, configure the following map:
- **First name** -> **firstName**
@ -49,7 +50,7 @@ description: "Learn how to configure Google SAML for Infisical SSO."
- **Primary email** -> **email**
![Google SAML attribute mapping](../../../images/sso/google-saml/attribute-mapping.png)
Click **Finish**.
</Step>
<Step title="Assign users in Google Workspace to the application">
@ -57,11 +58,11 @@ description: "Learn how to configure Google SAML for Infisical SSO."
and press on **User access**.
![Google SAML user access](../../../images/sso/google-saml/user-access.png)
To assign everyone in your organization to the application, click **On for everyone** or **Off for everyone** and then click **Save**.
You can also assign an organizational unit or set of users to an application; you can learn more about that [here](https://support.google.com/a/answer/6087519?hl=en#add_custom_saml&turn_on&verify_sso&&zippy=%2Cstep-add-the-custom-saml-app%2Cstep-turn-on-your-saml-app%2Cstep-verify-that-sso-is-working-with-your-custom-app).
![Google SAML user access assignment](../../../images/sso/google-saml/user-access-assign.png)
</Step>
<Step title="Enable SAML SSO in Infisical">
@ -75,21 +76,24 @@ description: "Learn how to configure Google SAML for Infisical SSO."
To enforce SAML SSO, you're required to test out the SAML connection by successfully authenticating at least one Google user with Infisical;
Once you've completed this requirement, you can toggle the **Enforce SAML SSO** button to enforce SAML SSO.
<Warning>
We recommend ensuring that your account is provisioned the application in Google
prior to enforcing SAML SSO to prevent any unintended issues.
</Warning>
</Step>
</Steps>
<Note>
If you're configuring SAML SSO on a self-hosted instance of Infisical, make sure to
set the `AUTH_SECRET` and `SITE_URL` environment variable for it to work:
- `AUTH_SECRET`: A secret key used for signing and verifying JWT. This can be a random 32-byte base64 string generated with `openssl rand -base64 32`.
- `SITE_URL`: The URL of your self-hosted instance of Infisical - should be an absolute URL including the protocol (e.g. https://app.infisical.com)
If you're configuring SAML SSO on a self-hosted instance of Infisical, make
sure to set the `AUTH_SECRET` and `SITE_URL` environment variable for it to
work: - `AUTH_SECRET`: A secret key used for signing and verifying JWT. This
can be a random 32-byte base64 string generated with `openssl rand -base64
32`. - `SITE_URL`: The URL of your self-hosted instance of Infisical - should
be an absolute URL including the protocol (e.g. https://app.infisical.com)
</Note>
References:
- Google's guide to [set up your own custom SAML app](https://support.google.com/a/answer/6087519?hl=en#add_custom_saml&turn_on&verify_sso&&zippy=%2Cstep-add-the-custom-saml-app%2Cstep-turn-on-your-saml-app%2Cstep-verify-that-sso-is-working-with-your-custom-app).
- Google's guide to [set up your own custom SAML app](https://support.google.com/a/answer/6087519?hl=en#add_custom_saml&turn_on&verify_sso&&zippy=%2Cstep-add-the-custom-saml-app%2Cstep-turn-on-your-saml-app%2Cstep-verify-that-sso-is-working-with-your-custom-app).

Binary file not shown.

Before

(image error) Size: 605 KiB

After

(image error) Size: 219 KiB

@ -1,5 +1,5 @@
---
title: "Kubernetes"
title: "Kubernetes Operator"
description: "How to use Infisical to inject secrets into Kubernetes clusters."
---
@ -9,6 +9,10 @@ The Infisical Secrets Operator is a Kubernetes controller that retrieves secrets
It uses an `InfisicalSecret` resource to specify authentication and storage methods.
The operator continuously updates secrets and can also reload dependent deployments automatically.
<Note>
If you are already using the External Secrets operator, you can view the integration documentation for it [here](https://external-secrets.io/latest/provider/infisical/).
</Note>
## Install Operator
The operator can be install via [Helm](https://helm.sh) or [kubectl](https://github.com/kubernetes/kubectl)

@ -155,7 +155,7 @@
]
},
{
"group": "Key Management",
"group": "Key Management (KMS)",
"pages": [
"documentation/platform/kms/overview",
"documentation/platform/kms/aws-kms",

@ -3,6 +3,7 @@ import { Controller, useForm } from "react-hook-form";
import { faCheck } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { zodResolver } from "@hookform/resolvers/zod";
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { createNotification } from "@app/components/notifications";
@ -87,7 +88,13 @@ type Props = {
};
const createTagSchema = z.object({
name: z.string().trim(),
slug: z
.string()
.trim()
.toLowerCase()
.refine((v) => slugify(v) === v, {
message: "Invalid slug. Slug can only contain alphanumeric characters and hyphens."
}),
color: z.string().trim()
});
@ -110,7 +117,7 @@ export const CreateTagModal = ({ isOpen, onToggle }: Props): JSX.Element => {
} = useForm<FormData>({
resolver: zodResolver(createTagSchema)
});
const { currentWorkspace } = useWorkspace();
const workspaceId = currentWorkspace?.id || "";
@ -123,13 +130,12 @@ export const CreateTagModal = ({ isOpen, onToggle }: Props): JSX.Element => {
if (!isOpen) reset();
}, [isOpen]);
const onFormSubmit = async ({ name, color }: FormData) => {
const onFormSubmit = async ({ slug, color }: FormData) => {
try {
await createWsTag({
workspaceID: workspaceId,
tagName: name,
tagColor: color,
tagSlug: name.replace(" ", "_")
tagSlug: slug
});
onToggle(false);
reset();
@ -155,11 +161,11 @@ export const CreateTagModal = ({ isOpen, onToggle }: Props): JSX.Element => {
<form onSubmit={handleSubmit(onFormSubmit)}>
<Controller
control={control}
name="name"
name="slug"
defaultValue=""
render={({ field, fieldState: { error } }) => (
<FormControl label="Tag Name" isError={Boolean(error)} errorText={error?.message}>
<Input {...field} placeholder="Type your tag name" />
<FormControl label="Tag Slug" isError={Boolean(error)} errorText={error?.message}>
<Input {...field} placeholder="Type your tag slug" />
</FormControl>
)}
/>

@ -1,4 +1,4 @@
import { useEffect, useState } from "react";
import { ReactNode, useEffect, useState } from "react";
import { useToggle } from "@app/hooks";
@ -16,6 +16,7 @@ type Props = {
subTitle?: string;
onDeleteApproved: () => Promise<void>;
buttonText?: string;
children?: ReactNode;
};
export const DeleteActionModal = ({
@ -26,7 +27,8 @@ export const DeleteActionModal = ({
onDeleteApproved,
title,
subTitle = "This action is irreversible.",
buttonText = "Delete"
buttonText = "Delete",
children
}: Props): JSX.Element => {
const [inputData, setInputData] = useState("");
const [isLoading, setIsLoading] = useToggle();
@ -94,9 +96,10 @@ export const DeleteActionModal = ({
<Input
value={inputData}
onChange={(e) => setInputData(e.target.value)}
placeholder="Type confirm..."
placeholder={`Type ${deleteKey} here`}
/>
</FormControl>
{children}
</form>
</ModalContent>
</Modal>

@ -0,0 +1,14 @@
/** Extracts the key and value from a passed in env string based on the provided delimiters. */
export const getKeyValue = (pastedContent: string, delimiters: string[]) => {
const foundDelimiter = delimiters.find((delimiter) => pastedContent.includes(delimiter));
if (!foundDelimiter) {
return { key: pastedContent.trim(), value: "" };
}
const [key, value] = pastedContent.split(foundDelimiter);
return {
key: key.trim(),
value: (value ?? "").trim()
};
};

@ -110,8 +110,15 @@ export const useCreateIntegration = () => {
export const useDeleteIntegration = () => {
const queryClient = useQueryClient();
return useMutation<{}, {}, { id: string; workspaceId: string }>({
mutationFn: ({ id }) => apiRequest.delete(`/api/v1/integration/${id}`),
return useMutation<
{},
{},
{ id: string; workspaceId: string; shouldDeleteIntegrationSecrets: boolean }
>({
mutationFn: ({ id, shouldDeleteIntegrationSecrets }) =>
apiRequest.delete(
`/api/v1/integration/${id}?shouldDeleteIntegrationSecrets=${shouldDeleteIntegrationSecrets}`
),
onSuccess: (_, { workspaceId }) => {
queryClient.invalidateQueries(workspaceKeys.getWorkspaceIntegrations(workspaceId));
queryClient.invalidateQueries(workspaceKeys.getWorkspaceAuthorization(workspaceId));

@ -5,6 +5,5 @@ export type TRateLimit = {
authRateLimit: number;
inviteUserRateLimit: number;
mfaRateLimit: number;
creationLimit: number;
publicEndpointLimit: number;
};

@ -28,11 +28,10 @@ export const useCreateWsTag = () => {
const queryClient = useQueryClient();
return useMutation<WsTag, {}, CreateTagDTO>({
mutationFn: async ({ workspaceID, tagName, tagColor, tagSlug }) => {
mutationFn: async ({ workspaceID, tagColor, tagSlug }) => {
const { data } = await apiRequest.post<{ workspaceTag: WsTag }>(
`/api/v1/workspace/${workspaceID}/tags`,
{
name: tagName,
color: tagColor || "",
slug: tagSlug
}

@ -2,7 +2,6 @@ export type UserWsTags = WsTag[];
export type WsTag = {
id: string;
name: string;
slug: string;
color?: string;
projectId: string;
@ -16,7 +15,6 @@ export type WorkspaceTag = { id: string; name: string; slug: string };
export type CreateTagDTO = {
workspaceID: string;
tagSlug: string;
tagName: string;
tagColor: string;
};

@ -277,18 +277,14 @@ export default function AWSParameterStoreCreateIntegrationPage() {
<div className="mt-2 ml-1">
<Switch
id="delete-aws"
onCheckedChange={() => setShouldDisableDelete(!shouldDisableDelete)}
onCheckedChange={setShouldDisableDelete}
isChecked={shouldDisableDelete}
>
Disable deleting secrets in AWS Parameter Store
</Switch>
</div>
<div className="mt-4 ml-1">
<Switch
id="tag-aws"
onCheckedChange={() => setShouldTag(!shouldTag)}
isChecked={shouldTag}
>
<Switch id="tag-aws" onCheckedChange={setShouldTag} isChecked={shouldTag}>
Tag in AWS Parameter Store
</Switch>
</div>

@ -106,9 +106,13 @@ export const IntegrationsPage = withProjectPermission(
handleProviderIntegration(provider);
};
const handleIntegrationDelete = async (integrationId: string, cb: () => void) => {
const handleIntegrationDelete = async (
integrationId: string,
shouldDeleteIntegrationSecrets: boolean,
cb: () => void
) => {
try {
await deleteIntegration({ id: integrationId, workspaceId });
await deleteIntegration({ id: integrationId, workspaceId, shouldDeleteIntegrationSecrets });
if (cb) cb();
createNotification({
type: "success",
@ -152,7 +156,7 @@ export const IntegrationsPage = withProjectPermission(
isLoading={isIntegrationLoading}
integrations={integrations}
environments={environments}
onIntegrationDelete={({ id }, cb) => handleIntegrationDelete(id, cb)}
onIntegrationDelete={handleIntegrationDelete}
workspaceId={workspaceId}
/>
<CloudIntegrationSection

@ -7,6 +7,7 @@ import { integrationSlugNameMapping } from "public/data/frequentConstants";
import { ProjectPermissionCan } from "@app/components/permissions";
import {
Button,
Checkbox,
DeleteActionModal,
EmptyState,
FormLabel,
@ -16,7 +17,7 @@ import {
Tooltip
} from "@app/components/v2";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/context";
import { usePopUp } from "@app/hooks";
import { usePopUp, useToggle } from "@app/hooks";
import { useSyncIntegration } from "@app/hooks/api/integrations/queries";
import { IntegrationMappingBehavior } from "@app/hooks/api/integrations/types";
import { TIntegration } from "@app/hooks/api/types";
@ -25,7 +26,11 @@ type Props = {
environments: Array<{ name: string; slug: string; id: string }>;
integrations?: TIntegration[];
isLoading?: boolean;
onIntegrationDelete: (integration: TIntegration, cb: () => void) => void;
onIntegrationDelete: (
integrationId: string,
shouldDeleteIntegrationSecrets: boolean,
cb: () => void
) => Promise<void>;
workspaceId: string;
};
@ -37,10 +42,12 @@ export const IntegrationsSection = ({
workspaceId
}: Props) => {
const { popUp, handlePopUpOpen, handlePopUpClose, handlePopUpToggle } = usePopUp([
"deleteConfirmation"
"deleteConfirmation",
"deleteSecretsConfirmation"
] as const);
const { mutate: syncIntegration } = useSyncIntegration();
const [shouldDeleteSecrets, setShouldDeleteSecrets] = useToggle(false);
return (
<div className="mb-8">
@ -249,7 +256,10 @@ export const IntegrationsSection = ({
<div className="flex items-end opacity-80 duration-200 hover:opacity-100">
<Tooltip content="Remove Integration">
<IconButton
onClick={() => handlePopUpOpen("deleteConfirmation", integration)}
onClick={() => {
setShouldDeleteSecrets.off();
handlePopUpOpen("deleteConfirmation", integration);
}}
ariaLabel="delete"
isDisabled={!isAllowed}
colorSchema="danger"
@ -281,11 +291,49 @@ export const IntegrationsSection = ({
(popUp?.deleteConfirmation?.data as TIntegration)?.integration ||
""
}
onDeleteApproved={async () =>
onIntegrationDelete(popUp?.deleteConfirmation.data as TIntegration, () =>
handlePopUpClose("deleteConfirmation")
)
}
onDeleteApproved={async () => {
if (shouldDeleteSecrets) {
handlePopUpOpen("deleteSecretsConfirmation");
return;
}
await onIntegrationDelete(
(popUp?.deleteConfirmation.data as TIntegration).id,
false,
() => handlePopUpClose("deleteConfirmation")
);
}}
>
{(popUp?.deleteConfirmation?.data as TIntegration)?.integration === "github" && (
<div className="mt-4">
<Checkbox
id="delete-integration-secrets"
checkIndicatorBg="text-white"
onCheckedChange={() => setShouldDeleteSecrets.toggle()}
>
Delete previously synced secrets from the destination
</Checkbox>
</div>
)}
</DeleteActionModal>
<DeleteActionModal
isOpen={popUp.deleteSecretsConfirmation.isOpen}
title={`Are you sure you also want to delete secrets on ${
(popUp?.deleteConfirmation.data as TIntegration)?.integration
}?`}
subTitle="By confirming, you acknowledge that all secrets managed by this integration will be removed from the destination. This action is irreversible."
onChange={(isOpen) => handlePopUpToggle("deleteSecretsConfirmation", isOpen)}
deleteKey="confirm"
onDeleteApproved={async () => {
await onIntegrationDelete(
(popUp?.deleteConfirmation.data as TIntegration).id,
true,
() => {
handlePopUpClose("deleteSecretsConfirmation");
handlePopUpClose("deleteConfirmation");
}
);
}}
/>
</div>
);

@ -97,7 +97,7 @@ export const SecretApprovalRequestChangeItem = ({
</Td>
<Td>{secretVersion?.secretComment}</Td>
<Td>
{secretVersion?.tags?.map(({ name, id: tagId, color }) => (
{secretVersion?.tags?.map(({ slug, id: tagId, color }) => (
<Tag
className="flex w-min items-center space-x-2"
key={`${secretVersion.id}-${tagId}`}
@ -106,7 +106,7 @@ export const SecretApprovalRequestChangeItem = ({
className="h-3 w-3 rounded-full"
style={{ backgroundColor: color || "#bec2c8" }}
/>
<div className="text-sm">{name}</div>
<div className="text-sm">{slug}</div>
</Tag>
))}
</Td>
@ -119,7 +119,7 @@ export const SecretApprovalRequestChangeItem = ({
</Td>
<Td>{newVersion?.secretComment}</Td>
<Td>
{newVersion?.tags?.map(({ name, id: tagId, color }) => (
{newVersion?.tags?.map(({ slug, id: tagId, color }) => (
<Tag
className="flex w-min items-center space-x-2"
key={`${newVersion.id}-${tagId}`}
@ -128,7 +128,7 @@ export const SecretApprovalRequestChangeItem = ({
className="h-3 w-3 rounded-full"
style={{ backgroundColor: color || "#bec2c8" }}
/>
<div className="text-sm">{name}</div>
<div className="text-sm">{slug}</div>
</Tag>
))}
</Td>
@ -157,7 +157,7 @@ export const SecretApprovalRequestChangeItem = ({
</Td>
<Td>
{(op === CommitType.CREATE ? newVersion?.tags : secretVersion?.tags)?.map(
({ name, id: tagId, color }) => (
({ slug, id: tagId, color }) => (
<Tag
className="flex w-min items-center space-x-2"
key={`${
@ -168,7 +168,7 @@ export const SecretApprovalRequestChangeItem = ({
className="h-3 w-3 rounded-full"
style={{ backgroundColor: color || "#bec2c8" }}
/>
<div className="text-sm">{name}</div>
<div className="text-sm">{slug}</div>
</Tag>
)
)}

@ -327,7 +327,7 @@ export const ActionBar = ({
</DropdownSubMenuTrigger>
<DropdownSubMenuContent className="rounded-l-none">
<DropdownMenuLabel>Apply tags to filter secrets</DropdownMenuLabel>
{tags.map(({ id, name, color }) => (
{tags.map(({ id, slug, color }) => (
<DropdownMenuItem
onClick={(evt) => {
evt.preventDefault();
@ -342,7 +342,7 @@ export const ActionBar = ({
className="mr-2 h-2 w-2 rounded-full"
style={{ background: color || "#bec2c8" }}
/>
{name}
{slug}
</div>
</DropdownMenuItem>
))}

@ -1,3 +1,4 @@
import { ClipboardEvent } from "react";
import { Controller, useForm } from "react-hook-form";
import { zodResolver } from "@hookform/resolvers/zod";
import { z } from "zod";
@ -5,6 +6,7 @@ import { z } from "zod";
import { createNotification } from "@app/components/notifications";
import { Button, FormControl, Input, Modal, ModalContent } from "@app/components/v2";
import { InfisicalSecretInput } from "@app/components/v2/InfisicalSecretInput";
import { getKeyValue } from "@app/helpers/parseEnvVar";
import { useCreateSecretV3 } from "@app/hooks/api";
import { SecretType } from "@app/hooks/api/types";
@ -38,6 +40,7 @@ export const CreateSecretForm = ({
handleSubmit,
control,
reset,
setValue,
formState: { errors, isSubmitting }
} = useForm<TFormSchema>({ resolver: zodResolver(typeSchema) });
const { isOpen } = usePopUpState(PopUpNames.CreateSecretForm);
@ -73,6 +76,16 @@ export const CreateSecretForm = ({
}
};
const handlePaste = (e: ClipboardEvent<HTMLInputElement>) => {
e.preventDefault();
const delimitters = [":", "="];
const pastedContent = e.clipboardData.getData("text");
const { key, value } = getKeyValue(pastedContent, delimitters);
setValue("key", key);
setValue("value", value);
};
return (
<Modal
isOpen={isOpen}
@ -83,10 +96,16 @@ export const CreateSecretForm = ({
subTitle="Add a secret to the particular environment and folder"
>
<form onSubmit={handleSubmit(handleFormSubmit)} noValidate>
<FormControl label="Key" isRequired isError={Boolean(errors?.key)} errorText={errors?.key?.message}>
<FormControl
label="Key"
isRequired
isError={Boolean(errors?.key)}
errorText={errors?.key?.message}
>
<Input
{...register("key")}
placeholder="Type your secret name"
onPaste={handlePaste}
autoCapitalization={autoCapitalize}
/>
</FormControl>

@ -254,7 +254,7 @@ export const SecretDetailSidebar = ({
)}
<FormControl label="Tags" className="">
<div className="grid auto-cols-min grid-flow-col gap-2 overflow-hidden pt-2">
{fields.map(({ tagColor, id: formId, name, id }) => (
{fields.map(({ tagColor, id: formId, slug, id }) => (
<Tag
className="flex w-min items-center space-x-2"
key={formId}
@ -271,7 +271,7 @@ export const SecretDetailSidebar = ({
className="h-3 w-3 rounded-full"
style={{ backgroundColor: tagColor || "#bec2c8" }}
/>
<div className="text-sm">{name}</div>
<div className="text-sm">{slug}</div>
</Tag>
))}
<DropdownMenu>
@ -296,7 +296,7 @@ export const SecretDetailSidebar = ({
<DropdownMenuContent align="end" className="z-[100]">
<DropdownMenuLabel>Add tags to this secret</DropdownMenuLabel>
{tags.map((tag) => {
const { id: tagId, name, color } = tag;
const { id: tagId, slug, color } = tag;
const isSelected = selectedTagsGroupById?.[tagId];
return (
@ -311,7 +311,7 @@ export const SecretDetailSidebar = ({
className="mr-2 h-2 w-2 rounded-full"
style={{ background: color || "#bec2c8" }}
/>
{name}
{slug}
</div>
</DropdownMenuItem>
);

@ -336,7 +336,7 @@ export const SecretItem = memo(
<DropdownMenuContent align="end">
<DropdownMenuLabel>Add tags to this secret</DropdownMenuLabel>
{tags.map((tag) => {
const { id: tagId, name, color } = tag;
const { id: tagId, slug, color } = tag;
const isTagSelected = selectedTagsGroupById?.[tagId];
return (
@ -358,7 +358,7 @@ export const SecretItem = memo(
className="mr-2 h-2 w-2 rounded-full"
style={{ background: color || "#bec2c8" }}
/>
{name}
{slug}
</div>
</DropdownMenuItem>
);

@ -49,7 +49,6 @@ export const formSchema = z.object({
tags: z
.object({
id: z.string(),
name: z.string(),
slug: z.string(),
tagColor: z.string().optional()
})

@ -151,7 +151,7 @@ export const SecretItem = ({ mode, preSecret, postSecret }: Props) => {
<Td className="border-r border-mineshaft-600">Tags</Td>
{isModified && (
<Td className="border-r border-mineshaft-600">
{preSecret?.tags?.map(({ name, id: tagId, color }) => (
{preSecret?.tags?.map(({ slug, id: tagId, color }) => (
<Tag
className="flex w-min items-center space-x-2"
key={`${preSecret.id}-${tagId}`}
@ -160,13 +160,13 @@ export const SecretItem = ({ mode, preSecret, postSecret }: Props) => {
className="h-3 w-3 rounded-full"
style={{ backgroundColor: color || "#bec2c8" }}
/>
<div className="text-sm">{name}</div>
<div className="text-sm">{slug}</div>
</Tag>
))}
</Td>
)}
<Td>
{postSecret?.tags?.map(({ name, id: tagId, color }) => (
{postSecret?.tags?.map(({ slug, id: tagId, color }) => (
<Tag
className="flex w-min items-center space-x-2"
key={`${postSecret.id}-${tagId}`}
@ -175,7 +175,7 @@ export const SecretItem = ({ mode, preSecret, postSecret }: Props) => {
className="h-3 w-3 rounded-full"
style={{ backgroundColor: color || "#bec2c8" }}
/>
<div className="text-sm">{name}</div>
<div className="text-sm">{slug}</div>
</Tag>
))}
</Td>

@ -1,3 +1,4 @@
import { ClipboardEvent } from "react";
import { Controller, useForm } from "react-hook-form";
import { faWarning } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
@ -17,8 +18,9 @@ import {
} from "@app/components/v2";
import { InfisicalSecretInput } from "@app/components/v2/InfisicalSecretInput";
import { useWorkspace } from "@app/context";
import { getKeyValue } from "@app/helpers/parseEnvVar";
import { useCreateFolder, useCreateSecretV3, useUpdateSecretV3 } from "@app/hooks/api";
import { SecretType,SecretV3RawSanitized } from "@app/hooks/api/types";
import { SecretType, SecretV3RawSanitized } from "@app/hooks/api/types";
const typeSchema = z
.object({
@ -54,6 +56,7 @@ export const CreateSecretForm = ({
control,
reset,
watch,
setValue,
formState: { isSubmitting, errors }
} = useForm<TFormSchema>({ resolver: zodResolver(typeSchema) });
const newSecretKey = watch("key");
@ -133,6 +136,17 @@ export const CreateSecretForm = ({
});
}
};
const handlePaste = (e: ClipboardEvent<HTMLInputElement>) => {
e.preventDefault();
const delimitters = [":", "="];
const pastedContent = e.clipboardData.getData("text");
const { key, value } = getKeyValue(pastedContent, delimitters);
setValue("key", key);
setValue("value", value);
};
return (
<Modal isOpen={isOpen} onOpenChange={onTogglePopUp}>
<ModalContent
@ -141,10 +155,16 @@ export const CreateSecretForm = ({
subTitle="Create & update a secret across many environments"
>
<form onSubmit={handleSubmit(handleFormSubmit)} noValidate>
<FormControl label="Key" isRequired isError={Boolean(errors?.key)} errorText={errors?.key?.message}>
<FormControl
label="Key"
isRequired
isError={Boolean(errors?.key)}
errorText={errors?.key?.message}
>
<Input
{...register("key")}
placeholder="Type your secret name"
onPaste={handlePaste}
autoCapitalization={currentWorkspace?.autoCapitalization}
/>
</FormControl>

@ -62,7 +62,7 @@ export const SSOModal = ({ popUp, handlePopUpClose, handlePopUpToggle, hideDelet
const [isDeletePopupOpen, setIsDeletePopupOpen] = useToggle();
const { data } = useGetSSOConfig(currentOrg?.id ?? "");
const { control, handleSubmit, reset, watch } = useForm<AddSSOFormData>({
const { control, handleSubmit, reset, watch, setValue, getValues } = useForm<AddSSOFormData>({
defaultValues: {
authProvider: AuthProvider.OKTA_SAML
},
@ -188,8 +188,8 @@ export const SSOModal = ({ popUp, handlePopUpClose, handlePopUpToggle, hideDelet
entityId: "SP Entity ID",
entryPoint: "SSO URL",
entryPointPlaceholder: "https://accounts.google.com/o/saml2/idp?idpid=xxx",
issuer: "IdP Entity ID",
issuerPlaceholder: "https://accounts.google.com/o/saml2/idp?idpid=xxx"
issuer: "Issuer",
issuerPlaceholder: window.origin
};
default:
return {
@ -204,6 +204,11 @@ export const SSOModal = ({ popUp, handlePopUpClose, handlePopUpToggle, hideDelet
};
const authProvider = watch("authProvider");
useEffect(() => {
if (authProvider === AuthProvider.GOOGLE_SAML && getValues("issuer") === "") {
setValue("issuer", window.origin);
}
}, [authProvider]);
return (
<>

@ -1,6 +1,7 @@
import { Controller, useForm } from "react-hook-form";
import { yupResolver } from "@hookform/resolvers/yup";
import * as yup from "yup";
import { zodResolver } from "@hookform/resolvers/zod";
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { createNotification } from "@app/components/notifications";
import { Button, FormControl, Input, Modal, ModalClose, ModalContent } from "@app/components/v2";
@ -8,11 +9,13 @@ import { useWorkspace } from "@app/context";
import { useCreateWsTag } from "@app/hooks/api";
import { UsePopUpState } from "@app/hooks/usePopUp";
const schema = yup.object({
name: yup.string().required().label("Tag Name")
const schema = z.object({
slug: z.string().refine((v) => slugify(v) === v, {
message: "Invalid slug. Slug can only contain alphanumeric characters and hyphens."
})
});
export type FormData = yup.InferType<typeof schema>;
export type FormData = z.infer<typeof schema>;
type Props = {
popUp: UsePopUpState<["CreateSecretTag", "deleteTagConfirmation"]>;
@ -26,7 +29,6 @@ type Props = {
};
export const AddSecretTagModal = ({ popUp, handlePopUpClose, handlePopUpToggle }: Props) => {
const { currentWorkspace } = useWorkspace();
const createWsTag = useCreateWsTag();
const {
@ -35,17 +37,16 @@ export const AddSecretTagModal = ({ popUp, handlePopUpClose, handlePopUpToggle }
handleSubmit,
formState: { isSubmitting }
} = useForm<FormData>({
resolver: yupResolver(schema)
resolver: zodResolver(schema)
});
const onFormSubmit = async ({ name }: FormData) => {
const onFormSubmit = async ({ slug }: FormData) => {
try {
if (!currentWorkspace?.id) return;
await createWsTag.mutateAsync({
workspaceID: currentWorkspace?.id,
tagName: name,
tagSlug: name.replace(/\s+/g, " ").replace(" ", "_"),
tagSlug: slug,
tagColor: ""
});
@ -80,11 +81,11 @@ export const AddSecretTagModal = ({ popUp, handlePopUpClose, handlePopUpToggle }
<form onSubmit={handleSubmit(onFormSubmit)}>
<Controller
control={control}
name="name"
name="slug"
defaultValue=""
render={({ field, fieldState: { error } }) => (
<FormControl label="Tag Name" isError={Boolean(error)} errorText={error?.message}>
<Input {...field} placeholder="Type your tag name" />
<FormControl label="Tag Slug" isError={Boolean(error)} errorText={error?.message}>
<Input {...field} placeholder="Type your tag slug" />
</FormControl>
)}
/>

@ -40,7 +40,6 @@ export const SecretTagsTable = ({ handlePopUpOpen }: Props) => {
<Table>
<THead>
<Tr>
<Th>Tag</Th>
<Th>Slug</Th>
<Th aria-label="button" />
</Tr>
@ -49,9 +48,8 @@ export const SecretTagsTable = ({ handlePopUpOpen }: Props) => {
{isLoading && <TableSkeleton columns={3} innerKey="secret-tags" />}
{!isLoading &&
data &&
data.map(({ id, name, slug }) => (
<Tr key={name}>
<Td>{name}</Td>
data.map(({ id, slug }) => (
<Tr key={id}>
<Td>{slug}</Td>
<Td className="flex items-center justify-end">
<ProjectPermissionCan
@ -62,7 +60,7 @@ export const SecretTagsTable = ({ handlePopUpOpen }: Props) => {
<IconButton
onClick={() =>
handlePopUpOpen("deleteTagConfirmation", {
name,
name: slug,
id
})
}

@ -61,7 +61,7 @@ export const ShareSecretsTable = ({ handlePopUpOpen }: Props) => {
</Table>
{!isLoading &&
data?.secrets &&
data.secrets.length >= perPage &&
data?.totalCount >= perPage &&
data?.totalCount !== undefined && (
<Pagination
count={data.totalCount}

@ -15,7 +15,6 @@ const formSchema = z.object({
authRateLimit: z.number(),
inviteUserRateLimit: z.number(),
mfaRateLimit: z.number(),
creationLimit: z.number(),
publicEndpointLimit: z.number()
});
@ -41,7 +40,6 @@ export const RateLimitPanel = () => {
authRateLimit: rateLimit?.authRateLimit ?? 60,
inviteUserRateLimit: rateLimit?.inviteUserRateLimit ?? 30,
mfaRateLimit: rateLimit?.mfaRateLimit ?? 20,
creationLimit: rateLimit?.creationLimit ?? 30,
publicEndpointLimit: rateLimit?.publicEndpointLimit ?? 30
}
});
@ -60,7 +58,6 @@ export const RateLimitPanel = () => {
authRateLimit,
inviteUserRateLimit,
mfaRateLimit,
creationLimit,
publicEndpointLimit
} = formData;
@ -71,7 +68,6 @@ export const RateLimitPanel = () => {
authRateLimit,
inviteUserRateLimit,
mfaRateLimit,
creationLimit,
publicEndpointLimit
});
createNotification({
@ -210,25 +206,6 @@ export const RateLimitPanel = () => {
</FormControl>
)}
/>
<Controller
control={control}
defaultValue={300}
name="creationLimit"
render={({ field, fieldState: { error } }) => (
<FormControl
label="New resource creation requests per minute"
className="w-72"
isError={Boolean(error)}
errorText={error?.message}
>
<Input
{...field}
value={field.value || ""}
onChange={(e) => field.onChange(Number(e.target.value))}
/>
</FormControl>
)}
/>
<Controller
control={control}
defaultValue={300}

Some files were not shown because too many files have changed in this diff Show More