Compare commits

..

430 Commits

Author SHA1 Message Date
4b718b679a Change deactivate button messaging, add scim check 2024-07-18 10:44:54 +07:00
498b1109c9 resolve pr review issues 2024-07-18 10:32:39 +07:00
ada0033bd0 Fix type issue frontend 2024-07-17 23:13:04 +07:00
8542ec8c3e Complete preliminary user page 2024-07-17 19:48:04 +07:00
9efeb8926f Merge pull request #2137 from Infisical/maidul-dewfewfqwef
Address vanta postcss update
2024-07-16 21:46:14 -04:00
389bbfcade fix vanta postcss 2024-07-16 21:44:33 -04:00
0b8427a004 Merge pull request #2112 from Infisical/feat/added-support-for-oidc-auth-in-cli
feat: added support for oidc auth in cli
2024-07-17 00:51:51 +08:00
8a470772e3 Merge pull request #2136 from Infisical/polish-scim-groups
Add SCIM user activation/deactivation
2024-07-16 12:09:50 -04:00
853f3c40bc Adjustments to migration file 2024-07-16 22:20:56 +07:00
fed44f328d Merge pull request #2133 from akhilmhdh/feat/aws-kms-sm
fix: slug too big for project fixed
2024-07-16 09:50:08 -04:00
a1d00f2c41 Add SCIM user activation/deactivation 2024-07-16 20:19:27 +07:00
95a68f2c2d Merge pull request #2134 from Infisical/improve-auth-method-errors
Improve Native Auth Method Forbidden Errors
2024-07-16 15:00:12 +07:00
db7c0c45f6 Merge pull request #2135 from Infisical/fix-identity-projects
Fix Identity-Project Provisioning Modal — Filter Current Org Projects
2024-07-16 14:59:41 +07:00
82bca03162 Filter out only projects that are part of current org in identity project modal 2024-07-16 14:31:40 +07:00
043c04778f Improve native auth method unauthorized errors 2024-07-16 13:47:46 +07:00
=
560cd81a1c fix: slug too big for project fixed 2024-07-16 11:26:45 +05:30
df3a87fabf Merge pull request #2132 from Infisical/daniel/operator-azure-fix
feat(k8-operator): customizable azure auth resource url
2024-07-16 06:29:13 +02:00
6eae98c1d4 Update login.mdx 2024-07-16 05:45:48 +02:00
6ceeccf583 Update kubernetes.mdx 2024-07-16 05:25:30 +02:00
9b0b14b847 Merge pull request #2131 from Infisical/daniel/azure-fix
fix(auth): Azure audience formatting bug
2024-07-16 04:50:49 +02:00
78f4c0f002 Update Chart.yaml 2024-07-16 04:46:32 +02:00
6cff2f0437 Update values.yaml 2024-07-16 04:46:24 +02:00
6cefb180d6 Update SDK and go mod tidy 2024-07-16 04:44:32 +02:00
59a44155c5 Azure resource 2024-07-16 04:43:53 +02:00
d0ad9c6b17 Update sample.yaml 2024-07-16 04:43:46 +02:00
58a406b114 Update secrets.infisical.com_infisicalsecrets.yaml 2024-07-16 04:43:42 +02:00
8a85695dc5 Custom azure resource 2024-07-16 04:43:38 +02:00
7ed8feee6f Update identity-azure-auth-fns.ts 2024-07-16 04:15:04 +02:00
de67c0ad9f Merge pull request #2110 from akhilmhdh/feat/folder-improvement-tf
New folder endpoints for terraform
2024-07-15 21:40:24 -04:00
b8d11d31a6 Merge pull request #2130 from Infisical/handbook-update
updated hiring handbook
2024-07-15 21:38:33 -04:00
d630ceaffe updated hiring handbook 2024-07-15 17:19:56 -07:00
a89e60f296 Merge pull request #2129 from Infisical/maidul-sddwqdwdwqe123
Remove org read check on project fetch
2024-07-15 16:38:21 -04:00
a5d9abf1c8 remove org read check on projects fetch 2024-07-15 16:33:11 -04:00
d97dea2573 Merge pull request #2128 from Infisical/misc/removed-aws-global-config-update
misc: moved aws creds to constructor
2024-07-16 02:38:57 +08:00
bc58f6b988 misc: moved aws creds to constructor 2024-07-16 02:31:31 +08:00
ed8e3f34fb Merge pull request #2095 from akhilmhdh/feat/aws-kms-sm
aws kms support base setup
2024-07-15 12:47:30 -04:00
91315c88c3 Merge pull request #2124 from Infisical/misc/displayed-project-name-in-slack-webhook
misc: displayed project name in slack webhook
2024-07-16 00:18:42 +08:00
9267f881d6 misc: displayed project name in slack webhook 2024-07-15 16:49:06 +08:00
c2ddb7e2fe misc: updated go-sdk version 2024-07-15 12:26:31 +08:00
c90ecd336c Merge pull request #2122 from Infisical/fix-scim-groups
Fix SCIM PATCH /Groups Fn
2024-07-15 00:57:46 +07:00
d8b1da3ddd Fix SCIM patch group fn 2024-07-15 00:29:18 +07:00
58e86382fe Merge pull request #2119 from Infisical/update-used-count-on-prem
Update dynamic seat count on prem
2024-07-14 20:13:42 +07:00
2080c4419e Update dynamic seat count on prem 2024-07-14 14:25:32 +07:00
b582a4a06d Merge pull request #2117 from DDDASHXD/sebastian/ui-fix
Fix: Features card overflow
2024-07-12 21:05:56 +02:00
a5c6a864de Fix: Features card overflow 2024-07-12 18:57:08 +00:00
5082c1ba3b Merge pull request #2115 from Infisical/misc/soft-delete-shared-secrets
misc: soft delete shared secrets upon expiry
2024-07-12 12:56:26 -04:00
cceb08b1b5 Merge pull request #2109 from Infisical/misc/address-ws-vulnerability-via-package-update
misc: addressed ws vulnerability via package update
2024-07-12 12:20:15 -04:00
4c34e58945 Merge pull request #2116 from Infisical/dependabot/npm_and_yarn/frontend/axios-0.28.0
build(deps): bump axios from 0.27.2 to 0.28.0 in /frontend
2024-07-12 19:17:08 +05:30
72de1901a1 build(deps): bump axios from 0.27.2 to 0.28.0 in /frontend
Bumps [axios](https://github.com/axios/axios) from 0.27.2 to 0.28.0.
- [Release notes](https://github.com/axios/axios/releases)
- [Changelog](https://github.com/axios/axios/blob/v0.28.0/CHANGELOG.md)
- [Commits](https://github.com/axios/axios/compare/v0.27.2...v0.28.0)

---
updated-dependencies:
- dependency-name: axios
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-12 13:37:13 +00:00
65fefcdd87 Merge pull request #2114 from Infisical/dependabot/npm_and_yarn/frontend/postcss-8.4.39
build(deps): bump postcss from 8.4.14 to 8.4.39 in /frontend
2024-07-12 19:03:59 +05:30
8e753eda72 misc: soft delete shared secrets 2024-07-12 21:29:17 +08:00
7137c94fa2 build(deps): bump postcss from 8.4.14 to 8.4.39 in /frontend
Bumps [postcss](https://github.com/postcss/postcss) from 8.4.14 to 8.4.39.
- [Release notes](https://github.com/postcss/postcss/releases)
- [Changelog](https://github.com/postcss/postcss/blob/main/CHANGELOG.md)
- [Commits](https://github.com/postcss/postcss/compare/8.4.14...8.4.39)

---
updated-dependencies:
- dependency-name: postcss
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-12 13:24:54 +00:00
52ea7dfa61 Merge pull request #2113 from Infisical/dependabot/go_modules/cli/github.com/rs/cors-1.11.0
build(deps): bump github.com/rs/cors from 1.9.0 to 1.11.0 in /cli
2024-07-12 18:52:25 +05:30
093925ed0e build(deps): bump github.com/rs/cors from 1.9.0 to 1.11.0 in /cli
Bumps [github.com/rs/cors](https://github.com/rs/cors) from 1.9.0 to 1.11.0.
- [Commits](https://github.com/rs/cors/compare/v1.9.0...v1.11.0)

---
updated-dependencies:
- dependency-name: github.com/rs/cors
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-12 13:20:45 +00:00
356afd18c4 feat: added support for oidc auth in cli 2024-07-12 18:28:09 +08:00
4491f2d8f1 Merge pull request #2111 from Infisical/dependabot/go_modules/cli/github.com/dvsekhvalnov/jose2go-1.6.0
build(deps): bump github.com/dvsekhvalnov/jose2go from 1.5.0 to 1.6.0 in /cli
2024-07-12 14:44:32 +05:30
4a401957c7 build(deps): bump github.com/dvsekhvalnov/jose2go in /cli
Bumps [github.com/dvsekhvalnov/jose2go](https://github.com/dvsekhvalnov/jose2go) from 1.5.0 to 1.6.0.
- [Commits](https://github.com/dvsekhvalnov/jose2go/compare/v1.5...v1.6.0)

---
updated-dependencies:
- dependency-name: github.com/dvsekhvalnov/jose2go
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-12 09:11:42 +00:00
=
539785acae docs: updated api reference docs for the new folder endpoint 2024-07-12 14:29:19 +05:30
=
3c63346d3a feat: new get-by-id for folder for tf 2024-07-12 14:24:13 +05:30
0c673f6cca misc: addressed ws vulnerability via package update 2024-07-12 15:36:49 +08:00
10f4cbf11f Merge pull request #2107 from Infisical/feat/add-share-secret-hide-unhide
feat: add share-secret hide and unhide
2024-07-12 14:57:02 +08:00
a6a8c32326 Merge pull request #2106 from Infisical/docs/identity-github-oidc-auth
doc: added docs for connecting github to infisical via oidc auth
2024-07-12 10:26:08 +07:00
99a474dba7 Merge pull request #2091 from Infisical/misc/moved-admin-user-deletion-to-pro
misc: moved admin user deletion to pro plan
2024-07-11 13:28:33 -04:00
e439f4e5aa Update UserPanel.tsx 2024-07-11 13:25:48 -04:00
ae2ecf1540 Merge pull request #2100 from Infisical/misc/add-ttl-max-value-for-identities
misc: add max checks for TTL values of identities
2024-07-11 13:21:53 -04:00
10214ea5dc misc: renamed button label 2024-07-12 00:45:16 +08:00
918cd414a8 feat: add share-secret hide and unhide 2024-07-12 00:42:26 +08:00
f9a125acee misc: updated limit to 10 years 2024-07-11 23:40:45 +08:00
52415ea83e misc: removed redundant text' 2024-07-11 23:30:55 +08:00
c5ca2b6796 doc: added docs for connecting github to infisical via oidc auth 2024-07-11 23:00:45 +08:00
ef5bcac925 Merge pull request #2103 from Infisical/move-groups
Consolidate People and Groups Tabs to shared User Tab at Org / Project Level
2024-07-11 18:58:12 +07:00
6cbeb29b4e Merge remote-tracking branch 'origin/main' into misc/add-ttl-max-value-for-identities 2024-07-11 19:17:25 +08:00
fbe344c0df Fix token auth ref 2024-07-11 14:56:57 +07:00
5821f65a63 Fix token auth ref 2024-07-11 14:56:08 +07:00
3af510d487 Merge pull request #2104 from Infisical/fix-token-auth-ref
Fix Token Auth Ref in Access Token DAL
2024-07-11 14:54:35 +07:00
c15adc7df9 Fix token auth ref 2024-07-11 14:49:22 +07:00
93af7573ac Consolidate people and groups tabs to user / user groups shared tab 2024-07-11 13:35:11 +07:00
cddda1148e misc: added max ttl checks for native auths 2024-07-11 14:05:50 +08:00
9c37eeeda6 misc: finalize form validation for universal auth ttl 2024-07-11 13:48:18 +08:00
eadf5bef77 misc: add TTL max values for universal auth 2024-07-11 13:35:58 +08:00
5dff46ee3a Add missing token auth to access token findOne fn 2024-07-11 10:59:08 +07:00
8b202c2a79 Merge pull request #2099 from Infisical/identity-improvements
Identity Workflow Improvements (Table Menu Opts, Error Handling)
2024-07-11 10:45:20 +07:00
4574519a76 Update identity table opts, identity project table error handling 2024-07-11 10:36:00 +07:00
82ee77bc05 Merge pull request #2093 from Infisical/doc/add-native-auth-to-docs
doc: added native auths to api reference
2024-07-11 09:46:26 +07:00
9a861499df Merge pull request #2097 from Infisical/secret-sharing-ui-update
update phrasing
2024-07-10 18:38:32 -04:00
17c7207f9d doc: added oidc auth api reference 2024-07-11 02:04:44 +08:00
d1f3c98f21 fix posthog cross orgin calls 2024-07-10 13:46:22 -04:00
d248a6166c Merge remote-tracking branch 'origin/main' into doc/add-native-auth-to-docs 2024-07-11 01:31:50 +08:00
8fdd82a335 Add token auth to api reference 2024-07-10 23:37:36 +07:00
c501c85eb8 misc: renamed to more generic label 2024-07-11 00:14:34 +08:00
eac621db73 Merge pull request #2096 from Infisical/identity-project-provisioning
Identities Page - Project Provisioning / De-provisioning
2024-07-10 23:08:21 +07:00
ab7983973e update phrasing 2024-07-10 08:06:06 -07:00
ff43773f37 Merge pull request #2088 from Infisical/feat/move-secrets
feat: move secrets
2024-07-10 21:48:57 +08:00
68574be05b Fix merge conflicts 2024-07-10 18:18:00 +07:00
1d9966af76 Add admin to display identity table 2024-07-10 18:15:39 +07:00
4dddf764bd Finish identity page project provisioning/deprovisioning 2024-07-10 18:14:34 +07:00
2d9435457d misc: addressed typo 2024-07-10 18:58:42 +08:00
=
5d4c7c2cbf feat: added encrypt/decrypt with key for kms service and changed kms encrytion to hoc to avoid back to back db calls 2024-07-10 15:23:02 +05:30
8b06215366 Merge pull request #2055 from Infisical/feat/oidc-identity
feat: oidc machine identity auth method
2024-07-10 17:29:56 +08:00
=
08f0bf9c67 feat: fixed migration down missing orgid 2024-07-10 14:47:44 +05:30
=
654dd97793 feat: external kms router defined not plugged in 2024-07-10 12:48:36 +05:30
=
2e7baf8c89 feat: added external kms router but not connected with the server yet 2024-07-10 12:48:35 +05:30
=
7ca7a95070 feat: kms service changes for db change 2024-07-10 12:48:35 +05:30
=
71c49c8b90 feat: kms db schema changes to support external and internal kms uniformly 2024-07-10 12:48:35 +05:30
4fab746b95 misc: added description to native auth properties 2024-07-10 15:17:23 +08:00
179edd98bf misc: rolled back frontend package-lock 2024-07-10 13:45:35 +08:00
dc05b34fb1 misc: rolled back package locks 2024-07-10 13:41:33 +08:00
899757ab7c doc: added native auth to api reference 2024-07-10 13:30:06 +08:00
20f6dbfbd1 Update oidc docs image 2024-07-10 12:09:24 +07:00
8ff524a037 Move migration file to front 2024-07-10 11:51:53 +07:00
3913e2f462 Fix merge conflicts, bring oidc auth up to speed with identity ui changes 2024-07-10 10:31:48 +07:00
9832915eba add .? incase adminUserDeletion is empty 2024-07-09 21:09:55 -04:00
ebbccdb857 add better label for identity id 2024-07-09 18:13:06 -04:00
b98c8629e5 misc: moved admin user deletion to pro 2024-07-09 23:51:09 +08:00
28723e9a4e misc: updated toast 2024-07-09 23:32:26 +08:00
079e005f49 misc: added audit log and overwrite feature 2024-07-09 21:46:12 +08:00
df90e4e6f0 Update go version in k8s dockerfile 2024-07-09 09:44:52 -04:00
6e9a624697 Merge pull request #2090 from Infisical/daniel/operator-bump-sdk
fix(operator): azure auth
2024-07-09 09:32:39 -04:00
94b0cb4697 Bump helm 2024-07-09 15:31:47 +02:00
5a5226c82f Update values.yaml 2024-07-09 15:27:11 +02:00
09cfaec175 Update infisicalsecret_controller.go 2024-07-09 15:27:11 +02:00
40abc184f2 Fix: Bump SDK version 2024-07-09 15:27:11 +02:00
3879edfab7 Merge pull request #2089 from Infisical/docs-token-auth
Update identity docs for auth token and newer identity flow
2024-07-09 16:51:44 +07:00
d20ae39f32 feat: initial move secret integration 2024-07-09 17:48:39 +08:00
53c875424e Update identity docs for auth token and newer identity flow 2024-07-09 16:47:24 +07:00
05bf2e4696 made move operation transactional 2024-07-09 16:03:50 +08:00
a06dee66f8 feat: initial logic for moving secrets 2024-07-09 15:20:58 +08:00
0eab9233bb Merge pull request #2076 from Infisical/misc/redesigned-org-security-settings
misc: redesigned org security settings page
2024-07-09 15:05:34 +08:00
9bf358a57d Merge pull request #2057 from Infisical/token-auth
Token Authentication Method + Revamped Identity (Page) Workflow
2024-07-09 11:55:56 +07:00
93926cc6b7 Merge remote-tracking branch 'origin' into token-auth 2024-07-09 11:52:14 +07:00
59ccabec69 Make fixes based on review 2024-07-09 11:51:21 +07:00
8b0678cfa1 Merge pull request #2079 from aheruz/patch-1
doc: Update how-to-create-a-feature.mdx
2024-07-08 22:53:36 -04:00
3004de459f Merge pull request #1998 from rtrompier/feat/helm
fix(helm-charts): add nodeSelector and tolerations
2024-07-08 21:41:08 -04:00
7d4e531e5f Merge branch 'main' into feat/helm 2024-07-08 21:40:45 -04:00
f66ef8b066 Update Chart.yaml 2024-07-08 21:39:16 -04:00
a116233979 add nodeSelector and tolerations to manager 2024-07-08 21:22:46 -04:00
454c0b62b9 Merge pull request #2086 from Infisical/feat/allow-admins-to-delete-users
feat: allow admins to delete users
2024-07-09 02:10:46 +08:00
2c6decaf6e misc: addressed comments 2024-07-09 01:11:24 +08:00
d0f0dca3a3 misc: added sort by 2024-07-09 00:57:23 +08:00
9efbffe5d2 misc: renamed mutation function 2024-07-09 00:42:50 +08:00
c1b242db67 misc: added pagination and moved to admin route 2024-07-09 00:34:07 +08:00
845f71e8ed Merge pull request #2085 from Infisical/vmatsiiako-patch-docs-3
Update secret-sharing.mdx
2024-07-08 09:44:59 -04:00
653fc367ac Merge pull request #2087 from akhilmhdh/feat/fly-io-banner
feat: banner on warning secret deletion in fly.io integration
2024-07-08 09:33:46 -04:00
9f0867559a update banner text 2024-07-08 09:32:47 -04:00
a37987b508 misc: added proper error handling 2024-07-08 21:31:11 +08:00
96e485910c Merge remote-tracking branch 'origin' into token-auth 2024-07-08 17:14:55 +07:00
b81f7d8350 Finish new identity page 2024-07-08 17:12:49 +07:00
=
eeb2e89d1a feat: banner on warning secret deletion in fly.io integration 2024-07-08 13:04:30 +05:30
f3a8fda254 misc: resolved conflict with existing method 2024-07-08 15:16:10 +08:00
ccf0c3cd35 misc: modified member to user 2024-07-08 15:09:32 +08:00
6e15979672 feat: allow admins to delete users 2024-07-08 15:04:08 +08:00
4e724d15f6 Update secret-sharing.mdx 2024-07-07 21:41:15 -07:00
5eba61b647 Merge pull request #2084 from Infisical/secret-sharing-ui-update 2024-07-07 11:53:19 -04:00
98ef1614c6 update mobile view 2024-07-07 08:52:02 -07:00
f591f6d428 update mobile view 2024-07-07 08:49:05 -07:00
795b533fce Merge pull request #2083 from Infisical/secret-sharing-ui-update 2024-07-07 07:55:29 -04:00
35be8e1912 fix ui secret sharing 2024-07-06 23:26:26 -07:00
da70f23bf6 Merge pull request #2082 from Infisical/maidul-2323e23
Fix posthog events on app.infisical frontend
2024-07-06 17:49:31 -04:00
3ba90cc42d fix post hog on app.infisical frontend 2024-07-06 17:44:53 -04:00
131ec81744 Merge pull request #2081 from Infisical/vmatsiiako-patch-docs-2 2024-07-06 13:41:14 -04:00
c84262b182 Update vercel.mdx 2024-07-06 10:38:19 -07:00
1ee9994df6 Merge pull request #2080 from Infisical/vmatsiiako-patch-docs-1 2024-07-06 13:37:59 -04:00
a3356b4bad Update azure-key-vault.mdx 2024-07-06 10:31:39 -07:00
f95092e083 doc: Update how-to-create-a-feature.mdx
`npm generate:component` should be `npm run generate:component`
2024-07-06 13:42:38 +02:00
982c51bdc7 Merge pull request #2078 from Infisical/daniel/rename-standalone-to-core
chore(binary): `rename infisical-standalone` to `infisical-core`
2024-07-05 20:04:14 -04:00
9e7ec88d57 Update build-binaries.yml 2024-07-06 02:02:04 +02:00
ce304b26d8 Merge pull request #2041 from Infisical/daniel/infisical-binary
feat: Infisical Binary
2024-07-05 19:43:15 -04:00
8deff5adfb Update package-lock.json 2024-07-06 01:34:00 +02:00
1f8b3b6779 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
a87bc66b05 Cleanup 2024-07-06 01:32:18 +02:00
de57e1af35 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
09d8822816 Update argv.ts 2024-07-06 01:32:18 +02:00
13aaef4212 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
5e9193adda Update build-binaries.yml 2024-07-06 01:32:18 +02:00
ec3e886624 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
36d30566fe Debian 2024-07-06 01:32:18 +02:00
dfbeac3dfe Update build-binaries.yml 2024-07-06 01:32:18 +02:00
87e52ddd06 Attempt .deb package 2024-07-06 01:32:18 +02:00
a62fbf088f Fix push 2024-07-06 01:32:18 +02:00
f186cb4d7b Alphine and migration mode 2024-07-06 01:32:18 +02:00
2ee123c9f6 Exit codes 2024-07-06 01:32:18 +02:00
18b6c4f73e chore: testing, hardcoded version 2024-07-06 01:32:18 +02:00
50409f0c48 Feat: Standalone migration mode 2024-07-06 01:32:18 +02:00
54e5166bb6 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
b9b880d310 Trigger workflow 2024-07-06 01:32:18 +02:00
085d1d5a5e Update build-binaries.yml 2024-07-06 01:32:18 +02:00
b02c37028b Update build-binaries.yml 2024-07-06 01:32:18 +02:00
49248ee13f Rollback 2024-07-06 01:32:18 +02:00
bafc6ee129 Fixes 2024-07-06 01:32:18 +02:00
eb6dca425c Update build-binaries.yml 2024-07-06 01:32:18 +02:00
99c1259f15 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
b4770116a8 Requested changes 2024-07-06 01:32:18 +02:00
eb90f503a9 Fix: Re-add compression 2024-07-06 01:32:18 +02:00
e419983249 Update external-nextjs.ts 2024-07-06 01:32:18 +02:00
b030fe2e69 Update package-lock.json 2024-07-06 01:32:18 +02:00
eff0604e9d Revert "Update package-lock.json"
This reverts commit ae39b80f12a73fae65036f6a3af4624a5798b2bb.
2024-07-06 01:32:18 +02:00
e90f3af4ce Update package-lock.json 2024-07-06 01:32:18 +02:00
baf2763287 Update package-lock.json 2024-07-06 01:32:18 +02:00
d708a3f566 Update package-lock.json 2024-07-06 01:32:18 +02:00
5b52c33f5f Fix: Add cloud smith api key 2024-07-06 01:32:18 +02:00
a116fc2bf3 Update package.json 2024-07-06 01:32:18 +02:00
39d09eea3d Update build-binaries.yml 2024-07-06 01:32:18 +02:00
f7d071e398 Fix: Compress binaries 2024-07-06 01:32:18 +02:00
0d4dd5a6fa Fix: e2e tests 2024-07-06 01:32:18 +02:00
b4de012047 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
b3720cdbfc Fix Windows executable upload 2024-07-06 01:32:18 +02:00
0dc85dff33 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
a6e4e3c69a Trying something new 2024-07-06 01:32:18 +02:00
be9de82ef5 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
2566f4dc9e Update build-binaries.yml 2024-07-06 01:32:18 +02:00
934bfbb624 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
509037e6d0 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
f041aa7557 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
266e2856e8 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
7109d2f785 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
2134d2e118 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
c2abc383d5 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
3a2336da44 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
1266949fb1 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
62d287f8a6 Try node16 2024-07-06 01:32:18 +02:00
0b4e7f0096 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
7dda2937ba Update build-binaries.yml 2024-07-06 01:32:18 +02:00
91d81bd20c Update build-binaries.yml 2024-07-06 01:32:18 +02:00
f329a79771 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
31a31f556c Update build-binaries.yml 2024-07-06 01:32:18 +02:00
1be2f806d9 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
38a6785ca4 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
377eb4cfd3 Create build-binaries.yml 2024-07-06 01:32:18 +02:00
8df7401e06 Remove compression and separate packaging 2024-07-06 01:32:18 +02:00
0c79303582 Update env.ts 2024-07-06 01:32:18 +02:00
e6edde57ba Create babel.config.json 2024-07-06 01:32:18 +02:00
6634675b2a Update .gitignore 2024-07-06 01:32:18 +02:00
50840ce26b Feat: Infisical Binary 2024-07-06 01:32:18 +02:00
4c2f7fff5c Fix: .mjs imports not being updated by bable 2024-07-06 01:30:27 +02:00
f0a3792a64 Create process.d.ts 2024-07-06 01:30:27 +02:00
70da6878c1 Fix: Enable production & standalone when packaged 2024-07-06 01:30:27 +02:00
754404d905 Fix: Serve frontend with binary 2024-07-06 01:30:27 +02:00
85cfac512c Fix: Serve frontend with binary 2024-07-06 01:30:27 +02:00
d40b907308 Merge pull request #2077 from Infisical/misc/update-make-a-wish-ui
misc: update make-a-wish UI design
2024-07-05 14:42:05 -04:00
a5b18cbb72 misc/make-a-wish-ui-improvement 2024-07-06 01:02:48 +08:00
d4a2f4590b misc: redesigned org security settings page 2024-07-06 00:25:27 +08:00
7add57ae78 Merge pull request #2075 from Infisical/fix/addressed-ldap-trust-email-issue
fix: addressed lap trust email issue during login
2024-07-05 12:12:40 -04:00
e5879df7c7 Merge pull request #2074 from Infisical/feat/make-a-wish-feature
feat: make a wish feature
2024-07-05 12:09:53 -04:00
04298bb1a7 fix: addressed lap trust email issue during login 2024-07-05 20:26:02 +08:00
1a6a5280a0 misc: display wish feature only on cloud 2024-07-05 19:42:38 +08:00
da0d8fdbfc feat: finished up wish integration 2024-07-05 15:19:43 +08:00
d2759ea378 patch npm ip package 2024-07-04 19:08:07 -04:00
c4385af352 Delete .github/workflows/update-be-new-migration-latest-timestamp.yml 2024-07-04 16:21:35 -04:00
bbe2d2e053 Merge pull request #2061 from akhilmhdh/feat/secret-approval-grouo
Secret approval with groups
2024-07-04 16:18:56 -04:00
2c9fdb7fad feat: initial make a wish UI 2024-07-05 00:30:22 +08:00
38eee5490e Merge pull request #2056 from Infisical/maidul-212313
Main
2024-07-04 11:58:56 -04:00
0aa7337ff4 Merge pull request #2072 from Infisical/misc/removed-webhook-url-from-audit-logs-table
misc: removed webhook url from audit logs table
2024-07-04 20:53:51 +05:30
98371f99e7 misc: removed webhook url from audit logs table 2024-07-04 23:16:02 +08:00
ddfc645cdd Merge pull request #2068 from akhilmhdh/feat/audit-log-batching
Changed audit log deletion to batched process
2024-07-04 10:54:54 -04:00
8bc6edd165 doc: added general docs for oidc auth 2024-07-04 22:31:03 +08:00
=
f4d9c61404 feat: added a pause in between as breather for db delete 2024-07-04 13:59:15 +05:30
=
5342c85696 feat: changed audit log deletion to batched process 2024-07-04 13:26:11 +05:30
2497aada8a misc: added oidc auth to access token trusted Ips 2024-07-04 15:54:37 +08:00
b05f3e0f1f Merge pull request #2050 from Infisical/feat/native-slack-webhook
feat: added native slack webhook type
2024-07-04 14:50:58 +08:00
9a2645b511 Merge pull request #2065 from akhilmhdh/fix/provider-not-found
Fix provider not found error for secret rotation
2024-07-04 12:08:55 +05:30
cb664bb042 misc: addressed review comments 2024-07-04 13:33:32 +08:00
5921f349a8 misc: removed comment 2024-07-04 12:42:36 +08:00
07db1d826b Merge pull request #2067 from Infisical/fix-license-seats-invite-propagation
Fix license seat count upon complete account invite with tx
2024-07-03 13:43:00 -07:00
74db1b75b4 Add tx support for seat count in license invitation update 2024-07-03 13:33:40 -07:00
b5166f1d39 Identity redesign modal opt 2024-07-03 13:29:31 -07:00
4927cc804a feat: added endpoint for oidc auth revocation 2024-07-04 00:53:24 +08:00
2153dd94eb feat: finished up login with identity oidc 2024-07-03 23:48:31 +08:00
=
d7023881e5 fix: resolving provider not found error for secret rotation 2024-07-03 20:39:02 +05:30
=
ef3cdd11ac feat: ui changes for secret approval group 2024-07-03 20:17:16 +05:30
=
612cf4f968 feat: server logic for secret approval group 2024-07-03 20:17:16 +05:30
=
b6a9dc7f53 feat: completed migration for secret approval group 2024-07-03 20:17:16 +05:30
08322f46f9 misc: setup audit logs for oidc identity 2024-07-03 21:38:13 +08:00
fc9326272a feat: finished up oidc auth management 2024-07-03 21:18:50 +08:00
b74595cf35 Merge pull request #2060 from Infisical/fix/addressed-main-page-ui-ux-reports
fix: addressed main page ui/ux concerns
2024-07-03 08:40:40 -04:00
a45453629c misc: addressed main page ui/ux concerns 2024-07-03 18:32:21 +08:00
f7626d03bf misc: documentation 2024-07-03 12:26:42 +08:00
bc14153bb3 Merge pull request #2049 from akhilmhdh/dynamic-secret/mssql
Dynamic secret MS SQL
2024-07-02 21:22:34 -04:00
4cfe564f3d Fix lint issues 2024-07-02 15:15:45 -07:00
93be4095c0 Finish preliminary token auth method 2024-07-02 15:05:57 -07:00
8915b4055b address security #86 2024-07-02 15:52:25 -04:00
c90e423e4a feat: initial setup 2024-07-03 02:06:02 +08:00
935a3cb036 Merge pull request #2026 from Infisical/feat/allow-toggling-login-options-as-admin
feat: allowed toggling login options as admin
2024-07-02 14:03:11 -04:00
148a29db19 Merge branch 'feat/allow-toggling-login-options-as-admin' of https://github.com/Infisical/infisical into feat/allow-toggling-login-options-as-admin 2024-07-03 01:58:04 +08:00
b12de3e4f5 misc: removed usecallback 2024-07-03 01:57:24 +08:00
661e5ec462 Merge pull request #2052 from Infisical/maidul-2132
Main
2024-07-02 20:29:43 +05:30
5cca51d711 access prod bd in ci 2024-07-02 10:57:05 -04:00
9e9b9a7b94 update self lock out msg 2024-07-02 10:53:36 -04:00
df1ffcf934 Merge pull request #2051 from Infisical/misc/add-config-to-redacted-keys
misc: add config to redacted keys
2024-07-02 10:47:20 -04:00
0ef7eacd0e misc: add config to redacted keys 2024-07-02 22:34:40 +08:00
776822d7d5 misc: updated secret path component 2024-07-02 20:54:27 +08:00
fe9af20d8c fix: addressed type issue 2024-07-02 20:28:03 +08:00
398a8f363d misc: cleanup of form display structure 2024-07-02 20:20:25 +08:00
ce5dbca6e2 misc: added placeholder for incoming webhook url 2024-07-02 20:04:55 +08:00
ed5a7d72ab feat: added native slack webhook type 2024-07-02 19:57:58 +08:00
3ac6b7be65 Merge pull request #2046 from Infisical/misc/add-check-for-ldap-group
misc: added backend check for ldap group config
2024-07-02 12:59:03 +08:00
10601b5afd Merge pull request #2039 from akhilmhdh/feat/migration-file-checks
feat: added slugify migration file creater name and additional check to ensure migration files are not editied in PR
2024-07-01 21:01:47 -04:00
8eec08356b update error message 2024-07-01 20:59:56 -04:00
=
0b4d4c008a docs: dynamic secret mssql 2024-07-02 00:18:56 +05:30
=
ae953add3d feat: dynamic secret for mssql completed 2024-07-02 00:12:38 +05:30
5960a899ba Merge pull request #2048 from Infisical/create-pull-request/patch-1719844740
GH Action: rename new migration file timestamp
2024-07-02 01:25:54 +08:00
ea98a0096d chore: renamed new migration files to latest timestamp (gh-action) 2024-07-01 14:38:59 +00:00
b8f65fc91a Merge pull request #2040 from Infisical/feat/mark-projects-as-favourite
feat: allow org members to mark projects as favorites
2024-07-01 22:38:36 +08:00
06a4e68ac1 misc: more improvements 2024-07-01 22:33:01 +08:00
9cbf9a675a misc: simplified update project favorites logic 2024-07-01 22:22:44 +08:00
178ddf1fb9 Merge pull request #2032 from akhilmhdh/fix/role-bug
Resolved identity roleId not setting null for predefined role selection
2024-07-01 19:42:17 +05:30
030d4fe152 misc: added handling of empty groups and default value 2024-07-01 21:10:27 +08:00
46abda9041 misc: add org scoping to mutation 2024-07-01 20:22:59 +08:00
c976a5ccba misc: add scoping to org-level 2024-07-01 20:20:15 +08:00
1eb9ea9c74 misc: implemened more review comments 2024-07-01 20:10:41 +08:00
7d7612aaf4 misc: removed use memo 2024-07-01 18:29:56 +08:00
f570b3b2ee misc: combined into one list 2024-07-01 18:23:38 +08:00
0b8f6878fe misc: added check for ldap group 2024-07-01 18:12:16 +08:00
758a9211ab misc: addressed pr comments 2024-07-01 13:11:47 +08:00
0bb2b2887b updated handbook 2024-06-30 10:02:51 -07:00
eeb0111bbe updated handbook style 2024-06-30 02:03:59 -07:00
d12c538511 updated handbook 2024-06-30 02:01:40 -07:00
6f67346b2a Merge pull request #2042 from Infisical/daniel/fix-k8-managed-secret-crash
fix(k8-operator): crash on predefined managed secret
2024-06-28 17:01:37 -04:00
a93db44bbd Helm 2024-06-28 21:34:59 +02:00
1ddacfda62 Fix: Annotations map nil sometimes nil when pre-created by the user 2024-06-28 21:29:33 +02:00
5a1e43be44 misc: only display recover when email login is enabled 2024-06-29 02:12:09 +08:00
04f54479cd misc: implemented review comments 2024-06-29 01:58:27 +08:00
351d0d0662 Merge pull request #2033 from Infisical/misc/added-secret-name-trim
misc: added secret name trimming
2024-06-29 01:14:23 +08:00
5a01edae7a misc: added favorites to app layout selection 2024-06-29 01:02:28 +08:00
=
506e86d666 feat: added slugify migration file creater name and additional check to ensure migration files are not editied in PR 2024-06-28 20:33:56 +05:30
11d9166684 misc: initial project favorite in grid view 2024-06-28 17:40:34 +08:00
1859557f90 Merge pull request #2027 from akhilmhdh/feat/secret-manager-integration-auth
AWS Secret Manager assume role based integration
2024-06-27 23:32:25 -04:00
59fc34412d small nits for admin login toggle pr 2024-06-27 20:35:15 -04:00
1b2a1f2339 Merge pull request #2019 from akhilmhdh/feat/read-replica
Postgres read replica support
2024-06-27 19:36:44 -04:00
15b4c397ab Merge pull request #2024 from Infisical/revert-2023-revert-1995-identity-based-pricing
Add support for Identity-Based Pricing"
2024-06-27 15:56:44 -07:00
fc27ad4575 Merge pull request #2037 from Infisical/create-pull-request/patch-1719509560
GH Action: rename new migration file timestamp
2024-06-27 23:07:16 +05:30
b7467a83ab chore: renamed new migration files to latest timestamp (gh-action) 2024-06-27 17:32:39 +00:00
3baf434230 Merge pull request #2034 from Infisical/misc/add-on-update-trigger-oidc
misc: add onUpdate trigger to oidc config
2024-06-27 23:02:14 +05:30
e28471a9f4 misc: add onUpdate trigger to oidc config 2024-06-27 19:55:01 +08:00
b2d6563994 misc: added secret name trimming 2024-06-27 19:41:00 +08:00
=
cfba8f53e3 fix: resolved identity roleId not setting null for predefined role switch 2024-06-27 15:06:00 +05:30
=
3537a5eb9b feat: switch to tabs instead of seperate pages for aws secret manager assume and access key 2024-06-27 13:06:04 +05:30
=
d5b17a8f24 feat: removed explicit check for aws access key credential allowing to pick it automatically 2024-06-27 13:05:30 +05:30
d6881e2e68 misc: added signup option filtering 2024-06-27 13:53:12 +08:00
92a663a17d misc: design change to finalize scim section in org settings 2024-06-27 13:24:26 +08:00
b3463e0d0f misc: added explicit comment of intent 2024-06-27 12:55:39 +08:00
c460f22665 misc: added backend disable checks 2024-06-27 12:40:56 +08:00
7cdc47cd3a Update build-staging-and-deploy-aws.yml 2024-06-26 18:55:13 -04:00
d666d60f9f Merge pull request #2030 from Infisical/doc/added-guide-for-configuring-certs
doc: added guide for configuring certs
2024-06-26 15:30:15 -04:00
491c4259ca small rephrase for gitlab cert docs 2024-06-26 15:29:44 -04:00
cff20eb621 doc: added guide for configuring certs 2024-06-27 03:00:15 +08:00
db39d03713 misc: added check to backend 2024-06-27 01:59:02 +08:00
84d8879177 Merge pull request #2029 from Infisical/create-pull-request/patch-1719422383
GH Action: rename new migration file timestamp
2024-06-27 01:20:12 +08:00
aa4f2adbb6 Merge pull request #2028 from Infisical/create-pull-request/patch-1719422278
GH Action: rename new migration file timestamp
2024-06-27 01:19:50 +08:00
86ed3ef6d6 chore: renamed new migration files to latest timestamp (gh-action) 2024-06-26 17:19:42 +00:00
a5bb80adc4 Merge pull request #2020 from Infisical/feat/allow-audit-log-retention-to-be-configurable
feat: enabled customization of project audit logs retention period
2024-06-27 01:19:20 +08:00
0e87dd3996 chore: renamed new migration files to latest timestamp (gh-action) 2024-06-26 17:17:57 +00:00
e1801e9eb4 Merge pull request #2025 from Infisical/feat/added-support-for-configurable-ldap-user-identifier
misc: add support for configuring unique attribute property for ldap users
2024-06-27 01:16:51 +08:00
9daa5badec misc: made reusable helper for login page 2024-06-27 01:15:50 +08:00
e1ed37c713 misc: adjusted OrgSettingsPage and PersonalSettingsPage to include toggle 2024-06-27 01:07:28 +08:00
=
8eea82a1a0 docs: updated docs on usage of aws sm integration with assume role 2024-06-26 22:35:49 +05:30
=
694d0e3ed3 feat: updated ui for aws sm assume role integration 2024-06-26 22:35:12 +05:30
=
58f6c6b409 feat: updated integration api and queue to support aws secret manager assume role feature 2024-06-26 22:33:49 +05:30
f4a33caba6 misc: upgraded doc description of new field 2024-06-27 00:38:17 +08:00
e0a6f09b5e misc: removed max in schema for api layer 2024-06-26 23:17:31 +08:00
98a15a901e feat: allowed toggling login options as admin 2024-06-26 22:45:14 +08:00
1e701687ae misc: adjusted other hook usage to incorporate new properties 2024-06-26 19:04:11 +08:00
15758b91f8 doc: updated ldap documentation 2024-06-26 18:54:34 +08:00
2d3a4a7559 feat: added support for configurable ldap user identifier 2024-06-26 18:36:28 +08:00
a1d01d5cbd misc: display retention settings only for self-hosted/dedicated 2024-06-26 12:44:56 +08:00
2e3aedc62b Merge pull request #2018 from Infisical/feat/add-initial-sync-behavior-azure
feat: added initial sync behavior for azure key integration
2024-06-26 11:43:54 +08:00
e0a5b1444a add step to install docker 2024-06-25 18:17:58 -04:00
1c2698f533 Revert "Revert "Add support for Identity-Based Pricing"" 2024-06-25 18:04:26 -04:00
b50833bded Merge pull request #2023 from Infisical/revert-1995-identity-based-pricing
Revert "Add support for Identity-Based Pricing"
2024-06-25 18:04:14 -04:00
e0c774c045 Revert "Add support for Identity-Based Pricing" 2024-06-25 18:03:07 -04:00
514df55d67 Merge pull request #1995 from Infisical/identity-based-pricing
Add support for Identity-Based Pricing
2024-06-25 14:50:18 -07:00
311b378f3b Merge pull request #1383 from Grraahaam/feat/cli-secret-plain-output
feat(cli): plain secret value output
2024-06-25 23:47:10 +02:00
b01b4323ca Fix merge conflicts 2024-06-25 14:46:58 -07:00
285a01af51 Merge pull request #2010 from Infisical/misc/add-documentation-for-bitbucket-cli-integration
doc: added bitbucket integration with cli
2024-06-25 16:33:07 -04:00
f7e658e62b rename bit bucket options 2024-06-25 16:31:56 -04:00
a8aef2934a Merge pull request #2021 from Infisical/feat/add-option-to-share-secrets-directly
feat: added option to share secret directly from main page
2024-06-25 15:55:04 -04:00
cc30476f79 Merge pull request #2022 from Infisical/misc/add-prompt-for-deleting-aws-sm-integration
misc: added proper prompt for aws secret manager integration deletion
2024-06-25 15:47:55 -04:00
=
5d59fe8810 fix: resolved rebase issue with knex.d.ts 2024-06-25 22:54:45 +05:30
=
90eed8d39b docs: updated replica information to the docs 2024-06-25 22:51:38 +05:30
=
f5974ce9ad feat: resolved some queries giving any[] on db instance modification for replication 2024-06-25 22:51:38 +05:30
=
c6b51af4b1 feat: removed knex-tables.d.ts 2024-06-25 22:51:37 +05:30
5139bf2385 misc: added delete prompt for aws secret manager integ deletion 2024-06-26 01:21:14 +08:00
=
c13c37fc77 feat: switched read db operations to replica nodes 2024-06-25 22:50:17 +05:30
=
259c01c110 feat: added read replica option in config and extended knex to choose 2024-06-25 22:49:28 +05:30
a016d0d33f Merge pull request #1999 from akhilmhdh/feat/ui-permission-check-broken
Terraform identity management apis
2024-06-25 22:46:53 +05:30
663be06d30 feat: added share secret to main page side nav 2024-06-26 00:22:47 +08:00
fa392382da feat: added option to share secret directly from main page 2024-06-25 23:41:17 +08:00
d34b2669c5 misc: finalized success message 2024-06-25 21:32:24 +08:00
11ea5990c9 feat: enabled customization of project audit logs retention period 2024-06-25 21:14:43 +08:00
9a66514178 Merge pull request #2007 from Infisical/feat/project-setting-for-rebuilding-index
feat: added project setting for rebuilding secret indices
2024-06-25 15:25:36 +08:00
d4f9faf24d feat: added initial sync behavior for azure key integration 2024-06-25 13:59:20 +08:00
a3c8d06845 Update overview.mdx 2024-06-24 20:25:53 -07:00
71b7be4057 Merge pull request #2017 from handotdev/patch-2
Update overview.mdx
2024-06-24 20:25:02 -07:00
5079a5889a Update overview.mdx 2024-06-24 17:37:35 -07:00
232b375f46 Merge pull request #2015 from Infisical/create-pull-request/patch-1719267521
GH Action: rename new migration file timestamp
2024-06-24 17:07:41 -07:00
d2acedf79e chore: renamed new migration files to latest timestamp (gh-action) 2024-06-24 22:18:39 +00:00
9d846319b0 Merge pull request #2014 from Infisical/cert-san
Add Certificate Support for Alt Names (SANs)
2024-06-24 15:18:17 -07:00
d69267a3ca remove console.log 2024-06-24 17:27:02 -04:00
051eee8701 Update altName example in docs 2024-06-24 14:14:51 -07:00
b5aa650899 Add cert support for alt names 2024-06-24 14:07:15 -07:00
376e185e2b Merge pull request #2006 from Infisical/daniel/expand-single-secret-ref
feat(api): Expand single secret references
2024-06-24 20:39:54 +02:00
a15a0a257c Update identity-router.ts 2024-06-24 20:38:11 +02:00
6facce220c update select default org login 2024-06-24 14:06:31 -04:00
620a423cee update org selection error message 2024-06-24 13:43:56 -04:00
361496c644 Merge pull request #2012 from Infisical/create-pull-request/patch-1719249628
GH Action: rename new migration file timestamp
2024-06-24 13:20:49 -04:00
e03f77d9cf chore: renamed new migration files to latest timestamp (gh-action) 2024-06-24 17:20:27 +00:00
60cb420242 Merge pull request #2000 from Infisical/daniel/default-org
Feat: Default organization slug for LDAP/SAML
2024-06-24 13:20:02 -04:00
1b8a77f507 Merge pull request #2002 from Infisical/patch-ldap
Patch LDAP undefined userId, email confirmation code sending
2024-06-24 13:19:48 -04:00
745f1c4e12 misc: only display when user is admin 2024-06-24 23:24:07 +08:00
6029eaa9df misc: modified step title 2024-06-24 20:17:31 +08:00
8703314c0c doc: added bitbucket integration with cli 2024-06-24 20:11:53 +08:00
=
084fc7c99e feat: resolved gcp auth revoke error 2024-06-23 01:25:27 +05:30
=
b6cc17d62a feat: updated var names and permission, rate limit changes based on comments 2024-06-23 01:21:26 +05:30
bd0d0bd333 feat: added project setting for rebuilding secret indices 2024-06-22 22:42:36 +08:00
c426ba517a Feat: Expand single secret references 2024-06-21 23:12:38 +02:00
91634fbe76 Patch LDAP 2024-06-20 17:49:09 -07:00
=
4072a40fe9 feat: completed all revoke for other identity auth 2024-06-20 21:18:45 +05:30
=
0dc132dda3 feat: added universal auth endpoints for revoke and client secret endpoint to fetch details 2024-06-20 21:18:45 +05:30
=
605ccb13e9 feat: added endpoints and docs for identity get by id and list operation 2024-06-20 21:18:45 +05:30
3f6b84de3b fix(helm-charts): add nodeSelector and tolerations 2024-06-20 16:32:47 +02:00
2160c66e20 doc(cli): improve --plain example + add --silent notice 2024-06-20 09:15:19 +02:00
1c5c7c75c4 Merge remote-tracking branch 'origin/main' into feat/cli-secret-plain-output 2024-06-20 08:50:11 +02:00
24c75c6325 Merge remote-tracking branch 'origin' into identity-based-pricing 2024-06-19 14:18:13 -07:00
0a22a2a9ef Readjustments 2024-06-19 14:16:32 -07:00
d0f1cad98c Add support for identity-based pricing 2024-06-19 13:59:47 -07:00
afee158b95 Start adding identity based pricing logic 2024-06-19 10:31:58 -07:00
f9a9599659 doc(cli): improve --plain example 2024-05-24 17:29:44 +02:00
637b0b955f fix(cli): add backward compatibility for --raw-value 2024-05-24 17:28:52 +02:00
092665737f Merge remote-tracking branch 'origin/main' into feat/cli-secret-plain-output 2024-05-24 17:13:57 +02:00
f83c2215a5 doc(cli): --plain 2024-02-08 17:44:55 +01:00
0f41590d6a feat(cli): --plain, --expand, --include-imports in 'secrets get' subcommand 2024-02-08 17:32:31 +01:00
496 changed files with 26883 additions and 4314 deletions

View File

@ -67,3 +67,6 @@ CLIENT_SECRET_GITLAB_LOGIN=
CAPTCHA_SECRET=
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
PLAIN_API_KEY=
PLAIN_WISH_LABEL_IDS=

99
.github/workflows/build-binaries.yml vendored Normal file
View File

@ -0,0 +1,99 @@
name: Build Binaries and Deploy
on:
workflow_dispatch:
inputs:
version:
description: "Version number"
required: true
type: string
defaults:
run:
working-directory: ./backend
jobs:
build-and-deploy:
runs-on: ubuntu-20.04
strategy:
matrix:
arch: [x64, arm64]
os: [linux, win]
include:
- os: linux
target: node20-linux
- os: win
target: node20-win
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 20
- name: Install pkg
run: npm install -g @yao-pkg/pkg
- name: Install dependencies (backend)
run: npm install
- name: Install dependencies (frontend)
run: npm install --prefix ../frontend
- name: Prerequisites for pkg
run: npm run binary:build
- name: Package into node binary
run: |
if [ "${{ matrix.os }}" != "linux" ]; then
pkg --no-bytecode --public-packages "*" --public --compress Brotli --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
else
pkg --no-bytecode --public-packages "*" --public --compress Brotli --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
fi
# Set up .deb package structure (Debian/Ubuntu only)
- name: Set up .deb package structure
if: matrix.os == 'linux'
run: |
mkdir -p infisical-core/DEBIAN
mkdir -p infisical-core/usr/local/bin
cp ./binary/infisical-core infisical-core/usr/local/bin/
chmod +x infisical-core/usr/local/bin/infisical-core
- name: Create control file
if: matrix.os == 'linux'
run: |
cat <<EOF > infisical-core/DEBIAN/control
Package: infisical-core
Version: ${{ github.event.inputs.version }}
Section: base
Priority: optional
Architecture: ${{ matrix.arch == 'x64' && 'amd64' || matrix.arch }}
Maintainer: Infisical <daniel@infisical.com>
Description: Infisical Core standalone executable (app.infisical.com)
EOF
# Build .deb file (Debian/Ubunutu only)
- name: Build .deb package
if: matrix.os == 'linux'
run: |
dpkg-deb --build infisical-core
mv infisical-core.deb ./binary/infisical-core-${{matrix.arch}}.deb
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
# Publish .deb file to Cloudsmith (Debian/Ubuntu only)
- name: Publish to Cloudsmith (Debian/Ubuntu)
if: matrix.os == 'linux'
working-directory: ./backend
run: cloudsmith push deb --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.deb
# Publish .exe file to Cloudsmith (Windows only)
- name: Publish to Cloudsmith (Windows)
if: matrix.os == 'win'
working-directory: ./backend
run: cloudsmith push raw infisical/infisical-core ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }}.exe --republish --no-wait-for-sync --version ${{ github.event.inputs.version }} --api-key ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -56,7 +56,7 @@ jobs:
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
#
# Required
service-key: ${{ secrets.TWINGATE_GAMMA_SERVICE_KEY }}
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
@ -105,6 +105,13 @@ jobs:
environment:
name: Production
steps:
- uses: twingate/github-action@v1
with:
# The Twingate Service Key used to connect Twingate to the proper service
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
#
# Required
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment

View File

@ -0,0 +1,25 @@
name: Check migration file edited
on:
pull_request:
types: [opened, synchronize]
paths:
- 'backend/src/db/migrations/**'
jobs:
rename:
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Check any migration files are modified, renamed or duplicated.
run: |
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^M\|^R\|^C' || true | cut -f2 | xargs -r -n1 basename > edited_files.txt
if [ -s edited_files.txt ]; then
echo "Exiting migration files cannot be modified."
cat edited_files.txt
exit 1
fi

View File

@ -1,59 +0,0 @@
name: Rename Migrations
on:
pull_request:
types: [closed]
paths:
- 'backend/src/db/migrations/**'
jobs:
rename:
runs-on: ubuntu-latest
if: github.event.pull_request.merged == true
steps:
- name: Check out repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get list of newly added files in migration folder
run: |
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^A' | cut -f2 | xargs -n1 basename > added_files.txt
if [ ! -s added_files.txt ]; then
echo "No new files added. Skipping"
echo "SKIP_RENAME=true" >> $GITHUB_ENV
fi
- name: Script to rename migrations
if: env.SKIP_RENAME != 'true'
run: python .github/resources/rename_migration_files.py
- name: Commit and push changes
if: env.SKIP_RENAME != 'true'
run: |
git config user.name github-actions
git config user.email github-actions@github.com
git add ./backend/src/db/migrations
rm added_files.txt
git commit -m "chore: renamed new migration files to latest timestamp (gh-action)"
- name: Get PR details
id: pr_details
run: |
PR_NUMBER=${{ github.event.pull_request.number }}
PR_MERGER=$(curl -s "https://api.github.com/repos/${{ github.repository }}/pulls/$PR_NUMBER" | jq -r '.merged_by.login')
echo "PR Number: $PR_NUMBER"
echo "PR Merger: $PR_MERGER"
echo "pr_merger=$PR_MERGER" >> $GITHUB_OUTPUT
- name: Create Pull Request
if: env.SKIP_RENAME != 'true'
uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: 'chore: renamed new migration files to latest UTC (gh-action)'
title: 'GH Action: rename new migration file timestamp'
branch-suffix: timestamp
reviewers: ${{ steps.pr_details.outputs.pr_merger }}

1
.gitignore vendored
View File

@ -69,3 +69,4 @@ frontend-build
*.tgz
cli/infisical-merge
cli/test/infisical-merge
/backend/binary

View File

@ -5,3 +5,4 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/M
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:292
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
docs/mint.json:generic-api-key:651

View File

@ -0,0 +1,4 @@
{
"presets": ["@babel/preset-env", "@babel/preset-react"],
"plugins": ["@babel/plugin-syntax-import-attributes", "babel-plugin-transform-import-meta"]
}

View File

@ -3,7 +3,6 @@ import "ts-node/register";
import dotenv from "dotenv";
import jwt from "jsonwebtoken";
import knex from "knex";
import path from "path";
import { seedData1 } from "@app/db/seed-data";
@ -15,6 +14,7 @@ import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { mockQueue } from "./mocks/queue";
import { mockSmtpServer } from "./mocks/smtp";
import { mockKeyStore } from "./mocks/keystore";
import { initDbConnection } from "@app/db";
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
export default {
@ -23,23 +23,21 @@ export default {
async setup() {
const logger = await initLogger();
const cfg = initEnvConfig(logger);
const db = knex({
client: "pg",
connection: cfg.DB_CONNECTION_URI,
migrations: {
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
tableName: "infisical_migrations"
},
seeds: {
directory: path.join(__dirname, "../src/db/seeds"),
extension: "ts"
}
const db = initDbConnection({
dbConnectionUri: cfg.DB_CONNECTION_URI,
dbRootCert: cfg.DB_ROOT_CERT
});
try {
await db.migrate.latest();
await db.seed.run();
await db.migrate.latest({
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
tableName: "infisical_migrations"
});
await db.seed.run({
directory: path.join(__dirname, "../src/db/seeds"),
extension: "ts"
});
const smtp = mockSmtpServer();
const queue = mockQueue();
const keyStore = mockKeyStore();
@ -74,7 +72,14 @@ export default {
// @ts-expect-error type
delete globalThis.jwtToken;
// called after all tests with this env have been run
await db.migrate.rollback({}, true);
await db.migrate.rollback(
{
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
tableName: "infisical_migrations"
},
true
);
await db.destroy();
}
};

6170
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -3,11 +3,39 @@
"version": "1.0.0",
"description": "",
"main": "./dist/main.mjs",
"bin": "dist/main.js",
"pkg": {
"scripts": [
"dist/**/*.js",
"../frontend/node_modules/next/**/*.js",
"../frontend/.next/*/**/*.js",
"../frontend/node_modules/next/dist/server/**/*.js",
"../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*.js"
],
"assets": [
"dist/**",
"!dist/**/*.js",
"node_modules/**",
"../frontend/node_modules/**",
"../frontend/.next/**",
"!../frontend/node_modules/next/dist/server/**/*.js",
"../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*",
"../frontend/public/**"
],
"outputPath": "binary"
},
"scripts": {
"binary:build": "npm run binary:clean && npm run build:frontend && npm run build && npm run binary:babel-frontend && npm run binary:babel-backend && npm run binary:rename-imports",
"binary:package": "pkg --no-bytecode --public-packages \"*\" --public --target host .",
"binary:babel-backend": " babel ./dist -d ./dist",
"binary:babel-frontend": "babel --copy-files ../frontend/.next/server -d ../frontend/.next/server",
"binary:clean": "rm -rf ./dist && rm -rf ./binary",
"binary:rename-imports": "ts-node ./scripts/rename-mjs.ts",
"test": "echo \"Error: no test specified\" && exit 1",
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
"dev:docker": "nodemon",
"build": "tsup",
"build:frontend": "npm run build --prefix ../frontend",
"start": "node dist/main.mjs",
"type:check": "tsc --noEmit",
"lint:fix": "eslint --fix --ext js,ts ./src",
@ -31,6 +59,11 @@
"author": "",
"license": "ISC",
"devDependencies": {
"@babel/cli": "^7.18.10",
"@babel/core": "^7.18.10",
"@babel/plugin-syntax-import-attributes": "^7.24.7",
"@babel/preset-env": "^7.18.10",
"@babel/preset-react": "^7.24.7",
"@types/bcrypt": "^5.0.2",
"@types/jmespath": "^0.15.2",
"@types/jsonwebtoken": "^9.0.5",
@ -48,6 +81,8 @@
"@types/uuid": "^9.0.7",
"@typescript-eslint/eslint-plugin": "^6.20.0",
"@typescript-eslint/parser": "^6.20.0",
"@yao-pkg/pkg": "^5.12.0",
"babel-plugin-transform-import-meta": "^2.2.1",
"eslint": "^8.56.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-config-airbnb-typescript": "^17.1.0",
@ -60,7 +95,7 @@
"pino-pretty": "^10.2.3",
"prompt-sync": "^4.2.0",
"rimraf": "^5.0.5",
"ts-node": "^10.9.1",
"ts-node": "^10.9.2",
"tsc-alias": "^1.8.8",
"tsconfig-paths": "^4.2.0",
"tsup": "^8.0.1",
@ -71,7 +106,9 @@
},
"dependencies": {
"@aws-sdk/client-iam": "^3.525.0",
"@aws-sdk/client-kms": "^3.609.0",
"@aws-sdk/client-secrets-manager": "^3.504.0",
"@aws-sdk/client-sts": "^3.600.0",
"@casl/ability": "^6.5.0",
"@fastify/cookie": "^9.3.1",
"@fastify/cors": "^8.5.0",
@ -89,7 +126,8 @@
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/x509": "^1.10.0",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "^2.2.1",
"@sindresorhus/slugify": "1.1.0",
"@team-plain/typescript-sdk": "^4.6.1",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
@ -111,13 +149,14 @@
"jmespath": "^0.16.0",
"jsonwebtoken": "^9.0.2",
"jsrp": "^0.2.4",
"jwks-rsa": "^3.1.0",
"knex": "^3.0.1",
"ldapjs": "^3.0.7",
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"ms": "^2.1.3",
"mysql2": "^3.9.8",
"nanoid": "^5.0.4",
"nanoid": "^3.3.4",
"nodemailer": "^6.9.9",
"openid-client": "^5.6.5",
"ora": "^7.0.1",
@ -133,6 +172,7 @@
"posthog-node": "^3.6.2",
"probot": "^13.0.0",
"smee-client": "^2.0.0",
"tedious": "^18.2.1",
"tweetnacl": "^1.0.3",
"tweetnacl-util": "^0.15.1",
"uuid": "^9.0.1",

View File

@ -2,13 +2,14 @@
import { execSync } from "child_process";
import path from "path";
import promptSync from "prompt-sync";
import slugify from "@sindresorhus/slugify"
const prompt = promptSync({ sigint: true });
const migrationName = prompt("Enter name for migration: ");
// Remove spaces from migration name and replace with hyphens
const formattedMigrationName = migrationName.replace(/\s+/g, "-");
const formattedMigrationName = slugify(migrationName);
execSync(
`npx knex migrate:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${formattedMigrationName}`,

View File

@ -0,0 +1,27 @@
/* eslint-disable @typescript-eslint/no-shadow */
import fs from "node:fs";
import path from "node:path";
function replaceMjsOccurrences(directory: string) {
fs.readdir(directory, (err, files) => {
if (err) throw err;
files.forEach((file) => {
const filePath = path.join(directory, file);
if (fs.statSync(filePath).isDirectory()) {
replaceMjsOccurrences(filePath);
} else {
fs.readFile(filePath, "utf8", (err, data) => {
if (err) throw err;
const result = data.replace(/\.mjs/g, ".js");
fs.writeFile(filePath, result, "utf8", (err) => {
if (err) throw err;
// eslint-disable-next-line no-console
console.log(`Updated: ${filePath}`);
});
});
}
});
});
}
replaceMjsOccurrences("dist");

View File

@ -9,6 +9,7 @@ import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
@ -41,7 +42,9 @@ import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
import { TIdentityTokenAuthServiceFactory } from "@app/services/identity-token-auth/identity-token-auth-service";
import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service";
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
@ -65,6 +68,7 @@ import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TUserServiceFactory } from "@app/services/user/user-service";
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
declare module "fastify" {
@ -127,11 +131,13 @@ declare module "fastify" {
identity: TIdentityServiceFactory;
identityAccessToken: TIdentityAccessTokenServiceFactory;
identityProject: TIdentityProjectServiceFactory;
identityTokenAuth: TIdentityTokenAuthServiceFactory;
identityUa: TIdentityUaServiceFactory;
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
identityGcpAuth: TIdentityGcpAuthServiceFactory;
identityAwsAuth: TIdentityAwsAuthServiceFactory;
identityAzureAuth: TIdentityAzureAuthServiceFactory;
identityOidcAuth: TIdentityOidcAuthServiceFactory;
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
@ -157,6 +163,8 @@ declare module "fastify" {
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
secretSharing: TSecretSharingServiceFactory;
rateLimit: TRateLimitServiceFactory;
userEngagement: TUserEngagementServiceFactory;
externalKms: TExternalKmsServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

View File

@ -1,4 +1,4 @@
import { Knex } from "knex";
import { Knex as KnexOriginal } from "knex";
import {
TableName,
@ -59,6 +59,9 @@ import {
TDynamicSecrets,
TDynamicSecretsInsert,
TDynamicSecretsUpdate,
TExternalKms,
TExternalKmsInsert,
TExternalKmsUpdate,
TGitAppInstallSessions,
TGitAppInstallSessionsInsert,
TGitAppInstallSessionsUpdate,
@ -92,6 +95,9 @@ import {
TIdentityKubernetesAuths,
TIdentityKubernetesAuthsInsert,
TIdentityKubernetesAuthsUpdate,
TIdentityOidcAuths,
TIdentityOidcAuthsInsert,
TIdentityOidcAuthsUpdate,
TIdentityOrgMemberships,
TIdentityOrgMembershipsInsert,
TIdentityOrgMembershipsUpdate,
@ -104,6 +110,9 @@ import {
TIdentityProjectMemberships,
TIdentityProjectMembershipsInsert,
TIdentityProjectMembershipsUpdate,
TIdentityTokenAuths,
TIdentityTokenAuthsInsert,
TIdentityTokenAuthsUpdate,
TIdentityUaClientSecrets,
TIdentityUaClientSecretsInsert,
TIdentityUaClientSecretsUpdate,
@ -119,6 +128,9 @@ import {
TIntegrations,
TIntegrationsInsert,
TIntegrationsUpdate,
TInternalKms,
TInternalKmsInsert,
TInternalKmsUpdate,
TKmsKeys,
TKmsKeysInsert,
TKmsKeysUpdate,
@ -280,318 +292,383 @@ import {
TWebhooksUpdate
} from "@app/db/schemas";
declare module "knex" {
namespace Knex {
interface QueryInterface {
primaryNode(): KnexOriginal;
replicaNode(): KnexOriginal;
}
}
}
declare module "knex/types/tables" {
interface Tables {
[TableName.Users]: Knex.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.Groups]: Knex.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
[TableName.CertificateAuthority]: Knex.CompositeTableType<
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
TCertificateAuthorities,
TCertificateAuthoritiesInsert,
TCertificateAuthoritiesUpdate
>;
[TableName.CertificateAuthorityCert]: Knex.CompositeTableType<
[TableName.CertificateAuthorityCert]: KnexOriginal.CompositeTableType<
TCertificateAuthorityCerts,
TCertificateAuthorityCertsInsert,
TCertificateAuthorityCertsUpdate
>;
[TableName.CertificateAuthoritySecret]: Knex.CompositeTableType<
[TableName.CertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
TCertificateAuthoritySecret,
TCertificateAuthoritySecretInsert,
TCertificateAuthoritySecretUpdate
>;
[TableName.CertificateAuthorityCrl]: Knex.CompositeTableType<
[TableName.CertificateAuthorityCrl]: KnexOriginal.CompositeTableType<
TCertificateAuthorityCrl,
TCertificateAuthorityCrlInsert,
TCertificateAuthorityCrlUpdate
>;
[TableName.Certificate]: Knex.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
[TableName.CertificateBody]: Knex.CompositeTableType<
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
[TableName.CertificateBody]: KnexOriginal.CompositeTableType<
TCertificateBodies,
TCertificateBodiesInsert,
TCertificateBodiesUpdate
>;
[TableName.CertificateSecret]: Knex.CompositeTableType<
[TableName.CertificateSecret]: KnexOriginal.CompositeTableType<
TCertificateSecrets,
TCertificateSecretsInsert,
TCertificateSecretsUpdate
>;
[TableName.UserGroupMembership]: Knex.CompositeTableType<
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
TUserGroupMembership,
TUserGroupMembershipInsert,
TUserGroupMembershipUpdate
>;
[TableName.GroupProjectMembership]: Knex.CompositeTableType<
[TableName.GroupProjectMembership]: KnexOriginal.CompositeTableType<
TGroupProjectMemberships,
TGroupProjectMembershipsInsert,
TGroupProjectMembershipsUpdate
>;
[TableName.GroupProjectMembershipRole]: Knex.CompositeTableType<
[TableName.GroupProjectMembershipRole]: KnexOriginal.CompositeTableType<
TGroupProjectMembershipRoles,
TGroupProjectMembershipRolesInsert,
TGroupProjectMembershipRolesUpdate
>;
[TableName.UserAliases]: Knex.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
[TableName.UserEncryptionKey]: Knex.CompositeTableType<
[TableName.UserAliases]: KnexOriginal.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
[TableName.UserEncryptionKey]: KnexOriginal.CompositeTableType<
TUserEncryptionKeys,
TUserEncryptionKeysInsert,
TUserEncryptionKeysUpdate
>;
[TableName.AuthTokens]: Knex.CompositeTableType<TAuthTokens, TAuthTokensInsert, TAuthTokensUpdate>;
[TableName.AuthTokenSession]: Knex.CompositeTableType<
[TableName.AuthTokens]: KnexOriginal.CompositeTableType<TAuthTokens, TAuthTokensInsert, TAuthTokensUpdate>;
[TableName.AuthTokenSession]: KnexOriginal.CompositeTableType<
TAuthTokenSessions,
TAuthTokenSessionsInsert,
TAuthTokenSessionsUpdate
>;
[TableName.BackupPrivateKey]: Knex.CompositeTableType<
[TableName.BackupPrivateKey]: KnexOriginal.CompositeTableType<
TBackupPrivateKey,
TBackupPrivateKeyInsert,
TBackupPrivateKeyUpdate
>;
[TableName.Organization]: Knex.CompositeTableType<TOrganizations, TOrganizationsInsert, TOrganizationsUpdate>;
[TableName.OrgMembership]: Knex.CompositeTableType<TOrgMemberships, TOrgMembershipsInsert, TOrgMembershipsUpdate>;
[TableName.OrgRoles]: Knex.CompositeTableType<TOrgRoles, TOrgRolesInsert, TOrgRolesUpdate>;
[TableName.IncidentContact]: Knex.CompositeTableType<
[TableName.Organization]: KnexOriginal.CompositeTableType<
TOrganizations,
TOrganizationsInsert,
TOrganizationsUpdate
>;
[TableName.OrgMembership]: KnexOriginal.CompositeTableType<
TOrgMemberships,
TOrgMembershipsInsert,
TOrgMembershipsUpdate
>;
[TableName.OrgRoles]: KnexOriginal.CompositeTableType<TOrgRoles, TOrgRolesInsert, TOrgRolesUpdate>;
[TableName.IncidentContact]: KnexOriginal.CompositeTableType<
TIncidentContacts,
TIncidentContactsInsert,
TIncidentContactsUpdate
>;
[TableName.UserAction]: Knex.CompositeTableType<TUserActions, TUserActionsInsert, TUserActionsUpdate>;
[TableName.SuperAdmin]: Knex.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
[TableName.ApiKey]: Knex.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
[TableName.Project]: Knex.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
[TableName.ProjectMembership]: Knex.CompositeTableType<
[TableName.UserAction]: KnexOriginal.CompositeTableType<TUserActions, TUserActionsInsert, TUserActionsUpdate>;
[TableName.SuperAdmin]: KnexOriginal.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
[TableName.ApiKey]: KnexOriginal.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
[TableName.Project]: KnexOriginal.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
[TableName.ProjectMembership]: KnexOriginal.CompositeTableType<
TProjectMemberships,
TProjectMembershipsInsert,
TProjectMembershipsUpdate
>;
[TableName.Environment]: Knex.CompositeTableType<
[TableName.Environment]: KnexOriginal.CompositeTableType<
TProjectEnvironments,
TProjectEnvironmentsInsert,
TProjectEnvironmentsUpdate
>;
[TableName.ProjectBot]: Knex.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
[TableName.ProjectUserMembershipRole]: Knex.CompositeTableType<
[TableName.ProjectBot]: KnexOriginal.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
[TableName.ProjectUserMembershipRole]: KnexOriginal.CompositeTableType<
TProjectUserMembershipRoles,
TProjectUserMembershipRolesInsert,
TProjectUserMembershipRolesUpdate
>;
[TableName.ProjectRoles]: Knex.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
[TableName.ProjectUserAdditionalPrivilege]: Knex.CompositeTableType<
[TableName.ProjectRoles]: KnexOriginal.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
[TableName.ProjectUserAdditionalPrivilege]: KnexOriginal.CompositeTableType<
TProjectUserAdditionalPrivilege,
TProjectUserAdditionalPrivilegeInsert,
TProjectUserAdditionalPrivilegeUpdate
>;
[TableName.ProjectKeys]: Knex.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
[TableName.Secret]: Knex.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
[TableName.SecretReference]: Knex.CompositeTableType<
[TableName.ProjectKeys]: KnexOriginal.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
[TableName.Secret]: KnexOriginal.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
[TableName.SecretReference]: KnexOriginal.CompositeTableType<
TSecretReferences,
TSecretReferencesInsert,
TSecretReferencesUpdate
>;
[TableName.SecretBlindIndex]: Knex.CompositeTableType<
[TableName.SecretBlindIndex]: KnexOriginal.CompositeTableType<
TSecretBlindIndexes,
TSecretBlindIndexesInsert,
TSecretBlindIndexesUpdate
>;
[TableName.SecretVersion]: Knex.CompositeTableType<TSecretVersions, TSecretVersionsInsert, TSecretVersionsUpdate>;
[TableName.SecretFolder]: Knex.CompositeTableType<TSecretFolders, TSecretFoldersInsert, TSecretFoldersUpdate>;
[TableName.SecretFolderVersion]: Knex.CompositeTableType<
[TableName.SecretVersion]: KnexOriginal.CompositeTableType<
TSecretVersions,
TSecretVersionsInsert,
TSecretVersionsUpdate
>;
[TableName.SecretFolder]: KnexOriginal.CompositeTableType<
TSecretFolders,
TSecretFoldersInsert,
TSecretFoldersUpdate
>;
[TableName.SecretFolderVersion]: KnexOriginal.CompositeTableType<
TSecretFolderVersions,
TSecretFolderVersionsInsert,
TSecretFolderVersionsUpdate
>;
[TableName.SecretSharing]: Knex.CompositeTableType<TSecretSharing, TSecretSharingInsert, TSecretSharingUpdate>;
[TableName.RateLimit]: Knex.CompositeTableType<TRateLimit, TRateLimitInsert, TRateLimitUpdate>;
[TableName.SecretTag]: Knex.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
[TableName.SecretImport]: Knex.CompositeTableType<TSecretImports, TSecretImportsInsert, TSecretImportsUpdate>;
[TableName.Integration]: Knex.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
[TableName.Webhook]: Knex.CompositeTableType<TWebhooks, TWebhooksInsert, TWebhooksUpdate>;
[TableName.ServiceToken]: Knex.CompositeTableType<TServiceTokens, TServiceTokensInsert, TServiceTokensUpdate>;
[TableName.IntegrationAuth]: Knex.CompositeTableType<
[TableName.SecretSharing]: KnexOriginal.CompositeTableType<
TSecretSharing,
TSecretSharingInsert,
TSecretSharingUpdate
>;
[TableName.RateLimit]: KnexOriginal.CompositeTableType<TRateLimit, TRateLimitInsert, TRateLimitUpdate>;
[TableName.SecretTag]: KnexOriginal.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
[TableName.SecretImport]: KnexOriginal.CompositeTableType<
TSecretImports,
TSecretImportsInsert,
TSecretImportsUpdate
>;
[TableName.Integration]: KnexOriginal.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
[TableName.Webhook]: KnexOriginal.CompositeTableType<TWebhooks, TWebhooksInsert, TWebhooksUpdate>;
[TableName.ServiceToken]: KnexOriginal.CompositeTableType<
TServiceTokens,
TServiceTokensInsert,
TServiceTokensUpdate
>;
[TableName.IntegrationAuth]: KnexOriginal.CompositeTableType<
TIntegrationAuths,
TIntegrationAuthsInsert,
TIntegrationAuthsUpdate
>;
[TableName.Identity]: Knex.CompositeTableType<TIdentities, TIdentitiesInsert, TIdentitiesUpdate>;
[TableName.IdentityUniversalAuth]: Knex.CompositeTableType<
[TableName.Identity]: KnexOriginal.CompositeTableType<TIdentities, TIdentitiesInsert, TIdentitiesUpdate>;
[TableName.IdentityTokenAuth]: KnexOriginal.CompositeTableType<
TIdentityTokenAuths,
TIdentityTokenAuthsInsert,
TIdentityTokenAuthsUpdate
>;
[TableName.IdentityUniversalAuth]: KnexOriginal.CompositeTableType<
TIdentityUniversalAuths,
TIdentityUniversalAuthsInsert,
TIdentityUniversalAuthsUpdate
>;
[TableName.IdentityKubernetesAuth]: Knex.CompositeTableType<
[TableName.IdentityKubernetesAuth]: KnexOriginal.CompositeTableType<
TIdentityKubernetesAuths,
TIdentityKubernetesAuthsInsert,
TIdentityKubernetesAuthsUpdate
>;
[TableName.IdentityGcpAuth]: Knex.CompositeTableType<
[TableName.IdentityGcpAuth]: KnexOriginal.CompositeTableType<
TIdentityGcpAuths,
TIdentityGcpAuthsInsert,
TIdentityGcpAuthsUpdate
>;
[TableName.IdentityAwsAuth]: Knex.CompositeTableType<
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
TIdentityAwsAuths,
TIdentityAwsAuthsInsert,
TIdentityAwsAuthsUpdate
>;
[TableName.IdentityAzureAuth]: Knex.CompositeTableType<
[TableName.IdentityAzureAuth]: KnexOriginal.CompositeTableType<
TIdentityAzureAuths,
TIdentityAzureAuthsInsert,
TIdentityAzureAuthsUpdate
>;
[TableName.IdentityUaClientSecret]: Knex.CompositeTableType<
[TableName.IdentityOidcAuth]: KnexOriginal.CompositeTableType<
TIdentityOidcAuths,
TIdentityOidcAuthsInsert,
TIdentityOidcAuthsUpdate
>;
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
TIdentityUaClientSecrets,
TIdentityUaClientSecretsInsert,
TIdentityUaClientSecretsUpdate
>;
[TableName.IdentityAccessToken]: Knex.CompositeTableType<
[TableName.IdentityAccessToken]: KnexOriginal.CompositeTableType<
TIdentityAccessTokens,
TIdentityAccessTokensInsert,
TIdentityAccessTokensUpdate
>;
[TableName.IdentityOrgMembership]: Knex.CompositeTableType<
[TableName.IdentityOrgMembership]: KnexOriginal.CompositeTableType<
TIdentityOrgMemberships,
TIdentityOrgMembershipsInsert,
TIdentityOrgMembershipsUpdate
>;
[TableName.IdentityProjectMembership]: Knex.CompositeTableType<
[TableName.IdentityProjectMembership]: KnexOriginal.CompositeTableType<
TIdentityProjectMemberships,
TIdentityProjectMembershipsInsert,
TIdentityProjectMembershipsUpdate
>;
[TableName.IdentityProjectMembershipRole]: Knex.CompositeTableType<
[TableName.IdentityProjectMembershipRole]: KnexOriginal.CompositeTableType<
TIdentityProjectMembershipRole,
TIdentityProjectMembershipRoleInsert,
TIdentityProjectMembershipRoleUpdate
>;
[TableName.IdentityProjectAdditionalPrivilege]: Knex.CompositeTableType<
[TableName.IdentityProjectAdditionalPrivilege]: KnexOriginal.CompositeTableType<
TIdentityProjectAdditionalPrivilege,
TIdentityProjectAdditionalPrivilegeInsert,
TIdentityProjectAdditionalPrivilegeUpdate
>;
[TableName.AccessApprovalPolicy]: Knex.CompositeTableType<
[TableName.AccessApprovalPolicy]: KnexOriginal.CompositeTableType<
TAccessApprovalPolicies,
TAccessApprovalPoliciesInsert,
TAccessApprovalPoliciesUpdate
>;
[TableName.AccessApprovalPolicyApprover]: Knex.CompositeTableType<
[TableName.AccessApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
TAccessApprovalPoliciesApprovers,
TAccessApprovalPoliciesApproversInsert,
TAccessApprovalPoliciesApproversUpdate
>;
[TableName.AccessApprovalRequest]: Knex.CompositeTableType<
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
TAccessApprovalRequests,
TAccessApprovalRequestsInsert,
TAccessApprovalRequestsUpdate
>;
[TableName.AccessApprovalRequestReviewer]: Knex.CompositeTableType<
[TableName.AccessApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
TAccessApprovalRequestsReviewers,
TAccessApprovalRequestsReviewersInsert,
TAccessApprovalRequestsReviewersUpdate
>;
[TableName.ScimToken]: Knex.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
[TableName.SecretApprovalPolicy]: Knex.CompositeTableType<
[TableName.ScimToken]: KnexOriginal.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
[TableName.SecretApprovalPolicy]: KnexOriginal.CompositeTableType<
TSecretApprovalPolicies,
TSecretApprovalPoliciesInsert,
TSecretApprovalPoliciesUpdate
>;
[TableName.SecretApprovalPolicyApprover]: Knex.CompositeTableType<
[TableName.SecretApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
TSecretApprovalPoliciesApprovers,
TSecretApprovalPoliciesApproversInsert,
TSecretApprovalPoliciesApproversUpdate
>;
[TableName.SecretApprovalRequest]: Knex.CompositeTableType<
[TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType<
TSecretApprovalRequests,
TSecretApprovalRequestsInsert,
TSecretApprovalRequestsUpdate
>;
[TableName.SecretApprovalRequestReviewer]: Knex.CompositeTableType<
[TableName.SecretApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
TSecretApprovalRequestsReviewers,
TSecretApprovalRequestsReviewersInsert,
TSecretApprovalRequestsReviewersUpdate
>;
[TableName.SecretApprovalRequestSecret]: Knex.CompositeTableType<
[TableName.SecretApprovalRequestSecret]: KnexOriginal.CompositeTableType<
TSecretApprovalRequestsSecrets,
TSecretApprovalRequestsSecretsInsert,
TSecretApprovalRequestsSecretsUpdate
>;
[TableName.SecretApprovalRequestSecretTag]: Knex.CompositeTableType<
[TableName.SecretApprovalRequestSecretTag]: KnexOriginal.CompositeTableType<
TSecretApprovalRequestSecretTags,
TSecretApprovalRequestSecretTagsInsert,
TSecretApprovalRequestSecretTagsUpdate
>;
[TableName.SecretRotation]: Knex.CompositeTableType<
[TableName.SecretRotation]: KnexOriginal.CompositeTableType<
TSecretRotations,
TSecretRotationsInsert,
TSecretRotationsUpdate
>;
[TableName.SecretRotationOutput]: Knex.CompositeTableType<
[TableName.SecretRotationOutput]: KnexOriginal.CompositeTableType<
TSecretRotationOutputs,
TSecretRotationOutputsInsert,
TSecretRotationOutputsUpdate
>;
[TableName.Snapshot]: Knex.CompositeTableType<TSecretSnapshots, TSecretSnapshotsInsert, TSecretSnapshotsUpdate>;
[TableName.SnapshotSecret]: Knex.CompositeTableType<
[TableName.Snapshot]: KnexOriginal.CompositeTableType<
TSecretSnapshots,
TSecretSnapshotsInsert,
TSecretSnapshotsUpdate
>;
[TableName.SnapshotSecret]: KnexOriginal.CompositeTableType<
TSecretSnapshotSecrets,
TSecretSnapshotSecretsInsert,
TSecretSnapshotSecretsUpdate
>;
[TableName.SnapshotFolder]: Knex.CompositeTableType<
[TableName.SnapshotFolder]: KnexOriginal.CompositeTableType<
TSecretSnapshotFolders,
TSecretSnapshotFoldersInsert,
TSecretSnapshotFoldersUpdate
>;
[TableName.DynamicSecret]: Knex.CompositeTableType<TDynamicSecrets, TDynamicSecretsInsert, TDynamicSecretsUpdate>;
[TableName.DynamicSecretLease]: Knex.CompositeTableType<
[TableName.DynamicSecret]: KnexOriginal.CompositeTableType<
TDynamicSecrets,
TDynamicSecretsInsert,
TDynamicSecretsUpdate
>;
[TableName.DynamicSecretLease]: KnexOriginal.CompositeTableType<
TDynamicSecretLeases,
TDynamicSecretLeasesInsert,
TDynamicSecretLeasesUpdate
>;
[TableName.SamlConfig]: Knex.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
[TableName.OidcConfig]: Knex.CompositeTableType<TOidcConfigs, TOidcConfigsInsert, TOidcConfigsUpdate>;
[TableName.LdapConfig]: Knex.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
[TableName.LdapGroupMap]: Knex.CompositeTableType<TLdapGroupMaps, TLdapGroupMapsInsert, TLdapGroupMapsUpdate>;
[TableName.OrgBot]: Knex.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
[TableName.AuditLog]: Knex.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
[TableName.AuditLogStream]: Knex.CompositeTableType<
[TableName.SamlConfig]: KnexOriginal.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
[TableName.OidcConfig]: KnexOriginal.CompositeTableType<TOidcConfigs, TOidcConfigsInsert, TOidcConfigsUpdate>;
[TableName.LdapConfig]: KnexOriginal.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
[TableName.LdapGroupMap]: KnexOriginal.CompositeTableType<
TLdapGroupMaps,
TLdapGroupMapsInsert,
TLdapGroupMapsUpdate
>;
[TableName.OrgBot]: KnexOriginal.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
[TableName.AuditLog]: KnexOriginal.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
[TableName.AuditLogStream]: KnexOriginal.CompositeTableType<
TAuditLogStreams,
TAuditLogStreamsInsert,
TAuditLogStreamsUpdate
>;
[TableName.GitAppInstallSession]: Knex.CompositeTableType<
[TableName.GitAppInstallSession]: KnexOriginal.CompositeTableType<
TGitAppInstallSessions,
TGitAppInstallSessionsInsert,
TGitAppInstallSessionsUpdate
>;
[TableName.GitAppOrg]: Knex.CompositeTableType<TGitAppOrg, TGitAppOrgInsert, TGitAppOrgUpdate>;
[TableName.SecretScanningGitRisk]: Knex.CompositeTableType<
[TableName.GitAppOrg]: KnexOriginal.CompositeTableType<TGitAppOrg, TGitAppOrgInsert, TGitAppOrgUpdate>;
[TableName.SecretScanningGitRisk]: KnexOriginal.CompositeTableType<
TSecretScanningGitRisks,
TSecretScanningGitRisksInsert,
TSecretScanningGitRisksUpdate
>;
[TableName.TrustedIps]: Knex.CompositeTableType<TTrustedIps, TTrustedIpsInsert, TTrustedIpsUpdate>;
[TableName.TrustedIps]: KnexOriginal.CompositeTableType<TTrustedIps, TTrustedIpsInsert, TTrustedIpsUpdate>;
// Junction tables
[TableName.JnSecretTag]: Knex.CompositeTableType<
[TableName.JnSecretTag]: KnexOriginal.CompositeTableType<
TSecretTagJunction,
TSecretTagJunctionInsert,
TSecretTagJunctionUpdate
>;
[TableName.SecretVersionTag]: Knex.CompositeTableType<
[TableName.SecretVersionTag]: KnexOriginal.CompositeTableType<
TSecretVersionTagJunction,
TSecretVersionTagJunctionInsert,
TSecretVersionTagJunctionUpdate
>;
// KMS service
[TableName.KmsServerRootConfig]: Knex.CompositeTableType<
[TableName.KmsServerRootConfig]: KnexOriginal.CompositeTableType<
TKmsRootConfig,
TKmsRootConfigInsert,
TKmsRootConfigUpdate
>;
[TableName.KmsKey]: Knex.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
[TableName.KmsKeyVersion]: Knex.CompositeTableType<TKmsKeyVersions, TKmsKeyVersionsInsert, TKmsKeyVersionsUpdate>;
[TableName.InternalKms]: KnexOriginal.CompositeTableType<TInternalKms, TInternalKmsInsert, TInternalKmsUpdate>;
[TableName.ExternalKms]: KnexOriginal.CompositeTableType<TExternalKms, TExternalKmsInsert, TExternalKmsUpdate>;
[TableName.KmsKey]: KnexOriginal.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
[TableName.KmsKeyVersion]: KnexOriginal.CompositeTableType<
TKmsKeyVersions,
TKmsKeyVersionsInsert,
TKmsKeyVersionsUpdate
>;
}
}

View File

@ -1,8 +1,38 @@
import knex from "knex";
import knex, { Knex } from "knex";
export type TDbClient = ReturnType<typeof initDbConnection>;
export const initDbConnection = ({ dbConnectionUri, dbRootCert }: { dbConnectionUri: string; dbRootCert?: string }) => {
const db = knex({
export const initDbConnection = ({
dbConnectionUri,
dbRootCert,
readReplicas = []
}: {
dbConnectionUri: string;
dbRootCert?: string;
readReplicas?: {
dbConnectionUri: string;
dbRootCert?: string;
}[];
}) => {
// akhilmhdh: the default Knex is knex.Knex<any, any[]>. but when assigned with knex({<config>}) the value is knex.Knex<any, unknown[]>
// this was causing issue with files like `snapshot-dal` `findRecursivelySnapshots` this i am explicitly putting the any and unknown[]
// eslint-disable-next-line
let db: Knex<any, unknown[]>;
// eslint-disable-next-line
let readReplicaDbs: Knex<any, unknown[]>[];
// @ts-expect-error the querybuilder type is expected but our intension is to return a knex instance
knex.QueryBuilder.extend("primaryNode", () => {
return db;
});
// @ts-expect-error the querybuilder type is expected but our intension is to return a knex instance
knex.QueryBuilder.extend("replicaNode", () => {
if (!readReplicaDbs.length) return db;
const selectedReplica = readReplicaDbs[Math.floor(Math.random() * readReplicaDbs.length)];
return selectedReplica;
});
db = knex({
client: "pg",
connection: {
connectionString: dbConnectionUri,
@ -22,5 +52,21 @@ export const initDbConnection = ({ dbConnectionUri, dbRootCert }: { dbConnection
}
});
readReplicaDbs = readReplicas.map((el) => {
const replicaDbCertificate = el.dbRootCert || dbRootCert;
return knex({
client: "pg",
connection: {
connectionString: el.dbConnectionUri,
ssl: replicaDbCertificate
? {
rejectUnauthorized: true,
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
}
: false
}
});
});
return db;
};

View File

@ -0,0 +1,24 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.Certificate)) {
const hasAltNamesColumn = await knex.schema.hasColumn(TableName.Certificate, "altNames");
if (!hasAltNamesColumn) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.string("altNames").defaultTo("");
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.Certificate)) {
if (await knex.schema.hasColumn(TableName.Certificate, "altNames")) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.dropColumn("altNames");
});
}
}
}

View File

@ -0,0 +1,35 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasAwsAssumeRoleCipherText = await knex.schema.hasColumn(
TableName.IntegrationAuth,
"awsAssumeIamRoleArnCipherText"
);
const hasAwsAssumeRoleIV = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnIV");
const hasAwsAssumeRoleTag = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnTag");
if (await knex.schema.hasTable(TableName.IntegrationAuth)) {
await knex.schema.alterTable(TableName.IntegrationAuth, (t) => {
if (!hasAwsAssumeRoleCipherText) t.text("awsAssumeIamRoleArnCipherText");
if (!hasAwsAssumeRoleIV) t.text("awsAssumeIamRoleArnIV");
if (!hasAwsAssumeRoleTag) t.text("awsAssumeIamRoleArnTag");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasAwsAssumeRoleCipherText = await knex.schema.hasColumn(
TableName.IntegrationAuth,
"awsAssumeIamRoleArnCipherText"
);
const hasAwsAssumeRoleIV = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnIV");
const hasAwsAssumeRoleTag = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnTag");
if (await knex.schema.hasTable(TableName.IntegrationAuth)) {
await knex.schema.alterTable(TableName.IntegrationAuth, (t) => {
if (hasAwsAssumeRoleCipherText) t.dropColumn("awsAssumeIamRoleArnCipherText");
if (hasAwsAssumeRoleIV) t.dropColumn("awsAssumeIamRoleArnIV");
if (hasAwsAssumeRoleTag) t.dropColumn("awsAssumeIamRoleArnTag");
});
}
}

View File

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "enabledLoginMethods"))) {
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
tb.specificType("enabledLoginMethods", "text[]");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SuperAdmin, "enabledLoginMethods")) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.dropColumn("enabledLoginMethods");
});
}
}

View File

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.LdapConfig, "uniqueUserAttribute"))) {
await knex.schema.alterTable(TableName.LdapConfig, (tb) => {
tb.string("uniqueUserAttribute").notNullable().defaultTo("");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.LdapConfig, "uniqueUserAttribute")) {
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
t.dropColumn("uniqueUserAttribute");
});
}
}

View File

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.Project, "auditLogsRetentionDays"))) {
await knex.schema.alterTable(TableName.Project, (tb) => {
tb.integer("auditLogsRetentionDays").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.Project, "auditLogsRetentionDays")) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.dropColumn("auditLogsRetentionDays");
});
}
}

View File

@ -0,0 +1,12 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
await createOnUpdateTrigger(knex, TableName.OidcConfig);
}
export async function down(knex: Knex): Promise<void> {
await dropOnUpdateTrigger(knex, TableName.OidcConfig);
}

View File

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.OrgMembership, "projectFavorites"))) {
await knex.schema.alterTable(TableName.OrgMembership, (tb) => {
tb.specificType("projectFavorites", "text[]");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.OrgMembership, "projectFavorites")) {
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
t.dropColumn("projectFavorites");
});
}
}

View File

@ -0,0 +1,53 @@
import { Knex } from "knex";
import { WebhookType } from "@app/services/webhook/webhook-types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasUrlCipherText = await knex.schema.hasColumn(TableName.Webhook, "urlCipherText");
const hasUrlIV = await knex.schema.hasColumn(TableName.Webhook, "urlIV");
const hasUrlTag = await knex.schema.hasColumn(TableName.Webhook, "urlTag");
const hasType = await knex.schema.hasColumn(TableName.Webhook, "type");
if (await knex.schema.hasTable(TableName.Webhook)) {
await knex.schema.alterTable(TableName.Webhook, (tb) => {
if (!hasUrlCipherText) {
tb.text("urlCipherText");
}
if (!hasUrlIV) {
tb.string("urlIV");
}
if (!hasUrlTag) {
tb.string("urlTag");
}
if (!hasType) {
tb.string("type").defaultTo(WebhookType.GENERAL);
}
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasUrlCipherText = await knex.schema.hasColumn(TableName.Webhook, "urlCipherText");
const hasUrlIV = await knex.schema.hasColumn(TableName.Webhook, "urlIV");
const hasUrlTag = await knex.schema.hasColumn(TableName.Webhook, "urlTag");
const hasType = await knex.schema.hasColumn(TableName.Webhook, "type");
if (await knex.schema.hasTable(TableName.Webhook)) {
await knex.schema.alterTable(TableName.Webhook, (t) => {
if (hasUrlCipherText) {
t.dropColumn("urlCipherText");
}
if (hasUrlIV) {
t.dropColumn("urlIV");
}
if (hasUrlTag) {
t.dropColumn("urlTag");
}
if (hasType) {
t.dropColumn("type");
}
});
}
}

View File

@ -0,0 +1,188 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
// migrate secret approval policy approvers to user id
const hasApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId");
const hasApproverId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverId");
if (!hasApproverUserId) {
// add the new fields
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
// if (hasApproverId) tb.setNullable("approverId");
tb.uuid("approverUserId");
tb.foreign("approverUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
});
// convert project membership id => user id
await knex(TableName.SecretApprovalPolicyApprover).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
approverUserId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverId`]))
});
// drop the old field
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
if (hasApproverId) tb.dropColumn("approverId");
tb.uuid("approverUserId").notNullable().alter();
});
}
// migrate secret approval request committer and statusChangeBy to user id
const hasSecretApprovalRequestTable = await knex.schema.hasTable(TableName.SecretApprovalRequest);
const hasCommitterUserId = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "committerUserId");
const hasCommitterId = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "committerId");
const hasStatusChangeBy = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "statusChangeBy");
const hasStatusChangedByUserId = await knex.schema.hasColumn(
TableName.SecretApprovalRequest,
"statusChangedByUserId"
);
if (hasSecretApprovalRequestTable) {
// new fields
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
// if (hasCommitterId) tb.setNullable("committerId");
if (!hasCommitterUserId) {
tb.uuid("committerUserId");
tb.foreign("committerUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
}
if (!hasStatusChangedByUserId) {
tb.uuid("statusChangedByUserId");
tb.foreign("statusChangedByUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
}
});
// copy the assigned project membership => user id to new fields
await knex(TableName.SecretApprovalRequest).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
committerUserId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerId`])),
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
statusChangedByUserId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangeBy`]))
});
// drop old fields
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
if (hasStatusChangeBy) tb.dropColumn("statusChangeBy");
if (hasCommitterId) tb.dropColumn("committerId");
tb.uuid("committerUserId").notNullable().alter();
});
}
// migrate secret approval request reviewer to user id
const hasMemberId = await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "member");
const hasReviewerUserId = await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "reviewerUserId");
if (!hasReviewerUserId) {
// new fields
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
// if (hasMemberId) tb.setNullable("member");
tb.uuid("reviewerUserId");
tb.foreign("reviewerUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
});
// copy project membership => user id to new fields
await knex(TableName.SecretApprovalRequestReviewer).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
reviewerUserId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.member`]))
});
// drop table
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
if (hasMemberId) tb.dropColumn("member");
tb.uuid("reviewerUserId").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId");
const hasApproverId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverId");
if (hasApproverUserId) {
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
if (!hasApproverId) {
tb.uuid("approverId");
tb.foreign("approverId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
}
});
if (!hasApproverId) {
await knex(TableName.SecretApprovalPolicyApprover).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
approverId: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverUserId`]))
});
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
tb.dropColumn("approverUserId");
tb.uuid("approverId").notNullable().alter();
});
}
}
const hasSecretApprovalRequestTable = await knex.schema.hasTable(TableName.SecretApprovalRequest);
const hasCommitterUserId = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "committerUserId");
const hasCommitterId = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "committerId");
const hasStatusChangeBy = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "statusChangeBy");
const hasStatusChangedByUser = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "statusChangedByUserId");
if (hasSecretApprovalRequestTable) {
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
// if (hasCommitterId) tb.uuid("committerId").notNullable().alter();
if (!hasCommitterId) {
tb.uuid("committerId");
tb.foreign("committerId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
}
if (!hasStatusChangeBy) {
tb.uuid("statusChangeBy");
tb.foreign("statusChangeBy").references("id").inTable(TableName.ProjectMembership).onDelete("SET NULL");
}
});
await knex(TableName.SecretApprovalRequest).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
committerId: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`])),
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
statusChangeBy: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangedByUserId`]))
});
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
if (hasCommitterUserId) tb.dropColumn("committerUserId");
if (hasStatusChangedByUser) tb.dropColumn("statusChangedByUserId");
if (hasCommitterId) tb.uuid("committerId").notNullable().alter();
});
}
const hasMemberId = await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "member");
const hasReviewerUserId = await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "reviewerUserId");
if (hasReviewerUserId) {
if (!hasMemberId) {
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
// if (hasMemberId) tb.uuid("member").notNullable().alter();
tb.uuid("member");
tb.foreign("member").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
});
}
await knex(TableName.SecretApprovalRequestReviewer).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
member: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`]))
});
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
tb.uuid("member").notNullable().alter();
tb.dropColumn("reviewerUserId");
});
}
}

View File

@ -0,0 +1,24 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
await knex.schema.createTable(TableName.IdentityTokenAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.timestamps(true, true, true);
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
});
await createOnUpdateTrigger(knex, TableName.IdentityTokenAuth);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityTokenAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityTokenAuth);
}

View File

@ -0,0 +1,24 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.IdentityAccessToken)) {
const hasNameColumn = await knex.schema.hasColumn(TableName.IdentityAccessToken, "name");
if (!hasNameColumn) {
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
t.string("name").nullable();
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.IdentityAccessToken)) {
if (await knex.schema.hasColumn(TableName.IdentityAccessToken, "name")) {
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
t.dropColumn("name");
});
}
}
}

View File

@ -0,0 +1,256 @@
import slugify from "@sindresorhus/slugify";
import { Knex } from "knex";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TableName } from "../schemas";
const createInternalKmsTableAndBackfillData = async (knex: Knex) => {
const doesOldKmsKeyTableExist = await knex.schema.hasTable(TableName.KmsKey);
const doesInternalKmsTableExist = await knex.schema.hasTable(TableName.InternalKms);
// building the internal kms table by filling from old kms table
if (doesOldKmsKeyTableExist && !doesInternalKmsTableExist) {
await knex.schema.createTable(TableName.InternalKms, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.binary("encryptedKey").notNullable();
tb.string("encryptionAlgorithm").notNullable();
tb.integer("version").defaultTo(1).notNullable();
tb.uuid("kmsKeyId").unique().notNullable();
tb.foreign("kmsKeyId").references("id").inTable(TableName.KmsKey).onDelete("CASCADE");
});
// copy the old kms and backfill
const oldKmsKey = await knex(TableName.KmsKey).select("version", "encryptedKey", "encryptionAlgorithm", "id");
if (oldKmsKey.length) {
await knex(TableName.InternalKms).insert(
oldKmsKey.map((el) => ({
encryptionAlgorithm: el.encryptionAlgorithm,
encryptedKey: el.encryptedKey,
kmsKeyId: el.id,
version: el.version
}))
);
}
}
};
const renameKmsKeyVersionTableAsInternalKmsKeyVersion = async (knex: Knex) => {
const doesOldKmsKeyVersionTableExist = await knex.schema.hasTable(TableName.KmsKeyVersion);
const doesNewKmsKeyVersionTableExist = await knex.schema.hasTable(TableName.InternalKmsKeyVersion);
if (doesOldKmsKeyVersionTableExist && !doesNewKmsKeyVersionTableExist) {
// because we haven't started using versioning for kms thus no data exist
await knex.schema.renameTable(TableName.KmsKeyVersion, TableName.InternalKmsKeyVersion);
const hasKmsKeyIdColumn = await knex.schema.hasColumn(TableName.InternalKmsKeyVersion, "kmsKeyId");
const hasInternalKmsIdColumn = await knex.schema.hasColumn(TableName.InternalKmsKeyVersion, "internalKmsId");
await knex.schema.alterTable(TableName.InternalKmsKeyVersion, (tb) => {
if (hasKmsKeyIdColumn) tb.dropColumn("kmsKeyId");
if (!hasInternalKmsIdColumn) {
tb.uuid("internalKmsId").notNullable();
tb.foreign("internalKmsId").references("id").inTable(TableName.InternalKms).onDelete("CASCADE");
}
});
}
};
const createExternalKmsKeyTable = async (knex: Knex) => {
const doesExternalKmsServiceExist = await knex.schema.hasTable(TableName.ExternalKms);
if (!doesExternalKmsServiceExist) {
await knex.schema.createTable(TableName.ExternalKms, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.string("provider").notNullable();
tb.binary("encryptedProviderInputs").notNullable();
tb.string("status");
tb.string("statusDetails");
tb.uuid("kmsKeyId").unique().notNullable();
tb.foreign("kmsKeyId").references("id").inTable(TableName.KmsKey).onDelete("CASCADE");
});
}
};
const removeNonRequiredFieldsFromKmsKeyTableAndBackfillRequiredData = async (knex: Knex) => {
const doesOldKmsKeyTableExist = await knex.schema.hasTable(TableName.KmsKey);
// building the internal kms table by filling from old kms table
if (doesOldKmsKeyTableExist) {
const hasSlugColumn = await knex.schema.hasColumn(TableName.KmsKey, "slug");
const hasEncryptedKeyColumn = await knex.schema.hasColumn(TableName.KmsKey, "encryptedKey");
const hasEncryptionAlgorithmColumn = await knex.schema.hasColumn(TableName.KmsKey, "encryptionAlgorithm");
const hasVersionColumn = await knex.schema.hasColumn(TableName.KmsKey, "version");
const hasTimestamps = await knex.schema.hasColumn(TableName.KmsKey, "createdAt");
const hasProjectId = await knex.schema.hasColumn(TableName.KmsKey, "projectId");
const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId");
await knex.schema.alterTable(TableName.KmsKey, (tb) => {
if (!hasSlugColumn) tb.string("slug", 32);
if (hasEncryptedKeyColumn) tb.dropColumn("encryptedKey");
if (hasEncryptionAlgorithmColumn) tb.dropColumn("encryptionAlgorithm");
if (hasVersionColumn) tb.dropColumn("version");
if (!hasTimestamps) tb.timestamps(true, true, true);
});
// backfill all org id in kms key because its gonna be changed to non nullable
if (hasProjectId && hasOrgId) {
await knex(TableName.KmsKey)
.whereNull("orgId")
.update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
orgId: knex(TableName.Project)
.select("orgId")
.where("id", knex.raw("??", [`${TableName.KmsKey}.projectId`]))
});
}
// backfill slugs in kms
const missingSlugs = await knex(TableName.KmsKey).whereNull("slug").select("id");
if (missingSlugs.length) {
await knex(TableName.KmsKey)
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
.insert(missingSlugs.map(({ id }) => ({ id, slug: slugify(alphaNumericNanoId(8).toLowerCase()) })))
.onConflict("id")
.merge();
}
await knex.schema.alterTable(TableName.KmsKey, (tb) => {
if (hasOrgId) tb.uuid("orgId").notNullable().alter();
tb.string("slug", 32).notNullable().alter();
if (hasProjectId) tb.dropColumn("projectId");
if (hasOrgId) tb.unique(["orgId", "slug"]);
});
}
};
/*
* The goal for this migration is split the existing kms key into three table
* the kms-key table would be a container table that contains
* the internal kms key table and external kms table
*/
export async function up(knex: Knex): Promise<void> {
await createInternalKmsTableAndBackfillData(knex);
await renameKmsKeyVersionTableAsInternalKmsKeyVersion(knex);
await removeNonRequiredFieldsFromKmsKeyTableAndBackfillRequiredData(knex);
await createExternalKmsKeyTable(knex);
const doesOrgKmsKeyExist = await knex.schema.hasColumn(TableName.Organization, "kmsDefaultKeyId");
if (!doesOrgKmsKeyExist) {
await knex.schema.alterTable(TableName.Organization, (tb) => {
tb.uuid("kmsDefaultKeyId").nullable();
tb.foreign("kmsDefaultKeyId").references("id").inTable(TableName.KmsKey);
});
}
const doesProjectKmsSecretManagerKeyExist = await knex.schema.hasColumn(TableName.Project, "kmsSecretManagerKeyId");
if (!doesProjectKmsSecretManagerKeyExist) {
await knex.schema.alterTable(TableName.Project, (tb) => {
tb.uuid("kmsSecretManagerKeyId").nullable();
tb.foreign("kmsSecretManagerKeyId").references("id").inTable(TableName.KmsKey);
});
}
}
const renameInternalKmsKeyVersionBackToKmsKeyVersion = async (knex: Knex) => {
const doesInternalKmsKeyVersionTableExist = await knex.schema.hasTable(TableName.InternalKmsKeyVersion);
const doesKmsKeyVersionTableExist = await knex.schema.hasTable(TableName.KmsKeyVersion);
if (doesInternalKmsKeyVersionTableExist && !doesKmsKeyVersionTableExist) {
// because we haven't started using versioning for kms thus no data exist
await knex.schema.renameTable(TableName.InternalKmsKeyVersion, TableName.KmsKeyVersion);
const hasInternalKmsIdColumn = await knex.schema.hasColumn(TableName.KmsKeyVersion, "internalKmsId");
const hasKmsKeyIdColumn = await knex.schema.hasColumn(TableName.KmsKeyVersion, "kmsKeyId");
await knex.schema.alterTable(TableName.KmsKeyVersion, (tb) => {
if (hasInternalKmsIdColumn) tb.dropColumn("internalKmsId");
if (!hasKmsKeyIdColumn) {
tb.uuid("kmsKeyId").notNullable();
tb.foreign("kmsKeyId").references("id").inTable(TableName.KmsKey).onDelete("CASCADE");
}
});
}
};
const bringBackKmsKeyFields = async (knex: Knex) => {
const doesOldKmsKeyTableExist = await knex.schema.hasTable(TableName.KmsKey);
const doesInternalKmsTableExist = await knex.schema.hasTable(TableName.InternalKms);
if (doesOldKmsKeyTableExist && doesInternalKmsTableExist) {
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
const hasEncryptedKeyColumn = await knex.schema.hasColumn(TableName.KmsKey, "encryptedKey");
const hasEncryptionAlgorithmColumn = await knex.schema.hasColumn(TableName.KmsKey, "encryptionAlgorithm");
const hasVersionColumn = await knex.schema.hasColumn(TableName.KmsKey, "version");
const hasNullableOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId");
const hasProjectIdColumn = await knex.schema.hasColumn(TableName.KmsKey, "projectId");
await knex.schema.alterTable(TableName.KmsKey, (tb) => {
if (!hasEncryptedKeyColumn) tb.binary("encryptedKey");
if (!hasEncryptionAlgorithmColumn) tb.string("encryptionAlgorithm");
if (!hasVersionColumn) tb.integer("version").defaultTo(1);
if (hasNullableOrgId) tb.uuid("orgId").nullable().alter();
if (!hasProjectIdColumn) {
tb.string("projectId");
tb.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
}
if (hasSlug) tb.dropColumn("slug");
});
}
};
const backfillKmsKeyFromInternalKmsTable = async (knex: Knex) => {
const doesOldKmsKeyTableExist = await knex.schema.hasTable(TableName.KmsKey);
const doesInternalKmsTableExist = await knex.schema.hasTable(TableName.InternalKms);
if (doesInternalKmsTableExist && doesOldKmsKeyTableExist) {
// backfill kms key with internal kms data
await knex(TableName.KmsKey).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
encryptedKey: knex(TableName.InternalKms)
.select("encryptedKey")
.where("kmsKeyId", knex.raw("??", [`${TableName.KmsKey}.id`])),
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
encryptionAlgorithm: knex(TableName.InternalKms)
.select("encryptionAlgorithm")
.where("kmsKeyId", knex.raw("??", [`${TableName.KmsKey}.id`])),
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
projectId: knex(TableName.Project)
.select("id")
.where("kmsCertificateKeyId", knex.raw("??", [`${TableName.KmsKey}.id`]))
});
}
};
export async function down(knex: Knex): Promise<void> {
const doesOrgKmsKeyExist = await knex.schema.hasColumn(TableName.Organization, "kmsDefaultKeyId");
if (doesOrgKmsKeyExist) {
await knex.schema.alterTable(TableName.Organization, (tb) => {
tb.dropColumn("kmsDefaultKeyId");
});
}
const doesProjectKmsSecretManagerKeyExist = await knex.schema.hasColumn(TableName.Project, "kmsSecretManagerKeyId");
if (doesProjectKmsSecretManagerKeyExist) {
await knex.schema.alterTable(TableName.Project, (tb) => {
tb.dropColumn("kmsSecretManagerKeyId");
});
}
await renameInternalKmsKeyVersionBackToKmsKeyVersion(knex);
await bringBackKmsKeyFields(knex);
await backfillKmsKeyFromInternalKmsTable(knex);
const doesOldKmsKeyTableExist = await knex.schema.hasTable(TableName.KmsKey);
if (doesOldKmsKeyTableExist) {
await knex.schema.alterTable(TableName.KmsKey, (tb) => {
tb.binary("encryptedKey").notNullable().alter();
tb.string("encryptionAlgorithm").notNullable().alter();
});
}
const doesInternalKmsTableExist = await knex.schema.hasTable(TableName.InternalKms);
if (doesInternalKmsTableExist) await knex.schema.dropTable(TableName.InternalKms);
const doesExternalKmsServiceExist = await knex.schema.hasTable(TableName.ExternalKms);
if (doesExternalKmsServiceExist) await knex.schema.dropTable(TableName.ExternalKms);
}

View File

@ -0,0 +1,34 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityOidcAuth))) {
await knex.schema.createTable(TableName.IdentityOidcAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.string("oidcDiscoveryUrl").notNullable();
t.text("encryptedCaCert").notNullable();
t.string("caCertIV").notNullable();
t.string("caCertTag").notNullable();
t.string("boundIssuer").notNullable();
t.string("boundAudiences").notNullable();
t.jsonb("boundClaims").notNullable();
t.string("boundSubject");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.IdentityOidcAuth);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityOidcAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityOidcAuth);
}

View File

@ -0,0 +1,25 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.OrgMembership)) {
const doesUserIdExist = await knex.schema.hasColumn(TableName.OrgMembership, "userId");
const doesOrgIdExist = await knex.schema.hasColumn(TableName.OrgMembership, "orgId");
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
t.boolean("isActive").notNullable().defaultTo(true);
if (doesUserIdExist && doesOrgIdExist) t.index(["userId", "orgId"]);
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.OrgMembership)) {
const doesUserIdExist = await knex.schema.hasColumn(TableName.OrgMembership, "userId");
const doesOrgIdExist = await knex.schema.hasColumn(TableName.OrgMembership, "orgId");
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
t.dropColumn("isActive");
if (doesUserIdExist && doesOrgIdExist) t.dropIndex(["userId", "orgId"]);
});
}
}

View File

@ -19,7 +19,8 @@ export const CertificatesSchema = z.object({
notBefore: z.date(),
notAfter: z.date(),
revokedAt: z.date().nullable().optional(),
revocationReason: z.number().nullable().optional()
revocationReason: z.number().nullable().optional(),
altNames: z.string().default("").nullable().optional()
});
export type TCertificates = z.infer<typeof CertificatesSchema>;

View File

@ -0,0 +1,23 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const ExternalKmsSchema = z.object({
id: z.string().uuid(),
provider: z.string(),
encryptedProviderInputs: zodBuffer,
status: z.string().nullable().optional(),
statusDetails: z.string().nullable().optional(),
kmsKeyId: z.string().uuid()
});
export type TExternalKms = z.infer<typeof ExternalKmsSchema>;
export type TExternalKmsInsert = Omit<z.input<typeof ExternalKmsSchema>, TImmutableDBKeys>;
export type TExternalKmsUpdate = Partial<Omit<z.input<typeof ExternalKmsSchema>, TImmutableDBKeys>>;

View File

@ -19,7 +19,8 @@ export const IdentityAccessTokensSchema = z.object({
identityUAClientSecretId: z.string().nullable().optional(),
identityId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
name: z.string().nullable().optional()
});
export type TIdentityAccessTokens = z.infer<typeof IdentityAccessTokensSchema>;

View File

@ -0,0 +1,31 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityOidcAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
identityId: z.string().uuid(),
oidcDiscoveryUrl: z.string(),
encryptedCaCert: z.string(),
caCertIV: z.string(),
caCertTag: z.string(),
boundIssuer: z.string(),
boundAudiences: z.string(),
boundClaims: z.unknown(),
boundSubject: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;
export type TIdentityOidcAuthsInsert = Omit<z.input<typeof IdentityOidcAuthsSchema>, TImmutableDBKeys>;
export type TIdentityOidcAuthsUpdate = Partial<Omit<z.input<typeof IdentityOidcAuthsSchema>, TImmutableDBKeys>>;

View File

@ -0,0 +1,23 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityTokenAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
identityId: z.string().uuid()
});
export type TIdentityTokenAuths = z.infer<typeof IdentityTokenAuthsSchema>;
export type TIdentityTokenAuthsInsert = Omit<z.input<typeof IdentityTokenAuthsSchema>, TImmutableDBKeys>;
export type TIdentityTokenAuthsUpdate = Partial<Omit<z.input<typeof IdentityTokenAuthsSchema>, TImmutableDBKeys>>;

View File

@ -17,6 +17,7 @@ export * from "./certificate-secrets";
export * from "./certificates";
export * from "./dynamic-secret-leases";
export * from "./dynamic-secrets";
export * from "./external-kms";
export * from "./git-app-install-sessions";
export * from "./git-app-org";
export * from "./group-project-membership-roles";
@ -28,15 +29,18 @@ export * from "./identity-aws-auths";
export * from "./identity-azure-auths";
export * from "./identity-gcp-auths";
export * from "./identity-kubernetes-auths";
export * from "./identity-oidc-auths";
export * from "./identity-org-memberships";
export * from "./identity-project-additional-privilege";
export * from "./identity-project-membership-role";
export * from "./identity-project-memberships";
export * from "./identity-token-auths";
export * from "./identity-ua-client-secrets";
export * from "./identity-universal-auths";
export * from "./incident-contacts";
export * from "./integration-auths";
export * from "./integrations";
export * from "./internal-kms";
export * from "./kms-key-versions";
export * from "./kms-keys";
export * from "./kms-root-config";

View File

@ -29,7 +29,10 @@ export const IntegrationAuthsSchema = z.object({
keyEncoding: z.string(),
projectId: z.string(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
awsAssumeIamRoleArnCipherText: z.string().nullable().optional(),
awsAssumeIamRoleArnIV: z.string().nullable().optional(),
awsAssumeIamRoleArnTag: z.string().nullable().optional()
});
export type TIntegrationAuths = z.infer<typeof IntegrationAuthsSchema>;

View File

@ -0,0 +1,21 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const InternalKmsKeyVersionSchema = z.object({
id: z.string().uuid(),
encryptedKey: zodBuffer,
version: z.number(),
internalKmsId: z.string().uuid()
});
export type TInternalKmsKeyVersion = z.infer<typeof InternalKmsKeyVersionSchema>;
export type TInternalKmsKeyVersionInsert = Omit<z.input<typeof InternalKmsKeyVersionSchema>, TImmutableDBKeys>;
export type TInternalKmsKeyVersionUpdate = Partial<Omit<z.input<typeof InternalKmsKeyVersionSchema>, TImmutableDBKeys>>;

View File

@ -0,0 +1,22 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const InternalKmsSchema = z.object({
id: z.string().uuid(),
encryptedKey: zodBuffer,
encryptionAlgorithm: z.string(),
version: z.number().default(1),
kmsKeyId: z.string().uuid()
});
export type TInternalKms = z.infer<typeof InternalKmsSchema>;
export type TInternalKmsInsert = Omit<z.input<typeof InternalKmsSchema>, TImmutableDBKeys>;
export type TInternalKmsUpdate = Partial<Omit<z.input<typeof InternalKmsSchema>, TImmutableDBKeys>>;

View File

@ -5,20 +5,17 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmsKeysSchema = z.object({
id: z.string().uuid(),
encryptedKey: zodBuffer,
encryptionAlgorithm: z.string(),
version: z.number().default(1),
description: z.string().nullable().optional(),
isDisabled: z.boolean().default(false).nullable().optional(),
isReserved: z.boolean().default(true).nullable().optional(),
projectId: z.string().nullable().optional(),
orgId: z.string().uuid().nullable().optional()
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
slug: z.string()
});
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;

View File

@ -26,7 +26,8 @@ export const LdapConfigsSchema = z.object({
updatedAt: z.date(),
groupSearchBase: z.string().default(""),
groupSearchFilter: z.string().default(""),
searchFilter: z.string().default("")
searchFilter: z.string().default(""),
uniqueUserAttribute: z.string().default("")
});
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;

View File

@ -53,12 +53,14 @@ export enum TableName {
Webhook = "webhooks",
Identity = "identities",
IdentityAccessToken = "identity_access_tokens",
IdentityTokenAuth = "identity_token_auths",
IdentityUniversalAuth = "identity_universal_auths",
IdentityKubernetesAuth = "identity_kubernetes_auths",
IdentityGcpAuth = "identity_gcp_auths",
IdentityAzureAuth = "identity_azure_auths",
IdentityUaClientSecret = "identity_ua_client_secrets",
IdentityAwsAuth = "identity_aws_auths",
IdentityOidcAuth = "identity_oidc_auths",
IdentityOrgMembership = "identity_org_memberships",
IdentityProjectMembership = "identity_project_memberships",
IdentityProjectMembershipRole = "identity_project_membership_role",
@ -94,6 +96,10 @@ export enum TableName {
// KMS Service
KmsServerRootConfig = "kms_root_config",
KmsKey = "kms_keys",
ExternalKms = "external_kms",
InternalKms = "internal_kms",
InternalKmsKeyVersion = "internal_kms_key_version",
// @depreciated
KmsKeyVersion = "kms_key_versions"
}
@ -161,9 +167,11 @@ export enum ProjectUpgradeStatus {
}
export enum IdentityAuthMethod {
TOKEN_AUTH = "token-auth",
Univeral = "universal-auth",
KUBERNETES_AUTH = "kubernetes-auth",
GCP_AUTH = "gcp-auth",
AWS_AUTH = "aws-auth",
AZURE_AUTH = "azure-auth"
AZURE_AUTH = "azure-auth",
OIDC_AUTH = "oidc-auth"
}

View File

@ -16,7 +16,9 @@ export const OrgMembershipsSchema = z.object({
updatedAt: z.date(),
userId: z.string().uuid().nullable().optional(),
orgId: z.string().uuid(),
roleId: z.string().uuid().nullable().optional()
roleId: z.string().uuid().nullable().optional(),
projectFavorites: z.string().array().nullable().optional(),
isActive: z.boolean()
});
export type TOrgMemberships = z.infer<typeof OrgMembershipsSchema>;

View File

@ -15,7 +15,8 @@ export const OrganizationsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
authEnforced: z.boolean().default(false).nullable().optional(),
scimEnabled: z.boolean().default(false).nullable().optional()
scimEnabled: z.boolean().default(false).nullable().optional(),
kmsDefaultKeyId: z.string().uuid().nullable().optional()
});
export type TOrganizations = z.infer<typeof OrganizationsSchema>;

View File

@ -17,8 +17,10 @@ export const ProjectsSchema = z.object({
updatedAt: z.date(),
version: z.number().default(1),
upgradeStatus: z.string().nullable().optional(),
pitVersionLimit: z.number().default(10),
kmsCertificateKeyId: z.string().uuid().nullable().optional(),
pitVersionLimit: z.number().default(10)
auditLogsRetentionDays: z.number().nullable().optional(),
kmsSecretManagerKeyId: z.string().uuid().nullable().optional()
});
export type TProjects = z.infer<typeof ProjectsSchema>;

View File

@ -9,10 +9,10 @@ import { TImmutableDBKeys } from "./models";
export const SecretApprovalPoliciesApproversSchema = z.object({
id: z.string().uuid(),
approverId: z.string().uuid(),
policyId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
approverUserId: z.string().uuid()
});
export type TSecretApprovalPoliciesApprovers = z.infer<typeof SecretApprovalPoliciesApproversSchema>;

View File

@ -9,11 +9,11 @@ import { TImmutableDBKeys } from "./models";
export const SecretApprovalRequestsReviewersSchema = z.object({
id: z.string().uuid(),
member: z.string().uuid(),
status: z.string(),
requestId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
reviewerUserId: z.string().uuid()
});
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;

View File

@ -15,11 +15,11 @@ export const SecretApprovalRequestsSchema = z.object({
conflicts: z.unknown().nullable().optional(),
slug: z.string(),
folderId: z.string().uuid(),
statusChangeBy: z.string().uuid().nullable().optional(),
committerId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
isReplicated: z.boolean().nullable().optional()
isReplicated: z.boolean().nullable().optional(),
committerUserId: z.string().uuid(),
statusChangedByUserId: z.string().uuid().nullable().optional()
});
export type TSecretApprovalRequests = z.infer<typeof SecretApprovalRequestsSchema>;

View File

@ -18,7 +18,8 @@ export const SuperAdminSchema = z.object({
trustSamlEmails: z.boolean().default(false).nullable().optional(),
trustLdapEmails: z.boolean().default(false).nullable().optional(),
trustOidcEmails: z.boolean().default(false).nullable().optional(),
defaultAuthOrgId: z.string().uuid().nullable().optional()
defaultAuthOrgId: z.string().uuid().nullable().optional(),
enabledLoginMethods: z.string().array().nullable().optional()
});
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;

View File

@ -21,7 +21,11 @@ export const WebhooksSchema = z.object({
keyEncoding: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
envId: z.string().uuid()
envId: z.string().uuid(),
urlCipherText: z.string().nullable().optional(),
urlIV: z.string().nullable().optional(),
urlTag: z.string().nullable().optional(),
type: z.string().default("general").nullable().optional()
});
export type TWebhooks = z.infer<typeof WebhooksSchema>;

View File

@ -29,7 +29,8 @@ export async function seed(knex: Knex): Promise<void> {
role: OrgMembershipRole.Admin,
orgId: org.id,
status: OrgMembershipStatus.Accepted,
userId: user.id
userId: user.id,
isActive: true
}
]);
}

View File

@ -0,0 +1,190 @@
import { z } from "zod";
import { ExternalKmsSchema, KmsKeysSchema } from "@app/db/schemas";
import {
ExternalKmsAwsSchema,
ExternalKmsInputSchema,
ExternalKmsInputUpdateSchema
} from "@app/ee/services/external-kms/providers/model";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const sanitizedExternalSchema = KmsKeysSchema.extend({
external: ExternalKmsSchema.pick({
id: true,
status: true,
statusDetails: true,
provider: true
})
});
const sanitizedExternalSchemaForGetById = KmsKeysSchema.extend({
external: ExternalKmsSchema.pick({
id: true,
status: true,
statusDetails: true,
provider: true
}).extend({
providerInput: ExternalKmsAwsSchema
})
});
export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
slug: z.string().min(1).trim().toLowerCase().optional(),
description: z.string().min(1).trim().optional(),
provider: ExternalKmsInputSchema
}),
response: {
200: z.object({
externalKms: sanitizedExternalSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const externalKms = await server.services.externalKms.create({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
slug: req.body.slug,
provider: req.body.provider,
description: req.body.description
});
return { externalKms };
}
});
server.route({
method: "PATCH",
url: "/:id",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string().trim().min(1)
}),
body: z.object({
slug: z.string().min(1).trim().toLowerCase().optional(),
description: z.string().min(1).trim().optional(),
provider: ExternalKmsInputUpdateSchema
}),
response: {
200: z.object({
externalKms: sanitizedExternalSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const externalKms = await server.services.externalKms.updateById({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
slug: req.body.slug,
provider: req.body.provider,
description: req.body.description,
id: req.params.id
});
return { externalKms };
}
});
server.route({
method: "DELETE",
url: "/:id",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string().trim().min(1)
}),
response: {
200: z.object({
externalKms: sanitizedExternalSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const externalKms = await server.services.externalKms.deleteById({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.id
});
return { externalKms };
}
});
server.route({
method: "GET",
url: "/:id",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
id: z.string().trim().min(1)
}),
response: {
200: z.object({
externalKms: sanitizedExternalSchemaForGetById
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const externalKms = await server.services.externalKms.findById({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.id
});
return { externalKms };
}
});
server.route({
method: "GET",
url: "/slug/:slug",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
slug: z.string().trim().min(1)
}),
response: {
200: z.object({
externalKms: sanitizedExternalSchemaForGetById
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const externalKms = await server.services.externalKms.findBySlug({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
slug: req.params.slug
});
return { externalKms };
}
});
};

View File

@ -70,10 +70,13 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
groups = await searchGroups(ldapConfig, groupSearchFilter, ldapConfig.groupSearchBase);
}
const externalId = ldapConfig.uniqueUserAttribute ? user[ldapConfig.uniqueUserAttribute] : user.uidNumber;
const username = ldapConfig.uniqueUserAttribute ? externalId : user.uid;
const { isUserCompleted, providerAuthToken } = await server.services.ldap.ldapLogin({
externalId,
username,
ldapConfigId: ldapConfig.id,
externalId: user.uidNumber,
username: user.uid,
firstName: user.givenName ?? user.cn ?? "",
lastName: user.sn ?? "",
email: user.mail,
@ -138,6 +141,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
url: z.string(),
bindDN: z.string(),
bindPass: z.string(),
uniqueUserAttribute: z.string(),
searchBase: z.string(),
searchFilter: z.string(),
groupSearchBase: z.string(),
@ -172,6 +176,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
url: z.string().trim(),
bindDN: z.string().trim(),
bindPass: z.string().trim(),
uniqueUserAttribute: z.string().trim().default("uidNumber"),
searchBase: z.string().trim(),
searchFilter: z.string().trim().default("(uid={{username}})"),
groupSearchBase: z.string().trim(),
@ -213,6 +218,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
url: z.string().trim(),
bindDN: z.string().trim(),
bindPass: z.string().trim(),
uniqueUserAttribute: z.string().trim(),
searchBase: z.string().trim(),
searchFilter: z.string().trim(),
groupSearchBase: z.string().trim(),

View File

@ -186,7 +186,13 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
})
),
displayName: z.string().trim(),
active: z.boolean()
active: z.boolean(),
groups: z.array(
z.object({
value: z.string().trim(),
display: z.string().trim()
})
)
})
}
},
@ -475,10 +481,13 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
}),
z.object({
op: z.literal("add"),
value: z.object({
value: z.string().trim(),
display: z.string().trim().optional()
})
path: z.string().trim(),
value: z.array(
z.object({
value: z.string().trim(),
display: z.string().trim().optional()
})
)
})
])
)
@ -569,7 +578,13 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
})
),
displayName: z.string().trim(),
active: z.boolean()
active: z.boolean(),
groups: z.array(
z.object({
value: z.string().trim(),
display: z.string().trim()
})
)
})
}
},

View File

@ -25,10 +25,10 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
.optional()
.nullable()
.transform((val) => (val ? removeTrailingSlash(val) : val)),
approvers: z.string().array().min(1),
approverUserIds: z.string().array().min(1),
approvals: z.number().min(1).default(1)
})
.refine((data) => data.approvals <= data.approvers.length, {
.refine((data) => data.approvals <= data.approverUserIds.length, {
path: ["approvals"],
message: "The number of approvals should be lower than the number of approvers."
}),
@ -66,7 +66,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
body: z
.object({
name: z.string().optional(),
approvers: z.string().array().min(1),
approverUserIds: z.string().array().min(1),
approvals: z.number().min(1).default(1),
secretPath: z
.string()
@ -74,7 +74,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
.nullable()
.transform((val) => (val ? removeTrailingSlash(val) : val))
})
.refine((data) => data.approvals <= data.approvers.length, {
.refine((data) => data.approvals <= data.approverUserIds.length, {
path: ["approvals"],
message: "The number of approvals should be lower than the number of approvers."
}),
@ -139,7 +139,15 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
}),
response: {
200: z.object({
approvals: sapPubSchema.merge(z.object({ approvers: z.string().array() })).array()
approvals: sapPubSchema
.extend({
userApprovers: z
.object({
userId: z.string()
})
.array()
})
.array()
})
}
},
@ -170,7 +178,11 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
}),
response: {
200: z.object({
policy: sapPubSchema.merge(z.object({ approvers: z.string().array() })).optional()
policy: sapPubSchema
.extend({
userApprovers: z.object({ userId: z.string() }).array()
})
.optional()
})
}
},

View File

@ -6,7 +6,8 @@ import {
SecretApprovalRequestsSecretsSchema,
SecretsSchema,
SecretTagsSchema,
SecretVersionsSchema
SecretVersionsSchema,
UsersSchema
} from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { ApprovalStatus, RequestState } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
@ -14,6 +15,15 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const approvalRequestUser = z.object({ userId: z.string() }).merge(
UsersSchema.pick({
email: true,
firstName: true,
lastName: true,
username: true
})
);
export const registerSecretApprovalRequestRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
@ -41,9 +51,10 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
approvers: z.string().array(),
secretPath: z.string().optional().nullable()
}),
committerUser: approvalRequestUser,
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
environment: z.string(),
reviewers: z.object({ member: z.string(), status: z.string() }).array(),
reviewers: z.object({ userId: z.string(), status: z.string() }).array(),
approvers: z.string().array()
}).array()
})
@ -195,7 +206,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
type: isClosing ? EventType.SECRET_APPROVAL_CLOSED : EventType.SECRET_APPROVAL_REOPENED,
// eslint-disable-next-line
metadata: {
[isClosing ? ("closedBy" as const) : ("reopenedBy" as const)]: approval.statusChangeBy as string,
[isClosing ? ("closedBy" as const) : ("reopenedBy" as const)]: approval.statusChangedByUserId as string,
secretApprovalRequestId: approval.id,
secretApprovalRequestSlug: approval.slug
// eslint-disable-next-line
@ -216,6 +227,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
})
.array()
.optional();
server.route({
method: "GET",
url: "/:id",
@ -235,12 +247,13 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
id: z.string(),
name: z.string(),
approvals: z.number(),
approvers: z.string().array(),
approvers: approvalRequestUser.array(),
secretPath: z.string().optional().nullable()
}),
environment: z.string(),
reviewers: z.object({ member: z.string(), status: z.string() }).array(),
approvers: z.string().array(),
statusChangedByUser: approvalRequestUser.optional(),
committerUser: approvalRequestUser,
reviewers: approvalRequestUser.extend({ status: z.string() }).array(),
secretPath: z.string(),
commits: SecretApprovalRequestsSecretsSchema.omit({ secretBlindIndex: true })
.merge(

View File

@ -32,7 +32,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
const findById = async (id: string, tx?: Knex) => {
try {
const doc = await accessApprovalPolicyFindQuery(tx || db, {
const doc = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), {
[`${TableName.AccessApprovalPolicy}.id` as "id"]: id
});
const formatedDoc = mergeOneToManyRelation(
@ -54,7 +54,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
const find = async (filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>, tx?: Knex) => {
try {
const docs = await accessApprovalPolicyFindQuery(tx || db, filter);
const docs = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), filter);
const formatedDoc = mergeOneToManyRelation(
docs,
"id",

View File

@ -14,7 +14,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
const findRequestsWithPrivilegeByPolicyIds = async (policyIds: string[]) => {
try {
const docs = await db(TableName.AccessApprovalRequest)
const docs = await db
.replicaNode()(TableName.AccessApprovalRequest)
.whereIn(`${TableName.AccessApprovalRequest}.policyId`, policyIds)
.leftJoin(
@ -170,7 +171,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
const findById = async (id: string, tx?: Knex) => {
try {
const sql = findQuery({ [`${TableName.AccessApprovalRequest}.id` as "id"]: id }, tx || db);
const sql = findQuery({ [`${TableName.AccessApprovalRequest}.id` as "id"]: id }, tx || db.replicaNode());
const docs = await sql;
const formatedDoc = sqlNestRelationships({
data: docs,
@ -207,7 +208,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
const getCount = async ({ projectId }: { projectId: string }) => {
try {
const accessRequests = await db(TableName.AccessApprovalRequest)
const accessRequests = await db
.replicaNode()(TableName.AccessApprovalRequest)
.leftJoin(
TableName.AccessApprovalPolicy,
`${TableName.AccessApprovalRequest}.policyId`,

View File

@ -4,6 +4,7 @@ import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, stripUndefinedInWhere } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
export type TAuditLogDALFactory = ReturnType<typeof auditLogDALFactory>;
@ -27,7 +28,7 @@ export const auditLogDALFactory = (db: TDbClient) => {
tx?: Knex
) => {
try {
const sqlQuery = (tx || db)(TableName.AuditLog)
const sqlQuery = (tx || db.replicaNode())(TableName.AuditLog)
.where(
stripUndefinedInWhere({
projectId,
@ -55,13 +56,34 @@ export const auditLogDALFactory = (db: TDbClient) => {
// delete all audit log that have expired
const pruneAuditLog = async (tx?: Knex) => {
try {
const today = new Date();
const docs = await (tx || db)(TableName.AuditLog).where("expiresAt", "<", today).del();
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "PruneAuditLog" });
}
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
const MAX_RETRY_ON_FAILURE = 3;
const today = new Date();
let deletedAuditLogIds: { id: string }[] = [];
let numberOfRetryOnFailure = 0;
do {
try {
const findExpiredLogSubQuery = (tx || db)(TableName.AuditLog)
.where("expiresAt", "<", today)
.select("id")
.limit(AUDIT_LOG_PRUNE_BATCH_SIZE);
// eslint-disable-next-line no-await-in-loop
deletedAuditLogIds = await (tx || db)(TableName.AuditLog)
.whereIn("id", findExpiredLogSubQuery)
.del()
.returning("id");
numberOfRetryOnFailure = 0; // reset
// eslint-disable-next-line no-await-in-loop
await new Promise((resolve) => {
setTimeout(resolve, 100); // time to breathe for db
});
} catch (error) {
numberOfRetryOnFailure += 1;
logger.error(error, "Failed to delete audit log on pruning");
}
} while (deletedAuditLogIds.length > 0 && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
};
return { ...auditLogOrm, pruneAuditLog, find };

View File

@ -45,18 +45,29 @@ export const auditLogQueueServiceFactory = ({
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
let { orgId } = job.data;
const MS_IN_DAY = 24 * 60 * 60 * 1000;
let project;
if (!orgId) {
// it will never be undefined for both org and project id
// TODO(akhilmhdh): use caching here in dal to avoid db calls
const project = await projectDAL.findById(projectId as string);
project = await projectDAL.findById(projectId as string);
orgId = project.orgId;
}
const plan = await licenseService.getPlan(orgId);
const ttl = plan.auditLogsRetentionDays * MS_IN_DAY;
// skip inserting if audit log retention is 0 meaning its not supported
if (ttl === 0) return;
if (plan.auditLogsRetentionDays === 0) {
// skip inserting if audit log retention is 0 meaning its not supported
return;
}
// For project actions, set TTL to project-level audit log retention config
// This condition ensures that the plan's audit log retention days cannot be bypassed
const ttlInDays =
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
? project.auditLogsRetentionDays
: plan.auditLogsRetentionDays;
const ttl = ttlInDays * MS_IN_DAY;
const auditLog = await auditLogDAL.create({
actor: actor.type,

View File

@ -45,6 +45,7 @@ export enum EventType {
CREATE_SECRETS = "create-secrets",
UPDATE_SECRET = "update-secret",
UPDATE_SECRETS = "update-secrets",
MOVE_SECRETS = "move-secrets",
DELETE_SECRET = "delete-secret",
DELETE_SECRETS = "delete-secrets",
GET_WORKSPACE_KEY = "get-workspace-key",
@ -65,25 +66,43 @@ export enum EventType {
ADD_IDENTITY_UNIVERSAL_AUTH = "add-identity-universal-auth",
UPDATE_IDENTITY_UNIVERSAL_AUTH = "update-identity-universal-auth",
GET_IDENTITY_UNIVERSAL_AUTH = "get-identity-universal-auth",
REVOKE_IDENTITY_UNIVERSAL_AUTH = "revoke-identity-universal-auth",
CREATE_TOKEN_IDENTITY_TOKEN_AUTH = "create-token-identity-token-auth",
UPDATE_TOKEN_IDENTITY_TOKEN_AUTH = "update-token-identity-token-auth",
GET_TOKENS_IDENTITY_TOKEN_AUTH = "get-tokens-identity-token-auth",
ADD_IDENTITY_TOKEN_AUTH = "add-identity-token-auth",
UPDATE_IDENTITY_TOKEN_AUTH = "update-identity-token-auth",
GET_IDENTITY_TOKEN_AUTH = "get-identity-token-auth",
REVOKE_IDENTITY_TOKEN_AUTH = "revoke-identity-token-auth",
LOGIN_IDENTITY_KUBERNETES_AUTH = "login-identity-kubernetes-auth",
ADD_IDENTITY_KUBERNETES_AUTH = "add-identity-kubernetes-auth",
UPDATE_IDENTITY_KUBENETES_AUTH = "update-identity-kubernetes-auth",
GET_IDENTITY_KUBERNETES_AUTH = "get-identity-kubernetes-auth",
REVOKE_IDENTITY_KUBERNETES_AUTH = "revoke-identity-kubernetes-auth",
LOGIN_IDENTITY_OIDC_AUTH = "login-identity-oidc-auth",
ADD_IDENTITY_OIDC_AUTH = "add-identity-oidc-auth",
UPDATE_IDENTITY_OIDC_AUTH = "update-identity-oidc-auth",
GET_IDENTITY_OIDC_AUTH = "get-identity-oidc-auth",
REVOKE_IDENTITY_OIDC_AUTH = "revoke-identity-oidc-auth",
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID = "get-identity-universal-auth-client-secret-by-id",
LOGIN_IDENTITY_GCP_AUTH = "login-identity-gcp-auth",
ADD_IDENTITY_GCP_AUTH = "add-identity-gcp-auth",
UPDATE_IDENTITY_GCP_AUTH = "update-identity-gcp-auth",
REVOKE_IDENTITY_GCP_AUTH = "revoke-identity-gcp-auth",
GET_IDENTITY_GCP_AUTH = "get-identity-gcp-auth",
LOGIN_IDENTITY_AWS_AUTH = "login-identity-aws-auth",
ADD_IDENTITY_AWS_AUTH = "add-identity-aws-auth",
UPDATE_IDENTITY_AWS_AUTH = "update-identity-aws-auth",
REVOKE_IDENTITY_AWS_AUTH = "revoke-identity-aws-auth",
GET_IDENTITY_AWS_AUTH = "get-identity-aws-auth",
LOGIN_IDENTITY_AZURE_AUTH = "login-identity-azure-auth",
ADD_IDENTITY_AZURE_AUTH = "add-identity-azure-auth",
UPDATE_IDENTITY_AZURE_AUTH = "update-identity-azure-auth",
GET_IDENTITY_AZURE_AUTH = "get-identity-azure-auth",
REVOKE_IDENTITY_AZURE_AUTH = "revoke-identity-azure-auth",
CREATE_ENVIRONMENT = "create-environment",
UPDATE_ENVIRONMENT = "update-environment",
DELETE_ENVIRONMENT = "delete-environment",
@ -222,6 +241,17 @@ interface UpdateSecretBatchEvent {
};
}
interface MoveSecretsEvent {
type: EventType.MOVE_SECRETS;
metadata: {
sourceEnvironment: string;
sourceSecretPath: string;
destinationEnvironment: string;
destinationSecretPath: string;
secretIds: string[];
};
}
interface DeleteSecretEvent {
type: EventType.DELETE_SECRET;
metadata: {
@ -434,6 +464,73 @@ interface GetIdentityUniversalAuthEvent {
};
}
interface DeleteIdentityUniversalAuthEvent {
type: EventType.REVOKE_IDENTITY_UNIVERSAL_AUTH;
metadata: {
identityId: string;
};
}
interface CreateTokenIdentityTokenAuthEvent {
type: EventType.CREATE_TOKEN_IDENTITY_TOKEN_AUTH;
metadata: {
identityId: string;
identityAccessTokenId: string;
};
}
interface UpdateTokenIdentityTokenAuthEvent {
type: EventType.UPDATE_TOKEN_IDENTITY_TOKEN_AUTH;
metadata: {
identityId: string;
tokenId: string;
name?: string;
};
}
interface GetTokensIdentityTokenAuthEvent {
type: EventType.GET_TOKENS_IDENTITY_TOKEN_AUTH;
metadata: {
identityId: string;
};
}
interface AddIdentityTokenAuthEvent {
type: EventType.ADD_IDENTITY_TOKEN_AUTH;
metadata: {
identityId: string;
accessTokenTTL: number;
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
};
}
interface UpdateIdentityTokenAuthEvent {
type: EventType.UPDATE_IDENTITY_TOKEN_AUTH;
metadata: {
identityId: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
};
}
interface GetIdentityTokenAuthEvent {
type: EventType.GET_IDENTITY_TOKEN_AUTH;
metadata: {
identityId: string;
};
}
interface DeleteIdentityTokenAuthEvent {
type: EventType.REVOKE_IDENTITY_TOKEN_AUTH;
metadata: {
identityId: string;
};
}
interface LoginIdentityKubernetesAuthEvent {
type: EventType.LOGIN_IDENTITY_KUBERNETES_AUTH;
metadata: {
@ -457,6 +554,13 @@ interface AddIdentityKubernetesAuthEvent {
};
}
interface DeleteIdentityKubernetesAuthEvent {
type: EventType.REVOKE_IDENTITY_KUBERNETES_AUTH;
metadata: {
identityId: string;
};
}
interface UpdateIdentityKubernetesAuthEvent {
type: EventType.UPDATE_IDENTITY_KUBENETES_AUTH;
metadata: {
@ -493,6 +597,14 @@ interface GetIdentityUniversalAuthClientSecretsEvent {
};
}
interface GetIdentityUniversalAuthClientSecretByIdEvent {
type: EventType.GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID;
metadata: {
identityId: string;
clientSecretId: string;
};
}
interface RevokeIdentityUniversalAuthClientSecretEvent {
type: EventType.REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET;
metadata: {
@ -525,6 +637,13 @@ interface AddIdentityGcpAuthEvent {
};
}
interface DeleteIdentityGcpAuthEvent {
type: EventType.REVOKE_IDENTITY_GCP_AUTH;
metadata: {
identityId: string;
};
}
interface UpdateIdentityGcpAuthEvent {
type: EventType.UPDATE_IDENTITY_GCP_AUTH;
metadata: {
@ -570,6 +689,13 @@ interface AddIdentityAwsAuthEvent {
};
}
interface DeleteIdentityAwsAuthEvent {
type: EventType.REVOKE_IDENTITY_AWS_AUTH;
metadata: {
identityId: string;
};
}
interface UpdateIdentityAwsAuthEvent {
type: EventType.UPDATE_IDENTITY_AWS_AUTH;
metadata: {
@ -613,6 +739,13 @@ interface AddIdentityAzureAuthEvent {
};
}
interface DeleteIdentityAzureAuthEvent {
type: EventType.REVOKE_IDENTITY_AZURE_AUTH;
metadata: {
identityId: string;
};
}
interface UpdateIdentityAzureAuthEvent {
type: EventType.UPDATE_IDENTITY_AZURE_AUTH;
metadata: {
@ -633,6 +766,63 @@ interface GetIdentityAzureAuthEvent {
};
}
interface LoginIdentityOidcAuthEvent {
type: EventType.LOGIN_IDENTITY_OIDC_AUTH;
metadata: {
identityId: string;
identityOidcAuthId: string;
identityAccessTokenId: string;
};
}
interface AddIdentityOidcAuthEvent {
type: EventType.ADD_IDENTITY_OIDC_AUTH;
metadata: {
identityId: string;
oidcDiscoveryUrl: string;
caCert: string;
boundIssuer: string;
boundAudiences: string;
boundClaims: Record<string, string>;
boundSubject: string;
accessTokenTTL: number;
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
};
}
interface DeleteIdentityOidcAuthEvent {
type: EventType.REVOKE_IDENTITY_OIDC_AUTH;
metadata: {
identityId: string;
};
}
interface UpdateIdentityOidcAuthEvent {
type: EventType.UPDATE_IDENTITY_OIDC_AUTH;
metadata: {
identityId: string;
oidcDiscoveryUrl?: string;
caCert?: string;
boundIssuer?: string;
boundAudiences?: string;
boundClaims?: Record<string, string>;
boundSubject?: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
};
}
interface GetIdentityOidcAuthEvent {
type: EventType.GET_IDENTITY_OIDC_AUTH;
metadata: {
identityId: string;
};
}
interface CreateEnvironmentEvent {
type: EventType.CREATE_ENVIRONMENT;
metadata: {
@ -722,7 +912,6 @@ interface CreateWebhookEvent {
webhookId: string;
environment: string;
secretPath: string;
webhookUrl: string;
isDisabled: boolean;
};
}
@ -733,7 +922,6 @@ interface UpdateWebhookStatusEvent {
webhookId: string;
environment: string;
secretPath: string;
webhookUrl: string;
isDisabled: boolean;
};
}
@ -744,7 +932,6 @@ interface DeleteWebhookEvent {
webhookId: string;
environment: string;
secretPath: string;
webhookUrl: string;
isDisabled: boolean;
};
}
@ -984,6 +1171,7 @@ export type Event =
| CreateSecretBatchEvent
| UpdateSecretEvent
| UpdateSecretBatchEvent
| MoveSecretsEvent
| DeleteSecretEvent
| DeleteSecretBatchEvent
| GetWorkspaceKeyEvent
@ -1003,26 +1191,44 @@ export type Event =
| LoginIdentityUniversalAuthEvent
| AddIdentityUniversalAuthEvent
| UpdateIdentityUniversalAuthEvent
| DeleteIdentityUniversalAuthEvent
| GetIdentityUniversalAuthEvent
| CreateTokenIdentityTokenAuthEvent
| UpdateTokenIdentityTokenAuthEvent
| GetTokensIdentityTokenAuthEvent
| AddIdentityTokenAuthEvent
| UpdateIdentityTokenAuthEvent
| GetIdentityTokenAuthEvent
| DeleteIdentityTokenAuthEvent
| LoginIdentityKubernetesAuthEvent
| DeleteIdentityKubernetesAuthEvent
| AddIdentityKubernetesAuthEvent
| UpdateIdentityKubernetesAuthEvent
| GetIdentityKubernetesAuthEvent
| CreateIdentityUniversalAuthClientSecretEvent
| GetIdentityUniversalAuthClientSecretsEvent
| GetIdentityUniversalAuthClientSecretByIdEvent
| RevokeIdentityUniversalAuthClientSecretEvent
| LoginIdentityGcpAuthEvent
| AddIdentityGcpAuthEvent
| DeleteIdentityGcpAuthEvent
| UpdateIdentityGcpAuthEvent
| GetIdentityGcpAuthEvent
| LoginIdentityAwsAuthEvent
| AddIdentityAwsAuthEvent
| UpdateIdentityAwsAuthEvent
| GetIdentityAwsAuthEvent
| DeleteIdentityAwsAuthEvent
| LoginIdentityAzureAuthEvent
| AddIdentityAzureAuthEvent
| DeleteIdentityAzureAuthEvent
| UpdateIdentityAzureAuthEvent
| GetIdentityAzureAuthEvent
| LoginIdentityOidcAuthEvent
| AddIdentityOidcAuthEvent
| DeleteIdentityOidcAuthEvent
| UpdateIdentityOidcAuthEvent
| GetIdentityOidcAuthEvent
| CreateEnvironmentEvent
| UpdateEnvironmentEvent
| DeleteEnvironmentEvent

View File

@ -17,7 +17,7 @@ type TCertificateAuthorityCrlServiceFactoryDep = {
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findById">;
certificateAuthorityCrlDAL: Pick<TCertificateAuthorityCrlDALFactory, "findOne">;
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction">;
kmsService: Pick<TKmsServiceFactory, "decrypt" | "generateKmsKey">;
kmsService: Pick<TKmsServiceFactory, "decryptWithKmsKey" | "generateKmsKey">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
};
@ -68,11 +68,11 @@ export const certificateAuthorityCrlServiceFactory = ({
kmsService
});
const decryptedCrl = await kmsService.decrypt({
kmsId: keyId,
cipherTextBlob: caCrl.encryptedCrl
const kmsDecryptor = await kmsService.decryptWithKmsKey({
kmsId: keyId
});
const decryptedCrl = kmsDecryptor({ cipherTextBlob: caCrl.encryptedCrl });
const crl = new x509.X509Crl(decryptedCrl);
const base64crl = crl.toString("base64");

View File

@ -12,7 +12,10 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
const countLeasesForDynamicSecret = async (dynamicSecretId: string, tx?: Knex) => {
try {
const doc = await (tx || db)(TableName.DynamicSecretLease).count("*").where({ dynamicSecretId }).first();
const doc = await (tx || db.replicaNode())(TableName.DynamicSecretLease)
.count("*")
.where({ dynamicSecretId })
.first();
return parseInt(doc || "0", 10);
} catch (error) {
throw new DatabaseError({ error, name: "DynamicSecretCountLeases" });
@ -21,7 +24,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
const findById = async (id: string, tx?: Knex) => {
try {
const doc = await (tx || db)(TableName.DynamicSecretLease)
const doc = await (tx || db.replicaNode())(TableName.DynamicSecretLease)
.where({ [`${TableName.DynamicSecretLease}.id` as "id"]: id })
.first()
.join(

View File

@ -3,7 +3,8 @@ import { z } from "zod";
export enum SqlProviders {
Postgres = "postgres",
MySQL = "mysql2",
Oracle = "oracledb"
Oracle = "oracledb",
MsSQL = "mssql"
}
export const DynamicSecretSqlDBSchema = z.object({

View File

@ -0,0 +1,47 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName, TKmsKeys } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols } from "@app/lib/knex";
export type TExternalKmsDALFactory = ReturnType<typeof externalKmsDALFactory>;
export const externalKmsDALFactory = (db: TDbClient) => {
const externalKmsOrm = ormify(db, TableName.ExternalKms);
const find = async (filter: Partial<TKmsKeys>, tx?: Knex) => {
try {
const result = await (tx || db.replicaNode())(TableName.ExternalKms)
.join(TableName.KmsKey, `${TableName.KmsKey}.id`, `${TableName.ExternalKms}.kmsKeyId`)
.where(filter)
.select(selectAllTableCols(TableName.KmsKey))
.select(
db.ref("id").withSchema(TableName.ExternalKms).as("externalKmsId"),
db.ref("provider").withSchema(TableName.ExternalKms).as("externalKmsProvider"),
db.ref("encryptedProviderInputs").withSchema(TableName.ExternalKms).as("externalKmsEncryptedProviderInput"),
db.ref("status").withSchema(TableName.ExternalKms).as("externalKmsStatus"),
db.ref("statusDetails").withSchema(TableName.ExternalKms).as("externalKmsStatusDetails")
);
return result.map((el) => ({
id: el.id,
description: el.description,
isDisabled: el.isDisabled,
isReserved: el.isReserved,
orgId: el.orgId,
slug: el.slug,
externalKms: {
id: el.externalKmsId,
provider: el.externalKmsProvider,
status: el.externalKmsStatus,
statusDetails: el.externalKmsStatusDetails
}
}));
} catch (error) {
throw new DatabaseError({ error, name: "Find" });
}
};
return { ...externalKmsOrm, find };
};

View File

@ -0,0 +1,309 @@
import { ForbiddenError } from "@casl/ability";
import slugify from "@sindresorhus/slugify";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TKmsKeyDALFactory } from "@app/services/kms/kms-key-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { TExternalKmsDALFactory } from "./external-kms-dal";
import {
TCreateExternalKmsDTO,
TDeleteExternalKmsDTO,
TGetExternalKmsByIdDTO,
TGetExternalKmsBySlugDTO,
TListExternalKmsDTO,
TUpdateExternalKmsDTO
} from "./external-kms-types";
import { AwsKmsProviderFactory } from "./providers/aws-kms";
import { ExternalKmsAwsSchema, KmsProviders } from "./providers/model";
type TExternalKmsServiceFactoryDep = {
externalKmsDAL: TExternalKmsDALFactory;
kmsService: Pick<TKmsServiceFactory, "getOrgKmsKeyId" | "encryptWithKmsKey" | "decryptWithKmsKey">;
kmsDAL: Pick<TKmsKeyDALFactory, "create" | "updateById" | "findById" | "deleteById" | "findOne">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
};
export type TExternalKmsServiceFactory = ReturnType<typeof externalKmsServiceFactory>;
export const externalKmsServiceFactory = ({
externalKmsDAL,
permissionService,
kmsService,
kmsDAL
}: TExternalKmsServiceFactoryDep) => {
const create = async ({
provider,
description,
actor,
slug,
actorId,
actorOrgId,
actorAuthMethod
}: TCreateExternalKmsDTO) => {
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
const kmsSlug = slug ? slugify(slug) : slugify(alphaNumericNanoId(8).toLowerCase());
let sanitizedProviderInput = "";
switch (provider.type) {
case KmsProviders.Aws:
{
const externalKms = await AwsKmsProviderFactory({ inputs: provider.inputs });
await externalKms.validateConnection();
// if missing kms key this generate a new kms key id and returns new provider input
const newProviderInput = await externalKms.generateInputKmsKey();
sanitizedProviderInput = JSON.stringify(newProviderInput);
}
break;
default:
throw new BadRequestError({ message: "external kms provided is invalid" });
}
const orgKmsKeyId = await kmsService.getOrgKmsKeyId(actorOrgId);
const kmsEncryptor = await kmsService.encryptWithKmsKey({
kmsId: orgKmsKeyId
});
const { cipherTextBlob: encryptedProviderInputs } = kmsEncryptor({
plainText: Buffer.from(sanitizedProviderInput, "utf8")
});
const externalKms = await externalKmsDAL.transaction(async (tx) => {
const kms = await kmsDAL.create(
{
isReserved: false,
description,
slug: kmsSlug,
orgId: actorOrgId
},
tx
);
const externalKmsCfg = await externalKmsDAL.create(
{
provider: provider.type,
encryptedProviderInputs,
kmsKeyId: kms.id
},
tx
);
return { ...kms, external: externalKmsCfg };
});
return externalKms;
};
const updateById = async ({
provider,
description,
actor,
id: kmsId,
slug,
actorId,
actorOrgId,
actorAuthMethod
}: TUpdateExternalKmsDTO) => {
const kmsDoc = await kmsDAL.findById(kmsId);
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
kmsDoc.orgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
const kmsSlug = slug ? slugify(slug) : undefined;
const externalKmsDoc = await externalKmsDAL.findOne({ kmsKeyId: kmsDoc.id });
if (!externalKmsDoc) throw new BadRequestError({ message: "External kms not found" });
const orgDefaultKmsId = await kmsService.getOrgKmsKeyId(kmsDoc.orgId);
let sanitizedProviderInput = "";
if (provider) {
const kmsDecryptor = await kmsService.decryptWithKmsKey({
kmsId: orgDefaultKmsId
});
const decryptedProviderInputBlob = kmsDecryptor({
cipherTextBlob: externalKmsDoc.encryptedProviderInputs
});
switch (provider.type) {
case KmsProviders.Aws:
{
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
);
const updatedProviderInput = { ...decryptedProviderInput, ...provider.inputs };
const externalKms = await AwsKmsProviderFactory({ inputs: updatedProviderInput });
await externalKms.validateConnection();
sanitizedProviderInput = JSON.stringify(updatedProviderInput);
}
break;
default:
throw new BadRequestError({ message: "external kms provided is invalid" });
}
}
let encryptedProviderInputs: Buffer | undefined;
if (sanitizedProviderInput) {
const kmsEncryptor = await kmsService.encryptWithKmsKey({
kmsId: orgDefaultKmsId
});
const { cipherTextBlob } = kmsEncryptor({
plainText: Buffer.from(sanitizedProviderInput, "utf8")
});
encryptedProviderInputs = cipherTextBlob;
}
const externalKms = await externalKmsDAL.transaction(async (tx) => {
const kms = await kmsDAL.updateById(
kmsDoc.id,
{
description,
slug: kmsSlug
},
tx
);
if (encryptedProviderInputs) {
const externalKmsCfg = await externalKmsDAL.updateById(
externalKmsDoc.id,
{
encryptedProviderInputs
},
tx
);
return { ...kms, external: externalKmsCfg };
}
return { ...kms, external: externalKmsDoc };
});
return externalKms;
};
const deleteById = async ({ actor, id: kmsId, actorId, actorOrgId, actorAuthMethod }: TDeleteExternalKmsDTO) => {
const kmsDoc = await kmsDAL.findById(kmsId);
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
kmsDoc.orgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
const externalKmsDoc = await externalKmsDAL.findOne({ kmsKeyId: kmsDoc.id });
if (!externalKmsDoc) throw new BadRequestError({ message: "External kms not found" });
const externalKms = await externalKmsDAL.transaction(async (tx) => {
const kms = await kmsDAL.deleteById(kmsDoc.id, tx);
return { ...kms, external: externalKmsDoc };
});
return externalKms;
};
const list = async ({ actor, actorId, actorOrgId, actorAuthMethod }: TListExternalKmsDTO) => {
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
const externalKmsDocs = await externalKmsDAL.find({ orgId: actorOrgId });
return externalKmsDocs;
};
const findById = async ({ actor, actorId, actorOrgId, actorAuthMethod, id: kmsId }: TGetExternalKmsByIdDTO) => {
const kmsDoc = await kmsDAL.findById(kmsId);
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
kmsDoc.orgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
const externalKmsDoc = await externalKmsDAL.findOne({ kmsKeyId: kmsDoc.id });
if (!externalKmsDoc) throw new BadRequestError({ message: "External kms not found" });
const orgDefaultKmsId = await kmsService.getOrgKmsKeyId(kmsDoc.orgId);
const kmsDecryptor = await kmsService.decryptWithKmsKey({
kmsId: orgDefaultKmsId
});
const decryptedProviderInputBlob = kmsDecryptor({
cipherTextBlob: externalKmsDoc.encryptedProviderInputs
});
switch (externalKmsDoc.provider) {
case KmsProviders.Aws: {
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
);
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
}
default:
throw new BadRequestError({ message: "external kms provided is invalid" });
}
};
const findBySlug = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
slug: kmsSlug
}: TGetExternalKmsBySlugDTO) => {
const kmsDoc = await kmsDAL.findOne({ slug: kmsSlug, orgId: actorOrgId });
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
kmsDoc.orgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
const externalKmsDoc = await externalKmsDAL.findOne({ kmsKeyId: kmsDoc.id });
if (!externalKmsDoc) throw new BadRequestError({ message: "External kms not found" });
const orgDefaultKmsId = await kmsService.getOrgKmsKeyId(kmsDoc.orgId);
const kmsDecryptor = await kmsService.decryptWithKmsKey({
kmsId: orgDefaultKmsId
});
const decryptedProviderInputBlob = kmsDecryptor({
cipherTextBlob: externalKmsDoc.encryptedProviderInputs
});
switch (externalKmsDoc.provider) {
case KmsProviders.Aws: {
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
);
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
}
default:
throw new BadRequestError({ message: "external kms provided is invalid" });
}
};
return {
create,
updateById,
deleteById,
list,
findById,
findBySlug
};
};

View File

@ -0,0 +1,30 @@
import { TOrgPermission } from "@app/lib/types";
import { TExternalKmsInputSchema, TExternalKmsInputUpdateSchema } from "./providers/model";
export type TCreateExternalKmsDTO = {
slug?: string;
description?: string;
provider: TExternalKmsInputSchema;
} & Omit<TOrgPermission, "orgId">;
export type TUpdateExternalKmsDTO = {
id: string;
slug?: string;
description?: string;
provider?: TExternalKmsInputUpdateSchema;
} & Omit<TOrgPermission, "orgId">;
export type TDeleteExternalKmsDTO = {
id: string;
} & Omit<TOrgPermission, "orgId">;
export type TListExternalKmsDTO = Omit<TOrgPermission, "orgId">;
export type TGetExternalKmsByIdDTO = {
id: string;
} & Omit<TOrgPermission, "orgId">;
export type TGetExternalKmsBySlugDTO = {
slug: string;
} & Omit<TOrgPermission, "orgId">;

View File

@ -0,0 +1,102 @@
import { CreateKeyCommand, DecryptCommand, DescribeKeyCommand, EncryptCommand, KMSClient } from "@aws-sdk/client-kms";
import { AssumeRoleCommand, STSClient } from "@aws-sdk/client-sts";
import { randomUUID } from "crypto";
import { ExternalKmsAwsSchema, KmsAwsCredentialType, TExternalKmsAwsSchema, TExternalKmsProviderFns } from "./model";
const getAwsKmsClient = async (providerInputs: TExternalKmsAwsSchema) => {
if (providerInputs.credential.type === KmsAwsCredentialType.AssumeRole) {
const awsCredential = providerInputs.credential.data;
const stsClient = new STSClient({
region: providerInputs.awsRegion
});
const command = new AssumeRoleCommand({
RoleArn: awsCredential.assumeRoleArn,
RoleSessionName: `infisical-kms-${randomUUID()}`,
DurationSeconds: 900, // 15mins
ExternalId: awsCredential.externalId
});
const response = await stsClient.send(command);
if (!response.Credentials?.AccessKeyId || !response.Credentials?.SecretAccessKey)
throw new Error("Failed to assume role");
const kmsClient = new KMSClient({
region: providerInputs.awsRegion,
credentials: {
accessKeyId: response.Credentials.AccessKeyId,
secretAccessKey: response.Credentials.SecretAccessKey,
sessionToken: response.Credentials.SessionToken,
expiration: response.Credentials.Expiration
}
});
return kmsClient;
}
const awsCredential = providerInputs.credential.data;
const kmsClient = new KMSClient({
region: providerInputs.awsRegion,
credentials: {
accessKeyId: awsCredential.accessKey,
secretAccessKey: awsCredential.secretKey
}
});
return kmsClient;
};
type AwsKmsProviderArgs = {
inputs: unknown;
};
type TAwsKmsProviderFactoryReturn = TExternalKmsProviderFns & {
generateInputKmsKey: () => Promise<TExternalKmsAwsSchema>;
};
export const AwsKmsProviderFactory = async ({ inputs }: AwsKmsProviderArgs): Promise<TAwsKmsProviderFactoryReturn> => {
const providerInputs = await ExternalKmsAwsSchema.parseAsync(inputs);
const awsClient = await getAwsKmsClient(providerInputs);
const generateInputKmsKey = async () => {
if (providerInputs.kmsKeyId) return providerInputs;
const command = new CreateKeyCommand({ Tags: [{ TagKey: "author", TagValue: "infisical" }] });
const kmsKey = await awsClient.send(command);
if (!kmsKey.KeyMetadata?.KeyId) throw new Error("Failed to generate kms key");
return { ...providerInputs, kmsKeyId: kmsKey.KeyMetadata?.KeyId };
};
const validateConnection = async () => {
const command = new DescribeKeyCommand({
KeyId: providerInputs.kmsKeyId
});
const isConnected = await awsClient.send(command).then(() => true);
return isConnected;
};
const encrypt = async (data: Buffer) => {
const command = new EncryptCommand({
KeyId: providerInputs.kmsKeyId,
Plaintext: data
});
const encryptionCommand = await awsClient.send(command);
if (!encryptionCommand.CiphertextBlob) throw new Error("encryption failed");
return { encryptedBlob: Buffer.from(encryptionCommand.CiphertextBlob) };
};
const decrypt = async (encryptedBlob: Buffer) => {
const command = new DecryptCommand({
KeyId: providerInputs.kmsKeyId,
CiphertextBlob: encryptedBlob
});
const decryptionCommand = await awsClient.send(command);
if (!decryptionCommand.Plaintext) throw new Error("decryption failed");
return { data: Buffer.from(decryptionCommand.Plaintext) };
};
return {
generateInputKmsKey,
validateConnection,
encrypt,
decrypt
};
};

View File

@ -0,0 +1,61 @@
import { z } from "zod";
export enum KmsProviders {
Aws = "aws"
}
export enum KmsAwsCredentialType {
AssumeRole = "assume-role",
AccessKey = "access-key"
}
export const ExternalKmsAwsSchema = z.object({
credential: z
.discriminatedUnion("type", [
z.object({
type: z.literal(KmsAwsCredentialType.AccessKey),
data: z.object({
accessKey: z.string().trim().min(1).describe("AWS user account access key"),
secretKey: z.string().trim().min(1).describe("AWS user account secret key")
})
}),
z.object({
type: z.literal(KmsAwsCredentialType.AssumeRole),
data: z.object({
assumeRoleArn: z.string().trim().min(1).describe("AWS user role to be assumed by infisical"),
externalId: z
.string()
.trim()
.min(1)
.optional()
.describe("AWS assume role external id for furthur security in authentication")
})
})
])
.describe("AWS credential information to connect"),
awsRegion: z.string().min(1).trim().describe("AWS region to connect"),
kmsKeyId: z
.string()
.trim()
.optional()
.describe("A pre existing AWS KMS key id to be used for encryption. If not provided a kms key will be generated.")
});
export type TExternalKmsAwsSchema = z.infer<typeof ExternalKmsAwsSchema>;
// The root schema of the JSON
export const ExternalKmsInputSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema })
]);
export type TExternalKmsInputSchema = z.infer<typeof ExternalKmsInputSchema>;
export const ExternalKmsInputUpdateSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema.partial() })
]);
export type TExternalKmsInputUpdateSchema = z.infer<typeof ExternalKmsInputUpdateSchema>;
// generic function shared by all provider
export type TExternalKmsProviderFns = {
validateConnection: () => Promise<boolean>;
encrypt: (data: Buffer) => Promise<{ encryptedBlob: Buffer }>;
decrypt: (encryptedBlob: Buffer) => Promise<{ data: Buffer }>;
};

View File

@ -12,7 +12,7 @@ export const groupDALFactory = (db: TDbClient) => {
const findGroups = async (filter: TFindFilter<TGroups>, { offset, limit, sort, tx }: TFindOpt<TGroups> = {}) => {
try {
const query = (tx || db)(TableName.Groups)
const query = (tx || db.replicaNode())(TableName.Groups)
// eslint-disable-next-line
.where(buildFindFilter(filter))
.select(selectAllTableCols(TableName.Groups));
@ -32,7 +32,7 @@ export const groupDALFactory = (db: TDbClient) => {
const findByOrgId = async (orgId: string, tx?: Knex) => {
try {
const docs = await (tx || db)(TableName.Groups)
const docs = await (tx || db.replicaNode())(TableName.Groups)
.where(`${TableName.Groups}.orgId`, orgId)
.leftJoin(TableName.OrgRoles, `${TableName.Groups}.roleId`, `${TableName.OrgRoles}.id`)
.select(selectAllTableCols(TableName.Groups))
@ -74,11 +74,12 @@ export const groupDALFactory = (db: TDbClient) => {
username?: string;
}) => {
try {
let query = db(TableName.OrgMembership)
let query = db
.replicaNode()(TableName.OrgMembership)
.where(`${TableName.OrgMembership}.orgId`, orgId)
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.UserGroupMembership, function () {
this.on(`${TableName.UserGroupMembership}.userId`, "=", `${TableName.Users}.id`).andOn(
.leftJoin(TableName.UserGroupMembership, (bd) => {
bd.on(`${TableName.UserGroupMembership}.userId`, "=", `${TableName.Users}.id`).andOn(
`${TableName.UserGroupMembership}.groupId`,
"=",
db.raw("?", [groupId])

View File

@ -18,7 +18,7 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
*/
const filterProjectsByUserMembership = async (userId: string, groupId: string, projectIds: string[], tx?: Knex) => {
try {
const userProjectMemberships: string[] = await (tx || db)(TableName.ProjectMembership)
const userProjectMemberships: string[] = await (tx || db.replicaNode())(TableName.ProjectMembership)
.where(`${TableName.ProjectMembership}.userId`, userId)
.whereIn(`${TableName.ProjectMembership}.projectId`, projectIds)
.pluck(`${TableName.ProjectMembership}.projectId`);
@ -43,7 +43,8 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
// special query
const findUserGroupMembershipsInProject = async (usernames: string[], projectId: string) => {
try {
const usernameDocs: string[] = await db(TableName.UserGroupMembership)
const usernameDocs: string[] = await db
.replicaNode()(TableName.UserGroupMembership)
.join(
TableName.GroupProjectMembership,
`${TableName.UserGroupMembership}.groupId`,
@ -73,7 +74,7 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
try {
// get list of groups in the project with id [projectId]
// that that are not the group with id [groupId]
const groups: string[] = await (tx || db)(TableName.GroupProjectMembership)
const groups: string[] = await (tx || db.replicaNode())(TableName.GroupProjectMembership)
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
.whereNot(`${TableName.GroupProjectMembership}.groupId`, groupId)
.pluck(`${TableName.GroupProjectMembership}.groupId`);
@ -83,8 +84,8 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
.where(`${TableName.UserGroupMembership}.groupId`, groupId)
.where(`${TableName.UserGroupMembership}.isPending`, false)
.join(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.ProjectMembership, function () {
this.on(`${TableName.Users}.id`, "=", `${TableName.ProjectMembership}.userId`).andOn(
.leftJoin(TableName.ProjectMembership, (bd) => {
bd.on(`${TableName.Users}.id`, "=", `${TableName.ProjectMembership}.userId`).andOn(
`${TableName.ProjectMembership}.projectId`,
"=",
db.raw("?", [projectId])
@ -107,9 +108,9 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
db.ref("publicKey").withSchema(TableName.UserEncryptionKey)
)
.where({ isGhost: false }) // MAKE SURE USER IS NOT A GHOST USER
.whereNotIn(`${TableName.UserGroupMembership}.userId`, function () {
.whereNotIn(`${TableName.UserGroupMembership}.userId`, (bd) => {
// eslint-disable-next-line @typescript-eslint/no-floating-promises
this.select(`${TableName.UserGroupMembership}.userId`)
bd.select(`${TableName.UserGroupMembership}.userId`)
.from(TableName.UserGroupMembership)
.whereIn(`${TableName.UserGroupMembership}.groupId`, groups);
});
@ -161,11 +162,26 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
}
};
const findUserGroupMembershipsInOrg = async (userId: string, orgId: string) => {
try {
const docs = await db
.replicaNode()(TableName.UserGroupMembership)
.join(TableName.Groups, `${TableName.UserGroupMembership}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.UserGroupMembership}.userId`, userId)
.where(`${TableName.Groups}.orgId`, orgId);
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "findTest" });
}
};
return {
...userGroupMembershipOrm,
filterProjectsByUserMembership,
findUserGroupMembershipsInProject,
findGroupMembersNotInProject,
deletePendingUserGroupMembershipsByUserIds
deletePendingUserGroupMembershipsByUserIds,
findUserGroupMembershipsInOrg
};
};

View File

@ -23,6 +23,8 @@ import {
} from "@app/lib/crypto/encryption";
import { BadRequestError } from "@app/lib/errors";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TokenType } from "@app/services/auth-token/auth-token-types";
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
import { TOrgDALFactory } from "@app/services/org/org-dal";
@ -30,7 +32,9 @@ import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membe
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { normalizeUsername } from "@app/services/user/user-fns";
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
@ -50,7 +54,7 @@ import {
TTestLdapConnectionDTO,
TUpdateLdapCfgDTO
} from "./ldap-config-types";
import { testLDAPConfig } from "./ldap-fns";
import { searchGroups, testLDAPConfig } from "./ldap-fns";
import { TLdapGroupMapDALFactory } from "./ldap-group-map-dal";
type TLdapConfigServiceFactoryDep = {
@ -84,6 +88,8 @@ type TLdapConfigServiceFactoryDep = {
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
smtpService: Pick<TSmtpService, "sendMail">;
};
export type TLdapConfigServiceFactory = ReturnType<typeof ldapConfigServiceFactory>;
@ -103,7 +109,9 @@ export const ldapConfigServiceFactory = ({
userDAL,
userAliasDAL,
permissionService,
licenseService
licenseService,
tokenService,
smtpService
}: TLdapConfigServiceFactoryDep) => {
const createLdapCfg = async ({
actor,
@ -115,6 +123,7 @@ export const ldapConfigServiceFactory = ({
url,
bindDN,
bindPass,
uniqueUserAttribute,
searchBase,
searchFilter,
groupSearchBase,
@ -193,6 +202,7 @@ export const ldapConfigServiceFactory = ({
encryptedBindPass,
bindPassIV,
bindPassTag,
uniqueUserAttribute,
searchBase,
searchFilter,
groupSearchBase,
@ -215,6 +225,7 @@ export const ldapConfigServiceFactory = ({
url,
bindDN,
bindPass,
uniqueUserAttribute,
searchBase,
searchFilter,
groupSearchBase,
@ -237,7 +248,8 @@ export const ldapConfigServiceFactory = ({
searchBase,
searchFilter,
groupSearchBase,
groupSearchFilter
groupSearchFilter,
uniqueUserAttribute
};
const orgBot = await orgBotDAL.findOne({ orgId });
@ -275,7 +287,7 @@ export const ldapConfigServiceFactory = ({
return ldapConfig;
};
const getLdapCfg = async (filter: { orgId: string; isActive?: boolean }) => {
const getLdapCfg = async (filter: { orgId: string; isActive?: boolean; id?: string }) => {
const ldapConfig = await ldapConfigDAL.findOne(filter);
if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" });
@ -338,6 +350,7 @@ export const ldapConfigServiceFactory = ({
url: ldapConfig.url,
bindDN,
bindPass,
uniqueUserAttribute: ldapConfig.uniqueUserAttribute,
searchBase: ldapConfig.searchBase,
searchFilter: ldapConfig.searchFilter,
groupSearchBase: ldapConfig.groupSearchBase,
@ -374,6 +387,7 @@ export const ldapConfigServiceFactory = ({
url: ldapConfig.url,
bindDN: ldapConfig.bindDN,
bindCredentials: ldapConfig.bindPass,
uniqueUserAttribute: ldapConfig.uniqueUserAttribute,
searchBase: ldapConfig.searchBase,
searchFilter: ldapConfig.searchFilter || "(uid={{username}})",
// searchAttributes: ["uid", "uidNumber", "givenName", "sn", "mail"],
@ -404,6 +418,13 @@ export const ldapConfigServiceFactory = ({
}: TLdapLoginDTO) => {
const appCfg = getConfig();
const serverCfg = await getServerCfg();
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.LDAP)) {
throw new BadRequestError({
message: "Login with LDAP is disabled by administrator."
});
}
let userAlias = await userAliasDAL.findOne({
externalId,
orgId,
@ -428,7 +449,8 @@ export const ldapConfigServiceFactory = ({
userId: userAlias.userId,
orgId,
role: OrgMembershipRole.Member,
status: OrgMembershipStatus.Accepted
status: OrgMembershipStatus.Accepted,
isActive: true
},
tx
);
@ -443,9 +465,24 @@ export const ldapConfigServiceFactory = ({
}
});
} else {
const plan = await licenseService.getPlan(orgId);
if (plan?.memberLimit && plan.membersUsed >= plan.memberLimit) {
// limit imposed on number of members allowed / number of members used exceeds the number of members allowed
throw new BadRequestError({
message: "Failed to create new member via LDAP due to member limit reached. Upgrade plan to add more members."
});
}
if (plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) {
// limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed
throw new BadRequestError({
message: "Failed to create new member via LDAP due to member limit reached. Upgrade plan to add more members."
});
}
userAlias = await userDAL.transaction(async (tx) => {
let newUser: TUsers | undefined;
if (serverCfg.trustSamlEmails) {
if (serverCfg.trustLdapEmails) {
newUser = await userDAL.findOne(
{
email,
@ -494,11 +531,12 @@ export const ldapConfigServiceFactory = ({
if (!orgMembership) {
await orgMembershipDAL.create(
{
userId: userAlias.userId,
userId: newUser.id,
inviteEmail: email,
orgId,
role: OrgMembershipRole.Member,
status: newUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
status: newUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited, // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
isActive: true
},
tx
);
@ -627,6 +665,22 @@ export const ldapConfigServiceFactory = ({
}
);
if (user.email && !user.isEmailVerified) {
const token = await tokenService.createTokenForUser({
type: TokenType.TOKEN_EMAIL_VERIFICATION,
userId: user.id
});
await smtpService.sendMail({
template: SmtpTemplates.EmailVerification,
subjectLine: "Infisical confirmation code",
recipients: [user.email],
substitutions: {
code: token
}
});
}
return { isUserCompleted, providerAuthToken };
};
@ -672,11 +726,25 @@ export const ldapConfigServiceFactory = ({
message: "Failed to create LDAP group map due to plan restriction. Upgrade plan to create LDAP group map."
});
const ldapConfig = await ldapConfigDAL.findOne({
id: ldapConfigId,
orgId
const ldapConfig = await getLdapCfg({
orgId,
id: ldapConfigId
});
if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" });
if (!ldapConfig.groupSearchBase) {
throw new BadRequestError({
message: "Configure a group search base in your LDAP configuration in order to proceed."
});
}
const groupSearchFilter = `(cn=${ldapGroupCN})`;
const groups = await searchGroups(ldapConfig, groupSearchFilter, ldapConfig.groupSearchBase);
if (!groups.some((g) => g.cn === ldapGroupCN)) {
throw new BadRequestError({
message: "Failed to find LDAP Group CN"
});
}
const group = await groupDAL.findOne({ slug: groupSlug, orgId });
if (!group) throw new BadRequestError({ message: "Failed to find group" });

View File

@ -7,6 +7,7 @@ export type TLDAPConfig = {
url: string;
bindDN: string;
bindPass: string;
uniqueUserAttribute: string;
searchBase: string;
groupSearchBase: string;
groupSearchFilter: string;
@ -19,6 +20,7 @@ export type TCreateLdapCfgDTO = {
url: string;
bindDN: string;
bindPass: string;
uniqueUserAttribute: string;
searchBase: string;
searchFilter: string;
groupSearchBase: string;
@ -33,6 +35,7 @@ export type TUpdateLdapCfgDTO = {
url: string;
bindDN: string;
bindPass: string;
uniqueUserAttribute: string;
searchBase: string;
searchFilter: string;
groupSearchBase: string;

View File

@ -10,7 +10,8 @@ export const ldapGroupMapDALFactory = (db: TDbClient) => {
const findLdapGroupMapsByLdapConfigId = async (ldapConfigId: string) => {
try {
const docs = await db(TableName.LdapGroupMap)
const docs = await db
.replicaNode()(TableName.LdapGroupMap)
.where(`${TableName.LdapGroupMap}.ldapConfigId`, ldapConfigId)
.join(TableName.Groups, `${TableName.LdapGroupMap}.groupId`, `${TableName.Groups}.id`)
.select(selectAllTableCols(TableName.LdapGroupMap))

View File

@ -7,6 +7,8 @@ export const getDefaultOnPremFeatures = () => {
workspacesUsed: 0,
memberLimit: null,
membersUsed: 0,
identityLimit: null,
identitiesUsed: 0,
environmentLimit: null,
environmentsUsed: 0,
secretVersioning: true,

View File

@ -15,6 +15,8 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
membersUsed: 0,
environmentLimit: null,
environmentsUsed: 0,
identityLimit: null,
identitiesUsed: 0,
dynamicSecret: false,
secretVersioning: true,
pitRecovery: false,
@ -36,7 +38,8 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
has_used_trial: true,
secretApproval: false,
secretRotation: true,
caCrl: false
caCrl: false,
instanceUserManagement: false
});
export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {

View File

@ -9,7 +9,7 @@ export type TLicenseDALFactory = ReturnType<typeof licenseDALFactory>;
export const licenseDALFactory = (db: TDbClient) => {
const countOfOrgMembers = async (orgId: string | null, tx?: Knex) => {
try {
const doc = await (tx || db)(TableName.OrgMembership)
const doc = await (tx || db.replicaNode())(TableName.OrgMembership)
.where({ status: OrgMembershipStatus.Accepted })
.andWhere((bd) => {
if (orgId) {
@ -19,11 +19,44 @@ export const licenseDALFactory = (db: TDbClient) => {
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
.where(`${TableName.Users}.isGhost`, false)
.count();
return doc?.[0].count;
return Number(doc?.[0].count);
} catch (error) {
throw new DatabaseError({ error, name: "Count of Org Members" });
}
};
return { countOfOrgMembers };
const countOrgUsersAndIdentities = async (orgId: string | null, tx?: Knex) => {
try {
// count org users
const userDoc = await (tx || db)(TableName.OrgMembership)
.where({ status: OrgMembershipStatus.Accepted })
.andWhere((bd) => {
if (orgId) {
void bd.where({ orgId });
}
})
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
.where(`${TableName.Users}.isGhost`, false)
.count();
const userCount = Number(userDoc?.[0].count);
// count org identities
const identityDoc = await (tx || db)(TableName.IdentityOrgMembership)
.where((bd) => {
if (orgId) {
void bd.where({ orgId });
}
})
.count();
const identityCount = Number(identityDoc?.[0].count);
return userCount + identityCount;
} catch (error) {
throw new DatabaseError({ error, name: "Count of Org Users + Identities" });
}
};
return { countOfOrgMembers, countOrgUsersAndIdentities };
};

View File

@ -5,6 +5,7 @@
// TODO(akhilmhdh): With tony find out the api structure and fill it here
import { ForbiddenError } from "@casl/ability";
import { Knex } from "knex";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
@ -155,6 +156,7 @@ export const licenseServiceFactory = ({
LICENSE_SERVER_CLOUD_PLAN_TTL,
JSON.stringify(currentPlan)
);
return currentPlan;
}
} catch (error) {
@ -199,21 +201,29 @@ export const licenseServiceFactory = ({
await licenseServerCloudApi.request.delete(`/api/license-server/v1/customers/${customerId}`);
};
const updateSubscriptionOrgMemberCount = async (orgId: string) => {
const updateSubscriptionOrgMemberCount = async (orgId: string, tx?: Knex) => {
if (instanceType === InstanceType.Cloud) {
const org = await orgDAL.findOrgById(orgId);
if (!org) throw new BadRequestError({ message: "Org not found" });
const count = await licenseDAL.countOfOrgMembers(orgId);
const quantity = await licenseDAL.countOfOrgMembers(orgId, tx);
const quantityIdentities = await licenseDAL.countOrgUsersAndIdentities(orgId, tx);
if (org?.customerId) {
await licenseServerCloudApi.request.patch(`/api/license-server/v1/customers/${org.customerId}/cloud-plan`, {
quantity: count
quantity,
quantityIdentities
});
}
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
} else if (instanceType === InstanceType.EnterpriseOnPrem) {
const usedSeats = await licenseDAL.countOfOrgMembers(null);
await licenseServerOnPremApi.request.patch(`/api/license/v1/license`, { usedSeats });
const usedSeats = await licenseDAL.countOfOrgMembers(null, tx);
const usedIdentitySeats = await licenseDAL.countOrgUsersAndIdentities(null, tx);
onPremFeatures.membersUsed = usedSeats;
onPremFeatures.identitiesUsed = usedIdentitySeats;
await licenseServerOnPremApi.request.patch(`/api/license/v1/license`, {
usedSeats,
usedIdentitySeats
});
}
await refreshPlan(orgId);
};

View File

@ -30,7 +30,9 @@ export type TFeatureSet = {
workspacesUsed: 0;
dynamicSecret: false;
memberLimit: null;
membersUsed: 0;
membersUsed: number;
identityLimit: null;
identitiesUsed: number;
environmentLimit: null;
environmentsUsed: 0;
secretVersioning: true;
@ -54,6 +56,7 @@ export type TFeatureSet = {
secretApproval: false;
secretRotation: true;
caCrl: false;
instanceUserManagement: false;
};
export type TOrgPlansTableDTO = {

View File

@ -26,6 +26,7 @@ import { TOrgDALFactory } from "@app/services/org/org-dal";
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { normalizeUsername } from "@app/services/user/user-fns";
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
@ -157,6 +158,13 @@ export const oidcConfigServiceFactory = ({
const oidcLogin = async ({ externalId, email, firstName, lastName, orgId, callbackPort }: TOidcLoginDTO) => {
const serverCfg = await getServerCfg();
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.OIDC)) {
throw new BadRequestError({
message: "Login with OIDC is disabled by administrator."
});
}
const appCfg = getConfig();
const userAlias = await userAliasDAL.findOne({
externalId,
@ -185,7 +193,8 @@ export const oidcConfigServiceFactory = ({
inviteEmail: email,
orgId,
role: OrgMembershipRole.Member,
status: foundUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
status: foundUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited, // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
isActive: true
},
tx
);
@ -258,7 +267,8 @@ export const oidcConfigServiceFactory = ({
inviteEmail: email,
orgId,
role: OrgMembershipRole.Member,
status: newUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
status: newUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited, // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
isActive: true
},
tx
);

View File

@ -10,7 +10,8 @@ export type TPermissionDALFactory = ReturnType<typeof permissionDALFactory>;
export const permissionDALFactory = (db: TDbClient) => {
const getOrgPermission = async (userId: string, orgId: string) => {
try {
const membership = await db(TableName.OrgMembership)
const membership = await db
.replicaNode()(TableName.OrgMembership)
.leftJoin(TableName.OrgRoles, `${TableName.OrgMembership}.roleId`, `${TableName.OrgRoles}.id`)
.join(TableName.Organization, `${TableName.OrgMembership}.orgId`, `${TableName.Organization}.id`)
.where("userId", userId)
@ -28,7 +29,8 @@ export const permissionDALFactory = (db: TDbClient) => {
const getOrgIdentityPermission = async (identityId: string, orgId: string) => {
try {
const membership = await db(TableName.IdentityOrgMembership)
const membership = await db
.replicaNode()(TableName.IdentityOrgMembership)
.leftJoin(TableName.OrgRoles, `${TableName.IdentityOrgMembership}.roleId`, `${TableName.OrgRoles}.id`)
.join(TableName.Organization, `${TableName.IdentityOrgMembership}.orgId`, `${TableName.Organization}.id`)
.where("identityId", identityId)
@ -45,11 +47,13 @@ export const permissionDALFactory = (db: TDbClient) => {
const getProjectPermission = async (userId: string, projectId: string) => {
try {
const groups: string[] = await db(TableName.GroupProjectMembership)
const groups: string[] = await db
.replicaNode()(TableName.GroupProjectMembership)
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
.pluck(`${TableName.GroupProjectMembership}.groupId`);
const groupDocs = await db(TableName.UserGroupMembership)
const groupDocs = await db
.replicaNode()(TableName.UserGroupMembership)
.where(`${TableName.UserGroupMembership}.userId`, userId)
.whereIn(`${TableName.UserGroupMembership}.groupId`, groups)
.join(
@ -231,7 +235,8 @@ export const permissionDALFactory = (db: TDbClient) => {
const getProjectIdentityPermission = async (identityId: string, projectId: string) => {
try {
const docs = await db(TableName.IdentityProjectMembership)
const docs = await db
.replicaNode()(TableName.IdentityProjectMembership)
.join(
TableName.IdentityProjectMembershipRole,
`${TableName.IdentityProjectMembershipRole}.projectMembershipId`,

View File

@ -109,6 +109,9 @@ export const permissionServiceFactory = ({
authMethod: ActorAuthMethod,
userOrgId?: string
) => {
// when token is scoped, ensure the passed org id is same as user org id
if (userOrgId && userOrgId !== orgId)
throw new BadRequestError({ message: "Invalid user token. Scoped to different organization." });
const membership = await permissionDAL.getOrgPermission(userId, orgId);
if (!membership) throw new UnauthorizedError({ name: "User not in org" });
if (membership.role === OrgMembershipRole.Custom && !membership.permissions) {

View File

@ -10,7 +10,8 @@ export const samlConfigDALFactory = (db: TDbClient) => {
const findEnforceableSamlCfg = async (orgId: string) => {
try {
const samlCfg = await db(TableName.SamlConfig)
const samlCfg = await db
.replicaNode()(TableName.SamlConfig)
.where({
orgId,
isActive: true

View File

@ -28,6 +28,7 @@ import { TOrgDALFactory } from "@app/services/org/org-dal";
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { normalizeUsername } from "@app/services/user/user-fns";
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
@ -335,6 +336,13 @@ export const samlConfigServiceFactory = ({
}: TSamlLoginDTO) => {
const appCfg = getConfig();
const serverCfg = await getServerCfg();
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.SAML)) {
throw new BadRequestError({
message: "Login with SAML is disabled by administrator."
});
}
const userAlias = await userAliasDAL.findOne({
externalId,
orgId,
@ -362,7 +370,8 @@ export const samlConfigServiceFactory = ({
inviteEmail: email,
orgId,
role: OrgMembershipRole.Member,
status: foundUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
status: foundUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited, // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
isActive: true
},
tx
);
@ -380,6 +389,21 @@ export const samlConfigServiceFactory = ({
return foundUser;
});
} else {
const plan = await licenseService.getPlan(orgId);
if (plan?.memberLimit && plan.membersUsed >= plan.memberLimit) {
// limit imposed on number of members allowed / number of members used exceeds the number of members allowed
throw new BadRequestError({
message: "Failed to create new member via SAML due to member limit reached. Upgrade plan to add more members."
});
}
if (plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) {
// limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed
throw new BadRequestError({
message: "Failed to create new member via SAML due to member limit reached. Upgrade plan to add more members."
});
}
user = await userDAL.transaction(async (tx) => {
let newUser: TUsers | undefined;
if (serverCfg.trustSamlEmails) {
@ -434,7 +458,8 @@ export const samlConfigServiceFactory = ({
inviteEmail: email,
orgId,
role: OrgMembershipRole.Member,
status: newUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
status: newUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited, // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
isActive: true
},
tx
);

View File

@ -32,12 +32,19 @@ export const parseScimFilter = (filterToParse: string | undefined) => {
return { [attributeName]: parsedValue.replace(/"/g, "") };
};
export function extractScimValueFromPath(path: string): string | null {
const regex = /members\[value eq "([^"]+)"\]/;
const match = path.match(regex);
return match ? match[1] : null;
}
export const buildScimUser = ({
orgMembershipId,
username,
email,
firstName,
lastName,
groups = [],
active
}: {
orgMembershipId: string;
@ -45,6 +52,10 @@ export const buildScimUser = ({
email?: string | null;
firstName: string;
lastName: string;
groups?: {
value: string;
display: string;
}[];
active: boolean;
}): TScimUser => {
const scimUser = {
@ -67,7 +78,7 @@ export const buildScimUser = ({
]
: [],
active,
groups: [],
groups,
meta: {
resourceType: "User",
location: null

View File

@ -2,7 +2,7 @@ import { ForbiddenError } from "@casl/ability";
import slugify from "@sindresorhus/slugify";
import jwt from "jsonwebtoken";
import { OrgMembershipRole, OrgMembershipStatus, TableName, TGroups, TOrgMemberships, TUsers } from "@app/db/schemas";
import { OrgMembershipRole, OrgMembershipStatus, TableName, TOrgMemberships, TUsers } from "@app/db/schemas";
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "@app/ee/services/group/group-fns";
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
@ -30,7 +30,14 @@ import { UserAliasType } from "@app/services/user-alias/user-alias-types";
import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { buildScimGroup, buildScimGroupList, buildScimUser, buildScimUserList, parseScimFilter } from "./scim-fns";
import {
buildScimGroup,
buildScimGroupList,
buildScimUser,
buildScimUserList,
extractScimValueFromPath,
parseScimFilter
} from "./scim-fns";
import {
TCreateScimGroupDTO,
TCreateScimTokenDTO,
@ -61,17 +68,22 @@ type TScimServiceFactoryDep = {
TOrgDALFactory,
"createMembership" | "findById" | "findMembership" | "deleteMembershipById" | "transaction" | "updateMembershipById"
>;
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find" | "findOne" | "create" | "updateById">;
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find" | "findOne" | "create" | "updateById" | "findById">;
projectDAL: Pick<TProjectDALFactory, "find" | "findProjectGhostUser">;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find" | "delete" | "findProjectMembershipsByUserId">;
groupDAL: Pick<
TGroupDALFactory,
"create" | "findOne" | "findAllGroupMembers" | "update" | "delete" | "findGroups" | "transaction"
"create" | "findOne" | "findAllGroupMembers" | "delete" | "findGroups" | "transaction" | "updateById" | "update"
>;
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
userGroupMembershipDAL: Pick<
TUserGroupMembershipDALFactory,
"find" | "transaction" | "insertMany" | "filterProjectsByUserMembership" | "delete"
| "find"
| "transaction"
| "insertMany"
| "filterProjectsByUserMembership"
| "delete"
| "findUserGroupMembershipsInOrg"
>;
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany" | "delete">;
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
@ -197,14 +209,14 @@ export const scimServiceFactory = ({
findOpts
);
const scimUsers = users.map(({ id, externalId, username, firstName, lastName, email }) =>
const scimUsers = users.map(({ id, externalId, username, firstName, lastName, email, isActive }) =>
buildScimUser({
orgMembershipId: id ?? "",
username: externalId ?? username,
firstName: firstName ?? "",
lastName: lastName ?? "",
email,
active: true
active: isActive
})
);
@ -240,13 +252,19 @@ export const scimServiceFactory = ({
status: 403
});
const groupMembershipsInOrg = await userGroupMembershipDAL.findUserGroupMembershipsInOrg(membership.userId, orgId);
return buildScimUser({
orgMembershipId: membership.id,
username: membership.externalId ?? membership.username,
email: membership.email ?? "",
firstName: membership.firstName as string,
lastName: membership.lastName as string,
active: true
active: membership.isActive,
groups: groupMembershipsInOrg.map((group) => ({
value: group.groupId,
display: group.name
}))
});
};
@ -296,7 +314,8 @@ export const scimServiceFactory = ({
inviteEmail: email,
orgId,
role: OrgMembershipRole.Member,
status: user.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
status: user.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited, // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
isActive: true
},
tx
);
@ -364,7 +383,8 @@ export const scimServiceFactory = ({
inviteEmail: email,
orgId,
role: OrgMembershipRole.Member,
status: user.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
status: user.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited, // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
isActive: true
},
tx
);
@ -401,7 +421,7 @@ export const scimServiceFactory = ({
firstName: createdUser.firstName as string,
lastName: createdUser.lastName as string,
email: createdUser.email ?? "",
active: true
active: createdOrgMembership.isActive
});
};
@ -445,14 +465,8 @@ export const scimServiceFactory = ({
});
if (!active) {
await deleteOrgMembershipFn({
orgMembershipId: membership.id,
orgId: membership.orgId,
orgDAL,
projectMembershipDAL,
projectKeyDAL,
userAliasDAL,
licenseService
await orgMembershipDAL.updateById(membership.id, {
isActive: false
});
}
@ -491,17 +505,11 @@ export const scimServiceFactory = ({
status: 403
});
if (!active) {
await deleteOrgMembershipFn({
orgMembershipId: membership.id,
orgId: membership.orgId,
orgDAL,
projectMembershipDAL,
projectKeyDAL,
userAliasDAL,
licenseService
});
}
await orgMembershipDAL.updateById(membership.id, {
isActive: active
});
const groupMembershipsInOrg = await userGroupMembershipDAL.findUserGroupMembershipsInOrg(membership.userId, orgId);
return buildScimUser({
orgMembershipId: membership.id,
@ -509,7 +517,11 @@ export const scimServiceFactory = ({
email: membership.email,
firstName: membership.firstName as string,
lastName: membership.lastName as string,
active
active,
groups: groupMembershipsInOrg.map((group) => ({
value: group.groupId,
display: group.name
}))
});
};
@ -817,7 +829,6 @@ export const scimServiceFactory = ({
});
};
// TODO: add support for add/remove op
const updateScimGroupNamePatch = async ({ groupId, orgId, operations }: TUpdateScimGroupNamePatchDTO) => {
const plan = await licenseService.getPlan(orgId);
if (!plan.groups)
@ -840,27 +851,60 @@ export const scimServiceFactory = ({
status: 403
});
let group: TGroups | undefined;
let group = await groupDAL.findOne({
id: groupId,
orgId
});
if (!group) {
throw new ScimRequestError({
detail: "Group Not Found",
status: 404
});
}
for await (const operation of operations) {
switch (operation.op) {
case "replace": {
await groupDAL.update(
{
id: groupId,
orgId
},
{
name: operation.value.displayName
}
);
group = await groupDAL.updateById(group.id, {
name: operation.value.displayName
});
break;
}
case "add": {
// TODO
const orgMemberships = await orgMembershipDAL.find({
$in: {
id: operation.value.map((member) => member.value)
}
});
await addUsersToGroupByUserIds({
group,
userIds: orgMemberships.map((membership) => membership.userId as string),
userDAL,
userGroupMembershipDAL,
orgDAL,
groupProjectDAL,
projectKeyDAL,
projectDAL,
projectBotDAL
});
break;
}
case "remove": {
// TODO
const orgMembershipId = extractScimValueFromPath(operation.path);
if (!orgMembershipId) throw new ScimRequestError({ detail: "Invalid path value", status: 400 });
const orgMembership = await orgMembershipDAL.findById(orgMembershipId);
if (!orgMembership) throw new ScimRequestError({ detail: "Org Membership Not Found", status: 400 });
await removeUsersFromGroupByUserIds({
group,
userIds: [orgMembership.userId as string],
userDAL,
userGroupMembershipDAL,
groupProjectDAL,
projectKeyDAL
});
break;
}
default: {
@ -872,13 +916,6 @@ export const scimServiceFactory = ({
}
}
if (!group) {
throw new ScimRequestError({
detail: "Group Not Found",
status: 404
});
}
return buildScimGroup({
groupId: group.id,
name: group.name,

View File

@ -125,10 +125,11 @@ type TRemoveOp = {
type TAddOp = {
op: "add";
path: string;
value: {
value: string;
display?: string;
};
}[];
};
export type TDeleteScimGroupDTO = {
@ -157,7 +158,10 @@ export type TScimUser = {
type: string;
}[];
active: boolean;
groups: string[];
groups: {
value: string;
display: string;
}[];
meta: {
resourceType: string;
location: null;

View File

@ -1,49 +1,59 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName, TSecretApprovalPolicies } from "@app/db/schemas";
import { SecretApprovalPoliciesSchema, TableName, TSecretApprovalPolicies } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, mergeOneToManyRelation, ormify, selectAllTableCols, TFindFilter } from "@app/lib/knex";
import { buildFindFilter, ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex";
export type TSecretApprovalPolicyDALFactory = ReturnType<typeof secretApprovalPolicyDALFactory>;
export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
const secretApprovalPolicyOrm = ormify(db, TableName.SecretApprovalPolicy);
const sapFindQuery = (tx: Knex, filter: TFindFilter<TSecretApprovalPolicies>) =>
const secretApprovalPolicyFindQuery = (tx: Knex, filter: TFindFilter<TSecretApprovalPolicies>) =>
tx(TableName.SecretApprovalPolicy)
// eslint-disable-next-line
.where(buildFindFilter(filter))
.join(TableName.Environment, `${TableName.SecretApprovalPolicy}.envId`, `${TableName.Environment}.id`)
.join(
.leftJoin(
TableName.SecretApprovalPolicyApprover,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalPolicyApprover}.policyId`
)
.select(tx.ref("approverId").withSchema(TableName.SecretApprovalPolicyApprover))
.select(tx.ref("name").withSchema(TableName.Environment).as("envName"))
.select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug"))
.select(tx.ref("id").withSchema(TableName.Environment).as("envId"))
.select(tx.ref("projectId").withSchema(TableName.Environment))
.select(tx.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover))
.select(
tx.ref("name").withSchema(TableName.Environment).as("envName"),
tx.ref("slug").withSchema(TableName.Environment).as("envSlug"),
tx.ref("id").withSchema(TableName.Environment).as("envId"),
tx.ref("projectId").withSchema(TableName.Environment)
)
.select(selectAllTableCols(TableName.SecretApprovalPolicy))
.orderBy("createdAt", "asc");
const findById = async (id: string, tx?: Knex) => {
try {
const doc = await sapFindQuery(tx || db, {
const doc = await secretApprovalPolicyFindQuery(tx || db.replicaNode(), {
[`${TableName.SecretApprovalPolicy}.id` as "id"]: id
});
const formatedDoc = mergeOneToManyRelation(
doc,
"id",
({ approverId, envId, envName: name, envSlug: slug, ...el }) => ({
...el,
envId,
environment: { id: envId, name, slug }
const formatedDoc = sqlNestRelationships({
data: doc,
key: "id",
parentMapper: (data) => ({
environment: { id: data.envId, name: data.envName, slug: data.envSlug },
projectId: data.projectId,
...SecretApprovalPoliciesSchema.parse(data)
}),
({ approverId }) => approverId,
"approvers"
);
childrenMapper: [
{
key: "approverUserId",
label: "userApprovers" as const,
mapper: ({ approverUserId }) => ({
userId: approverUserId
})
}
]
});
return formatedDoc?.[0];
} catch (error) {
throw new DatabaseError({ error, name: "FindById" });
@ -52,18 +62,25 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
const find = async (filter: TFindFilter<TSecretApprovalPolicies & { projectId: string }>, tx?: Knex) => {
try {
const docs = await sapFindQuery(tx || db, filter);
const formatedDoc = mergeOneToManyRelation(
docs,
"id",
({ approverId, envId, envName: name, envSlug: slug, ...el }) => ({
...el,
envId,
environment: { id: envId, name, slug }
const docs = await secretApprovalPolicyFindQuery(tx || db.replicaNode(), filter);
const formatedDoc = sqlNestRelationships({
data: docs,
key: "id",
parentMapper: (data) => ({
environment: { id: data.envId, name: data.envName, slug: data.envSlug },
projectId: data.projectId,
...SecretApprovalPoliciesSchema.parse(data)
}),
({ approverId }) => approverId,
"approvers"
);
childrenMapper: [
{
key: "approverUserId",
label: "userApprovers" as const,
mapper: ({ approverUserId }) => ({
userId: approverUserId
})
}
]
});
return formatedDoc;
} catch (error) {
throw new DatabaseError({ error, name: "Find" });

View File

@ -7,7 +7,6 @@ import { BadRequestError } from "@app/lib/errors";
import { removeTrailingSlash } from "@app/lib/fn";
import { containsGlobPatterns } from "@app/lib/picomatch";
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { TSecretApprovalPolicyApproverDALFactory } from "./secret-approval-policy-approver-dal";
import { TSecretApprovalPolicyDALFactory } from "./secret-approval-policy-dal";
@ -29,7 +28,6 @@ type TSecretApprovalPolicyServiceFactoryDep = {
secretApprovalPolicyDAL: TSecretApprovalPolicyDALFactory;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
secretApprovalPolicyApproverDAL: TSecretApprovalPolicyApproverDALFactory;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find">;
};
export type TSecretApprovalPolicyServiceFactory = ReturnType<typeof secretApprovalPolicyServiceFactory>;
@ -38,8 +36,7 @@ export const secretApprovalPolicyServiceFactory = ({
secretApprovalPolicyDAL,
permissionService,
secretApprovalPolicyApproverDAL,
projectEnvDAL,
projectMembershipDAL
projectEnvDAL
}: TSecretApprovalPolicyServiceFactoryDep) => {
const createSecretApprovalPolicy = async ({
name,
@ -48,12 +45,12 @@ export const secretApprovalPolicyServiceFactory = ({
actorOrgId,
actorAuthMethod,
approvals,
approvers,
approverUserIds,
projectId,
secretPath,
environment
}: TCreateSapDTO) => {
if (approvals > approvers.length)
if (approvals > approverUserIds.length)
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
const { permission } = await permissionService.getProjectPermission(
@ -70,13 +67,6 @@ export const secretApprovalPolicyServiceFactory = ({
const env = await projectEnvDAL.findOne({ slug: environment, projectId });
if (!env) throw new BadRequestError({ message: "Environment not found" });
const secretApprovers = await projectMembershipDAL.find({
projectId,
$in: { id: approvers }
});
if (secretApprovers.length !== approvers.length)
throw new BadRequestError({ message: "Approver not found in project" });
const secretApproval = await secretApprovalPolicyDAL.transaction(async (tx) => {
const doc = await secretApprovalPolicyDAL.create(
{
@ -88,8 +78,8 @@ export const secretApprovalPolicyServiceFactory = ({
tx
);
await secretApprovalPolicyApproverDAL.insertMany(
secretApprovers.map(({ id }) => ({
approverId: id,
approverUserIds.map((approverUserId) => ({
approverUserId,
policyId: doc.id
})),
tx
@ -100,7 +90,7 @@ export const secretApprovalPolicyServiceFactory = ({
};
const updateSecretApprovalPolicy = async ({
approvers,
approverUserIds,
secretPath,
name,
actorId,
@ -132,22 +122,11 @@ export const secretApprovalPolicyServiceFactory = ({
},
tx
);
if (approvers) {
const secretApprovers = await projectMembershipDAL.find(
{
projectId: secretApprovalPolicy.projectId,
$in: { id: approvers }
},
{ tx }
);
if (secretApprovers.length !== approvers.length)
throw new BadRequestError({ message: "Approver not found in project" });
if (doc.approvals > secretApprovers.length)
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
if (approverUserIds) {
await secretApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx);
await secretApprovalPolicyApproverDAL.insertMany(
secretApprovers.map(({ id }) => ({
approverId: id,
approverUserIds.map((approverUserId) => ({
approverUserId,
policyId: doc.id
})),
tx

View File

@ -4,7 +4,7 @@ export type TCreateSapDTO = {
approvals: number;
secretPath?: string | null;
environment: string;
approvers: string[];
approverUserIds: string[];
projectId: string;
name: string;
} & Omit<TProjectPermission, "projectId">;
@ -13,7 +13,7 @@ export type TUpdateSapDTO = {
secretPolicyId: string;
approvals?: number;
secretPath?: string | null;
approvers: string[];
approverUserIds: string[];
name?: string;
} & Omit<TProjectPermission, "projectId">;

View File

@ -5,7 +5,8 @@ import {
SecretApprovalRequestsSchema,
TableName,
TSecretApprovalRequests,
TSecretApprovalRequestsSecrets
TSecretApprovalRequestsSecrets,
TUsers
} from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols, sqlNestRelationships, stripUndefinedInWhere, TFindFilter } from "@app/lib/knex";
@ -16,7 +17,7 @@ export type TSecretApprovalRequestDALFactory = ReturnType<typeof secretApprovalR
type TFindQueryFilter = {
projectId: string;
membershipId: string;
userId: string;
status?: RequestState;
environment?: string;
committer?: string;
@ -37,32 +38,68 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
`${TableName.SecretApprovalRequest}.policyId`,
`${TableName.SecretApprovalPolicy}.id`
)
.leftJoin<TUsers>(
db(TableName.Users).as("statusChangedByUser"),
`${TableName.SecretApprovalRequest}.statusChangedByUserId`,
`statusChangedByUser.id`
)
.join<TUsers>(
db(TableName.Users).as("committerUser"),
`${TableName.SecretApprovalRequest}.committerUserId`,
`committerUser.id`
)
.join(
TableName.SecretApprovalPolicyApprover,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalPolicyApprover}.policyId`
)
.join<TUsers>(
db(TableName.Users).as("secretApprovalPolicyApproverUser"),
`${TableName.SecretApprovalPolicyApprover}.approverUserId`,
"secretApprovalPolicyApproverUser.id"
)
.leftJoin(
TableName.SecretApprovalRequestReviewer,
`${TableName.SecretApprovalRequest}.id`,
`${TableName.SecretApprovalRequestReviewer}.requestId`
)
.leftJoin<TUsers>(
db(TableName.Users).as("secretApprovalReviewerUser"),
`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`,
`secretApprovalReviewerUser.id`
)
.select(selectAllTableCols(TableName.SecretApprovalRequest))
.select(
tx.ref("member").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerMemberId"),
tx.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
tx.ref("email").withSchema("secretApprovalPolicyApproverUser").as("approverEmail"),
tx.ref("username").withSchema("secretApprovalPolicyApproverUser").as("approverUsername"),
tx.ref("firstName").withSchema("secretApprovalPolicyApproverUser").as("approverFirstName"),
tx.ref("lastName").withSchema("secretApprovalPolicyApproverUser").as("approverLastName"),
tx.ref("email").withSchema("statusChangedByUser").as("statusChangedByUserEmail"),
tx.ref("username").withSchema("statusChangedByUser").as("statusChangedByUserUsername"),
tx.ref("firstName").withSchema("statusChangedByUser").as("statusChangedByUserFirstName"),
tx.ref("lastName").withSchema("statusChangedByUser").as("statusChangedByUserLastName"),
tx.ref("email").withSchema("committerUser").as("committerUserEmail"),
tx.ref("username").withSchema("committerUser").as("committerUserUsername"),
tx.ref("firstName").withSchema("committerUser").as("committerUserFirstName"),
tx.ref("lastName").withSchema("committerUser").as("committerUserLastName"),
tx.ref("reviewerUserId").withSchema(TableName.SecretApprovalRequestReviewer),
tx.ref("status").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerStatus"),
tx.ref("email").withSchema("secretApprovalReviewerUser").as("reviewerEmail"),
tx.ref("username").withSchema("secretApprovalReviewerUser").as("reviewerUsername"),
tx.ref("firstName").withSchema("secretApprovalReviewerUser").as("reviewerFirstName"),
tx.ref("lastName").withSchema("secretApprovalReviewerUser").as("reviewerLastName"),
tx.ref("id").withSchema(TableName.SecretApprovalPolicy).as("policyId"),
tx.ref("name").withSchema(TableName.SecretApprovalPolicy).as("policyName"),
tx.ref("projectId").withSchema(TableName.Environment),
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
tx.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
tx.ref("approverId").withSchema(TableName.SecretApprovalPolicyApprover)
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals")
);
const findById = async (id: string, tx?: Knex) => {
try {
const sql = findQuery({ [`${TableName.SecretApprovalRequest}.id` as "id"]: id }, tx || db);
const sql = findQuery({ [`${TableName.SecretApprovalRequest}.id` as "id"]: id }, tx || db.replicaNode());
const docs = await sql;
const formatedDoc = sqlNestRelationships({
data: docs,
@ -71,6 +108,22 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
...SecretApprovalRequestsSchema.parse(el),
projectId: el.projectId,
environment: el.environment,
statusChangedByUser: el.statusChangedByUserId
? {
userId: el.statusChangedByUserId,
email: el.statusChangedByUserEmail,
firstName: el.statusChangedByUserFirstName,
lastName: el.statusChangedByUserLastName,
username: el.statusChangedByUserUsername
}
: undefined,
committerUser: {
userId: el.committerUserId,
email: el.committerUserEmail,
firstName: el.committerUserFirstName,
lastName: el.committerUserLastName,
username: el.committerUserUsername
},
policy: {
id: el.policyId,
name: el.policyName,
@ -80,11 +133,34 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
}),
childrenMapper: [
{
key: "reviewerMemberId",
key: "reviewerUserId",
label: "reviewers" as const,
mapper: ({ reviewerMemberId: member, reviewerStatus: status }) => (member ? { member, status } : undefined)
mapper: ({
reviewerUserId: userId,
reviewerStatus: status,
reviewerEmail: email,
reviewerLastName: lastName,
reviewerUsername: username,
reviewerFirstName: firstName
}) => (userId ? { userId, status, email, firstName, lastName, username } : undefined)
},
{ key: "approverId", label: "approvers" as const, mapper: ({ approverId }) => approverId }
{
key: "approverUserId",
label: "approvers" as const,
mapper: ({
approverUserId,
approverEmail: email,
approverUsername: username,
approverLastName: lastName,
approverFirstName: firstName
}) => ({
userId: approverUserId,
email,
firstName,
lastName,
username
})
}
]
});
if (!formatedDoc?.[0]) return;
@ -97,12 +173,12 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
}
};
const findProjectRequestCount = async (projectId: string, membershipId: string, tx?: Knex) => {
const findProjectRequestCount = async (projectId: string, userId: string, tx?: Knex) => {
try {
const docs = await (tx || db)
.with(
"temp",
(tx || db)(TableName.SecretApprovalRequest)
(tx || db.replicaNode())(TableName.SecretApprovalRequest)
.join(TableName.SecretFolder, `${TableName.SecretApprovalRequest}.folderId`, `${TableName.SecretFolder}.id`)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.join(
@ -114,8 +190,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
.andWhere(
(bd) =>
void bd
.where(`${TableName.SecretApprovalPolicyApprover}.approverId`, membershipId)
.orWhere(`${TableName.SecretApprovalRequest}.committerId`, membershipId)
.where(`${TableName.SecretApprovalPolicyApprover}.approverUserId`, userId)
.orWhere(`${TableName.SecretApprovalRequest}.committerUserId`, userId)
)
.select("status", `${TableName.SecretApprovalRequest}.id`)
.groupBy(`${TableName.SecretApprovalRequest}.id`, "status")
@ -142,13 +218,13 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
};
const findByProjectId = async (
{ status, limit = 20, offset = 0, projectId, committer, environment, membershipId }: TFindQueryFilter,
{ status, limit = 20, offset = 0, projectId, committer, environment, userId }: TFindQueryFilter,
tx?: Knex
) => {
try {
// akhilmhdh: If ever u wanted a 1 to so many relationship connected with pagination
// this is the place u wanna look at.
const query = (tx || db)(TableName.SecretApprovalRequest)
const query = (tx || db.replicaNode())(TableName.SecretApprovalRequest)
.join(TableName.SecretFolder, `${TableName.SecretApprovalRequest}.folderId`, `${TableName.SecretFolder}.id`)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.join(
@ -161,6 +237,11 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalPolicyApprover}.policyId`
)
.join<TUsers>(
db(TableName.Users).as("committerUser"),
`${TableName.SecretApprovalRequest}.committerUserId`,
`committerUser.id`
)
.leftJoin(
TableName.SecretApprovalRequestReviewer,
`${TableName.SecretApprovalRequest}.id`,
@ -176,20 +257,21 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
projectId,
[`${TableName.Environment}.slug` as "slug"]: environment,
[`${TableName.SecretApprovalRequest}.status`]: status,
committerId: committer
committerUserId: committer
})
)
.andWhere(
(bd) =>
void bd
.where(`${TableName.SecretApprovalPolicyApprover}.approverId`, membershipId)
.orWhere(`${TableName.SecretApprovalRequest}.committerId`, membershipId)
.where(`${TableName.SecretApprovalPolicyApprover}.approverUserId`, userId)
.orWhere(`${TableName.SecretApprovalRequest}.committerUserId`, userId)
)
.select(selectAllTableCols(TableName.SecretApprovalRequest))
.select(
db.ref("projectId").withSchema(TableName.Environment),
db.ref("slug").withSchema(TableName.Environment).as("environment"),
db.ref("id").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerMemberId"),
db.ref("id").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerId"),
db.ref("reviewerUserId").withSchema(TableName.SecretApprovalRequestReviewer),
db.ref("status").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerStatus"),
db.ref("id").withSchema(TableName.SecretApprovalPolicy).as("policyId"),
db.ref("name").withSchema(TableName.SecretApprovalPolicy).as("policyName"),
@ -201,7 +283,11 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
),
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
db.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
db.ref("approverId").withSchema(TableName.SecretApprovalPolicyApprover)
db.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
db.ref("email").withSchema("committerUser").as("committerUserEmail"),
db.ref("username").withSchema("committerUser").as("committerUserUsername"),
db.ref("firstName").withSchema("committerUser").as("committerUserFirstName"),
db.ref("lastName").withSchema("committerUser").as("committerUserLastName")
)
.orderBy("createdAt", "desc");
@ -223,18 +309,26 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
name: el.policyName,
approvals: el.policyApprovals,
secretPath: el.policySecretPath
},
committerUser: {
userId: el.committerUserId,
email: el.committerUserEmail,
firstName: el.committerUserFirstName,
lastName: el.committerUserLastName,
username: el.committerUserUsername
}
}),
childrenMapper: [
{
key: "reviewerMemberId",
key: "reviewerId",
label: "reviewers" as const,
mapper: ({ reviewerMemberId: member, reviewerStatus: s }) => (member ? { member, status: s } : undefined)
mapper: ({ reviewerUserId, reviewerStatus: s }) =>
reviewerUserId ? { userId: reviewerUserId, status: s } : undefined
},
{
key: "approverId",
key: "approverUserId",
label: "approvers" as const,
mapper: ({ approverId }) => approverId
mapper: ({ approverUserId }) => approverUserId
},
{
key: "commitId",

View File

@ -47,7 +47,7 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
const findByRequestId = async (requestId: string, tx?: Knex) => {
try {
const doc = await (tx || db)({
const doc = await (tx || db.replicaNode())({
secVerTag: TableName.SecretTag
})
.from(TableName.SecretApprovalRequestSecret)

View File

@ -87,7 +87,7 @@ export const secretApprovalRequestServiceFactory = ({
const requestCount = async ({ projectId, actor, actorId, actorOrgId, actorAuthMethod }: TApprovalRequestCountDTO) => {
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
const { membership } = await permissionService.getProjectPermission(
await permissionService.getProjectPermission(
actor as ActorType.USER,
actorId,
projectId,
@ -95,7 +95,7 @@ export const secretApprovalRequestServiceFactory = ({
actorOrgId
);
const count = await secretApprovalRequestDAL.findProjectRequestCount(projectId, membership.id);
const count = await secretApprovalRequestDAL.findProjectRequestCount(projectId, actorId);
return count;
};
@ -113,19 +113,13 @@ export const secretApprovalRequestServiceFactory = ({
}: TListApprovalsDTO) => {
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
const { membership } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
await permissionService.getProjectPermission(actor, actorId, projectId, actorAuthMethod, actorOrgId);
const approvals = await secretApprovalRequestDAL.findByProjectId({
projectId,
committer,
environment,
status,
membershipId: membership.id,
userId: actorId,
limit,
offset
});
@ -145,7 +139,7 @@ export const secretApprovalRequestServiceFactory = ({
if (!secretApprovalRequest) throw new BadRequestError({ message: "Secret approval request not found" });
const { policy } = secretApprovalRequest;
const { membership, hasRole } = await permissionService.getProjectPermission(
const { hasRole } = await permissionService.getProjectPermission(
actor,
actorId,
secretApprovalRequest.projectId,
@ -154,8 +148,8 @@ export const secretApprovalRequestServiceFactory = ({
);
if (
!hasRole(ProjectMembershipRole.Admin) &&
secretApprovalRequest.committerId !== membership.id &&
!policy.approvers.find((approverId) => approverId === membership.id)
secretApprovalRequest.committerUserId !== actorId &&
!policy.approvers.find(({ userId }) => userId === actorId)
) {
throw new UnauthorizedError({ message: "User has no access" });
}
@ -180,7 +174,7 @@ export const secretApprovalRequestServiceFactory = ({
if (actor !== ActorType.USER) throw new BadRequestError({ message: "Must be a user" });
const { policy } = secretApprovalRequest;
const { membership, hasRole } = await permissionService.getProjectPermission(
const { hasRole } = await permissionService.getProjectPermission(
ActorType.USER,
actorId,
secretApprovalRequest.projectId,
@ -189,8 +183,8 @@ export const secretApprovalRequestServiceFactory = ({
);
if (
!hasRole(ProjectMembershipRole.Admin) &&
secretApprovalRequest.committerId !== membership.id &&
!policy.approvers.find((approverId) => approverId === membership.id)
secretApprovalRequest.committerUserId !== actorId &&
!policy.approvers.find(({ userId }) => userId === actorId)
) {
throw new UnauthorizedError({ message: "User has no access" });
}
@ -198,7 +192,7 @@ export const secretApprovalRequestServiceFactory = ({
const review = await secretApprovalRequestReviewerDAL.findOne(
{
requestId: secretApprovalRequest.id,
member: membership.id
reviewerUserId: actorId
},
tx
);
@ -207,7 +201,7 @@ export const secretApprovalRequestServiceFactory = ({
{
status,
requestId: secretApprovalRequest.id,
member: membership.id
reviewerUserId: actorId
},
tx
);
@ -230,7 +224,7 @@ export const secretApprovalRequestServiceFactory = ({
if (actor !== ActorType.USER) throw new BadRequestError({ message: "Must be a user" });
const { policy } = secretApprovalRequest;
const { membership, hasRole } = await permissionService.getProjectPermission(
const { hasRole } = await permissionService.getProjectPermission(
ActorType.USER,
actorId,
secretApprovalRequest.projectId,
@ -239,8 +233,8 @@ export const secretApprovalRequestServiceFactory = ({
);
if (
!hasRole(ProjectMembershipRole.Admin) &&
secretApprovalRequest.committerId !== membership.id &&
!policy.approvers.find((approverId) => approverId === membership.id)
secretApprovalRequest.committerUserId !== actorId &&
!policy.approvers.find(({ userId }) => userId === actorId)
) {
throw new UnauthorizedError({ message: "User has no access" });
}
@ -253,7 +247,7 @@ export const secretApprovalRequestServiceFactory = ({
const updatedRequest = await secretApprovalRequestDAL.updateById(secretApprovalRequest.id, {
status,
statusChangeBy: membership.id
statusChangedByUserId: actorId
});
return { ...secretApprovalRequest, ...updatedRequest };
};
@ -270,7 +264,7 @@ export const secretApprovalRequestServiceFactory = ({
if (actor !== ActorType.USER) throw new BadRequestError({ message: "Must be a user" });
const { policy, folderId, projectId } = secretApprovalRequest;
const { membership, hasRole } = await permissionService.getProjectPermission(
const { hasRole } = await permissionService.getProjectPermission(
ActorType.USER,
actorId,
projectId,
@ -280,19 +274,19 @@ export const secretApprovalRequestServiceFactory = ({
if (
!hasRole(ProjectMembershipRole.Admin) &&
secretApprovalRequest.committerId !== membership.id &&
!policy.approvers.find((approverId) => approverId === membership.id)
secretApprovalRequest.committerUserId !== actorId &&
!policy.approvers.find(({ userId }) => userId === actorId)
) {
throw new UnauthorizedError({ message: "User has no access" });
}
const reviewers = secretApprovalRequest.reviewers.reduce<Record<string, ApprovalStatus>>(
(prev, curr) => ({ ...prev, [curr.member.toString()]: curr.status as ApprovalStatus }),
(prev, curr) => ({ ...prev, [curr.userId.toString()]: curr.status as ApprovalStatus }),
{}
);
const hasMinApproval =
secretApprovalRequest.policy.approvals <=
secretApprovalRequest.policy.approvers.filter(
(approverId) => reviewers[approverId.toString()] === ApprovalStatus.APPROVED
({ userId: approverId }) => reviewers[approverId.toString()] === ApprovalStatus.APPROVED
).length;
if (!hasMinApproval) throw new BadRequestError({ message: "Doesn't have minimum approvals needed" });
@ -472,7 +466,7 @@ export const secretApprovalRequestServiceFactory = ({
conflicts: JSON.stringify(conflicts),
hasMerged: true,
status: RequestState.Closed,
statusChangeBy: membership.id
statusChangedByUserId: actorId
},
tx
);
@ -509,7 +503,7 @@ export const secretApprovalRequestServiceFactory = ({
}: TGenerateSecretApprovalRequestDTO) => {
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
const { permission, membership } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
@ -663,7 +657,7 @@ export const secretApprovalRequestServiceFactory = ({
policyId: policy.id,
status: "open",
hasMerged: false,
committerId: membership.id
committerUserId: actorId
},
tx
);

View File

@ -11,7 +11,6 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { QueueName, TQueueServiceFactory } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
import { fnSecretBulkInsert, fnSecretBulkUpdate } from "@app/services/secret/secret-fns";
import { TSecretQueueFactory, uniqueSecretQueueKey } from "@app/services/secret/secret-queue";
@ -46,7 +45,6 @@ type TSecretReplicationServiceFactoryDep = {
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
secretTagDAL: Pick<TSecretTagDALFactory, "findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret" | "find">;
secretApprovalRequestDAL: Pick<TSecretApprovalRequestDALFactory, "create" | "transaction">;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findOne">;
secretApprovalRequestSecretDAL: Pick<
TSecretApprovalRequestSecretDALFactory,
"insertMany" | "insertApprovalSecretTags"
@ -92,7 +90,6 @@ export const secretReplicationServiceFactory = ({
secretApprovalRequestSecretDAL,
secretApprovalRequestDAL,
secretQueueService,
projectMembershipDAL,
projectBotService
}: TSecretReplicationServiceFactoryDep) => {
const getReplicatedSecrets = (
@ -297,12 +294,6 @@ export const secretReplicationServiceFactory = ({
);
// this means it should be a approval request rather than direct replication
if (policy && actor === ActorType.USER) {
const membership = await projectMembershipDAL.findOne({ projectId, userId: actorId });
if (!membership) {
logger.error("Project membership not found in %s for user %s", projectId, actorId);
return;
}
const localSecretsLatestVersions = destinationLocalSecrets.map(({ id }) => id);
const latestSecretVersions = await secretVersionDAL.findLatestVersionMany(
destinationReplicationFolderId,
@ -316,7 +307,7 @@ export const secretReplicationServiceFactory = ({
policyId: policy.id,
status: "open",
hasMerged: false,
committerId: membership.id,
committerUserId: actorId,
isReplicated: true
},
tx

View File

@ -41,7 +41,7 @@ export const secretRotationDALFactory = (db: TDbClient) => {
const find = async (filter: TFindFilter<TSecretRotations & { projectId: string }>, tx?: Knex) => {
try {
const data = await findQuery(filter, tx || db);
const data = await findQuery(filter, tx || db.replicaNode());
return sqlNestRelationships({
data,
key: "id",
@ -93,7 +93,7 @@ export const secretRotationDALFactory = (db: TDbClient) => {
const findById = async (id: string, tx?: Knex) => {
try {
const doc = await (tx || db)(TableName.SecretRotation)
const doc = await (tx || db.replicaNode())(TableName.SecretRotation)
.join(TableName.Environment, `${TableName.SecretRotation}.envId`, `${TableName.Environment}.id`)
.where({ [`${TableName.SecretRotation}.id` as "id"]: id })
.select(selectAllTableCols(TableName.SecretRotation))

View File

@ -331,7 +331,7 @@ export const secretRotationQueueFactory = ({
logger.info("Finished rotating: rotation id: ", rotationId);
} catch (error) {
logger.error(error);
logger.error(error, "Failed to execute secret rotation");
if (error instanceof DisableRotationErrors) {
if (job.id) {
await queue.stopRepeatableJobByJobId(QueueName.SecretRotation, job.id);

View File

@ -133,7 +133,7 @@ export const secretRotationServiceFactory = ({
creds: []
};
const encData = infisicalSymmetricEncypt(JSON.stringify(unencryptedData));
const secretRotation = secretRotationDAL.transaction(async (tx) => {
const secretRotation = await secretRotationDAL.transaction(async (tx) => {
const doc = await secretRotationDAL.create(
{
provider,
@ -148,13 +148,13 @@ export const secretRotationServiceFactory = ({
},
tx
);
await secretRotationQueue.addToQueue(doc.id, doc.interval);
const outputSecretMapping = await secretRotationDAL.secretOutputInsertMany(
Object.entries(outputs).map(([key, secretId]) => ({ key, secretId, rotationId: doc.id })),
tx
);
return { ...doc, outputs: outputSecretMapping, environment: folder.environment };
});
await secretRotationQueue.addToQueue(secretRotation.id, secretRotation.interval);
return secretRotation;
};
@ -212,9 +212,9 @@ export const secretRotationServiceFactory = ({
);
const deletedDoc = await secretRotationDAL.transaction(async (tx) => {
const strat = await secretRotationDAL.deleteById(rotationId, tx);
await secretRotationQueue.removeFromQueue(strat.id, strat.interval);
return strat;
});
await secretRotationQueue.removeFromQueue(deletedDoc.id, deletedDoc.interval);
return { ...doc, ...deletedDoc };
};

View File

@ -21,7 +21,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
const findById = async (id: string, tx?: Knex) => {
try {
const data = await (tx || db)(TableName.Snapshot)
const data = await (tx || db.replicaNode())(TableName.Snapshot)
.where(`${TableName.Snapshot}.id`, id)
.join(TableName.Environment, `${TableName.Snapshot}.envId`, `${TableName.Environment}.id`)
.select(selectAllTableCols(TableName.Snapshot))
@ -43,7 +43,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
const countOfSnapshotsByFolderId = async (folderId: string, tx?: Knex) => {
try {
const doc = await (tx || db)(TableName.Snapshot)
const doc = await (tx || db.replicaNode())(TableName.Snapshot)
.where({ folderId })
.groupBy(["folderId"])
.count("folderId")
@ -56,7 +56,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
const findSecretSnapshotDataById = async (snapshotId: string, tx?: Knex) => {
try {
const data = await (tx || db)(TableName.Snapshot)
const data = await (tx || db.replicaNode())(TableName.Snapshot)
.where(`${TableName.Snapshot}.id`, snapshotId)
.join(TableName.Environment, `${TableName.Snapshot}.envId`, `${TableName.Environment}.id`)
.leftJoin(TableName.SnapshotSecret, `${TableName.Snapshot}.id`, `${TableName.SnapshotSecret}.snapshotId`)
@ -309,7 +309,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
// when we need to rollback we will pull from these snapshots
const findLatestSnapshotByFolderId = async (folderId: string, tx?: Knex) => {
try {
const docs = await (tx || db)(TableName.Snapshot)
const docs = await (tx || db.replicaNode())(TableName.Snapshot)
.where(`${TableName.Snapshot}.folderId`, folderId)
.join<TSecretSnapshots>(
(tx || db)(TableName.Snapshot).groupBy("folderId").max("createdAt").select("folderId").as("latestVersion"),

View File

@ -42,6 +42,13 @@ export const IDENTITIES = {
},
DELETE: {
identityId: "The ID of the identity to delete."
},
GET_BY_ID: {
identityId: "The ID of the identity to get details.",
orgId: "The ID of the org of the identity"
},
LIST: {
orgId: "The ID of the organization to list identities."
}
} as const;
@ -63,10 +70,13 @@ export const UNIVERSAL_AUTH = {
"The maximum number of times that an access token can be used; a value of 0 implies infinite number of uses."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve."
identityId: "The ID of the identity to retrieve the auth method for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the auth method for."
},
UPDATE: {
identityId: "The ID of the identity to update.",
identityId: "The ID of the identity to update the auth method for.",
clientSecretTrustedIps: "The new list of IPs or CIDR ranges that the Client Secret can be used from.",
accessTokenTrustedIps: "The new list of IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The new lifetime for an access token in seconds.",
@ -83,6 +93,10 @@ export const UNIVERSAL_AUTH = {
LIST_CLIENT_SECRETS: {
identityId: "The ID of the identity to list client secrets for."
},
GET_CLIENT_SECRET: {
identityId: "The ID of the identity to get the client secret from.",
clientSecretId: "The ID of the client secret to get details."
},
REVOKE_CLIENT_SECRET: {
identityId: "The ID of the identity to revoke the client secret from.",
clientSecretId: "The ID of the client secret to revoke."
@ -104,6 +118,229 @@ export const AWS_AUTH = {
iamRequestBody:
"The base64-encoded body of the signed request. Most likely, the base64-encoding of Action=GetCallerIdentity&Version=2011-06-15.",
iamRequestHeaders: "The base64-encoded headers of the sts:GetCallerIdentity signed request."
},
ATTACH: {
identityId: "The ID of the identity to attach the configuration onto.",
allowedPrincipalArns:
"The comma-separated list of trusted IAM principal ARNs that are allowed to authenticate with Infisical.",
allowedAccountIds:
"The comma-separated list of trusted AWS account IDs that are allowed to authenticate with Infisical.",
accessTokenTTL: "The lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The maximum lifetime for an acccess token in seconds.",
stsEndpoint: "The endpoint URL for the AWS STS API.",
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from."
},
UPDATE: {
identityId: "The ID of the identity to update the auth method for.",
allowedPrincipalArns:
"The new comma-separated list of trusted IAM principal ARNs that are allowed to authenticate with Infisical.",
allowedAccountIds:
"The new comma-separated list of trusted AWS account IDs that are allowed to authenticate with Infisical.",
accessTokenTTL: "The new lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.",
stsEndpoint: "The new endpoint URL for the AWS STS API.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the auth method for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the auth method for."
}
} as const;
export const AZURE_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login."
},
ATTACH: {
identityId: "The ID of the identity to attach the configuration onto.",
tenantId: "The tenant ID for the Azure AD organization.",
resource: "The resource URL for the application registered in Azure AD.",
allowedServicePrincipalIds:
"The comma-separated list of Azure AD service principal IDs that are allowed to authenticate with Infisical.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used."
},
UPDATE: {
identityId: "The ID of the identity to update the auth method for.",
tenantId: "The new tenant ID for the Azure AD organization.",
resource: "The new resource URL for the application registered in Azure AD.",
allowedServicePrincipalIds:
"The new comma-separated list of Azure AD service principal IDs that are allowed to authenticate with Infisical.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The new lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the auth method for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the auth method for."
}
} as const;
export const GCP_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login."
},
ATTACH: {
identityId: "The ID of the identity to attach the configuration onto.",
allowedServiceAccounts:
"The comma-separated list of trusted service account emails corresponding to the GCE resource(s) allowed to authenticate with Infisical.",
allowedProjects:
"The comma-separated list of trusted GCP projects that the GCE instance must belong to authenticate with Infisical.",
allowedZones:
"The comma-separated list of trusted zones that the GCE instances must belong to authenticate with Infisical.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used."
},
UPDATE: {
identityId: "The ID of the identity to update the auth method for.",
allowedServiceAccounts:
"The new comma-separated list of trusted service account emails corresponding to the GCE resource(s) allowed to authenticate with Infisical.",
allowedProjects:
"The new comma-separated list of trusted GCP projects that the GCE instance must belong to authenticate with Infisical.",
allowedZones:
"The new comma-separated list of trusted zones that the GCE instances must belong to authenticate with Infisical.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The new lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the auth method for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the auth method for."
}
} as const;
export const KUBERNETES_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login."
},
ATTACH: {
identityId: "The ID of the identity to attach the configuration onto.",
kubernetesHost: "The host string, host:port pair, or URL to the base of the Kubernetes API server.",
caCert: "The PEM-encoded CA cert for the Kubernetes API server.",
tokenReviewerJwt:
"The long-lived service account JWT token for Infisical to access the TokenReview API to validate other service account JWT tokens submitted by applications/pods.",
allowedNamespaces:
"The comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
allowedNames: "The comma-separated list of trusted service account names that can authenticate with Infisical.",
allowedAudience:
"The optional audience claim that the service account JWT token must have to authenticate with Infisical.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used."
},
UPDATE: {
identityId: "The ID of the identity to update the auth method for.",
kubernetesHost: "The new host string, host:port pair, or URL to the base of the Kubernetes API server.",
caCert: "The new PEM-encoded CA cert for the Kubernetes API server.",
tokenReviewerJwt:
"The new long-lived service account JWT token for Infisical to access the TokenReview API to validate other service account JWT tokens submitted by applications/pods.",
allowedNamespaces:
"The new comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
allowedNames: "The new comma-separated list of trusted service account names that can authenticate with Infisical.",
allowedAudience:
"The new optional audience claim that the service account JWT token must have to authenticate with Infisical.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The new lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the auth method for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the auth method for."
}
} as const;
export const TOKEN_AUTH = {
ATTACH: {
identityId: "The ID of the identity to attach the configuration onto.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used."
},
UPDATE: {
identityId: "The ID of the identity to update the auth method for.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The new lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the auth method for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the auth method for."
},
GET_TOKENS: {
identityId: "The ID of the identity to list token metadata for.",
offset: "The offset to start from. If you enter 10, it will start from the 10th token.",
limit: "The number of tokens to return"
},
CREATE_TOKEN: {
identityId: "The ID of the identity to create the token for.",
name: "The name of the token to create"
},
UPDATE_TOKEN: {
tokenId: "The ID of the token to update metadata for",
name: "The name of the token to update to"
},
REVOKE_TOKEN: {
tokenId: "The ID of the token to revoke"
}
} as const;
export const OIDC_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login."
},
ATTACH: {
identityId: "The ID of the identity to attach the configuration onto.",
oidcDiscoveryUrl: "The URL used to retrieve the OpenID Connect configuration from the identity provider.",
caCert: "The PEM-encoded CA cert for establishing secure communication with the Identity Provider endpoints.",
boundIssuer: "The unique identifier of the identity provider issuing the JWT.",
boundAudiences: "The list of intended recipients.",
boundClaims: "The attributes that should be present in the JWT for it to be valid.",
boundSubject: "The expected principal that is the subject of the JWT.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used."
},
UPDATE: {
identityId: "The ID of the identity to update the auth method for.",
oidcDiscoveryUrl: "The new URL used to retrieve the OpenID Connect configuration from the identity provider.",
caCert: "The new PEM-encoded CA cert for establishing secure communication with the Identity Provider endpoints.",
boundIssuer: "The new unique identifier of the identity provider issuing the JWT.",
boundAudiences: "The new list of intended recipients.",
boundClaims: "The new attributes that should be present in the JWT for it to be valid.",
boundSubject: "The new expected principal that is the subject of the JWT.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The new lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the auth method for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the auth method for."
}
} as const;
@ -111,10 +348,15 @@ export const ORGANIZATIONS = {
LIST_USER_MEMBERSHIPS: {
organizationId: "The ID of the organization to get memberships from."
},
GET_USER_MEMBERSHIP: {
organizationId: "The ID of the organization to get the membership for.",
membershipId: "The ID of the membership to get."
},
UPDATE_USER_MEMBERSHIP: {
organizationId: "The ID of the organization to update the membership for.",
membershipId: "The ID of the membership to update.",
role: "The new role of the membership."
role: "The new role of the membership.",
isActive: "The active status of the membership"
},
DELETE_USER_MEMBERSHIP: {
organizationId: "The ID of the organization to delete the membership from.",
@ -278,6 +520,9 @@ export const FOLDERS = {
path: "The path to list folders from.",
directory: "The directory to list folders from. (Deprecated in favor of path)"
},
GET_BY_ID: {
folderId: "The id of the folder to get details."
},
CREATE: {
workspaceId: "The ID of the project to create the folder in.",
environment: "The slug of the environment to create the folder in.",
@ -347,6 +592,7 @@ export const RAW_SECRETS = {
tagIds: "The ID of the tags to be attached to the created secret."
},
GET: {
expand: "Whether or not to expand secret references",
secretName: "The name of the secret to get.",
workspaceId: "The ID of the project to get the secret from.",
workspaceSlug: "The slug of the project to get the secret from.",
@ -656,6 +902,7 @@ export const INTEGRATION_AUTH = {
integration: "The slug of integration for the auth object.",
accessId: "The unique authorized access id of the external integration provider.",
accessToken: "The unique authorized access token of the external integration provider.",
awsAssumeIamRoleArn: "The AWS IAM Role to be assumed by Infisical",
url: "",
namespace: "",
refreshToken: "The refresh token for integration authorization."
@ -804,6 +1051,8 @@ export const CERTIFICATE_AUTHORITIES = {
caId: "The ID of the CA to issue the certificate from",
friendlyName: "A friendly name for the certificate",
commonName: "The common name (CN) for the certificate",
altNames:
"A comma-delimited list of Subject Alternative Names (SANs) for the certificate; these can be host names or email addresses.",
ttl: "The time to live for the certificate such as 1m, 1h, 1d, 1y, ...",
notBefore: "The date and time when the certificate becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format",
notAfter: "The date and time when the certificate expires in YYYY-MM-DDTHH:mm:ss.sssZ format",

Some files were not shown because too many files have changed in this diff Show More