1
0
mirror of https://github.com/Infisical/infisical.git synced 2025-03-20 22:32:28 +00:00

Compare commits

..

567 Commits

Author SHA1 Message Date
9267f881d6 misc: displayed project name in slack webhook 2024-07-15 16:49:06 +08:00
c90ecd336c Merge pull request from Infisical/fix-scim-groups
Fix SCIM PATCH /Groups Fn
2024-07-15 00:57:46 +07:00
d8b1da3ddd Fix SCIM patch group fn 2024-07-15 00:29:18 +07:00
58e86382fe Merge pull request from Infisical/update-used-count-on-prem
Update dynamic seat count on prem
2024-07-14 20:13:42 +07:00
2080c4419e Update dynamic seat count on prem 2024-07-14 14:25:32 +07:00
b582a4a06d Merge pull request from DDDASHXD/sebastian/ui-fix
Fix: Features card overflow
2024-07-12 21:05:56 +02:00
a5c6a864de Fix: Features card overflow 2024-07-12 18:57:08 +00:00
5082c1ba3b Merge pull request from Infisical/misc/soft-delete-shared-secrets
misc: soft delete shared secrets upon expiry
2024-07-12 12:56:26 -04:00
cceb08b1b5 Merge pull request from Infisical/misc/address-ws-vulnerability-via-package-update
misc: addressed ws vulnerability via package update
2024-07-12 12:20:15 -04:00
4c34e58945 Merge pull request from Infisical/dependabot/npm_and_yarn/frontend/axios-0.28.0
build(deps): bump axios from 0.27.2 to 0.28.0 in /frontend
2024-07-12 19:17:08 +05:30
72de1901a1 build(deps): bump axios from 0.27.2 to 0.28.0 in /frontend
Bumps [axios](https://github.com/axios/axios) from 0.27.2 to 0.28.0.
- [Release notes](https://github.com/axios/axios/releases)
- [Changelog](https://github.com/axios/axios/blob/v0.28.0/CHANGELOG.md)
- [Commits](https://github.com/axios/axios/compare/v0.27.2...v0.28.0)

---
updated-dependencies:
- dependency-name: axios
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-12 13:37:13 +00:00
65fefcdd87 Merge pull request from Infisical/dependabot/npm_and_yarn/frontend/postcss-8.4.39
build(deps): bump postcss from 8.4.14 to 8.4.39 in /frontend
2024-07-12 19:03:59 +05:30
8e753eda72 misc: soft delete shared secrets 2024-07-12 21:29:17 +08:00
7137c94fa2 build(deps): bump postcss from 8.4.14 to 8.4.39 in /frontend
Bumps [postcss](https://github.com/postcss/postcss) from 8.4.14 to 8.4.39.
- [Release notes](https://github.com/postcss/postcss/releases)
- [Changelog](https://github.com/postcss/postcss/blob/main/CHANGELOG.md)
- [Commits](https://github.com/postcss/postcss/compare/8.4.14...8.4.39)

---
updated-dependencies:
- dependency-name: postcss
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-12 13:24:54 +00:00
52ea7dfa61 Merge pull request from Infisical/dependabot/go_modules/cli/github.com/rs/cors-1.11.0
build(deps): bump github.com/rs/cors from 1.9.0 to 1.11.0 in /cli
2024-07-12 18:52:25 +05:30
093925ed0e build(deps): bump github.com/rs/cors from 1.9.0 to 1.11.0 in /cli
Bumps [github.com/rs/cors](https://github.com/rs/cors) from 1.9.0 to 1.11.0.
- [Commits](https://github.com/rs/cors/compare/v1.9.0...v1.11.0)

---
updated-dependencies:
- dependency-name: github.com/rs/cors
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-12 13:20:45 +00:00
4491f2d8f1 Merge pull request from Infisical/dependabot/go_modules/cli/github.com/dvsekhvalnov/jose2go-1.6.0
build(deps): bump github.com/dvsekhvalnov/jose2go from 1.5.0 to 1.6.0 in /cli
2024-07-12 14:44:32 +05:30
4a401957c7 build(deps): bump github.com/dvsekhvalnov/jose2go in /cli
Bumps [github.com/dvsekhvalnov/jose2go](https://github.com/dvsekhvalnov/jose2go) from 1.5.0 to 1.6.0.
- [Commits](https://github.com/dvsekhvalnov/jose2go/compare/v1.5...v1.6.0)

---
updated-dependencies:
- dependency-name: github.com/dvsekhvalnov/jose2go
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-12 09:11:42 +00:00
0c673f6cca misc: addressed ws vulnerability via package update 2024-07-12 15:36:49 +08:00
10f4cbf11f Merge pull request from Infisical/feat/add-share-secret-hide-unhide
feat: add share-secret hide and unhide
2024-07-12 14:57:02 +08:00
a6a8c32326 Merge pull request from Infisical/docs/identity-github-oidc-auth
doc: added docs for connecting github to infisical via oidc auth
2024-07-12 10:26:08 +07:00
99a474dba7 Merge pull request from Infisical/misc/moved-admin-user-deletion-to-pro
misc: moved admin user deletion to pro plan
2024-07-11 13:28:33 -04:00
e439f4e5aa Update UserPanel.tsx 2024-07-11 13:25:48 -04:00
ae2ecf1540 Merge pull request from Infisical/misc/add-ttl-max-value-for-identities
misc: add max checks for TTL values of identities
2024-07-11 13:21:53 -04:00
10214ea5dc misc: renamed button label 2024-07-12 00:45:16 +08:00
918cd414a8 feat: add share-secret hide and unhide 2024-07-12 00:42:26 +08:00
f9a125acee misc: updated limit to 10 years 2024-07-11 23:40:45 +08:00
52415ea83e misc: removed redundant text' 2024-07-11 23:30:55 +08:00
c5ca2b6796 doc: added docs for connecting github to infisical via oidc auth 2024-07-11 23:00:45 +08:00
ef5bcac925 Merge pull request from Infisical/move-groups
Consolidate People and Groups Tabs to shared User Tab at Org / Project Level
2024-07-11 18:58:12 +07:00
6cbeb29b4e Merge remote-tracking branch 'origin/main' into misc/add-ttl-max-value-for-identities 2024-07-11 19:17:25 +08:00
fbe344c0df Fix token auth ref 2024-07-11 14:56:57 +07:00
5821f65a63 Fix token auth ref 2024-07-11 14:56:08 +07:00
3af510d487 Merge pull request from Infisical/fix-token-auth-ref
Fix Token Auth Ref in Access Token DAL
2024-07-11 14:54:35 +07:00
c15adc7df9 Fix token auth ref 2024-07-11 14:49:22 +07:00
93af7573ac Consolidate people and groups tabs to user / user groups shared tab 2024-07-11 13:35:11 +07:00
cddda1148e misc: added max ttl checks for native auths 2024-07-11 14:05:50 +08:00
9c37eeeda6 misc: finalize form validation for universal auth ttl 2024-07-11 13:48:18 +08:00
eadf5bef77 misc: add TTL max values for universal auth 2024-07-11 13:35:58 +08:00
5dff46ee3a Add missing token auth to access token findOne fn 2024-07-11 10:59:08 +07:00
8b202c2a79 Merge pull request from Infisical/identity-improvements
Identity Workflow Improvements (Table Menu Opts, Error Handling)
2024-07-11 10:45:20 +07:00
4574519a76 Update identity table opts, identity project table error handling 2024-07-11 10:36:00 +07:00
82ee77bc05 Merge pull request from Infisical/doc/add-native-auth-to-docs
doc: added native auths to api reference
2024-07-11 09:46:26 +07:00
9a861499df Merge pull request from Infisical/secret-sharing-ui-update
update phrasing
2024-07-10 18:38:32 -04:00
17c7207f9d doc: added oidc auth api reference 2024-07-11 02:04:44 +08:00
d1f3c98f21 fix posthog cross orgin calls 2024-07-10 13:46:22 -04:00
d248a6166c Merge remote-tracking branch 'origin/main' into doc/add-native-auth-to-docs 2024-07-11 01:31:50 +08:00
8fdd82a335 Add token auth to api reference 2024-07-10 23:37:36 +07:00
c501c85eb8 misc: renamed to more generic label 2024-07-11 00:14:34 +08:00
eac621db73 Merge pull request from Infisical/identity-project-provisioning
Identities Page - Project Provisioning / De-provisioning
2024-07-10 23:08:21 +07:00
ab7983973e update phrasing 2024-07-10 08:06:06 -07:00
ff43773f37 Merge pull request from Infisical/feat/move-secrets
feat: move secrets
2024-07-10 21:48:57 +08:00
68574be05b Fix merge conflicts 2024-07-10 18:18:00 +07:00
1d9966af76 Add admin to display identity table 2024-07-10 18:15:39 +07:00
4dddf764bd Finish identity page project provisioning/deprovisioning 2024-07-10 18:14:34 +07:00
2d9435457d misc: addressed typo 2024-07-10 18:58:42 +08:00
8b06215366 Merge pull request from Infisical/feat/oidc-identity
feat: oidc machine identity auth method
2024-07-10 17:29:56 +08:00
4fab746b95 misc: added description to native auth properties 2024-07-10 15:17:23 +08:00
179edd98bf misc: rolled back frontend package-lock 2024-07-10 13:45:35 +08:00
dc05b34fb1 misc: rolled back package locks 2024-07-10 13:41:33 +08:00
899757ab7c doc: added native auth to api reference 2024-07-10 13:30:06 +08:00
20f6dbfbd1 Update oidc docs image 2024-07-10 12:09:24 +07:00
8ff524a037 Move migration file to front 2024-07-10 11:51:53 +07:00
3913e2f462 Fix merge conflicts, bring oidc auth up to speed with identity ui changes 2024-07-10 10:31:48 +07:00
9832915eba add .? incase adminUserDeletion is empty 2024-07-09 21:09:55 -04:00
ebbccdb857 add better label for identity id 2024-07-09 18:13:06 -04:00
b98c8629e5 misc: moved admin user deletion to pro 2024-07-09 23:51:09 +08:00
28723e9a4e misc: updated toast 2024-07-09 23:32:26 +08:00
079e005f49 misc: added audit log and overwrite feature 2024-07-09 21:46:12 +08:00
df90e4e6f0 Update go version in k8s dockerfile 2024-07-09 09:44:52 -04:00
6e9a624697 Merge pull request from Infisical/daniel/operator-bump-sdk
fix(operator): azure auth
2024-07-09 09:32:39 -04:00
94b0cb4697 Bump helm 2024-07-09 15:31:47 +02:00
5a5226c82f Update values.yaml 2024-07-09 15:27:11 +02:00
09cfaec175 Update infisicalsecret_controller.go 2024-07-09 15:27:11 +02:00
40abc184f2 Fix: Bump SDK version 2024-07-09 15:27:11 +02:00
3879edfab7 Merge pull request from Infisical/docs-token-auth
Update identity docs for auth token and newer identity flow
2024-07-09 16:51:44 +07:00
d20ae39f32 feat: initial move secret integration 2024-07-09 17:48:39 +08:00
53c875424e Update identity docs for auth token and newer identity flow 2024-07-09 16:47:24 +07:00
05bf2e4696 made move operation transactional 2024-07-09 16:03:50 +08:00
a06dee66f8 feat: initial logic for moving secrets 2024-07-09 15:20:58 +08:00
0eab9233bb Merge pull request from Infisical/misc/redesigned-org-security-settings
misc: redesigned org security settings page
2024-07-09 15:05:34 +08:00
9bf358a57d Merge pull request from Infisical/token-auth
Token Authentication Method + Revamped Identity (Page) Workflow
2024-07-09 11:55:56 +07:00
93926cc6b7 Merge remote-tracking branch 'origin' into token-auth 2024-07-09 11:52:14 +07:00
59ccabec69 Make fixes based on review 2024-07-09 11:51:21 +07:00
8b0678cfa1 Merge pull request from aheruz/patch-1
doc: Update how-to-create-a-feature.mdx
2024-07-08 22:53:36 -04:00
3004de459f Merge pull request from rtrompier/feat/helm
fix(helm-charts): add nodeSelector and tolerations
2024-07-08 21:41:08 -04:00
7d4e531e5f Merge branch 'main' into feat/helm 2024-07-08 21:40:45 -04:00
f66ef8b066 Update Chart.yaml 2024-07-08 21:39:16 -04:00
a116233979 add nodeSelector and tolerations to manager 2024-07-08 21:22:46 -04:00
454c0b62b9 Merge pull request from Infisical/feat/allow-admins-to-delete-users
feat: allow admins to delete users
2024-07-09 02:10:46 +08:00
2c6decaf6e misc: addressed comments 2024-07-09 01:11:24 +08:00
d0f0dca3a3 misc: added sort by 2024-07-09 00:57:23 +08:00
9efbffe5d2 misc: renamed mutation function 2024-07-09 00:42:50 +08:00
c1b242db67 misc: added pagination and moved to admin route 2024-07-09 00:34:07 +08:00
845f71e8ed Merge pull request from Infisical/vmatsiiako-patch-docs-3
Update secret-sharing.mdx
2024-07-08 09:44:59 -04:00
653fc367ac Merge pull request from akhilmhdh/feat/fly-io-banner
feat: banner on warning secret deletion in fly.io integration
2024-07-08 09:33:46 -04:00
9f0867559a update banner text 2024-07-08 09:32:47 -04:00
a37987b508 misc: added proper error handling 2024-07-08 21:31:11 +08:00
96e485910c Merge remote-tracking branch 'origin' into token-auth 2024-07-08 17:14:55 +07:00
b81f7d8350 Finish new identity page 2024-07-08 17:12:49 +07:00
=
eeb2e89d1a feat: banner on warning secret deletion in fly.io integration 2024-07-08 13:04:30 +05:30
f3a8fda254 misc: resolved conflict with existing method 2024-07-08 15:16:10 +08:00
ccf0c3cd35 misc: modified member to user 2024-07-08 15:09:32 +08:00
6e15979672 feat: allow admins to delete users 2024-07-08 15:04:08 +08:00
4e724d15f6 Update secret-sharing.mdx 2024-07-07 21:41:15 -07:00
5eba61b647 Merge pull request from Infisical/secret-sharing-ui-update 2024-07-07 11:53:19 -04:00
98ef1614c6 update mobile view 2024-07-07 08:52:02 -07:00
f591f6d428 update mobile view 2024-07-07 08:49:05 -07:00
795b533fce Merge pull request from Infisical/secret-sharing-ui-update 2024-07-07 07:55:29 -04:00
35be8e1912 fix ui secret sharing 2024-07-06 23:26:26 -07:00
da70f23bf6 Merge pull request from Infisical/maidul-2323e23
Fix posthog events on app.infisical frontend
2024-07-06 17:49:31 -04:00
3ba90cc42d fix post hog on app.infisical frontend 2024-07-06 17:44:53 -04:00
131ec81744 Merge pull request from Infisical/vmatsiiako-patch-docs-2 2024-07-06 13:41:14 -04:00
c84262b182 Update vercel.mdx 2024-07-06 10:38:19 -07:00
1ee9994df6 Merge pull request from Infisical/vmatsiiako-patch-docs-1 2024-07-06 13:37:59 -04:00
a3356b4bad Update azure-key-vault.mdx 2024-07-06 10:31:39 -07:00
f95092e083 doc: Update how-to-create-a-feature.mdx
`npm generate:component` should be `npm run generate:component`
2024-07-06 13:42:38 +02:00
982c51bdc7 Merge pull request from Infisical/daniel/rename-standalone-to-core
chore(binary): `rename infisical-standalone` to `infisical-core`
2024-07-05 20:04:14 -04:00
9e7ec88d57 Update build-binaries.yml 2024-07-06 02:02:04 +02:00
ce304b26d8 Merge pull request from Infisical/daniel/infisical-binary
feat: Infisical Binary
2024-07-05 19:43:15 -04:00
8deff5adfb Update package-lock.json 2024-07-06 01:34:00 +02:00
1f8b3b6779 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
a87bc66b05 Cleanup 2024-07-06 01:32:18 +02:00
de57e1af35 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
09d8822816 Update argv.ts 2024-07-06 01:32:18 +02:00
13aaef4212 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
5e9193adda Update build-binaries.yml 2024-07-06 01:32:18 +02:00
ec3e886624 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
36d30566fe Debian 2024-07-06 01:32:18 +02:00
dfbeac3dfe Update build-binaries.yml 2024-07-06 01:32:18 +02:00
87e52ddd06 Attempt .deb package 2024-07-06 01:32:18 +02:00
a62fbf088f Fix push 2024-07-06 01:32:18 +02:00
f186cb4d7b Alphine and migration mode 2024-07-06 01:32:18 +02:00
2ee123c9f6 Exit codes 2024-07-06 01:32:18 +02:00
18b6c4f73e chore: testing, hardcoded version 2024-07-06 01:32:18 +02:00
50409f0c48 Feat: Standalone migration mode 2024-07-06 01:32:18 +02:00
54e5166bb6 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
b9b880d310 Trigger workflow 2024-07-06 01:32:18 +02:00
085d1d5a5e Update build-binaries.yml 2024-07-06 01:32:18 +02:00
b02c37028b Update build-binaries.yml 2024-07-06 01:32:18 +02:00
49248ee13f Rollback 2024-07-06 01:32:18 +02:00
bafc6ee129 Fixes 2024-07-06 01:32:18 +02:00
eb6dca425c Update build-binaries.yml 2024-07-06 01:32:18 +02:00
99c1259f15 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
b4770116a8 Requested changes 2024-07-06 01:32:18 +02:00
eb90f503a9 Fix: Re-add compression 2024-07-06 01:32:18 +02:00
e419983249 Update external-nextjs.ts 2024-07-06 01:32:18 +02:00
b030fe2e69 Update package-lock.json 2024-07-06 01:32:18 +02:00
eff0604e9d Revert "Update package-lock.json"
This reverts commit ae39b80f12a73fae65036f6a3af4624a5798b2bb.
2024-07-06 01:32:18 +02:00
e90f3af4ce Update package-lock.json 2024-07-06 01:32:18 +02:00
baf2763287 Update package-lock.json 2024-07-06 01:32:18 +02:00
d708a3f566 Update package-lock.json 2024-07-06 01:32:18 +02:00
5b52c33f5f Fix: Add cloud smith api key 2024-07-06 01:32:18 +02:00
a116fc2bf3 Update package.json 2024-07-06 01:32:18 +02:00
39d09eea3d Update build-binaries.yml 2024-07-06 01:32:18 +02:00
f7d071e398 Fix: Compress binaries 2024-07-06 01:32:18 +02:00
0d4dd5a6fa Fix: e2e tests 2024-07-06 01:32:18 +02:00
b4de012047 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
b3720cdbfc Fix Windows executable upload 2024-07-06 01:32:18 +02:00
0dc85dff33 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
a6e4e3c69a Trying something new 2024-07-06 01:32:18 +02:00
be9de82ef5 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
2566f4dc9e Update build-binaries.yml 2024-07-06 01:32:18 +02:00
934bfbb624 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
509037e6d0 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
f041aa7557 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
266e2856e8 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
7109d2f785 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
2134d2e118 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
c2abc383d5 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
3a2336da44 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
1266949fb1 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
62d287f8a6 Try node16 2024-07-06 01:32:18 +02:00
0b4e7f0096 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
7dda2937ba Update build-binaries.yml 2024-07-06 01:32:18 +02:00
91d81bd20c Update build-binaries.yml 2024-07-06 01:32:18 +02:00
f329a79771 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
31a31f556c Update build-binaries.yml 2024-07-06 01:32:18 +02:00
1be2f806d9 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
38a6785ca4 Update build-binaries.yml 2024-07-06 01:32:18 +02:00
377eb4cfd3 Create build-binaries.yml 2024-07-06 01:32:18 +02:00
8df7401e06 Remove compression and separate packaging 2024-07-06 01:32:18 +02:00
0c79303582 Update env.ts 2024-07-06 01:32:18 +02:00
e6edde57ba Create babel.config.json 2024-07-06 01:32:18 +02:00
6634675b2a Update .gitignore 2024-07-06 01:32:18 +02:00
50840ce26b Feat: Infisical Binary 2024-07-06 01:32:18 +02:00
4c2f7fff5c Fix: .mjs imports not being updated by bable 2024-07-06 01:30:27 +02:00
f0a3792a64 Create process.d.ts 2024-07-06 01:30:27 +02:00
70da6878c1 Fix: Enable production & standalone when packaged 2024-07-06 01:30:27 +02:00
754404d905 Fix: Serve frontend with binary 2024-07-06 01:30:27 +02:00
85cfac512c Fix: Serve frontend with binary 2024-07-06 01:30:27 +02:00
d40b907308 Merge pull request from Infisical/misc/update-make-a-wish-ui
misc: update make-a-wish UI design
2024-07-05 14:42:05 -04:00
a5b18cbb72 misc/make-a-wish-ui-improvement 2024-07-06 01:02:48 +08:00
d4a2f4590b misc: redesigned org security settings page 2024-07-06 00:25:27 +08:00
7add57ae78 Merge pull request from Infisical/fix/addressed-ldap-trust-email-issue
fix: addressed lap trust email issue during login
2024-07-05 12:12:40 -04:00
e5879df7c7 Merge pull request from Infisical/feat/make-a-wish-feature
feat: make a wish feature
2024-07-05 12:09:53 -04:00
04298bb1a7 fix: addressed lap trust email issue during login 2024-07-05 20:26:02 +08:00
1a6a5280a0 misc: display wish feature only on cloud 2024-07-05 19:42:38 +08:00
da0d8fdbfc feat: finished up wish integration 2024-07-05 15:19:43 +08:00
d2759ea378 patch npm ip package 2024-07-04 19:08:07 -04:00
c4385af352 Delete .github/workflows/update-be-new-migration-latest-timestamp.yml 2024-07-04 16:21:35 -04:00
bbe2d2e053 Merge pull request from akhilmhdh/feat/secret-approval-grouo
Secret approval with groups
2024-07-04 16:18:56 -04:00
2c9fdb7fad feat: initial make a wish UI 2024-07-05 00:30:22 +08:00
38eee5490e Merge pull request from Infisical/maidul-212313
Main
2024-07-04 11:58:56 -04:00
0aa7337ff4 Merge pull request from Infisical/misc/removed-webhook-url-from-audit-logs-table
misc: removed webhook url from audit logs table
2024-07-04 20:53:51 +05:30
98371f99e7 misc: removed webhook url from audit logs table 2024-07-04 23:16:02 +08:00
ddfc645cdd Merge pull request from akhilmhdh/feat/audit-log-batching
Changed audit log deletion to batched process
2024-07-04 10:54:54 -04:00
8bc6edd165 doc: added general docs for oidc auth 2024-07-04 22:31:03 +08:00
=
f4d9c61404 feat: added a pause in between as breather for db delete 2024-07-04 13:59:15 +05:30
=
5342c85696 feat: changed audit log deletion to batched process 2024-07-04 13:26:11 +05:30
2497aada8a misc: added oidc auth to access token trusted Ips 2024-07-04 15:54:37 +08:00
b05f3e0f1f Merge pull request from Infisical/feat/native-slack-webhook
feat: added native slack webhook type
2024-07-04 14:50:58 +08:00
9a2645b511 Merge pull request from akhilmhdh/fix/provider-not-found
Fix provider not found error for secret rotation
2024-07-04 12:08:55 +05:30
cb664bb042 misc: addressed review comments 2024-07-04 13:33:32 +08:00
5921f349a8 misc: removed comment 2024-07-04 12:42:36 +08:00
07db1d826b Merge pull request from Infisical/fix-license-seats-invite-propagation
Fix license seat count upon complete account invite with tx
2024-07-03 13:43:00 -07:00
74db1b75b4 Add tx support for seat count in license invitation update 2024-07-03 13:33:40 -07:00
b5166f1d39 Identity redesign modal opt 2024-07-03 13:29:31 -07:00
4927cc804a feat: added endpoint for oidc auth revocation 2024-07-04 00:53:24 +08:00
2153dd94eb feat: finished up login with identity oidc 2024-07-03 23:48:31 +08:00
=
d7023881e5 fix: resolving provider not found error for secret rotation 2024-07-03 20:39:02 +05:30
=
ef3cdd11ac feat: ui changes for secret approval group 2024-07-03 20:17:16 +05:30
=
612cf4f968 feat: server logic for secret approval group 2024-07-03 20:17:16 +05:30
=
b6a9dc7f53 feat: completed migration for secret approval group 2024-07-03 20:17:16 +05:30
08322f46f9 misc: setup audit logs for oidc identity 2024-07-03 21:38:13 +08:00
fc9326272a feat: finished up oidc auth management 2024-07-03 21:18:50 +08:00
b74595cf35 Merge pull request from Infisical/fix/addressed-main-page-ui-ux-reports
fix: addressed main page ui/ux concerns
2024-07-03 08:40:40 -04:00
a45453629c misc: addressed main page ui/ux concerns 2024-07-03 18:32:21 +08:00
f7626d03bf misc: documentation 2024-07-03 12:26:42 +08:00
bc14153bb3 Merge pull request from akhilmhdh/dynamic-secret/mssql
Dynamic secret MS SQL
2024-07-02 21:22:34 -04:00
4cfe564f3d Fix lint issues 2024-07-02 15:15:45 -07:00
93be4095c0 Finish preliminary token auth method 2024-07-02 15:05:57 -07:00
8915b4055b address security 2024-07-02 15:52:25 -04:00
c90e423e4a feat: initial setup 2024-07-03 02:06:02 +08:00
935a3cb036 Merge pull request from Infisical/feat/allow-toggling-login-options-as-admin
feat: allowed toggling login options as admin
2024-07-02 14:03:11 -04:00
148a29db19 Merge branch 'feat/allow-toggling-login-options-as-admin' of https://github.com/Infisical/infisical into feat/allow-toggling-login-options-as-admin 2024-07-03 01:58:04 +08:00
b12de3e4f5 misc: removed usecallback 2024-07-03 01:57:24 +08:00
661e5ec462 Merge pull request from Infisical/maidul-2132
Main
2024-07-02 20:29:43 +05:30
5cca51d711 access prod bd in ci 2024-07-02 10:57:05 -04:00
9e9b9a7b94 update self lock out msg 2024-07-02 10:53:36 -04:00
df1ffcf934 Merge pull request from Infisical/misc/add-config-to-redacted-keys
misc: add config to redacted keys
2024-07-02 10:47:20 -04:00
0ef7eacd0e misc: add config to redacted keys 2024-07-02 22:34:40 +08:00
776822d7d5 misc: updated secret path component 2024-07-02 20:54:27 +08:00
fe9af20d8c fix: addressed type issue 2024-07-02 20:28:03 +08:00
398a8f363d misc: cleanup of form display structure 2024-07-02 20:20:25 +08:00
ce5dbca6e2 misc: added placeholder for incoming webhook url 2024-07-02 20:04:55 +08:00
ed5a7d72ab feat: added native slack webhook type 2024-07-02 19:57:58 +08:00
3ac6b7be65 Merge pull request from Infisical/misc/add-check-for-ldap-group
misc: added backend check for ldap group config
2024-07-02 12:59:03 +08:00
10601b5afd Merge pull request from akhilmhdh/feat/migration-file-checks
feat: added slugify migration file creater name and additional check to ensure migration files are not editied in PR
2024-07-01 21:01:47 -04:00
8eec08356b update error message 2024-07-01 20:59:56 -04:00
=
0b4d4c008a docs: dynamic secret mssql 2024-07-02 00:18:56 +05:30
=
ae953add3d feat: dynamic secret for mssql completed 2024-07-02 00:12:38 +05:30
5960a899ba Merge pull request from Infisical/create-pull-request/patch-1719844740
GH Action: rename new migration file timestamp
2024-07-02 01:25:54 +08:00
ea98a0096d chore: renamed new migration files to latest timestamp (gh-action) 2024-07-01 14:38:59 +00:00
b8f65fc91a Merge pull request from Infisical/feat/mark-projects-as-favourite
feat: allow org members to mark projects as favorites
2024-07-01 22:38:36 +08:00
06a4e68ac1 misc: more improvements 2024-07-01 22:33:01 +08:00
9cbf9a675a misc: simplified update project favorites logic 2024-07-01 22:22:44 +08:00
178ddf1fb9 Merge pull request from akhilmhdh/fix/role-bug
Resolved identity roleId not setting null for predefined role selection
2024-07-01 19:42:17 +05:30
030d4fe152 misc: added handling of empty groups and default value 2024-07-01 21:10:27 +08:00
46abda9041 misc: add org scoping to mutation 2024-07-01 20:22:59 +08:00
c976a5ccba misc: add scoping to org-level 2024-07-01 20:20:15 +08:00
1eb9ea9c74 misc: implemened more review comments 2024-07-01 20:10:41 +08:00
7d7612aaf4 misc: removed use memo 2024-07-01 18:29:56 +08:00
f570b3b2ee misc: combined into one list 2024-07-01 18:23:38 +08:00
0b8f6878fe misc: added check for ldap group 2024-07-01 18:12:16 +08:00
758a9211ab misc: addressed pr comments 2024-07-01 13:11:47 +08:00
0bb2b2887b updated handbook 2024-06-30 10:02:51 -07:00
eeb0111bbe updated handbook style 2024-06-30 02:03:59 -07:00
d12c538511 updated handbook 2024-06-30 02:01:40 -07:00
6f67346b2a Merge pull request from Infisical/daniel/fix-k8-managed-secret-crash
fix(k8-operator): crash on predefined managed secret
2024-06-28 17:01:37 -04:00
a93db44bbd Helm 2024-06-28 21:34:59 +02:00
1ddacfda62 Fix: Annotations map nil sometimes nil when pre-created by the user 2024-06-28 21:29:33 +02:00
5a1e43be44 misc: only display recover when email login is enabled 2024-06-29 02:12:09 +08:00
04f54479cd misc: implemented review comments 2024-06-29 01:58:27 +08:00
351d0d0662 Merge pull request from Infisical/misc/added-secret-name-trim
misc: added secret name trimming
2024-06-29 01:14:23 +08:00
5a01edae7a misc: added favorites to app layout selection 2024-06-29 01:02:28 +08:00
=
506e86d666 feat: added slugify migration file creater name and additional check to ensure migration files are not editied in PR 2024-06-28 20:33:56 +05:30
11d9166684 misc: initial project favorite in grid view 2024-06-28 17:40:34 +08:00
1859557f90 Merge pull request from akhilmhdh/feat/secret-manager-integration-auth
AWS Secret Manager assume role based integration
2024-06-27 23:32:25 -04:00
59fc34412d small nits for admin login toggle pr 2024-06-27 20:35:15 -04:00
1b2a1f2339 Merge pull request from akhilmhdh/feat/read-replica
Postgres read replica support
2024-06-27 19:36:44 -04:00
15b4c397ab Merge pull request from Infisical/revert-2023-revert-1995-identity-based-pricing
Add support for Identity-Based Pricing"
2024-06-27 15:56:44 -07:00
fc27ad4575 Merge pull request from Infisical/create-pull-request/patch-1719509560
GH Action: rename new migration file timestamp
2024-06-27 23:07:16 +05:30
b7467a83ab chore: renamed new migration files to latest timestamp (gh-action) 2024-06-27 17:32:39 +00:00
3baf434230 Merge pull request from Infisical/misc/add-on-update-trigger-oidc
misc: add onUpdate trigger to oidc config
2024-06-27 23:02:14 +05:30
e28471a9f4 misc: add onUpdate trigger to oidc config 2024-06-27 19:55:01 +08:00
b2d6563994 misc: added secret name trimming 2024-06-27 19:41:00 +08:00
=
cfba8f53e3 fix: resolved identity roleId not setting null for predefined role switch 2024-06-27 15:06:00 +05:30
=
3537a5eb9b feat: switch to tabs instead of seperate pages for aws secret manager assume and access key 2024-06-27 13:06:04 +05:30
=
d5b17a8f24 feat: removed explicit check for aws access key credential allowing to pick it automatically 2024-06-27 13:05:30 +05:30
d6881e2e68 misc: added signup option filtering 2024-06-27 13:53:12 +08:00
92a663a17d misc: design change to finalize scim section in org settings 2024-06-27 13:24:26 +08:00
b3463e0d0f misc: added explicit comment of intent 2024-06-27 12:55:39 +08:00
c460f22665 misc: added backend disable checks 2024-06-27 12:40:56 +08:00
7cdc47cd3a Update build-staging-and-deploy-aws.yml 2024-06-26 18:55:13 -04:00
d666d60f9f Merge pull request from Infisical/doc/added-guide-for-configuring-certs
doc: added guide for configuring certs
2024-06-26 15:30:15 -04:00
491c4259ca small rephrase for gitlab cert docs 2024-06-26 15:29:44 -04:00
cff20eb621 doc: added guide for configuring certs 2024-06-27 03:00:15 +08:00
db39d03713 misc: added check to backend 2024-06-27 01:59:02 +08:00
84d8879177 Merge pull request from Infisical/create-pull-request/patch-1719422383
GH Action: rename new migration file timestamp
2024-06-27 01:20:12 +08:00
aa4f2adbb6 Merge pull request from Infisical/create-pull-request/patch-1719422278
GH Action: rename new migration file timestamp
2024-06-27 01:19:50 +08:00
86ed3ef6d6 chore: renamed new migration files to latest timestamp (gh-action) 2024-06-26 17:19:42 +00:00
a5bb80adc4 Merge pull request from Infisical/feat/allow-audit-log-retention-to-be-configurable
feat: enabled customization of project audit logs retention period
2024-06-27 01:19:20 +08:00
0e87dd3996 chore: renamed new migration files to latest timestamp (gh-action) 2024-06-26 17:17:57 +00:00
e1801e9eb4 Merge pull request from Infisical/feat/added-support-for-configurable-ldap-user-identifier
misc: add support for configuring unique attribute property for ldap users
2024-06-27 01:16:51 +08:00
9daa5badec misc: made reusable helper for login page 2024-06-27 01:15:50 +08:00
e1ed37c713 misc: adjusted OrgSettingsPage and PersonalSettingsPage to include toggle 2024-06-27 01:07:28 +08:00
=
8eea82a1a0 docs: updated docs on usage of aws sm integration with assume role 2024-06-26 22:35:49 +05:30
=
694d0e3ed3 feat: updated ui for aws sm assume role integration 2024-06-26 22:35:12 +05:30
=
58f6c6b409 feat: updated integration api and queue to support aws secret manager assume role feature 2024-06-26 22:33:49 +05:30
f4a33caba6 misc: upgraded doc description of new field 2024-06-27 00:38:17 +08:00
e0a6f09b5e misc: removed max in schema for api layer 2024-06-26 23:17:31 +08:00
98a15a901e feat: allowed toggling login options as admin 2024-06-26 22:45:14 +08:00
1e701687ae misc: adjusted other hook usage to incorporate new properties 2024-06-26 19:04:11 +08:00
15758b91f8 doc: updated ldap documentation 2024-06-26 18:54:34 +08:00
2d3a4a7559 feat: added support for configurable ldap user identifier 2024-06-26 18:36:28 +08:00
a1d01d5cbd misc: display retention settings only for self-hosted/dedicated 2024-06-26 12:44:56 +08:00
2e3aedc62b Merge pull request from Infisical/feat/add-initial-sync-behavior-azure
feat: added initial sync behavior for azure key integration
2024-06-26 11:43:54 +08:00
e0a5b1444a add step to install docker 2024-06-25 18:17:58 -04:00
1c2698f533 Revert "Revert "Add support for Identity-Based Pricing"" 2024-06-25 18:04:26 -04:00
b50833bded Merge pull request from Infisical/revert-1995-identity-based-pricing
Revert "Add support for Identity-Based Pricing"
2024-06-25 18:04:14 -04:00
e0c774c045 Revert "Add support for Identity-Based Pricing" 2024-06-25 18:03:07 -04:00
514df55d67 Merge pull request from Infisical/identity-based-pricing
Add support for Identity-Based Pricing
2024-06-25 14:50:18 -07:00
311b378f3b Merge pull request from Grraahaam/feat/cli-secret-plain-output
feat(cli): plain secret value output
2024-06-25 23:47:10 +02:00
b01b4323ca Fix merge conflicts 2024-06-25 14:46:58 -07:00
285a01af51 Merge pull request from Infisical/misc/add-documentation-for-bitbucket-cli-integration
doc: added bitbucket integration with cli
2024-06-25 16:33:07 -04:00
f7e658e62b rename bit bucket options 2024-06-25 16:31:56 -04:00
a8aef2934a Merge pull request from Infisical/feat/add-option-to-share-secrets-directly
feat: added option to share secret directly from main page
2024-06-25 15:55:04 -04:00
cc30476f79 Merge pull request from Infisical/misc/add-prompt-for-deleting-aws-sm-integration
misc: added proper prompt for aws secret manager integration deletion
2024-06-25 15:47:55 -04:00
=
5d59fe8810 fix: resolved rebase issue with knex.d.ts 2024-06-25 22:54:45 +05:30
=
90eed8d39b docs: updated replica information to the docs 2024-06-25 22:51:38 +05:30
=
f5974ce9ad feat: resolved some queries giving any[] on db instance modification for replication 2024-06-25 22:51:38 +05:30
=
c6b51af4b1 feat: removed knex-tables.d.ts 2024-06-25 22:51:37 +05:30
5139bf2385 misc: added delete prompt for aws secret manager integ deletion 2024-06-26 01:21:14 +08:00
=
c13c37fc77 feat: switched read db operations to replica nodes 2024-06-25 22:50:17 +05:30
=
259c01c110 feat: added read replica option in config and extended knex to choose 2024-06-25 22:49:28 +05:30
a016d0d33f Merge pull request from akhilmhdh/feat/ui-permission-check-broken
Terraform identity management apis
2024-06-25 22:46:53 +05:30
663be06d30 feat: added share secret to main page side nav 2024-06-26 00:22:47 +08:00
fa392382da feat: added option to share secret directly from main page 2024-06-25 23:41:17 +08:00
d34b2669c5 misc: finalized success message 2024-06-25 21:32:24 +08:00
11ea5990c9 feat: enabled customization of project audit logs retention period 2024-06-25 21:14:43 +08:00
9a66514178 Merge pull request from Infisical/feat/project-setting-for-rebuilding-index
feat: added project setting for rebuilding secret indices
2024-06-25 15:25:36 +08:00
d4f9faf24d feat: added initial sync behavior for azure key integration 2024-06-25 13:59:20 +08:00
a3c8d06845 Update overview.mdx 2024-06-24 20:25:53 -07:00
71b7be4057 Merge pull request from handotdev/patch-2
Update overview.mdx
2024-06-24 20:25:02 -07:00
5079a5889a Update overview.mdx 2024-06-24 17:37:35 -07:00
232b375f46 Merge pull request from Infisical/create-pull-request/patch-1719267521
GH Action: rename new migration file timestamp
2024-06-24 17:07:41 -07:00
d2acedf79e chore: renamed new migration files to latest timestamp (gh-action) 2024-06-24 22:18:39 +00:00
9d846319b0 Merge pull request from Infisical/cert-san
Add Certificate Support for Alt Names (SANs)
2024-06-24 15:18:17 -07:00
d69267a3ca remove console.log 2024-06-24 17:27:02 -04:00
051eee8701 Update altName example in docs 2024-06-24 14:14:51 -07:00
b5aa650899 Add cert support for alt names 2024-06-24 14:07:15 -07:00
376e185e2b Merge pull request from Infisical/daniel/expand-single-secret-ref
feat(api): Expand single secret references
2024-06-24 20:39:54 +02:00
a15a0a257c Update identity-router.ts 2024-06-24 20:38:11 +02:00
6facce220c update select default org login 2024-06-24 14:06:31 -04:00
620a423cee update org selection error message 2024-06-24 13:43:56 -04:00
361496c644 Merge pull request from Infisical/create-pull-request/patch-1719249628
GH Action: rename new migration file timestamp
2024-06-24 13:20:49 -04:00
e03f77d9cf chore: renamed new migration files to latest timestamp (gh-action) 2024-06-24 17:20:27 +00:00
60cb420242 Merge pull request from Infisical/daniel/default-org
Feat: Default organization slug for LDAP/SAML
2024-06-24 13:20:02 -04:00
1b8a77f507 Merge pull request from Infisical/patch-ldap
Patch LDAP undefined userId, email confirmation code sending
2024-06-24 13:19:48 -04:00
5a957514df Feat: Clear select option 2024-06-24 19:12:38 +02:00
a6865585f3 Fix: Failing to create admin config on first run 2024-06-24 19:11:58 +02:00
1aaca12781 Update super-admin-dal.ts 2024-06-24 19:11:58 +02:00
7ab5c02000 Requested changes 2024-06-24 19:11:58 +02:00
c735beea32 Fix: Requested changes 2024-06-24 19:11:58 +02:00
2d98560255 Updated "defaultOrgId" and "defaultOrgSlug" to "defaultAuthOrgId" and "defaultAuthOrgSlug" 2024-06-24 19:10:22 +02:00
91bdd7ea6a Fix: UI descriptions 2024-06-24 19:09:48 +02:00
b0f3476e4a Fix: Completely hide org slug input field when org slug is passed or default slug is provided 2024-06-24 19:09:03 +02:00
14751df9de Feat: Default SAML/LDAP organization slug 2024-06-24 19:09:03 +02:00
e1a4185f76 Hide org slug input when default slug is set 2024-06-24 19:08:19 +02:00
4905ad1f48 Feat: Default SAML/LDAP organization slug 2024-06-24 19:08:19 +02:00
56bc25025a Update Login.utils.tsx 2024-06-24 19:08:19 +02:00
45da563465 Convert navigate function to hook 2024-06-24 19:08:19 +02:00
1930d40be8 Feat: Default SAML/LDAP organization slug 2024-06-24 19:05:46 +02:00
30b8d59796 Feat: Default SAML/LDAP organization slug 2024-06-24 19:05:46 +02:00
aa6cca738e Update index.ts 2024-06-24 19:05:46 +02:00
04dee70a55 Type changes 2024-06-24 19:05:46 +02:00
dfb53dd333 Helper omit function 2024-06-24 19:05:20 +02:00
ab19e7df6d Feat: Default SAML/LDAP organization slug 2024-06-24 19:05:20 +02:00
f9a1accf84 Merge pull request from Infisical/create-pull-request/patch-1719245983
GH Action: rename new migration file timestamp
2024-06-25 00:24:08 +08:00
ca86f3d2b6 chore: renamed new migration files to latest timestamp (gh-action) 2024-06-24 16:19:41 +00:00
de466b4b86 Merge pull request from Infisical/feature/oidc
feat: oidc
2024-06-25 00:19:15 +08:00
745f1c4e12 misc: only display when user is admin 2024-06-24 23:24:07 +08:00
106dc261de Merge pull request from handotdev/patch-1
Update style.css in docs
2024-06-24 08:20:30 -07:00
548a0aed2a Merge pull request from Infisical/daniel/sdk-docs-typo
fix(docs): duplicate faq item
2024-06-24 07:28:52 -07:00
6029eaa9df misc: modified step title 2024-06-24 20:17:31 +08:00
8703314c0c doc: added bitbucket integration with cli 2024-06-24 20:11:53 +08:00
b7b606ab9a Update overview.mdx 2024-06-24 14:01:24 +02:00
00617ea7e8 Update style.css 2024-06-24 00:56:39 -07:00
6d9330e870 Update machine-identities.mdx 2024-06-23 14:27:11 -07:00
d026a9b988 Update mint.json 2024-06-23 13:23:10 -07:00
c2c693d295 Update mint.json 2024-06-23 13:22:30 -07:00
c9c77f6c58 Update saml docs 2024-06-22 19:02:43 -07:00
36a34b0f58 Update sdk docs 2024-06-22 18:38:37 -07:00
45c153e592 Update terraform-cloud.mdx 2024-06-22 18:31:12 -07:00
eeaabe44ec Update LDAP docs 2024-06-22 16:44:42 -07:00
=
084fc7c99e feat: resolved gcp auth revoke error 2024-06-23 01:25:27 +05:30
=
b6cc17d62a feat: updated var names and permission, rate limit changes based on comments 2024-06-23 01:21:26 +05:30
bd0d0bd333 feat: added project setting for rebuilding secret indices 2024-06-22 22:42:36 +08:00
4b37c0f1c4 Merge pull request from Infisical/daniel/show-imported-overwritten-values-overview
feat(platform): Show imported/overwritten values in secret overview
2024-06-21 17:13:02 -04:00
c426ba517a Feat: Expand single secret references 2024-06-21 23:12:38 +02:00
973403c7f9 Merge branch 'feature/oidc' of https://github.com/Infisical/infisical into feature/oidc 2024-06-21 22:23:23 +08:00
52fcf53d0e misc: moved authenticate to preValidation 2024-06-21 22:22:34 +08:00
cbef9ea514 Merge pull request from Infisical/sharing-ui-update
updated secret sharing design
2024-06-21 10:03:10 -04:00
d0f8394f50 Fix: Added explicit SecretType type 2024-06-21 15:23:50 +02:00
9c06cab99d Fix: Added explicit SecretType type 2024-06-21 15:23:31 +02:00
c43a18904d Feat: Show overwritten value in secret overview and allow for edits 2024-06-21 15:23:09 +02:00
dc0fe6920c Feat: Show overwritten value in secret overview and allow for edits 2024-06-21 15:22:48 +02:00
077cbc97d5 Update encryptSecrets.ts 2024-06-21 15:21:52 +02:00
f3da676b88 Update checkOverrides.ts 2024-06-21 15:21:44 +02:00
988c612048 Update DropZone.tsx 2024-06-21 15:20:48 +02:00
7cf7eb5acb Fix: Added explicit SecretType type 2024-06-21 15:20:37 +02:00
a2fd071b62 Update SecretOverviewPage.tsx 2024-06-21 14:31:45 +02:00
0d7a07dea3 Update SecretEditRow.tsx 2024-06-21 14:31:41 +02:00
f676b44335 Feat: Show imported values in overview 2024-06-21 14:31:36 +02:00
00d83f9136 Update SecretInput.tsx 2024-06-21 14:31:29 +02:00
eca6871cbc Feat: Show imported values in overview 2024-06-21 14:31:22 +02:00
97cff783cf updated secret sharing design 2024-06-20 22:30:00 -07:00
3767ec9521 Update build-staging-and-deploy-aws.yml 2024-06-21 00:21:07 -04:00
91634fbe76 Patch LDAP 2024-06-20 17:49:09 -07:00
f31340cf53 Minor adjustments to oidc docs 2024-06-20 16:34:21 -07:00
908358b841 Update build-staging-and-deploy-aws.yml 2024-06-20 19:32:30 -04:00
b2a88a4384 Update build-staging-and-deploy-aws.yml 2024-06-20 16:01:55 -04:00
ab73e77499 Update build-staging-and-deploy-aws.yml 2024-06-20 15:54:06 -04:00
095a049661 Update build-staging-and-deploy-aws.yml 2024-06-20 15:48:54 -04:00
3a51155d23 Merge pull request from akhilmhdh/fix/handover-enc-v1
feat: resolved generate srp failing for user enc v1 users
2024-06-20 14:05:52 -04:00
=
c5f361a3e5 feat: updated fail message over srp key generation as per review 2024-06-20 22:30:45 +05:30
=
5ace8ed073 feat: resolved generate srp failing for user enc v1 users 2024-06-20 22:12:44 +05:30
193d6dad54 misc: removed read sso from org member 2024-06-21 00:39:58 +08:00
0f36fc46b3 docs: added docs for general oidc configuration 2024-06-21 00:37:37 +08:00
=
4072a40fe9 feat: completed all revoke for other identity auth 2024-06-20 21:18:45 +05:30
=
0dc132dda3 feat: added universal auth endpoints for revoke and client secret endpoint to fetch details 2024-06-20 21:18:45 +05:30
=
605ccb13e9 feat: added endpoints and docs for identity get by id and list operation 2024-06-20 21:18:45 +05:30
4a1a399fd8 docs: added documentation for auth0 oidc configuration 2024-06-20 22:56:53 +08:00
3f6b84de3b fix(helm-charts): add nodeSelector and tolerations 2024-06-20 16:32:47 +02:00
d19e2f64f0 misc: added oidc to user alias type 2024-06-20 21:14:47 +08:00
1e0f54d9a4 doc: added mentions of oidc 2024-06-20 21:14:09 +08:00
8d55c2802e misc: added redirect after user creation 2024-06-20 20:55:26 +08:00
e9639df8ce docs: added keycloak-oidc documentation 2024-06-20 20:25:57 +08:00
e0f5ecbe7b misc: added oidc to text label 2024-06-20 15:40:40 +08:00
2160c66e20 doc(cli): improve --plain example + add --silent notice 2024-06-20 09:15:19 +02:00
1c5c7c75c4 Merge remote-tracking branch 'origin/main' into feat/cli-secret-plain-output 2024-06-20 08:50:11 +02:00
3e230555fb misc: added oifc checks to signup 2024-06-20 13:59:50 +08:00
31e27ad1d7 update docs to include TELEMETRY_ENABLED 2024-06-19 21:53:26 -04:00
24c75c6325 Merge remote-tracking branch 'origin' into identity-based-pricing 2024-06-19 14:18:13 -07:00
0a22a2a9ef Readjustments 2024-06-19 14:16:32 -07:00
d0f1cad98c Add support for identity-based pricing 2024-06-19 13:59:47 -07:00
4962a63888 Merge pull request from Infisical/daniel/unify-cli-auth-methods
feat(cli): Unify CLI auth methods
2024-06-19 21:57:49 +02:00
ad92565783 misc: grammar update 2024-06-20 03:30:03 +08:00
6c98c96a15 misc: added comment for samesite lax 2024-06-20 03:29:20 +08:00
f0a70d8769 misc: added samesite lax 2024-06-20 03:19:10 +08:00
9e9de9f527 Merge pull request from Infisical/daniel/k8-managed-secret-recreation
feat(k8-operator): reconcile on managed secret deletion
2024-06-19 19:48:42 +02:00
6af4a06c02 Merge pull request from Infisical/daniel/login-all-auth-methods
feat(cli): Support for all authentication methods in `infisical login` command
2024-06-19 19:44:42 +02:00
fe6dc248b6 Update login.mdx 2024-06-19 19:40:27 +02:00
d64e2fa243 misc: added client id and secret focus toggle 2024-06-20 01:37:52 +08:00
7d380f9b43 fix: documentation improvements 2024-06-19 19:34:12 +02:00
76c8410081 Update mail attribute on ldap login check 2024-06-19 10:33:11 -07:00
afee158b95 Start adding identity based pricing logic 2024-06-19 10:31:58 -07:00
6df90fa825 Docs: Improve and update infisical login cmd docs 2024-06-19 19:24:11 +02:00
c042bafba3 Add flags 2024-06-19 19:24:11 +02:00
8067df821e Update login.go 2024-06-19 19:24:11 +02:00
1906896e56 Update constants.go 2024-06-19 19:24:11 +02:00
a8ccfd9c92 Feat: Login support for all auth methods 2024-06-19 19:24:11 +02:00
32609b95a0 Update constants.go 2024-06-19 19:24:11 +02:00
08d3436217 Update login.go 2024-06-19 19:24:11 +02:00
2ae45dc1cc Feat: Login support for all auth methods 2024-06-19 19:24:11 +02:00
44a898fb15 Update auth.go 2024-06-19 19:24:11 +02:00
4d194052b5 Feat: Login support for all auth methods 2024-06-19 19:24:11 +02:00
1d622bb121 Update agent.go 2024-06-19 19:24:11 +02:00
ecca6f4db5 Merge remote-tracking branch 'origin/main' into feature/oidc 2024-06-20 01:21:46 +08:00
b198f97930 misc: added oidc create validation in route 2024-06-20 00:28:14 +08:00
63a9e46936 misc: removed unnecessary zod assertions 2024-06-20 00:20:00 +08:00
7c067551a4 misc: added frontend validation for oidc form 2024-06-20 00:15:18 +08:00
5c149c6ac6 Merge pull request from Infisical/misc/added-special-handling-of-non-root-folders
misc: added special pruning of non-root folders
2024-06-19 12:14:11 -04:00
c19f8839ff Merge pull request from akhilmhdh/feat/srp-handover
Removing Master password for Oauth/SSO/LDAP users.
2024-06-19 12:09:42 -04:00
1193ddbed1 misc: added rate limit for oidc login endpoint 2024-06-19 23:02:49 +08:00
c6c71a04e8 Update changelog 2024-06-19 07:53:00 -07:00
6457c34712 misc: addressed eslint issue regarding configurationType 2024-06-19 22:41:33 +08:00
6a83b58de4 misc: added support for dynamic discovery of OIDC configuration 2024-06-19 22:33:50 +08:00
d47c586a52 Helm 2024-06-19 15:26:06 +02:00
88156c8cd8 small k8s bug patch 2024-06-19 09:12:46 -04:00
27d5d90d02 Update infisicalsecret_controller.go 2024-06-19 15:00:51 +02:00
0100ddfb99 misc: addressed review comments 2024-06-19 19:35:02 +08:00
2bc6db1c47 misc: readded cookie nginx config for dev 2024-06-19 14:19:27 +08:00
92f2f16656 misc: added option for trusting OIDC emails by default 2024-06-19 13:46:17 +08:00
07ca1ed424 misc: added special pruning of non-root folders 2024-06-19 12:41:47 +08:00
18c5dd3cbd Update README, docs for pki 2024-06-18 13:19:02 -07:00
467e3aab56 Update infisicalsecret_controller.go 2024-06-18 20:50:14 +02:00
577b432861 feat(k8-operator): reconcile when managed secret is deleted 2024-06-18 20:45:17 +02:00
dda6b1d233 Update infisicalsecret_controller.go 2024-06-18 20:15:22 +02:00
e83f31249a Merge pull request from Infisical/daniel/cli-auth-methods
feat(agent): Authentication methods
2024-06-18 14:14:22 -04:00
18e69578f0 feat: added support for limiting email domains 2024-06-19 01:29:26 +08:00
0685a5ea8b Merge remote-tracking branch 'origin/main' into feature/oidc 2024-06-18 23:54:06 +08:00
3142d36ea1 Merge pull request from Infisical/daniel/ingrations-improvements
Fix: Silent integration errors
2024-06-18 15:14:23 +02:00
bdc7c018eb misc: added comment regarding session and redis usage 2024-06-18 20:36:09 +08:00
9506b60d02 Feat: Silent integration errors 2024-06-18 14:11:19 +02:00
ed25b82113 Update infisical-agent.mdx 2024-06-18 14:06:37 +02:00
83bd97fc70 Update infisical-agent.mdx 2024-06-18 14:04:42 +02:00
1d5115972b fix: agent docs improvements 2024-06-18 14:03:32 +02:00
d26521be0b Update helper.go 2024-06-18 14:03:16 +02:00
473f8137fd Improved flag descriptions 2024-06-18 13:33:41 +02:00
bcd65333c0 misc: added handling of inactive and undefined oidc config 2024-06-18 19:17:54 +08:00
719d0ea30f Optional response 2024-06-18 12:33:36 +02:00
371b96a13a misc: removed cookie path proxy for dev envs 2024-06-18 15:36:49 +08:00
c5c00b520c misc: added session regenerate for fresh state 2024-06-18 15:35:18 +08:00
8de4443be1 feat: added support for login via cli 2024-06-18 15:26:08 +08:00
96ad3b0264 misc: used redis for oic session managemen 2024-06-18 14:20:04 +08:00
aaef339e21 Revert "temp disable cors"
This reverts commit c8677ac54867f6b04ff1e10085d456522c70212d.
2024-06-17 20:33:09 -04:00
e3beeb68eb Merge pull request from Infisical/daniel/leave-project
Feat: Leave Project
2024-06-17 20:31:21 -04:00
d0c76ae4b4 Merge pull request from Infisical/analytics-update
remove k8s events from posthog
2024-06-17 20:14:28 -04:00
a5cf6f40c7 Update integration-sync-secret.ts 2024-06-17 22:44:35 +02:00
c8677ac548 temp disable cors 2024-06-17 16:03:12 -04:00
df51d05c46 feat: integrated oidc with sso login 2024-06-18 02:10:08 +08:00
4f2f7b2f70 misc: moved oidc endpoints to /sso 2024-06-18 01:21:42 +08:00
d79ffbe37e misc: added license checks for oidc sso 2024-06-18 01:11:57 +08:00
2c237ee277 feat: moved oidc to ee directory 2024-06-18 00:53:56 +08:00
56cc248425 feature: finalized oidc core service methods 2024-06-17 23:28:27 +08:00
61fcb2b605 feat: finished oidc form functions 2024-06-17 22:37:23 +08:00
992cc03eca Merge pull request from akhilmhdh/feat/ui-permission-check-broken
New API endpoints for Tag update, get by id and get by slug
2024-06-17 19:13:45 +05:30
=
f0e7c459e2 feat: switched back to prod openapi 2024-06-17 18:28:45 +05:30
29d0694a16 Merge pull request from Infisical/daniel/fix-null-name-fields
Fix: `null` name fields on signup
2024-06-17 08:49:34 -04:00
66e5edcfc0 feat: oidc poc 2024-06-17 20:32:47 +08:00
f13930bc6b Fix: Silent integration errors 2024-06-17 13:14:46 +02:00
0d5514834d Fix: Redundancies 2024-06-17 13:14:28 +02:00
=
b495156444 feat: added docs for new tag api operations 2024-06-17 15:16:48 +05:30
=
65a2b0116b feat: added update, get by id and get by slug as tag api methods 2024-06-17 15:13:11 +05:30
8ef2501407 Fix: null null firstName and lastName allowed during signup 2024-06-17 10:58:05 +02:00
2675aa6969 Update agent.go 2024-06-16 07:45:47 +02:00
6bad13738f Fix: Abstraction of getting env variable or file content 2024-06-16 07:39:57 +02:00
dbae6968c9 Update constants.go 2024-06-16 07:39:33 +02:00
e019f3811b Helpers 2024-06-16 07:39:27 +02:00
24935f4e07 Remove redundant auth schema set (defaults to Bearer) 2024-06-15 08:59:00 +02:00
1835777832 Move validation 2024-06-15 08:56:55 +02:00
cb237831c7 Remove log 2024-06-15 08:56:48 +02:00
49d2ea6f2e Feat: Unify CLI auth methods 2024-06-15 08:48:14 +02:00
3b2a2d1a73 Feat: Unify CLI auth methods 2024-06-15 08:48:11 +02:00
f490fb6616 Update cli.go 2024-06-15 08:48:05 +02:00
c4f9a3b31e Feat: Unify CLI auth methods 2024-06-15 08:48:03 +02:00
afcf15df55 Set secrets raw support 2024-06-15 08:47:48 +02:00
bf8aee25fe Feat: Unify CLI auth methods 2024-06-15 08:45:25 +02:00
ebdfe31c17 Raw secrets operations models 2024-06-15 08:44:55 +02:00
e65ce932dd feat: create/update raw secrets 2024-06-15 08:44:42 +02:00
c119f506fd docs 2024-06-15 02:35:04 +02:00
93638baba7 Update agent.go 2024-06-15 02:34:21 +02:00
bad97774c4 remove k8s events from posthog 2024-06-14 16:30:12 -07:00
68f5be2ff1 Fix: File-based credentials 2024-06-14 23:55:25 +02:00
0b54099789 Feat(agent): Multiple auth methods 2024-06-14 23:09:31 +02:00
9b2a2eda0c Feat(agent): Multiple auth methods 2024-06-14 23:09:21 +02:00
ccef9646c6 Update helper.go 2024-06-14 07:20:53 +02:00
458639e93d Create auth.go 2024-06-14 07:20:51 +02:00
35998e98cf Update token.go 2024-06-14 07:20:49 +02:00
e19b67f9a2 Feat: Auth methods (draft 1) 2024-06-14 07:20:42 +02:00
f41ec46a35 Fix: Properly rename function to CallMachineIdentityRefreshAccessToken 2024-06-14 07:20:17 +02:00
33aa9ea1a7 Install Go SDK & go mod tidy 2024-06-14 07:19:51 +02:00
=
21bd468307 feat: change field password to hashedPassword 2024-06-12 23:04:10 +05:30
=
e95109c446 feat: updated cli to include password on login 2024-06-12 22:43:59 +05:30
=
93d5180dfc feat: just forward for ldap users 2024-06-12 19:25:06 +05:30
=
a9bec84d27 feat: applied hashed password change in change-password route 2024-06-12 19:25:06 +05:30
e3f87382a3 rephrase comment and error message 2024-06-12 19:25:06 +05:30
=
736f067178 feat: srp handover for admin and minor bug fix in mfa 2024-06-12 19:25:06 +05:30
=
f3ea7b3dfd feat: updated ui for srp handover 2024-06-12 19:25:06 +05:30
=
777dfd5f58 feat: api changes for srp handover 2024-06-12 19:25:05 +05:30
f9a9599659 doc(cli): improve --plain example 2024-05-24 17:29:44 +02:00
637b0b955f fix(cli): add backward compatibility for --raw-value 2024-05-24 17:28:52 +02:00
092665737f Merge remote-tracking branch 'origin/main' into feat/cli-secret-plain-output 2024-05-24 17:13:57 +02:00
f83c2215a5 doc(cli): --plain 2024-02-08 17:44:55 +01:00
0f41590d6a feat(cli): --plain, --expand, --include-imports in 'secrets get' subcommand 2024-02-08 17:32:31 +01:00
565 changed files with 28415 additions and 4999 deletions
.env.example
.github/workflows
.gitignore.infisicalignoreREADME.md
backend
babel.config.json
e2e-test
package-lock.jsonpackage.json
scripts
src
@types
db
ee
lib
main.ts
server
services
auth-token
auth
certificate-authority
certificate
group-project
identity-access-token
identity-aws-auth
identity-azure-auth
identity-gcp-auth
identity-kubernetes-auth
identity-oidc-auth
identity-project
identity-token-auth
identity-ua
identity
integration-auth
integration
org
project-bot
project-env
project-key
project-membership
project
secret-blind-index
secret-folder
secret-import
secret-sharing
secret-tag
secret
service-token
super-admin
user-alias
user-engagement
user
webhook
cli
company
documentation/getting-started
handbook
mint.jsonstyle.css
docker-compose.dev-read-replica.yml
docs
api-reference/endpoints
changelog
cli/commands
contributing/platform/backend
documentation
images
integrations
platform
sso
webhook-create.png
integrations
internals
mint.json
sdks
self-hosting
configuration
deployment-options
guides
style.css
frontend
next.config.jspackage-lock.jsonpackage.json
src
components
hooks/api
layouts/AppLayout
AppLayout.tsx
components/WishForm
pages
integrations
aws-secret-manager
azure-key-vault
flyio
org/[id]
identities/[identityId]
overview
signupinvite.tsx
views
IntegrationsPage/components/IntegrationsSection
Login
Org
Project
SecretApprovalPage/components
SecretMainPage
SecretOverviewPage
Settings
ShareSecretPage/components
ShareSecretPublicPage
Signup/components
EmailConfirmationStep
UserInfoSSOStep
admin
helm-charts/secrets-operator
k8-operator
migration
nginx

@ -67,3 +67,6 @@ CLIENT_SECRET_GITLAB_LOGIN=
CAPTCHA_SECRET=
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
PLAIN_API_KEY=
PLAIN_WISH_LABEL_IDS=

99
.github/workflows/build-binaries.yml vendored Normal file

@ -0,0 +1,99 @@
name: Build Binaries and Deploy
on:
workflow_dispatch:
inputs:
version:
description: "Version number"
required: true
type: string
defaults:
run:
working-directory: ./backend
jobs:
build-and-deploy:
runs-on: ubuntu-20.04
strategy:
matrix:
arch: [x64, arm64]
os: [linux, win]
include:
- os: linux
target: node20-linux
- os: win
target: node20-win
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: 20
- name: Install pkg
run: npm install -g @yao-pkg/pkg
- name: Install dependencies (backend)
run: npm install
- name: Install dependencies (frontend)
run: npm install --prefix ../frontend
- name: Prerequisites for pkg
run: npm run binary:build
- name: Package into node binary
run: |
if [ "${{ matrix.os }}" != "linux" ]; then
pkg --no-bytecode --public-packages "*" --public --compress Brotli --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
else
pkg --no-bytecode --public-packages "*" --public --compress Brotli --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
fi
# Set up .deb package structure (Debian/Ubuntu only)
- name: Set up .deb package structure
if: matrix.os == 'linux'
run: |
mkdir -p infisical-core/DEBIAN
mkdir -p infisical-core/usr/local/bin
cp ./binary/infisical-core infisical-core/usr/local/bin/
chmod +x infisical-core/usr/local/bin/infisical-core
- name: Create control file
if: matrix.os == 'linux'
run: |
cat <<EOF > infisical-core/DEBIAN/control
Package: infisical-core
Version: ${{ github.event.inputs.version }}
Section: base
Priority: optional
Architecture: ${{ matrix.arch == 'x64' && 'amd64' || matrix.arch }}
Maintainer: Infisical <daniel@infisical.com>
Description: Infisical Core standalone executable (app.infisical.com)
EOF
# Build .deb file (Debian/Ubunutu only)
- name: Build .deb package
if: matrix.os == 'linux'
run: |
dpkg-deb --build infisical-core
mv infisical-core.deb ./binary/infisical-core-${{matrix.arch}}.deb
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
# Publish .deb file to Cloudsmith (Debian/Ubuntu only)
- name: Publish to Cloudsmith (Debian/Ubuntu)
if: matrix.os == 'linux'
working-directory: ./backend
run: cloudsmith push deb --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.deb
# Publish .exe file to Cloudsmith (Windows only)
- name: Publish to Cloudsmith (Windows)
if: matrix.os == 'win'
working-directory: ./backend
run: cloudsmith push raw infisical/infisical-core ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }}.exe --republish --no-wait-for-sync --version ${{ github.event.inputs.version }} --api-key ${{ secrets.CLOUDSMITH_API_KEY }}

@ -50,6 +50,13 @@ jobs:
environment:
name: Gamma
steps:
- uses: twingate/github-action@v1
with:
# The Twingate Service Key used to connect Twingate to the proper service
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
#
# Required
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
@ -74,21 +81,21 @@ jobs:
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core-platform
container-name: infisical-core
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-platform
cluster: infisical-core-platform
service: infisical-core-gamma-stage
cluster: infisical-gamma-stage
wait-for-service-stability: true
production-postgres-deployment:
@ -98,6 +105,13 @@ jobs:
environment:
name: Production
steps:
- uses: twingate/github-action@v1
with:
# The Twingate Service Key used to connect Twingate to the proper service
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
#
# Required
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment

@ -0,0 +1,25 @@
name: Check migration file edited
on:
pull_request:
types: [opened, synchronize]
paths:
- 'backend/src/db/migrations/**'
jobs:
rename:
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Check any migration files are modified, renamed or duplicated.
run: |
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^M\|^R\|^C' || true | cut -f2 | xargs -r -n1 basename > edited_files.txt
if [ -s edited_files.txt ]; then
echo "Exiting migration files cannot be modified."
cat edited_files.txt
exit 1
fi

@ -1,59 +0,0 @@
name: Rename Migrations
on:
pull_request:
types: [closed]
paths:
- 'backend/src/db/migrations/**'
jobs:
rename:
runs-on: ubuntu-latest
if: github.event.pull_request.merged == true
steps:
- name: Check out repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get list of newly added files in migration folder
run: |
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^A' | cut -f2 | xargs -n1 basename > added_files.txt
if [ ! -s added_files.txt ]; then
echo "No new files added. Skipping"
echo "SKIP_RENAME=true" >> $GITHUB_ENV
fi
- name: Script to rename migrations
if: env.SKIP_RENAME != 'true'
run: python .github/resources/rename_migration_files.py
- name: Commit and push changes
if: env.SKIP_RENAME != 'true'
run: |
git config user.name github-actions
git config user.email github-actions@github.com
git add ./backend/src/db/migrations
rm added_files.txt
git commit -m "chore: renamed new migration files to latest timestamp (gh-action)"
- name: Get PR details
id: pr_details
run: |
PR_NUMBER=${{ github.event.pull_request.number }}
PR_MERGER=$(curl -s "https://api.github.com/repos/${{ github.repository }}/pulls/$PR_NUMBER" | jq -r '.merged_by.login')
echo "PR Number: $PR_NUMBER"
echo "PR Merger: $PR_MERGER"
echo "pr_merger=$PR_MERGER" >> $GITHUB_OUTPUT
- name: Create Pull Request
if: env.SKIP_RENAME != 'true'
uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: 'chore: renamed new migration files to latest UTC (gh-action)'
title: 'GH Action: rename new migration file timestamp'
branch-suffix: timestamp
reviewers: ${{ steps.pr_details.outputs.pr_merger }}

1
.gitignore vendored

@ -69,3 +69,4 @@ frontend-build
*.tgz
cli/infisical-merge
cli/test/infisical-merge
/backend/binary

@ -5,3 +5,4 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/M
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:292
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
docs/mint.json:generic-api-key:651

@ -48,25 +48,26 @@
## Introduction
**[Infisical](https://infisical.com)** is the open source secret management platform that teams use to centralize their secrets like API keys, database credentials, and configurations.
**[Infisical](https://infisical.com)** is the open source secret management platform that teams use to centralize their application configuration and secrets like API keys and database credentials as well as manage their internal PKI.
We're on a mission to make secret management more accessible to everyone, not just security teams, and that means redesigning the entire developer experience from ground up.
We're on a mission to make security tooling more accessible to everyone, not just security teams, and that means redesigning the entire developer experience from ground up.
## Features
- **[User-friendly dashboard](https://infisical.com/docs/documentation/platform/project)** to manage secrets across projects and environments (e.g. development, production, etc.).
- **[Client SDKs](https://infisical.com/docs/sdks/overview)** to fetch secrets for your apps and infrastructure on demand.
- **[Infisical CLI](https://infisical.com/docs/cli/overview)** to fetch and inject secrets into any framework in local development and CI/CD.
- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)** to perform CRUD operation on secrets, users, projects, and any other resource in Infisical.
- **[Native integrations](https://infisical.com/docs/integrations/overview)** with platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more.
- **[User-friendly dashboard](https://infisical.com/docs/documentation/platform/project)** to manage secrets across projects and environments (e.g. development, production, etc.).
- **[Client SDKs](https://infisical.com/docs/sdks/overview)** to fetch secrets for your apps and infrastructure on demand.
- **[Infisical CLI](https://infisical.com/docs/cli/overview)** to fetch and inject secrets into any framework in local development and CI/CD.
- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)** to perform CRUD operation on secrets, users, projects, and any other resource in Infisical.
- **[Native integrations](https://infisical.com/docs/integrations/overview)** with platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more.
- **[Infisical Kubernetes operator](https://infisical.com/docs/documentation/getting-started/kubernetes)** to managed secrets in k8s, automatically reload deployments, and more.
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)** to inject secrets into your applications without modifying any code logic.
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)** to inject secrets into your applications without modifying any code logic.
- **[Self-hosting and on-prem](https://infisical.com/docs/self-hosting/overview)** to get complete control over your data.
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)** to version every secret and project state.
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)** to record every action taken in a project.
- **[Role-based Access Controls](https://infisical.com/docs/documentation/platform/role-based-access-controls)** to create permission sets on any resource in Infisica and assign those to user or machine identities.
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)** to version every secret and project state.
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)** to record every action taken in a project.
- **[Role-based Access Controls](https://infisical.com/docs/documentation/platform/role-based-access-controls)** to create permission sets on any resource in Infisica and assign those to user or machine identities.
- **[Simple on-premise deployments](https://infisical.com/docs/self-hosting/overview)** to AWS, Digital Ocean, and more.
- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)** to prevent secrets from leaking to git.
- **[Internal PKI](https://infisical.com/docs/documentation/platform/pki/private-ca)** to create Private CA hierarchies and start issuing and managing X.509 digital certificates.
- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)** to prevent secrets from leaking to git.
And much more.
@ -74,9 +75,9 @@ And much more.
Check out the [Quickstart Guides](https://infisical.com/docs/getting-started/introduction)
| Use Infisical Cloud | Deploy Infisical on premise |
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
| Use Infisical Cloud | Deploy Infisical on premise |
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------ |
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
### Run Infisical locally

@ -0,0 +1,4 @@
{
"presets": ["@babel/preset-env", "@babel/preset-react"],
"plugins": ["@babel/plugin-syntax-import-attributes", "babel-plugin-transform-import-meta"]
}

@ -3,7 +3,6 @@ import "ts-node/register";
import dotenv from "dotenv";
import jwt from "jsonwebtoken";
import knex from "knex";
import path from "path";
import { seedData1 } from "@app/db/seed-data";
@ -15,6 +14,7 @@ import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { mockQueue } from "./mocks/queue";
import { mockSmtpServer } from "./mocks/smtp";
import { mockKeyStore } from "./mocks/keystore";
import { initDbConnection } from "@app/db";
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
export default {
@ -23,23 +23,21 @@ export default {
async setup() {
const logger = await initLogger();
const cfg = initEnvConfig(logger);
const db = knex({
client: "pg",
connection: cfg.DB_CONNECTION_URI,
migrations: {
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
tableName: "infisical_migrations"
},
seeds: {
directory: path.join(__dirname, "../src/db/seeds"),
extension: "ts"
}
const db = initDbConnection({
dbConnectionUri: cfg.DB_CONNECTION_URI,
dbRootCert: cfg.DB_ROOT_CERT
});
try {
await db.migrate.latest();
await db.seed.run();
await db.migrate.latest({
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
tableName: "infisical_migrations"
});
await db.seed.run({
directory: path.join(__dirname, "../src/db/seeds"),
extension: "ts"
});
const smtp = mockSmtpServer();
const queue = mockQueue();
const keyStore = mockKeyStore();
@ -74,7 +72,14 @@ export default {
// @ts-expect-error type
delete globalThis.jwtToken;
// called after all tests with this env have been run
await db.migrate.rollback({}, true);
await db.migrate.rollback(
{
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
tableName: "infisical_migrations"
},
true
);
await db.destroy();
}
};

5252
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -3,11 +3,39 @@
"version": "1.0.0",
"description": "",
"main": "./dist/main.mjs",
"bin": "dist/main.js",
"pkg": {
"scripts": [
"dist/**/*.js",
"../frontend/node_modules/next/**/*.js",
"../frontend/.next/*/**/*.js",
"../frontend/node_modules/next/dist/server/**/*.js",
"../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*.js"
],
"assets": [
"dist/**",
"!dist/**/*.js",
"node_modules/**",
"../frontend/node_modules/**",
"../frontend/.next/**",
"!../frontend/node_modules/next/dist/server/**/*.js",
"../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*",
"../frontend/public/**"
],
"outputPath": "binary"
},
"scripts": {
"binary:build": "npm run binary:clean && npm run build:frontend && npm run build && npm run binary:babel-frontend && npm run binary:babel-backend && npm run binary:rename-imports",
"binary:package": "pkg --no-bytecode --public-packages \"*\" --public --target host .",
"binary:babel-backend": " babel ./dist -d ./dist",
"binary:babel-frontend": "babel --copy-files ../frontend/.next/server -d ../frontend/.next/server",
"binary:clean": "rm -rf ./dist && rm -rf ./binary",
"binary:rename-imports": "ts-node ./scripts/rename-mjs.ts",
"test": "echo \"Error: no test specified\" && exit 1",
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
"dev:docker": "nodemon",
"build": "tsup",
"build:frontend": "npm run build --prefix ../frontend",
"start": "node dist/main.mjs",
"type:check": "tsc --noEmit",
"lint:fix": "eslint --fix --ext js,ts ./src",
@ -31,6 +59,11 @@
"author": "",
"license": "ISC",
"devDependencies": {
"@babel/cli": "^7.18.10",
"@babel/core": "^7.18.10",
"@babel/plugin-syntax-import-attributes": "^7.24.7",
"@babel/preset-env": "^7.18.10",
"@babel/preset-react": "^7.24.7",
"@types/bcrypt": "^5.0.2",
"@types/jmespath": "^0.15.2",
"@types/jsonwebtoken": "^9.0.5",
@ -48,6 +81,8 @@
"@types/uuid": "^9.0.7",
"@typescript-eslint/eslint-plugin": "^6.20.0",
"@typescript-eslint/parser": "^6.20.0",
"@yao-pkg/pkg": "^5.12.0",
"babel-plugin-transform-import-meta": "^2.2.1",
"eslint": "^8.56.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-config-airbnb-typescript": "^17.1.0",
@ -60,7 +95,7 @@
"pino-pretty": "^10.2.3",
"prompt-sync": "^4.2.0",
"rimraf": "^5.0.5",
"ts-node": "^10.9.1",
"ts-node": "^10.9.2",
"tsc-alias": "^1.8.8",
"tsconfig-paths": "^4.2.0",
"tsup": "^8.0.1",
@ -72,6 +107,7 @@
"dependencies": {
"@aws-sdk/client-iam": "^3.525.0",
"@aws-sdk/client-secrets-manager": "^3.504.0",
"@aws-sdk/client-sts": "^3.600.0",
"@casl/ability": "^6.5.0",
"@fastify/cookie": "^9.3.1",
"@fastify/cors": "^8.5.0",
@ -89,7 +125,8 @@
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/x509": "^1.10.0",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "^2.2.1",
"@sindresorhus/slugify": "1.1.0",
"@team-plain/typescript-sdk": "^4.6.1",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
@ -99,6 +136,7 @@
"bcrypt": "^5.1.1",
"bullmq": "^5.4.2",
"cassandra-driver": "^4.7.2",
"connect-redis": "^7.1.1",
"cron": "^3.1.7",
"dotenv": "^16.4.1",
"fastify": "^4.26.0",
@ -110,14 +148,16 @@
"jmespath": "^0.16.0",
"jsonwebtoken": "^9.0.2",
"jsrp": "^0.2.4",
"jwks-rsa": "^3.1.0",
"knex": "^3.0.1",
"ldapjs": "^3.0.7",
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"ms": "^2.1.3",
"mysql2": "^3.9.8",
"nanoid": "^5.0.4",
"nanoid": "^3.3.4",
"nodemailer": "^6.9.9",
"openid-client": "^5.6.5",
"ora": "^7.0.1",
"oracledb": "^6.4.0",
"passport-github": "^1.1.0",
@ -131,6 +171,7 @@
"posthog-node": "^3.6.2",
"probot": "^13.0.0",
"smee-client": "^2.0.0",
"tedious": "^18.2.1",
"tweetnacl": "^1.0.3",
"tweetnacl-util": "^0.15.1",
"uuid": "^9.0.1",

@ -2,13 +2,14 @@
import { execSync } from "child_process";
import path from "path";
import promptSync from "prompt-sync";
import slugify from "@sindresorhus/slugify"
const prompt = promptSync({ sigint: true });
const migrationName = prompt("Enter name for migration: ");
// Remove spaces from migration name and replace with hyphens
const formattedMigrationName = migrationName.replace(/\s+/g, "-");
const formattedMigrationName = slugify(migrationName);
execSync(
`npx knex migrate:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${formattedMigrationName}`,

@ -0,0 +1,27 @@
/* eslint-disable @typescript-eslint/no-shadow */
import fs from "node:fs";
import path from "node:path";
function replaceMjsOccurrences(directory: string) {
fs.readdir(directory, (err, files) => {
if (err) throw err;
files.forEach((file) => {
const filePath = path.join(directory, file);
if (fs.statSync(filePath).isDirectory()) {
replaceMjsOccurrences(filePath);
} else {
fs.readFile(filePath, "utf8", (err, data) => {
if (err) throw err;
const result = data.replace(/\.mjs/g, ".js");
fs.writeFile(filePath, result, "utf8", (err) => {
if (err) throw err;
// eslint-disable-next-line no-console
console.log(`Updated: ${filePath}`);
});
});
}
});
});
}
replaceMjsOccurrences("dist");

@ -13,6 +13,7 @@ import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
@ -40,7 +41,9 @@ import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
import { TIdentityTokenAuthServiceFactory } from "@app/services/identity-token-auth/identity-token-auth-service";
import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service";
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
@ -64,6 +67,7 @@ import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TUserServiceFactory } from "@app/services/user/user-service";
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
declare module "fastify" {
@ -102,6 +106,7 @@ declare module "fastify" {
permission: TPermissionServiceFactory;
org: TOrgServiceFactory;
orgRole: TOrgRoleServiceFactory;
oidc: TOidcConfigServiceFactory;
superAdmin: TSuperAdminServiceFactory;
user: TUserServiceFactory;
group: TGroupServiceFactory;
@ -125,11 +130,13 @@ declare module "fastify" {
identity: TIdentityServiceFactory;
identityAccessToken: TIdentityAccessTokenServiceFactory;
identityProject: TIdentityProjectServiceFactory;
identityTokenAuth: TIdentityTokenAuthServiceFactory;
identityUa: TIdentityUaServiceFactory;
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
identityGcpAuth: TIdentityGcpAuthServiceFactory;
identityAwsAuth: TIdentityAwsAuthServiceFactory;
identityAzureAuth: TIdentityAzureAuthServiceFactory;
identityOidcAuth: TIdentityOidcAuthServiceFactory;
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
@ -155,6 +162,7 @@ declare module "fastify" {
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
secretSharing: TSecretSharingServiceFactory;
rateLimit: TRateLimitServiceFactory;
userEngagement: TUserEngagementServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

@ -1,4 +1,4 @@
import { Knex } from "knex";
import { Knex as KnexOriginal } from "knex";
import {
TableName,
@ -92,6 +92,9 @@ import {
TIdentityKubernetesAuths,
TIdentityKubernetesAuthsInsert,
TIdentityKubernetesAuthsUpdate,
TIdentityOidcAuths,
TIdentityOidcAuthsInsert,
TIdentityOidcAuthsUpdate,
TIdentityOrgMemberships,
TIdentityOrgMembershipsInsert,
TIdentityOrgMembershipsUpdate,
@ -104,6 +107,9 @@ import {
TIdentityProjectMemberships,
TIdentityProjectMembershipsInsert,
TIdentityProjectMembershipsUpdate,
TIdentityTokenAuths,
TIdentityTokenAuthsInsert,
TIdentityTokenAuthsUpdate,
TIdentityUaClientSecrets,
TIdentityUaClientSecretsInsert,
TIdentityUaClientSecretsUpdate,
@ -134,6 +140,9 @@ import {
TLdapGroupMaps,
TLdapGroupMapsInsert,
TLdapGroupMapsUpdate,
TOidcConfigs,
TOidcConfigsInsert,
TOidcConfigsUpdate,
TOrganizations,
TOrganizationsInsert,
TOrganizationsUpdate,
@ -277,317 +286,381 @@ import {
TWebhooksUpdate
} from "@app/db/schemas";
declare module "knex" {
namespace Knex {
interface QueryInterface {
primaryNode(): KnexOriginal;
replicaNode(): KnexOriginal;
}
}
}
declare module "knex/types/tables" {
interface Tables {
[TableName.Users]: Knex.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.Groups]: Knex.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
[TableName.CertificateAuthority]: Knex.CompositeTableType<
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
TCertificateAuthorities,
TCertificateAuthoritiesInsert,
TCertificateAuthoritiesUpdate
>;
[TableName.CertificateAuthorityCert]: Knex.CompositeTableType<
[TableName.CertificateAuthorityCert]: KnexOriginal.CompositeTableType<
TCertificateAuthorityCerts,
TCertificateAuthorityCertsInsert,
TCertificateAuthorityCertsUpdate
>;
[TableName.CertificateAuthoritySecret]: Knex.CompositeTableType<
[TableName.CertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
TCertificateAuthoritySecret,
TCertificateAuthoritySecretInsert,
TCertificateAuthoritySecretUpdate
>;
[TableName.CertificateAuthorityCrl]: Knex.CompositeTableType<
[TableName.CertificateAuthorityCrl]: KnexOriginal.CompositeTableType<
TCertificateAuthorityCrl,
TCertificateAuthorityCrlInsert,
TCertificateAuthorityCrlUpdate
>;
[TableName.Certificate]: Knex.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
[TableName.CertificateBody]: Knex.CompositeTableType<
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
[TableName.CertificateBody]: KnexOriginal.CompositeTableType<
TCertificateBodies,
TCertificateBodiesInsert,
TCertificateBodiesUpdate
>;
[TableName.CertificateSecret]: Knex.CompositeTableType<
[TableName.CertificateSecret]: KnexOriginal.CompositeTableType<
TCertificateSecrets,
TCertificateSecretsInsert,
TCertificateSecretsUpdate
>;
[TableName.UserGroupMembership]: Knex.CompositeTableType<
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
TUserGroupMembership,
TUserGroupMembershipInsert,
TUserGroupMembershipUpdate
>;
[TableName.GroupProjectMembership]: Knex.CompositeTableType<
[TableName.GroupProjectMembership]: KnexOriginal.CompositeTableType<
TGroupProjectMemberships,
TGroupProjectMembershipsInsert,
TGroupProjectMembershipsUpdate
>;
[TableName.GroupProjectMembershipRole]: Knex.CompositeTableType<
[TableName.GroupProjectMembershipRole]: KnexOriginal.CompositeTableType<
TGroupProjectMembershipRoles,
TGroupProjectMembershipRolesInsert,
TGroupProjectMembershipRolesUpdate
>;
[TableName.UserAliases]: Knex.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
[TableName.UserEncryptionKey]: Knex.CompositeTableType<
[TableName.UserAliases]: KnexOriginal.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
[TableName.UserEncryptionKey]: KnexOriginal.CompositeTableType<
TUserEncryptionKeys,
TUserEncryptionKeysInsert,
TUserEncryptionKeysUpdate
>;
[TableName.AuthTokens]: Knex.CompositeTableType<TAuthTokens, TAuthTokensInsert, TAuthTokensUpdate>;
[TableName.AuthTokenSession]: Knex.CompositeTableType<
[TableName.AuthTokens]: KnexOriginal.CompositeTableType<TAuthTokens, TAuthTokensInsert, TAuthTokensUpdate>;
[TableName.AuthTokenSession]: KnexOriginal.CompositeTableType<
TAuthTokenSessions,
TAuthTokenSessionsInsert,
TAuthTokenSessionsUpdate
>;
[TableName.BackupPrivateKey]: Knex.CompositeTableType<
[TableName.BackupPrivateKey]: KnexOriginal.CompositeTableType<
TBackupPrivateKey,
TBackupPrivateKeyInsert,
TBackupPrivateKeyUpdate
>;
[TableName.Organization]: Knex.CompositeTableType<TOrganizations, TOrganizationsInsert, TOrganizationsUpdate>;
[TableName.OrgMembership]: Knex.CompositeTableType<TOrgMemberships, TOrgMembershipsInsert, TOrgMembershipsUpdate>;
[TableName.OrgRoles]: Knex.CompositeTableType<TOrgRoles, TOrgRolesInsert, TOrgRolesUpdate>;
[TableName.IncidentContact]: Knex.CompositeTableType<
[TableName.Organization]: KnexOriginal.CompositeTableType<
TOrganizations,
TOrganizationsInsert,
TOrganizationsUpdate
>;
[TableName.OrgMembership]: KnexOriginal.CompositeTableType<
TOrgMemberships,
TOrgMembershipsInsert,
TOrgMembershipsUpdate
>;
[TableName.OrgRoles]: KnexOriginal.CompositeTableType<TOrgRoles, TOrgRolesInsert, TOrgRolesUpdate>;
[TableName.IncidentContact]: KnexOriginal.CompositeTableType<
TIncidentContacts,
TIncidentContactsInsert,
TIncidentContactsUpdate
>;
[TableName.UserAction]: Knex.CompositeTableType<TUserActions, TUserActionsInsert, TUserActionsUpdate>;
[TableName.SuperAdmin]: Knex.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
[TableName.ApiKey]: Knex.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
[TableName.Project]: Knex.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
[TableName.ProjectMembership]: Knex.CompositeTableType<
[TableName.UserAction]: KnexOriginal.CompositeTableType<TUserActions, TUserActionsInsert, TUserActionsUpdate>;
[TableName.SuperAdmin]: KnexOriginal.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
[TableName.ApiKey]: KnexOriginal.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
[TableName.Project]: KnexOriginal.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
[TableName.ProjectMembership]: KnexOriginal.CompositeTableType<
TProjectMemberships,
TProjectMembershipsInsert,
TProjectMembershipsUpdate
>;
[TableName.Environment]: Knex.CompositeTableType<
[TableName.Environment]: KnexOriginal.CompositeTableType<
TProjectEnvironments,
TProjectEnvironmentsInsert,
TProjectEnvironmentsUpdate
>;
[TableName.ProjectBot]: Knex.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
[TableName.ProjectUserMembershipRole]: Knex.CompositeTableType<
[TableName.ProjectBot]: KnexOriginal.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
[TableName.ProjectUserMembershipRole]: KnexOriginal.CompositeTableType<
TProjectUserMembershipRoles,
TProjectUserMembershipRolesInsert,
TProjectUserMembershipRolesUpdate
>;
[TableName.ProjectRoles]: Knex.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
[TableName.ProjectUserAdditionalPrivilege]: Knex.CompositeTableType<
[TableName.ProjectRoles]: KnexOriginal.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
[TableName.ProjectUserAdditionalPrivilege]: KnexOriginal.CompositeTableType<
TProjectUserAdditionalPrivilege,
TProjectUserAdditionalPrivilegeInsert,
TProjectUserAdditionalPrivilegeUpdate
>;
[TableName.ProjectKeys]: Knex.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
[TableName.Secret]: Knex.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
[TableName.SecretReference]: Knex.CompositeTableType<
[TableName.ProjectKeys]: KnexOriginal.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
[TableName.Secret]: KnexOriginal.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
[TableName.SecretReference]: KnexOriginal.CompositeTableType<
TSecretReferences,
TSecretReferencesInsert,
TSecretReferencesUpdate
>;
[TableName.SecretBlindIndex]: Knex.CompositeTableType<
[TableName.SecretBlindIndex]: KnexOriginal.CompositeTableType<
TSecretBlindIndexes,
TSecretBlindIndexesInsert,
TSecretBlindIndexesUpdate
>;
[TableName.SecretVersion]: Knex.CompositeTableType<TSecretVersions, TSecretVersionsInsert, TSecretVersionsUpdate>;
[TableName.SecretFolder]: Knex.CompositeTableType<TSecretFolders, TSecretFoldersInsert, TSecretFoldersUpdate>;
[TableName.SecretFolderVersion]: Knex.CompositeTableType<
[TableName.SecretVersion]: KnexOriginal.CompositeTableType<
TSecretVersions,
TSecretVersionsInsert,
TSecretVersionsUpdate
>;
[TableName.SecretFolder]: KnexOriginal.CompositeTableType<
TSecretFolders,
TSecretFoldersInsert,
TSecretFoldersUpdate
>;
[TableName.SecretFolderVersion]: KnexOriginal.CompositeTableType<
TSecretFolderVersions,
TSecretFolderVersionsInsert,
TSecretFolderVersionsUpdate
>;
[TableName.SecretSharing]: Knex.CompositeTableType<TSecretSharing, TSecretSharingInsert, TSecretSharingUpdate>;
[TableName.RateLimit]: Knex.CompositeTableType<TRateLimit, TRateLimitInsert, TRateLimitUpdate>;
[TableName.SecretTag]: Knex.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
[TableName.SecretImport]: Knex.CompositeTableType<TSecretImports, TSecretImportsInsert, TSecretImportsUpdate>;
[TableName.Integration]: Knex.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
[TableName.Webhook]: Knex.CompositeTableType<TWebhooks, TWebhooksInsert, TWebhooksUpdate>;
[TableName.ServiceToken]: Knex.CompositeTableType<TServiceTokens, TServiceTokensInsert, TServiceTokensUpdate>;
[TableName.IntegrationAuth]: Knex.CompositeTableType<
[TableName.SecretSharing]: KnexOriginal.CompositeTableType<
TSecretSharing,
TSecretSharingInsert,
TSecretSharingUpdate
>;
[TableName.RateLimit]: KnexOriginal.CompositeTableType<TRateLimit, TRateLimitInsert, TRateLimitUpdate>;
[TableName.SecretTag]: KnexOriginal.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
[TableName.SecretImport]: KnexOriginal.CompositeTableType<
TSecretImports,
TSecretImportsInsert,
TSecretImportsUpdate
>;
[TableName.Integration]: KnexOriginal.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
[TableName.Webhook]: KnexOriginal.CompositeTableType<TWebhooks, TWebhooksInsert, TWebhooksUpdate>;
[TableName.ServiceToken]: KnexOriginal.CompositeTableType<
TServiceTokens,
TServiceTokensInsert,
TServiceTokensUpdate
>;
[TableName.IntegrationAuth]: KnexOriginal.CompositeTableType<
TIntegrationAuths,
TIntegrationAuthsInsert,
TIntegrationAuthsUpdate
>;
[TableName.Identity]: Knex.CompositeTableType<TIdentities, TIdentitiesInsert, TIdentitiesUpdate>;
[TableName.IdentityUniversalAuth]: Knex.CompositeTableType<
[TableName.Identity]: KnexOriginal.CompositeTableType<TIdentities, TIdentitiesInsert, TIdentitiesUpdate>;
[TableName.IdentityTokenAuth]: KnexOriginal.CompositeTableType<
TIdentityTokenAuths,
TIdentityTokenAuthsInsert,
TIdentityTokenAuthsUpdate
>;
[TableName.IdentityUniversalAuth]: KnexOriginal.CompositeTableType<
TIdentityUniversalAuths,
TIdentityUniversalAuthsInsert,
TIdentityUniversalAuthsUpdate
>;
[TableName.IdentityKubernetesAuth]: Knex.CompositeTableType<
[TableName.IdentityKubernetesAuth]: KnexOriginal.CompositeTableType<
TIdentityKubernetesAuths,
TIdentityKubernetesAuthsInsert,
TIdentityKubernetesAuthsUpdate
>;
[TableName.IdentityGcpAuth]: Knex.CompositeTableType<
[TableName.IdentityGcpAuth]: KnexOriginal.CompositeTableType<
TIdentityGcpAuths,
TIdentityGcpAuthsInsert,
TIdentityGcpAuthsUpdate
>;
[TableName.IdentityAwsAuth]: Knex.CompositeTableType<
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
TIdentityAwsAuths,
TIdentityAwsAuthsInsert,
TIdentityAwsAuthsUpdate
>;
[TableName.IdentityAzureAuth]: Knex.CompositeTableType<
[TableName.IdentityAzureAuth]: KnexOriginal.CompositeTableType<
TIdentityAzureAuths,
TIdentityAzureAuthsInsert,
TIdentityAzureAuthsUpdate
>;
[TableName.IdentityUaClientSecret]: Knex.CompositeTableType<
[TableName.IdentityOidcAuth]: KnexOriginal.CompositeTableType<
TIdentityOidcAuths,
TIdentityOidcAuthsInsert,
TIdentityOidcAuthsUpdate
>;
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
TIdentityUaClientSecrets,
TIdentityUaClientSecretsInsert,
TIdentityUaClientSecretsUpdate
>;
[TableName.IdentityAccessToken]: Knex.CompositeTableType<
[TableName.IdentityAccessToken]: KnexOriginal.CompositeTableType<
TIdentityAccessTokens,
TIdentityAccessTokensInsert,
TIdentityAccessTokensUpdate
>;
[TableName.IdentityOrgMembership]: Knex.CompositeTableType<
[TableName.IdentityOrgMembership]: KnexOriginal.CompositeTableType<
TIdentityOrgMemberships,
TIdentityOrgMembershipsInsert,
TIdentityOrgMembershipsUpdate
>;
[TableName.IdentityProjectMembership]: Knex.CompositeTableType<
[TableName.IdentityProjectMembership]: KnexOriginal.CompositeTableType<
TIdentityProjectMemberships,
TIdentityProjectMembershipsInsert,
TIdentityProjectMembershipsUpdate
>;
[TableName.IdentityProjectMembershipRole]: Knex.CompositeTableType<
[TableName.IdentityProjectMembershipRole]: KnexOriginal.CompositeTableType<
TIdentityProjectMembershipRole,
TIdentityProjectMembershipRoleInsert,
TIdentityProjectMembershipRoleUpdate
>;
[TableName.IdentityProjectAdditionalPrivilege]: Knex.CompositeTableType<
[TableName.IdentityProjectAdditionalPrivilege]: KnexOriginal.CompositeTableType<
TIdentityProjectAdditionalPrivilege,
TIdentityProjectAdditionalPrivilegeInsert,
TIdentityProjectAdditionalPrivilegeUpdate
>;
[TableName.AccessApprovalPolicy]: Knex.CompositeTableType<
[TableName.AccessApprovalPolicy]: KnexOriginal.CompositeTableType<
TAccessApprovalPolicies,
TAccessApprovalPoliciesInsert,
TAccessApprovalPoliciesUpdate
>;
[TableName.AccessApprovalPolicyApprover]: Knex.CompositeTableType<
[TableName.AccessApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
TAccessApprovalPoliciesApprovers,
TAccessApprovalPoliciesApproversInsert,
TAccessApprovalPoliciesApproversUpdate
>;
[TableName.AccessApprovalRequest]: Knex.CompositeTableType<
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
TAccessApprovalRequests,
TAccessApprovalRequestsInsert,
TAccessApprovalRequestsUpdate
>;
[TableName.AccessApprovalRequestReviewer]: Knex.CompositeTableType<
[TableName.AccessApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
TAccessApprovalRequestsReviewers,
TAccessApprovalRequestsReviewersInsert,
TAccessApprovalRequestsReviewersUpdate
>;
[TableName.ScimToken]: Knex.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
[TableName.SecretApprovalPolicy]: Knex.CompositeTableType<
[TableName.ScimToken]: KnexOriginal.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
[TableName.SecretApprovalPolicy]: KnexOriginal.CompositeTableType<
TSecretApprovalPolicies,
TSecretApprovalPoliciesInsert,
TSecretApprovalPoliciesUpdate
>;
[TableName.SecretApprovalPolicyApprover]: Knex.CompositeTableType<
[TableName.SecretApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
TSecretApprovalPoliciesApprovers,
TSecretApprovalPoliciesApproversInsert,
TSecretApprovalPoliciesApproversUpdate
>;
[TableName.SecretApprovalRequest]: Knex.CompositeTableType<
[TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType<
TSecretApprovalRequests,
TSecretApprovalRequestsInsert,
TSecretApprovalRequestsUpdate
>;
[TableName.SecretApprovalRequestReviewer]: Knex.CompositeTableType<
[TableName.SecretApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
TSecretApprovalRequestsReviewers,
TSecretApprovalRequestsReviewersInsert,
TSecretApprovalRequestsReviewersUpdate
>;
[TableName.SecretApprovalRequestSecret]: Knex.CompositeTableType<
[TableName.SecretApprovalRequestSecret]: KnexOriginal.CompositeTableType<
TSecretApprovalRequestsSecrets,
TSecretApprovalRequestsSecretsInsert,
TSecretApprovalRequestsSecretsUpdate
>;
[TableName.SecretApprovalRequestSecretTag]: Knex.CompositeTableType<
[TableName.SecretApprovalRequestSecretTag]: KnexOriginal.CompositeTableType<
TSecretApprovalRequestSecretTags,
TSecretApprovalRequestSecretTagsInsert,
TSecretApprovalRequestSecretTagsUpdate
>;
[TableName.SecretRotation]: Knex.CompositeTableType<
[TableName.SecretRotation]: KnexOriginal.CompositeTableType<
TSecretRotations,
TSecretRotationsInsert,
TSecretRotationsUpdate
>;
[TableName.SecretRotationOutput]: Knex.CompositeTableType<
[TableName.SecretRotationOutput]: KnexOriginal.CompositeTableType<
TSecretRotationOutputs,
TSecretRotationOutputsInsert,
TSecretRotationOutputsUpdate
>;
[TableName.Snapshot]: Knex.CompositeTableType<TSecretSnapshots, TSecretSnapshotsInsert, TSecretSnapshotsUpdate>;
[TableName.SnapshotSecret]: Knex.CompositeTableType<
[TableName.Snapshot]: KnexOriginal.CompositeTableType<
TSecretSnapshots,
TSecretSnapshotsInsert,
TSecretSnapshotsUpdate
>;
[TableName.SnapshotSecret]: KnexOriginal.CompositeTableType<
TSecretSnapshotSecrets,
TSecretSnapshotSecretsInsert,
TSecretSnapshotSecretsUpdate
>;
[TableName.SnapshotFolder]: Knex.CompositeTableType<
[TableName.SnapshotFolder]: KnexOriginal.CompositeTableType<
TSecretSnapshotFolders,
TSecretSnapshotFoldersInsert,
TSecretSnapshotFoldersUpdate
>;
[TableName.DynamicSecret]: Knex.CompositeTableType<TDynamicSecrets, TDynamicSecretsInsert, TDynamicSecretsUpdate>;
[TableName.DynamicSecretLease]: Knex.CompositeTableType<
[TableName.DynamicSecret]: KnexOriginal.CompositeTableType<
TDynamicSecrets,
TDynamicSecretsInsert,
TDynamicSecretsUpdate
>;
[TableName.DynamicSecretLease]: KnexOriginal.CompositeTableType<
TDynamicSecretLeases,
TDynamicSecretLeasesInsert,
TDynamicSecretLeasesUpdate
>;
[TableName.SamlConfig]: Knex.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
[TableName.LdapConfig]: Knex.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
[TableName.LdapGroupMap]: Knex.CompositeTableType<TLdapGroupMaps, TLdapGroupMapsInsert, TLdapGroupMapsUpdate>;
[TableName.OrgBot]: Knex.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
[TableName.AuditLog]: Knex.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
[TableName.AuditLogStream]: Knex.CompositeTableType<
[TableName.SamlConfig]: KnexOriginal.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
[TableName.OidcConfig]: KnexOriginal.CompositeTableType<TOidcConfigs, TOidcConfigsInsert, TOidcConfigsUpdate>;
[TableName.LdapConfig]: KnexOriginal.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
[TableName.LdapGroupMap]: KnexOriginal.CompositeTableType<
TLdapGroupMaps,
TLdapGroupMapsInsert,
TLdapGroupMapsUpdate
>;
[TableName.OrgBot]: KnexOriginal.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
[TableName.AuditLog]: KnexOriginal.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
[TableName.AuditLogStream]: KnexOriginal.CompositeTableType<
TAuditLogStreams,
TAuditLogStreamsInsert,
TAuditLogStreamsUpdate
>;
[TableName.GitAppInstallSession]: Knex.CompositeTableType<
[TableName.GitAppInstallSession]: KnexOriginal.CompositeTableType<
TGitAppInstallSessions,
TGitAppInstallSessionsInsert,
TGitAppInstallSessionsUpdate
>;
[TableName.GitAppOrg]: Knex.CompositeTableType<TGitAppOrg, TGitAppOrgInsert, TGitAppOrgUpdate>;
[TableName.SecretScanningGitRisk]: Knex.CompositeTableType<
[TableName.GitAppOrg]: KnexOriginal.CompositeTableType<TGitAppOrg, TGitAppOrgInsert, TGitAppOrgUpdate>;
[TableName.SecretScanningGitRisk]: KnexOriginal.CompositeTableType<
TSecretScanningGitRisks,
TSecretScanningGitRisksInsert,
TSecretScanningGitRisksUpdate
>;
[TableName.TrustedIps]: Knex.CompositeTableType<TTrustedIps, TTrustedIpsInsert, TTrustedIpsUpdate>;
[TableName.TrustedIps]: KnexOriginal.CompositeTableType<TTrustedIps, TTrustedIpsInsert, TTrustedIpsUpdate>;
// Junction tables
[TableName.JnSecretTag]: Knex.CompositeTableType<
[TableName.JnSecretTag]: KnexOriginal.CompositeTableType<
TSecretTagJunction,
TSecretTagJunctionInsert,
TSecretTagJunctionUpdate
>;
[TableName.SecretVersionTag]: Knex.CompositeTableType<
[TableName.SecretVersionTag]: KnexOriginal.CompositeTableType<
TSecretVersionTagJunction,
TSecretVersionTagJunctionInsert,
TSecretVersionTagJunctionUpdate
>;
// KMS service
[TableName.KmsServerRootConfig]: Knex.CompositeTableType<
[TableName.KmsServerRootConfig]: KnexOriginal.CompositeTableType<
TKmsRootConfig,
TKmsRootConfigInsert,
TKmsRootConfigUpdate
>;
[TableName.KmsKey]: Knex.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
[TableName.KmsKeyVersion]: Knex.CompositeTableType<TKmsKeyVersions, TKmsKeyVersionsInsert, TKmsKeyVersionsUpdate>;
[TableName.KmsKey]: KnexOriginal.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
[TableName.KmsKeyVersion]: KnexOriginal.CompositeTableType<
TKmsKeyVersions,
TKmsKeyVersionsInsert,
TKmsKeyVersionsUpdate
>;
}
}

@ -1,8 +1,38 @@
import knex from "knex";
import knex, { Knex } from "knex";
export type TDbClient = ReturnType<typeof initDbConnection>;
export const initDbConnection = ({ dbConnectionUri, dbRootCert }: { dbConnectionUri: string; dbRootCert?: string }) => {
const db = knex({
export const initDbConnection = ({
dbConnectionUri,
dbRootCert,
readReplicas = []
}: {
dbConnectionUri: string;
dbRootCert?: string;
readReplicas?: {
dbConnectionUri: string;
dbRootCert?: string;
}[];
}) => {
// akhilmhdh: the default Knex is knex.Knex<any, any[]>. but when assigned with knex({<config>}) the value is knex.Knex<any, unknown[]>
// this was causing issue with files like `snapshot-dal` `findRecursivelySnapshots` this i am explicitly putting the any and unknown[]
// eslint-disable-next-line
let db: Knex<any, unknown[]>;
// eslint-disable-next-line
let readReplicaDbs: Knex<any, unknown[]>[];
// @ts-expect-error the querybuilder type is expected but our intension is to return a knex instance
knex.QueryBuilder.extend("primaryNode", () => {
return db;
});
// @ts-expect-error the querybuilder type is expected but our intension is to return a knex instance
knex.QueryBuilder.extend("replicaNode", () => {
if (!readReplicaDbs.length) return db;
const selectedReplica = readReplicaDbs[Math.floor(Math.random() * readReplicaDbs.length)];
return selectedReplica;
});
db = knex({
client: "pg",
connection: {
connectionString: dbConnectionUri,
@ -22,5 +52,21 @@ export const initDbConnection = ({ dbConnectionUri, dbRootCert }: { dbConnection
}
});
readReplicaDbs = readReplicas.map((el) => {
const replicaDbCertificate = el.dbRootCert || dbRootCert;
return knex({
client: "pg",
connection: {
connectionString: el.dbConnectionUri,
ssl: replicaDbCertificate
? {
rejectUnauthorized: true,
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
}
: false
}
});
});
return db;
};

@ -0,0 +1,61 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesPasswordFieldExist = await knex.schema.hasColumn(TableName.UserEncryptionKey, "hashedPassword");
const doesPrivateKeyFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKey"
);
const doesPrivateKeyIVFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKeyIV"
);
const doesPrivateKeyTagFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKeyTag"
);
const doesPrivateKeyEncodingFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKeyEncoding"
);
if (await knex.schema.hasTable(TableName.UserEncryptionKey)) {
await knex.schema.alterTable(TableName.UserEncryptionKey, (t) => {
if (!doesPasswordFieldExist) t.string("hashedPassword");
if (!doesPrivateKeyFieldExist) t.text("serverEncryptedPrivateKey");
if (!doesPrivateKeyIVFieldExist) t.text("serverEncryptedPrivateKeyIV");
if (!doesPrivateKeyTagFieldExist) t.text("serverEncryptedPrivateKeyTag");
if (!doesPrivateKeyEncodingFieldExist) t.text("serverEncryptedPrivateKeyEncoding");
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesPasswordFieldExist = await knex.schema.hasColumn(TableName.UserEncryptionKey, "hashedPassword");
const doesPrivateKeyFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKey"
);
const doesPrivateKeyIVFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKeyIV"
);
const doesPrivateKeyTagFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKeyTag"
);
const doesPrivateKeyEncodingFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKeyEncoding"
);
if (await knex.schema.hasTable(TableName.UserEncryptionKey)) {
await knex.schema.alterTable(TableName.UserEncryptionKey, (t) => {
if (doesPasswordFieldExist) t.dropColumn("hashedPassword");
if (doesPrivateKeyFieldExist) t.dropColumn("serverEncryptedPrivateKey");
if (doesPrivateKeyIVFieldExist) t.dropColumn("serverEncryptedPrivateKeyIV");
if (doesPrivateKeyTagFieldExist) t.dropColumn("serverEncryptedPrivateKeyTag");
if (doesPrivateKeyEncodingFieldExist) t.dropColumn("serverEncryptedPrivateKeyEncoding");
});
}
}

@ -0,0 +1,49 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.OidcConfig))) {
await knex.schema.createTable(TableName.OidcConfig, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.string("discoveryURL");
tb.string("issuer");
tb.string("authorizationEndpoint");
tb.string("jwksUri");
tb.string("tokenEndpoint");
tb.string("userinfoEndpoint");
tb.text("encryptedClientId").notNullable();
tb.string("configurationType").notNullable();
tb.string("clientIdIV").notNullable();
tb.string("clientIdTag").notNullable();
tb.text("encryptedClientSecret").notNullable();
tb.string("clientSecretIV").notNullable();
tb.string("clientSecretTag").notNullable();
tb.string("allowedEmailDomains").nullable();
tb.boolean("isActive").notNullable();
tb.timestamps(true, true, true);
tb.uuid("orgId").notNullable().unique();
tb.foreign("orgId").references("id").inTable(TableName.Organization);
});
}
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "trustOidcEmails"))) {
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
tb.boolean("trustOidcEmails").defaultTo(false);
});
}
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.OidcConfig);
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
if (await knex.schema.hasColumn(TableName.SuperAdmin, "trustOidcEmails")) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.dropColumn("trustOidcEmails");
});
}
}
}

@ -0,0 +1,27 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
const DEFAULT_AUTH_ORG_ID_FIELD = "defaultAuthOrgId";
export async function up(knex: Knex): Promise<void> {
const hasDefaultOrgColumn = await knex.schema.hasColumn(TableName.SuperAdmin, DEFAULT_AUTH_ORG_ID_FIELD);
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
if (!hasDefaultOrgColumn) {
t.uuid(DEFAULT_AUTH_ORG_ID_FIELD).nullable();
t.foreign(DEFAULT_AUTH_ORG_ID_FIELD).references("id").inTable(TableName.Organization).onDelete("SET NULL");
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasDefaultOrgColumn = await knex.schema.hasColumn(TableName.SuperAdmin, DEFAULT_AUTH_ORG_ID_FIELD);
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
if (hasDefaultOrgColumn) {
t.dropForeign([DEFAULT_AUTH_ORG_ID_FIELD]);
t.dropColumn(DEFAULT_AUTH_ORG_ID_FIELD);
}
});
}

@ -0,0 +1,24 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.Certificate)) {
const hasAltNamesColumn = await knex.schema.hasColumn(TableName.Certificate, "altNames");
if (!hasAltNamesColumn) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.string("altNames").defaultTo("");
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.Certificate)) {
if (await knex.schema.hasColumn(TableName.Certificate, "altNames")) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.dropColumn("altNames");
});
}
}
}

@ -0,0 +1,35 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasAwsAssumeRoleCipherText = await knex.schema.hasColumn(
TableName.IntegrationAuth,
"awsAssumeIamRoleArnCipherText"
);
const hasAwsAssumeRoleIV = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnIV");
const hasAwsAssumeRoleTag = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnTag");
if (await knex.schema.hasTable(TableName.IntegrationAuth)) {
await knex.schema.alterTable(TableName.IntegrationAuth, (t) => {
if (!hasAwsAssumeRoleCipherText) t.text("awsAssumeIamRoleArnCipherText");
if (!hasAwsAssumeRoleIV) t.text("awsAssumeIamRoleArnIV");
if (!hasAwsAssumeRoleTag) t.text("awsAssumeIamRoleArnTag");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasAwsAssumeRoleCipherText = await knex.schema.hasColumn(
TableName.IntegrationAuth,
"awsAssumeIamRoleArnCipherText"
);
const hasAwsAssumeRoleIV = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnIV");
const hasAwsAssumeRoleTag = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnTag");
if (await knex.schema.hasTable(TableName.IntegrationAuth)) {
await knex.schema.alterTable(TableName.IntegrationAuth, (t) => {
if (hasAwsAssumeRoleCipherText) t.dropColumn("awsAssumeIamRoleArnCipherText");
if (hasAwsAssumeRoleIV) t.dropColumn("awsAssumeIamRoleArnIV");
if (hasAwsAssumeRoleTag) t.dropColumn("awsAssumeIamRoleArnTag");
});
}
}

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "enabledLoginMethods"))) {
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
tb.specificType("enabledLoginMethods", "text[]");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SuperAdmin, "enabledLoginMethods")) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.dropColumn("enabledLoginMethods");
});
}
}

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.LdapConfig, "uniqueUserAttribute"))) {
await knex.schema.alterTable(TableName.LdapConfig, (tb) => {
tb.string("uniqueUserAttribute").notNullable().defaultTo("");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.LdapConfig, "uniqueUserAttribute")) {
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
t.dropColumn("uniqueUserAttribute");
});
}
}

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.Project, "auditLogsRetentionDays"))) {
await knex.schema.alterTable(TableName.Project, (tb) => {
tb.integer("auditLogsRetentionDays").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.Project, "auditLogsRetentionDays")) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.dropColumn("auditLogsRetentionDays");
});
}
}

@ -0,0 +1,12 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
await createOnUpdateTrigger(knex, TableName.OidcConfig);
}
export async function down(knex: Knex): Promise<void> {
await dropOnUpdateTrigger(knex, TableName.OidcConfig);
}

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.OrgMembership, "projectFavorites"))) {
await knex.schema.alterTable(TableName.OrgMembership, (tb) => {
tb.specificType("projectFavorites", "text[]");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.OrgMembership, "projectFavorites")) {
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
t.dropColumn("projectFavorites");
});
}
}

@ -0,0 +1,53 @@
import { Knex } from "knex";
import { WebhookType } from "@app/services/webhook/webhook-types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasUrlCipherText = await knex.schema.hasColumn(TableName.Webhook, "urlCipherText");
const hasUrlIV = await knex.schema.hasColumn(TableName.Webhook, "urlIV");
const hasUrlTag = await knex.schema.hasColumn(TableName.Webhook, "urlTag");
const hasType = await knex.schema.hasColumn(TableName.Webhook, "type");
if (await knex.schema.hasTable(TableName.Webhook)) {
await knex.schema.alterTable(TableName.Webhook, (tb) => {
if (!hasUrlCipherText) {
tb.text("urlCipherText");
}
if (!hasUrlIV) {
tb.string("urlIV");
}
if (!hasUrlTag) {
tb.string("urlTag");
}
if (!hasType) {
tb.string("type").defaultTo(WebhookType.GENERAL);
}
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasUrlCipherText = await knex.schema.hasColumn(TableName.Webhook, "urlCipherText");
const hasUrlIV = await knex.schema.hasColumn(TableName.Webhook, "urlIV");
const hasUrlTag = await knex.schema.hasColumn(TableName.Webhook, "urlTag");
const hasType = await knex.schema.hasColumn(TableName.Webhook, "type");
if (await knex.schema.hasTable(TableName.Webhook)) {
await knex.schema.alterTable(TableName.Webhook, (t) => {
if (hasUrlCipherText) {
t.dropColumn("urlCipherText");
}
if (hasUrlIV) {
t.dropColumn("urlIV");
}
if (hasUrlTag) {
t.dropColumn("urlTag");
}
if (hasType) {
t.dropColumn("type");
}
});
}
}

@ -0,0 +1,188 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
// migrate secret approval policy approvers to user id
const hasApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId");
const hasApproverId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverId");
if (!hasApproverUserId) {
// add the new fields
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
// if (hasApproverId) tb.setNullable("approverId");
tb.uuid("approverUserId");
tb.foreign("approverUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
});
// convert project membership id => user id
await knex(TableName.SecretApprovalPolicyApprover).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
approverUserId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverId`]))
});
// drop the old field
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
if (hasApproverId) tb.dropColumn("approverId");
tb.uuid("approverUserId").notNullable().alter();
});
}
// migrate secret approval request committer and statusChangeBy to user id
const hasSecretApprovalRequestTable = await knex.schema.hasTable(TableName.SecretApprovalRequest);
const hasCommitterUserId = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "committerUserId");
const hasCommitterId = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "committerId");
const hasStatusChangeBy = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "statusChangeBy");
const hasStatusChangedByUserId = await knex.schema.hasColumn(
TableName.SecretApprovalRequest,
"statusChangedByUserId"
);
if (hasSecretApprovalRequestTable) {
// new fields
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
// if (hasCommitterId) tb.setNullable("committerId");
if (!hasCommitterUserId) {
tb.uuid("committerUserId");
tb.foreign("committerUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
}
if (!hasStatusChangedByUserId) {
tb.uuid("statusChangedByUserId");
tb.foreign("statusChangedByUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
}
});
// copy the assigned project membership => user id to new fields
await knex(TableName.SecretApprovalRequest).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
committerUserId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerId`])),
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
statusChangedByUserId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangeBy`]))
});
// drop old fields
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
if (hasStatusChangeBy) tb.dropColumn("statusChangeBy");
if (hasCommitterId) tb.dropColumn("committerId");
tb.uuid("committerUserId").notNullable().alter();
});
}
// migrate secret approval request reviewer to user id
const hasMemberId = await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "member");
const hasReviewerUserId = await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "reviewerUserId");
if (!hasReviewerUserId) {
// new fields
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
// if (hasMemberId) tb.setNullable("member");
tb.uuid("reviewerUserId");
tb.foreign("reviewerUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
});
// copy project membership => user id to new fields
await knex(TableName.SecretApprovalRequestReviewer).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
reviewerUserId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.member`]))
});
// drop table
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
if (hasMemberId) tb.dropColumn("member");
tb.uuid("reviewerUserId").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId");
const hasApproverId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverId");
if (hasApproverUserId) {
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
if (!hasApproverId) {
tb.uuid("approverId");
tb.foreign("approverId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
}
});
if (!hasApproverId) {
await knex(TableName.SecretApprovalPolicyApprover).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
approverId: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverUserId`]))
});
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
tb.dropColumn("approverUserId");
tb.uuid("approverId").notNullable().alter();
});
}
}
const hasSecretApprovalRequestTable = await knex.schema.hasTable(TableName.SecretApprovalRequest);
const hasCommitterUserId = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "committerUserId");
const hasCommitterId = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "committerId");
const hasStatusChangeBy = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "statusChangeBy");
const hasStatusChangedByUser = await knex.schema.hasColumn(TableName.SecretApprovalRequest, "statusChangedByUserId");
if (hasSecretApprovalRequestTable) {
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
// if (hasCommitterId) tb.uuid("committerId").notNullable().alter();
if (!hasCommitterId) {
tb.uuid("committerId");
tb.foreign("committerId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
}
if (!hasStatusChangeBy) {
tb.uuid("statusChangeBy");
tb.foreign("statusChangeBy").references("id").inTable(TableName.ProjectMembership).onDelete("SET NULL");
}
});
await knex(TableName.SecretApprovalRequest).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
committerId: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`])),
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
statusChangeBy: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangedByUserId`]))
});
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
if (hasCommitterUserId) tb.dropColumn("committerUserId");
if (hasStatusChangedByUser) tb.dropColumn("statusChangedByUserId");
if (hasCommitterId) tb.uuid("committerId").notNullable().alter();
});
}
const hasMemberId = await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "member");
const hasReviewerUserId = await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "reviewerUserId");
if (hasReviewerUserId) {
if (!hasMemberId) {
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
// if (hasMemberId) tb.uuid("member").notNullable().alter();
tb.uuid("member");
tb.foreign("member").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
});
}
await knex(TableName.SecretApprovalRequestReviewer).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
member: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`]))
});
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
tb.uuid("member").notNullable().alter();
tb.dropColumn("reviewerUserId");
});
}
}

@ -0,0 +1,24 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
await knex.schema.createTable(TableName.IdentityTokenAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.timestamps(true, true, true);
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
});
await createOnUpdateTrigger(knex, TableName.IdentityTokenAuth);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityTokenAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityTokenAuth);
}

@ -0,0 +1,24 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.IdentityAccessToken)) {
const hasNameColumn = await knex.schema.hasColumn(TableName.IdentityAccessToken, "name");
if (!hasNameColumn) {
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
t.string("name").nullable();
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.IdentityAccessToken)) {
if (await knex.schema.hasColumn(TableName.IdentityAccessToken, "name")) {
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
t.dropColumn("name");
});
}
}
}

@ -0,0 +1,34 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityOidcAuth))) {
await knex.schema.createTable(TableName.IdentityOidcAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.string("oidcDiscoveryUrl").notNullable();
t.text("encryptedCaCert").notNullable();
t.string("caCertIV").notNullable();
t.string("caCertTag").notNullable();
t.string("boundIssuer").notNullable();
t.string("boundAudiences").notNullable();
t.jsonb("boundClaims").notNullable();
t.string("boundSubject");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.IdentityOidcAuth);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityOidcAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityOidcAuth);
}

@ -19,7 +19,8 @@ export const CertificatesSchema = z.object({
notBefore: z.date(),
notAfter: z.date(),
revokedAt: z.date().nullable().optional(),
revocationReason: z.number().nullable().optional()
revocationReason: z.number().nullable().optional(),
altNames: z.string().default("").nullable().optional()
});
export type TCertificates = z.infer<typeof CertificatesSchema>;

@ -19,7 +19,8 @@ export const IdentityAccessTokensSchema = z.object({
identityUAClientSecretId: z.string().nullable().optional(),
identityId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
name: z.string().nullable().optional()
});
export type TIdentityAccessTokens = z.infer<typeof IdentityAccessTokensSchema>;

@ -0,0 +1,31 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityOidcAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
identityId: z.string().uuid(),
oidcDiscoveryUrl: z.string(),
encryptedCaCert: z.string(),
caCertIV: z.string(),
caCertTag: z.string(),
boundIssuer: z.string(),
boundAudiences: z.string(),
boundClaims: z.unknown(),
boundSubject: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;
export type TIdentityOidcAuthsInsert = Omit<z.input<typeof IdentityOidcAuthsSchema>, TImmutableDBKeys>;
export type TIdentityOidcAuthsUpdate = Partial<Omit<z.input<typeof IdentityOidcAuthsSchema>, TImmutableDBKeys>>;

@ -0,0 +1,23 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityTokenAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
identityId: z.string().uuid()
});
export type TIdentityTokenAuths = z.infer<typeof IdentityTokenAuthsSchema>;
export type TIdentityTokenAuthsInsert = Omit<z.input<typeof IdentityTokenAuthsSchema>, TImmutableDBKeys>;
export type TIdentityTokenAuthsUpdate = Partial<Omit<z.input<typeof IdentityTokenAuthsSchema>, TImmutableDBKeys>>;

@ -28,10 +28,12 @@ export * from "./identity-aws-auths";
export * from "./identity-azure-auths";
export * from "./identity-gcp-auths";
export * from "./identity-kubernetes-auths";
export * from "./identity-oidc-auths";
export * from "./identity-org-memberships";
export * from "./identity-project-additional-privilege";
export * from "./identity-project-membership-role";
export * from "./identity-project-memberships";
export * from "./identity-token-auths";
export * from "./identity-ua-client-secrets";
export * from "./identity-universal-auths";
export * from "./incident-contacts";
@ -43,6 +45,7 @@ export * from "./kms-root-config";
export * from "./ldap-configs";
export * from "./ldap-group-maps";
export * from "./models";
export * from "./oidc-configs";
export * from "./org-bots";
export * from "./org-memberships";
export * from "./org-roles";

@ -29,7 +29,10 @@ export const IntegrationAuthsSchema = z.object({
keyEncoding: z.string(),
projectId: z.string(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
awsAssumeIamRoleArnCipherText: z.string().nullable().optional(),
awsAssumeIamRoleArnIV: z.string().nullable().optional(),
awsAssumeIamRoleArnTag: z.string().nullable().optional()
});
export type TIntegrationAuths = z.infer<typeof IntegrationAuthsSchema>;

@ -26,7 +26,8 @@ export const LdapConfigsSchema = z.object({
updatedAt: z.date(),
groupSearchBase: z.string().default(""),
groupSearchFilter: z.string().default(""),
searchFilter: z.string().default("")
searchFilter: z.string().default(""),
uniqueUserAttribute: z.string().default("")
});
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;

@ -53,12 +53,14 @@ export enum TableName {
Webhook = "webhooks",
Identity = "identities",
IdentityAccessToken = "identity_access_tokens",
IdentityTokenAuth = "identity_token_auths",
IdentityUniversalAuth = "identity_universal_auths",
IdentityKubernetesAuth = "identity_kubernetes_auths",
IdentityGcpAuth = "identity_gcp_auths",
IdentityAzureAuth = "identity_azure_auths",
IdentityUaClientSecret = "identity_ua_client_secrets",
IdentityAwsAuth = "identity_aws_auths",
IdentityOidcAuth = "identity_oidc_auths",
IdentityOrgMembership = "identity_org_memberships",
IdentityProjectMembership = "identity_project_memberships",
IdentityProjectMembershipRole = "identity_project_membership_role",
@ -78,6 +80,7 @@ export enum TableName {
SecretRotationOutput = "secret_rotation_outputs",
SamlConfig = "saml_configs",
LdapConfig = "ldap_configs",
OidcConfig = "oidc_configs",
LdapGroupMap = "ldap_group_maps",
AuditLog = "audit_logs",
AuditLogStream = "audit_log_streams",
@ -160,9 +163,11 @@ export enum ProjectUpgradeStatus {
}
export enum IdentityAuthMethod {
TOKEN_AUTH = "token-auth",
Univeral = "universal-auth",
KUBERNETES_AUTH = "kubernetes-auth",
GCP_AUTH = "gcp-auth",
AWS_AUTH = "aws-auth",
AZURE_AUTH = "azure-auth"
AZURE_AUTH = "azure-auth",
OIDC_AUTH = "oidc-auth"
}

@ -0,0 +1,34 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const OidcConfigsSchema = z.object({
id: z.string().uuid(),
discoveryURL: z.string().nullable().optional(),
issuer: z.string().nullable().optional(),
authorizationEndpoint: z.string().nullable().optional(),
jwksUri: z.string().nullable().optional(),
tokenEndpoint: z.string().nullable().optional(),
userinfoEndpoint: z.string().nullable().optional(),
encryptedClientId: z.string(),
configurationType: z.string(),
clientIdIV: z.string(),
clientIdTag: z.string(),
encryptedClientSecret: z.string(),
clientSecretIV: z.string(),
clientSecretTag: z.string(),
allowedEmailDomains: z.string().nullable().optional(),
isActive: z.boolean(),
createdAt: z.date(),
updatedAt: z.date(),
orgId: z.string().uuid()
});
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;
export type TOidcConfigsInsert = Omit<z.input<typeof OidcConfigsSchema>, TImmutableDBKeys>;
export type TOidcConfigsUpdate = Partial<Omit<z.input<typeof OidcConfigsSchema>, TImmutableDBKeys>>;

@ -16,7 +16,8 @@ export const OrgMembershipsSchema = z.object({
updatedAt: z.date(),
userId: z.string().uuid().nullable().optional(),
orgId: z.string().uuid(),
roleId: z.string().uuid().nullable().optional()
roleId: z.string().uuid().nullable().optional(),
projectFavorites: z.string().array().nullable().optional()
});
export type TOrgMemberships = z.infer<typeof OrgMembershipsSchema>;

@ -17,8 +17,9 @@ export const ProjectsSchema = z.object({
updatedAt: z.date(),
version: z.number().default(1),
upgradeStatus: z.string().nullable().optional(),
pitVersionLimit: z.number().default(10),
kmsCertificateKeyId: z.string().uuid().nullable().optional(),
pitVersionLimit: z.number().default(10)
auditLogsRetentionDays: z.number().nullable().optional()
});
export type TProjects = z.infer<typeof ProjectsSchema>;

@ -9,10 +9,10 @@ import { TImmutableDBKeys } from "./models";
export const SecretApprovalPoliciesApproversSchema = z.object({
id: z.string().uuid(),
approverId: z.string().uuid(),
policyId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
approverUserId: z.string().uuid()
});
export type TSecretApprovalPoliciesApprovers = z.infer<typeof SecretApprovalPoliciesApproversSchema>;

@ -9,11 +9,11 @@ import { TImmutableDBKeys } from "./models";
export const SecretApprovalRequestsReviewersSchema = z.object({
id: z.string().uuid(),
member: z.string().uuid(),
status: z.string(),
requestId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
reviewerUserId: z.string().uuid()
});
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;

@ -15,11 +15,11 @@ export const SecretApprovalRequestsSchema = z.object({
conflicts: z.unknown().nullable().optional(),
slug: z.string(),
folderId: z.string().uuid(),
statusChangeBy: z.string().uuid().nullable().optional(),
committerId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
isReplicated: z.boolean().nullable().optional()
isReplicated: z.boolean().nullable().optional(),
committerUserId: z.string().uuid(),
statusChangedByUserId: z.string().uuid().nullable().optional()
});
export type TSecretApprovalRequests = z.infer<typeof SecretApprovalRequestsSchema>;

@ -16,7 +16,10 @@ export const SuperAdminSchema = z.object({
allowedSignUpDomain: z.string().nullable().optional(),
instanceId: z.string().uuid().default("00000000-0000-0000-0000-000000000000"),
trustSamlEmails: z.boolean().default(false).nullable().optional(),
trustLdapEmails: z.boolean().default(false).nullable().optional()
trustLdapEmails: z.boolean().default(false).nullable().optional(),
trustOidcEmails: z.boolean().default(false).nullable().optional(),
defaultAuthOrgId: z.string().uuid().nullable().optional(),
enabledLoginMethods: z.string().array().nullable().optional()
});
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;

@ -21,7 +21,12 @@ export const UserEncryptionKeysSchema = z.object({
tag: z.string(),
salt: z.string(),
verifier: z.string(),
userId: z.string().uuid()
userId: z.string().uuid(),
hashedPassword: z.string().nullable().optional(),
serverEncryptedPrivateKey: z.string().nullable().optional(),
serverEncryptedPrivateKeyIV: z.string().nullable().optional(),
serverEncryptedPrivateKeyTag: z.string().nullable().optional(),
serverEncryptedPrivateKeyEncoding: z.string().nullable().optional()
});
export type TUserEncryptionKeys = z.infer<typeof UserEncryptionKeysSchema>;

@ -21,7 +21,11 @@ export const WebhooksSchema = z.object({
keyEncoding: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
envId: z.string().uuid()
envId: z.string().uuid(),
urlCipherText: z.string().nullable().optional(),
urlIV: z.string().nullable().optional(),
urlTag: z.string().nullable().optional(),
type: z.string().default("general").nullable().optional()
});
export type TWebhooks = z.infer<typeof WebhooksSchema>;

@ -8,6 +8,7 @@ import { registerGroupRouter } from "./group-router";
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
import { registerLdapRouter } from "./ldap-router";
import { registerLicenseRouter } from "./license-router";
import { registerOidcRouter } from "./oidc-router";
import { registerOrgRoleRouter } from "./org-role-router";
import { registerProjectRoleRouter } from "./project-role-router";
import { registerProjectRouter } from "./project-router";
@ -64,7 +65,14 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
{ prefix: "/pki" }
);
await server.register(registerSamlRouter, { prefix: "/sso" });
await server.register(
async (ssoRouter) => {
await ssoRouter.register(registerSamlRouter);
await ssoRouter.register(registerOidcRouter, { prefix: "/oidc" });
},
{ prefix: "/sso" }
);
await server.register(registerScimRouter, { prefix: "/scim" });
await server.register(registerLdapRouter, { prefix: "/ldap" });
await server.register(registerSecretScanningRouter, { prefix: "/secret-scanning" });

@ -53,7 +53,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
// eslint-disable-next-line
async (req: IncomingMessage, user, cb) => {
try {
if (!user.email) throw new BadRequestError({ message: "Invalid request. Missing email." });
if (!user.mail) throw new BadRequestError({ message: "Invalid request. Missing mail attribute on user." });
const ldapConfig = (req as unknown as FastifyRequest).ldapConfig as TLDAPConfig;
let groups: { dn: string; cn: string }[] | undefined;
@ -70,10 +70,13 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
groups = await searchGroups(ldapConfig, groupSearchFilter, ldapConfig.groupSearchBase);
}
const externalId = ldapConfig.uniqueUserAttribute ? user[ldapConfig.uniqueUserAttribute] : user.uidNumber;
const username = ldapConfig.uniqueUserAttribute ? externalId : user.uid;
const { isUserCompleted, providerAuthToken } = await server.services.ldap.ldapLogin({
externalId,
username,
ldapConfigId: ldapConfig.id,
externalId: user.uidNumber,
username: user.uid,
firstName: user.givenName ?? user.cn ?? "",
lastName: user.sn ?? "",
email: user.mail,
@ -138,6 +141,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
url: z.string(),
bindDN: z.string(),
bindPass: z.string(),
uniqueUserAttribute: z.string(),
searchBase: z.string(),
searchFilter: z.string(),
groupSearchBase: z.string(),
@ -172,6 +176,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
url: z.string().trim(),
bindDN: z.string().trim(),
bindPass: z.string().trim(),
uniqueUserAttribute: z.string().trim().default("uidNumber"),
searchBase: z.string().trim(),
searchFilter: z.string().trim().default("(uid={{username}})"),
groupSearchBase: z.string().trim(),
@ -213,6 +218,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
url: z.string().trim(),
bindDN: z.string().trim(),
bindPass: z.string().trim(),
uniqueUserAttribute: z.string().trim(),
searchBase: z.string().trim(),
searchFilter: z.string().trim(),
groupSearchBase: z.string().trim(),

@ -0,0 +1,355 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable @typescript-eslint/no-unsafe-return */
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
/* eslint-disable @typescript-eslint/no-unsafe-call */
/* eslint-disable @typescript-eslint/no-unsafe-argument */
// All the any rules are disabled because passport typesense with fastify is really poor
import { Authenticator, Strategy } from "@fastify/passport";
import fastifySession from "@fastify/session";
import RedisStore from "connect-redis";
import { Redis } from "ioredis";
import { z } from "zod";
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
import { OIDCConfigurationType } from "@app/ee/services/oidc/oidc-config-types";
import { getConfig } from "@app/lib/config/env";
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerOidcRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const redis = new Redis(appCfg.REDIS_URL);
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
/*
- OIDC protocol cannot work without sessions: https://github.com/panva/node-openid-client/issues/190
- Current redis usage is not ideal and will eventually have to be refactored to use a better structure
- Fastify session <> Redis structure is based on the ff: https://github.com/fastify/session/blob/master/examples/redis.js
*/
const redisStore = new RedisStore({
client: redis,
prefix: "oidc-session:",
ttl: 600 // 10 minutes
});
await server.register(fastifySession, {
secret: appCfg.COOKIE_SECRET_SIGN_KEY,
store: redisStore,
cookie: {
secure: appCfg.HTTPS_ENABLED,
sameSite: "lax" // we want cookies to be sent to Infisical in redirects originating from IDP server
}
});
await server.register(passport.initialize());
await server.register(passport.secureSession());
// redirect to IDP for login
server.route({
url: "/login",
method: "GET",
config: {
rateLimit: authRateLimit
},
schema: {
querystring: z.object({
orgSlug: z.string().trim(),
callbackPort: z.string().trim().optional()
})
},
preValidation: [
async (req, res) => {
const { orgSlug, callbackPort } = req.query;
// ensure fresh session state per login attempt
await req.session.regenerate();
req.session.set<any>("oidcOrgSlug", orgSlug);
if (callbackPort) {
req.session.set<any>("callbackPort", callbackPort);
}
const oidcStrategy = await server.services.oidc.getOrgAuthStrategy(orgSlug, callbackPort);
return (
passport.authenticate(oidcStrategy as Strategy, {
scope: "profile email openid"
}) as any
)(req, res);
}
],
handler: () => {}
});
// callback route after login from IDP
server.route({
url: "/callback",
method: "GET",
preValidation: [
async (req, res) => {
const oidcOrgSlug = req.session.get<any>("oidcOrgSlug");
const callbackPort = req.session.get<any>("callbackPort");
const oidcStrategy = await server.services.oidc.getOrgAuthStrategy(oidcOrgSlug, callbackPort);
return (
passport.authenticate(oidcStrategy as Strategy, {
failureRedirect: "/api/v1/sso/oidc/login/error",
session: false,
failureMessage: true
}) as any
)(req, res);
}
],
handler: async (req, res) => {
await req.session.destroy();
if (req.passportUser.isUserCompleted) {
return res.redirect(
`${appCfg.SITE_URL}/login/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`
);
}
// signup
return res.redirect(
`${appCfg.SITE_URL}/signup/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`
);
}
});
server.route({
url: "/login/error",
method: "GET",
handler: async (req, res) => {
await req.session.destroy();
return res.status(500).send({
error: "Authentication error",
details: req.query
});
}
});
server.route({
url: "/config",
method: "GET",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
querystring: z.object({
orgSlug: z.string().trim()
}),
response: {
200: OidcConfigsSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
jwksUri: true,
tokenEndpoint: true,
userinfoEndpoint: true,
configurationType: true,
discoveryURL: true,
isActive: true,
orgId: true,
allowedEmailDomains: true
}).extend({
clientId: z.string(),
clientSecret: z.string()
})
}
},
handler: async (req) => {
const { orgSlug } = req.query;
const oidc = await server.services.oidc.getOidc({
orgSlug,
type: "external",
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod
});
return oidc;
}
});
server.route({
method: "PATCH",
url: "/config",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
body: z
.object({
allowedEmailDomains: z
.string()
.trim()
.optional()
.default("")
.transform((data) => {
if (data === "") return "";
// Trim each ID and join with ', ' to ensure formatting
return data
.split(",")
.map((id) => id.trim())
.join(", ");
}),
discoveryURL: z.string().trim(),
configurationType: z.nativeEnum(OIDCConfigurationType),
issuer: z.string().trim(),
authorizationEndpoint: z.string().trim(),
jwksUri: z.string().trim(),
tokenEndpoint: z.string().trim(),
userinfoEndpoint: z.string().trim(),
clientId: z.string().trim(),
clientSecret: z.string().trim(),
isActive: z.boolean()
})
.partial()
.merge(z.object({ orgSlug: z.string() })),
response: {
200: OidcConfigsSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
configurationType: true,
discoveryURL: true,
jwksUri: true,
tokenEndpoint: true,
userinfoEndpoint: true,
orgId: true,
allowedEmailDomains: true,
isActive: true
})
}
},
handler: async (req) => {
const oidc = await server.services.oidc.updateOidcCfg({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
return oidc;
}
});
server.route({
method: "POST",
url: "/config",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
body: z
.object({
allowedEmailDomains: z
.string()
.trim()
.optional()
.default("")
.transform((data) => {
if (data === "") return "";
// Trim each ID and join with ', ' to ensure formatting
return data
.split(",")
.map((id) => id.trim())
.join(", ");
}),
configurationType: z.nativeEnum(OIDCConfigurationType),
issuer: z.string().trim().optional().default(""),
discoveryURL: z.string().trim().optional().default(""),
authorizationEndpoint: z.string().trim().optional().default(""),
jwksUri: z.string().trim().optional().default(""),
tokenEndpoint: z.string().trim().optional().default(""),
userinfoEndpoint: z.string().trim().optional().default(""),
clientId: z.string().trim(),
clientSecret: z.string().trim(),
isActive: z.boolean(),
orgSlug: z.string().trim()
})
.superRefine((data, ctx) => {
if (data.configurationType === OIDCConfigurationType.CUSTOM) {
if (!data.issuer) {
ctx.addIssue({
path: ["issuer"],
message: "Issuer is required",
code: z.ZodIssueCode.custom
});
}
if (!data.authorizationEndpoint) {
ctx.addIssue({
path: ["authorizationEndpoint"],
message: "Authorization endpoint is required",
code: z.ZodIssueCode.custom
});
}
if (!data.jwksUri) {
ctx.addIssue({
path: ["jwksUri"],
message: "JWKS URI is required",
code: z.ZodIssueCode.custom
});
}
if (!data.tokenEndpoint) {
ctx.addIssue({
path: ["tokenEndpoint"],
message: "Token endpoint is required",
code: z.ZodIssueCode.custom
});
}
if (!data.userinfoEndpoint) {
ctx.addIssue({
path: ["userinfoEndpoint"],
message: "Userinfo endpoint is required",
code: z.ZodIssueCode.custom
});
}
} else {
// eslint-disable-next-line no-lonely-if
if (!data.discoveryURL) {
ctx.addIssue({
path: ["discoveryURL"],
message: "Discovery URL is required",
code: z.ZodIssueCode.custom
});
}
}
}),
response: {
200: OidcConfigsSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
configurationType: true,
discoveryURL: true,
jwksUri: true,
tokenEndpoint: true,
userinfoEndpoint: true,
orgId: true,
isActive: true,
allowedEmailDomains: true
})
}
},
handler: async (req) => {
const oidc = await server.services.oidc.createOidcCfg({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
return oidc;
}
});
};

@ -475,10 +475,13 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
}),
z.object({
op: z.literal("add"),
value: z.object({
value: z.string().trim(),
display: z.string().trim().optional()
})
path: z.string().trim(),
value: z.array(
z.object({
value: z.string().trim(),
display: z.string().trim().optional()
})
)
})
])
)

@ -25,10 +25,10 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
.optional()
.nullable()
.transform((val) => (val ? removeTrailingSlash(val) : val)),
approvers: z.string().array().min(1),
approverUserIds: z.string().array().min(1),
approvals: z.number().min(1).default(1)
})
.refine((data) => data.approvals <= data.approvers.length, {
.refine((data) => data.approvals <= data.approverUserIds.length, {
path: ["approvals"],
message: "The number of approvals should be lower than the number of approvers."
}),
@ -66,7 +66,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
body: z
.object({
name: z.string().optional(),
approvers: z.string().array().min(1),
approverUserIds: z.string().array().min(1),
approvals: z.number().min(1).default(1),
secretPath: z
.string()
@ -74,7 +74,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
.nullable()
.transform((val) => (val ? removeTrailingSlash(val) : val))
})
.refine((data) => data.approvals <= data.approvers.length, {
.refine((data) => data.approvals <= data.approverUserIds.length, {
path: ["approvals"],
message: "The number of approvals should be lower than the number of approvers."
}),
@ -139,7 +139,15 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
}),
response: {
200: z.object({
approvals: sapPubSchema.merge(z.object({ approvers: z.string().array() })).array()
approvals: sapPubSchema
.extend({
userApprovers: z
.object({
userId: z.string()
})
.array()
})
.array()
})
}
},
@ -170,7 +178,11 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
}),
response: {
200: z.object({
policy: sapPubSchema.merge(z.object({ approvers: z.string().array() })).optional()
policy: sapPubSchema
.extend({
userApprovers: z.object({ userId: z.string() }).array()
})
.optional()
})
}
},

@ -6,7 +6,8 @@ import {
SecretApprovalRequestsSecretsSchema,
SecretsSchema,
SecretTagsSchema,
SecretVersionsSchema
SecretVersionsSchema,
UsersSchema
} from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { ApprovalStatus, RequestState } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
@ -14,6 +15,15 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const approvalRequestUser = z.object({ userId: z.string() }).merge(
UsersSchema.pick({
email: true,
firstName: true,
lastName: true,
username: true
})
);
export const registerSecretApprovalRequestRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
@ -41,9 +51,10 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
approvers: z.string().array(),
secretPath: z.string().optional().nullable()
}),
committerUser: approvalRequestUser,
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
environment: z.string(),
reviewers: z.object({ member: z.string(), status: z.string() }).array(),
reviewers: z.object({ userId: z.string(), status: z.string() }).array(),
approvers: z.string().array()
}).array()
})
@ -195,7 +206,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
type: isClosing ? EventType.SECRET_APPROVAL_CLOSED : EventType.SECRET_APPROVAL_REOPENED,
// eslint-disable-next-line
metadata: {
[isClosing ? ("closedBy" as const) : ("reopenedBy" as const)]: approval.statusChangeBy as string,
[isClosing ? ("closedBy" as const) : ("reopenedBy" as const)]: approval.statusChangedByUserId as string,
secretApprovalRequestId: approval.id,
secretApprovalRequestSlug: approval.slug
// eslint-disable-next-line
@ -216,6 +227,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
})
.array()
.optional();
server.route({
method: "GET",
url: "/:id",
@ -235,12 +247,13 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
id: z.string(),
name: z.string(),
approvals: z.number(),
approvers: z.string().array(),
approvers: approvalRequestUser.array(),
secretPath: z.string().optional().nullable()
}),
environment: z.string(),
reviewers: z.object({ member: z.string(), status: z.string() }).array(),
approvers: z.string().array(),
statusChangedByUser: approvalRequestUser.optional(),
committerUser: approvalRequestUser,
reviewers: approvalRequestUser.extend({ status: z.string() }).array(),
secretPath: z.string(),
commits: SecretApprovalRequestsSecretsSchema.omit({ secretBlindIndex: true })
.merge(

@ -32,7 +32,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
const findById = async (id: string, tx?: Knex) => {
try {
const doc = await accessApprovalPolicyFindQuery(tx || db, {
const doc = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), {
[`${TableName.AccessApprovalPolicy}.id` as "id"]: id
});
const formatedDoc = mergeOneToManyRelation(
@ -54,7 +54,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
const find = async (filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>, tx?: Knex) => {
try {
const docs = await accessApprovalPolicyFindQuery(tx || db, filter);
const docs = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), filter);
const formatedDoc = mergeOneToManyRelation(
docs,
"id",

@ -14,7 +14,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
const findRequestsWithPrivilegeByPolicyIds = async (policyIds: string[]) => {
try {
const docs = await db(TableName.AccessApprovalRequest)
const docs = await db
.replicaNode()(TableName.AccessApprovalRequest)
.whereIn(`${TableName.AccessApprovalRequest}.policyId`, policyIds)
.leftJoin(
@ -170,7 +171,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
const findById = async (id: string, tx?: Knex) => {
try {
const sql = findQuery({ [`${TableName.AccessApprovalRequest}.id` as "id"]: id }, tx || db);
const sql = findQuery({ [`${TableName.AccessApprovalRequest}.id` as "id"]: id }, tx || db.replicaNode());
const docs = await sql;
const formatedDoc = sqlNestRelationships({
data: docs,
@ -207,7 +208,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
const getCount = async ({ projectId }: { projectId: string }) => {
try {
const accessRequests = await db(TableName.AccessApprovalRequest)
const accessRequests = await db
.replicaNode()(TableName.AccessApprovalRequest)
.leftJoin(
TableName.AccessApprovalPolicy,
`${TableName.AccessApprovalRequest}.policyId`,

@ -4,6 +4,7 @@ import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, stripUndefinedInWhere } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
export type TAuditLogDALFactory = ReturnType<typeof auditLogDALFactory>;
@ -27,7 +28,7 @@ export const auditLogDALFactory = (db: TDbClient) => {
tx?: Knex
) => {
try {
const sqlQuery = (tx || db)(TableName.AuditLog)
const sqlQuery = (tx || db.replicaNode())(TableName.AuditLog)
.where(
stripUndefinedInWhere({
projectId,
@ -55,13 +56,34 @@ export const auditLogDALFactory = (db: TDbClient) => {
// delete all audit log that have expired
const pruneAuditLog = async (tx?: Knex) => {
try {
const today = new Date();
const docs = await (tx || db)(TableName.AuditLog).where("expiresAt", "<", today).del();
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "PruneAuditLog" });
}
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
const MAX_RETRY_ON_FAILURE = 3;
const today = new Date();
let deletedAuditLogIds: { id: string }[] = [];
let numberOfRetryOnFailure = 0;
do {
try {
const findExpiredLogSubQuery = (tx || db)(TableName.AuditLog)
.where("expiresAt", "<", today)
.select("id")
.limit(AUDIT_LOG_PRUNE_BATCH_SIZE);
// eslint-disable-next-line no-await-in-loop
deletedAuditLogIds = await (tx || db)(TableName.AuditLog)
.whereIn("id", findExpiredLogSubQuery)
.del()
.returning("id");
numberOfRetryOnFailure = 0; // reset
// eslint-disable-next-line no-await-in-loop
await new Promise((resolve) => {
setTimeout(resolve, 100); // time to breathe for db
});
} catch (error) {
numberOfRetryOnFailure += 1;
logger.error(error, "Failed to delete audit log on pruning");
}
} while (deletedAuditLogIds.length > 0 && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
};
return { ...auditLogOrm, pruneAuditLog, find };

@ -45,18 +45,29 @@ export const auditLogQueueServiceFactory = ({
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
let { orgId } = job.data;
const MS_IN_DAY = 24 * 60 * 60 * 1000;
let project;
if (!orgId) {
// it will never be undefined for both org and project id
// TODO(akhilmhdh): use caching here in dal to avoid db calls
const project = await projectDAL.findById(projectId as string);
project = await projectDAL.findById(projectId as string);
orgId = project.orgId;
}
const plan = await licenseService.getPlan(orgId);
const ttl = plan.auditLogsRetentionDays * MS_IN_DAY;
// skip inserting if audit log retention is 0 meaning its not supported
if (ttl === 0) return;
if (plan.auditLogsRetentionDays === 0) {
// skip inserting if audit log retention is 0 meaning its not supported
return;
}
// For project actions, set TTL to project-level audit log retention config
// This condition ensures that the plan's audit log retention days cannot be bypassed
const ttlInDays =
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
? project.auditLogsRetentionDays
: plan.auditLogsRetentionDays;
const ttl = ttlInDays * MS_IN_DAY;
const auditLog = await auditLogDAL.create({
actor: actor.type,

@ -45,6 +45,7 @@ export enum EventType {
CREATE_SECRETS = "create-secrets",
UPDATE_SECRET = "update-secret",
UPDATE_SECRETS = "update-secrets",
MOVE_SECRETS = "move-secrets",
DELETE_SECRET = "delete-secret",
DELETE_SECRETS = "delete-secrets",
GET_WORKSPACE_KEY = "get-workspace-key",
@ -65,25 +66,43 @@ export enum EventType {
ADD_IDENTITY_UNIVERSAL_AUTH = "add-identity-universal-auth",
UPDATE_IDENTITY_UNIVERSAL_AUTH = "update-identity-universal-auth",
GET_IDENTITY_UNIVERSAL_AUTH = "get-identity-universal-auth",
REVOKE_IDENTITY_UNIVERSAL_AUTH = "revoke-identity-universal-auth",
CREATE_TOKEN_IDENTITY_TOKEN_AUTH = "create-token-identity-token-auth",
UPDATE_TOKEN_IDENTITY_TOKEN_AUTH = "update-token-identity-token-auth",
GET_TOKENS_IDENTITY_TOKEN_AUTH = "get-tokens-identity-token-auth",
ADD_IDENTITY_TOKEN_AUTH = "add-identity-token-auth",
UPDATE_IDENTITY_TOKEN_AUTH = "update-identity-token-auth",
GET_IDENTITY_TOKEN_AUTH = "get-identity-token-auth",
REVOKE_IDENTITY_TOKEN_AUTH = "revoke-identity-token-auth",
LOGIN_IDENTITY_KUBERNETES_AUTH = "login-identity-kubernetes-auth",
ADD_IDENTITY_KUBERNETES_AUTH = "add-identity-kubernetes-auth",
UPDATE_IDENTITY_KUBENETES_AUTH = "update-identity-kubernetes-auth",
GET_IDENTITY_KUBERNETES_AUTH = "get-identity-kubernetes-auth",
REVOKE_IDENTITY_KUBERNETES_AUTH = "revoke-identity-kubernetes-auth",
LOGIN_IDENTITY_OIDC_AUTH = "login-identity-oidc-auth",
ADD_IDENTITY_OIDC_AUTH = "add-identity-oidc-auth",
UPDATE_IDENTITY_OIDC_AUTH = "update-identity-oidc-auth",
GET_IDENTITY_OIDC_AUTH = "get-identity-oidc-auth",
REVOKE_IDENTITY_OIDC_AUTH = "revoke-identity-oidc-auth",
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID = "get-identity-universal-auth-client-secret-by-id",
LOGIN_IDENTITY_GCP_AUTH = "login-identity-gcp-auth",
ADD_IDENTITY_GCP_AUTH = "add-identity-gcp-auth",
UPDATE_IDENTITY_GCP_AUTH = "update-identity-gcp-auth",
REVOKE_IDENTITY_GCP_AUTH = "revoke-identity-gcp-auth",
GET_IDENTITY_GCP_AUTH = "get-identity-gcp-auth",
LOGIN_IDENTITY_AWS_AUTH = "login-identity-aws-auth",
ADD_IDENTITY_AWS_AUTH = "add-identity-aws-auth",
UPDATE_IDENTITY_AWS_AUTH = "update-identity-aws-auth",
REVOKE_IDENTITY_AWS_AUTH = "revoke-identity-aws-auth",
GET_IDENTITY_AWS_AUTH = "get-identity-aws-auth",
LOGIN_IDENTITY_AZURE_AUTH = "login-identity-azure-auth",
ADD_IDENTITY_AZURE_AUTH = "add-identity-azure-auth",
UPDATE_IDENTITY_AZURE_AUTH = "update-identity-azure-auth",
GET_IDENTITY_AZURE_AUTH = "get-identity-azure-auth",
REVOKE_IDENTITY_AZURE_AUTH = "revoke-identity-azure-auth",
CREATE_ENVIRONMENT = "create-environment",
UPDATE_ENVIRONMENT = "update-environment",
DELETE_ENVIRONMENT = "delete-environment",
@ -222,6 +241,17 @@ interface UpdateSecretBatchEvent {
};
}
interface MoveSecretsEvent {
type: EventType.MOVE_SECRETS;
metadata: {
sourceEnvironment: string;
sourceSecretPath: string;
destinationEnvironment: string;
destinationSecretPath: string;
secretIds: string[];
};
}
interface DeleteSecretEvent {
type: EventType.DELETE_SECRET;
metadata: {
@ -434,6 +464,73 @@ interface GetIdentityUniversalAuthEvent {
};
}
interface DeleteIdentityUniversalAuthEvent {
type: EventType.REVOKE_IDENTITY_UNIVERSAL_AUTH;
metadata: {
identityId: string;
};
}
interface CreateTokenIdentityTokenAuthEvent {
type: EventType.CREATE_TOKEN_IDENTITY_TOKEN_AUTH;
metadata: {
identityId: string;
identityAccessTokenId: string;
};
}
interface UpdateTokenIdentityTokenAuthEvent {
type: EventType.UPDATE_TOKEN_IDENTITY_TOKEN_AUTH;
metadata: {
identityId: string;
tokenId: string;
name?: string;
};
}
interface GetTokensIdentityTokenAuthEvent {
type: EventType.GET_TOKENS_IDENTITY_TOKEN_AUTH;
metadata: {
identityId: string;
};
}
interface AddIdentityTokenAuthEvent {
type: EventType.ADD_IDENTITY_TOKEN_AUTH;
metadata: {
identityId: string;
accessTokenTTL: number;
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
};
}
interface UpdateIdentityTokenAuthEvent {
type: EventType.UPDATE_IDENTITY_TOKEN_AUTH;
metadata: {
identityId: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
};
}
interface GetIdentityTokenAuthEvent {
type: EventType.GET_IDENTITY_TOKEN_AUTH;
metadata: {
identityId: string;
};
}
interface DeleteIdentityTokenAuthEvent {
type: EventType.REVOKE_IDENTITY_TOKEN_AUTH;
metadata: {
identityId: string;
};
}
interface LoginIdentityKubernetesAuthEvent {
type: EventType.LOGIN_IDENTITY_KUBERNETES_AUTH;
metadata: {
@ -457,6 +554,13 @@ interface AddIdentityKubernetesAuthEvent {
};
}
interface DeleteIdentityKubernetesAuthEvent {
type: EventType.REVOKE_IDENTITY_KUBERNETES_AUTH;
metadata: {
identityId: string;
};
}
interface UpdateIdentityKubernetesAuthEvent {
type: EventType.UPDATE_IDENTITY_KUBENETES_AUTH;
metadata: {
@ -493,6 +597,14 @@ interface GetIdentityUniversalAuthClientSecretsEvent {
};
}
interface GetIdentityUniversalAuthClientSecretByIdEvent {
type: EventType.GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID;
metadata: {
identityId: string;
clientSecretId: string;
};
}
interface RevokeIdentityUniversalAuthClientSecretEvent {
type: EventType.REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET;
metadata: {
@ -525,6 +637,13 @@ interface AddIdentityGcpAuthEvent {
};
}
interface DeleteIdentityGcpAuthEvent {
type: EventType.REVOKE_IDENTITY_GCP_AUTH;
metadata: {
identityId: string;
};
}
interface UpdateIdentityGcpAuthEvent {
type: EventType.UPDATE_IDENTITY_GCP_AUTH;
metadata: {
@ -570,6 +689,13 @@ interface AddIdentityAwsAuthEvent {
};
}
interface DeleteIdentityAwsAuthEvent {
type: EventType.REVOKE_IDENTITY_AWS_AUTH;
metadata: {
identityId: string;
};
}
interface UpdateIdentityAwsAuthEvent {
type: EventType.UPDATE_IDENTITY_AWS_AUTH;
metadata: {
@ -613,6 +739,13 @@ interface AddIdentityAzureAuthEvent {
};
}
interface DeleteIdentityAzureAuthEvent {
type: EventType.REVOKE_IDENTITY_AZURE_AUTH;
metadata: {
identityId: string;
};
}
interface UpdateIdentityAzureAuthEvent {
type: EventType.UPDATE_IDENTITY_AZURE_AUTH;
metadata: {
@ -633,6 +766,63 @@ interface GetIdentityAzureAuthEvent {
};
}
interface LoginIdentityOidcAuthEvent {
type: EventType.LOGIN_IDENTITY_OIDC_AUTH;
metadata: {
identityId: string;
identityOidcAuthId: string;
identityAccessTokenId: string;
};
}
interface AddIdentityOidcAuthEvent {
type: EventType.ADD_IDENTITY_OIDC_AUTH;
metadata: {
identityId: string;
oidcDiscoveryUrl: string;
caCert: string;
boundIssuer: string;
boundAudiences: string;
boundClaims: Record<string, string>;
boundSubject: string;
accessTokenTTL: number;
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
};
}
interface DeleteIdentityOidcAuthEvent {
type: EventType.REVOKE_IDENTITY_OIDC_AUTH;
metadata: {
identityId: string;
};
}
interface UpdateIdentityOidcAuthEvent {
type: EventType.UPDATE_IDENTITY_OIDC_AUTH;
metadata: {
identityId: string;
oidcDiscoveryUrl?: string;
caCert?: string;
boundIssuer?: string;
boundAudiences?: string;
boundClaims?: Record<string, string>;
boundSubject?: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
};
}
interface GetIdentityOidcAuthEvent {
type: EventType.GET_IDENTITY_OIDC_AUTH;
metadata: {
identityId: string;
};
}
interface CreateEnvironmentEvent {
type: EventType.CREATE_ENVIRONMENT;
metadata: {
@ -722,7 +912,6 @@ interface CreateWebhookEvent {
webhookId: string;
environment: string;
secretPath: string;
webhookUrl: string;
isDisabled: boolean;
};
}
@ -733,7 +922,6 @@ interface UpdateWebhookStatusEvent {
webhookId: string;
environment: string;
secretPath: string;
webhookUrl: string;
isDisabled: boolean;
};
}
@ -744,7 +932,6 @@ interface DeleteWebhookEvent {
webhookId: string;
environment: string;
secretPath: string;
webhookUrl: string;
isDisabled: boolean;
};
}
@ -984,6 +1171,7 @@ export type Event =
| CreateSecretBatchEvent
| UpdateSecretEvent
| UpdateSecretBatchEvent
| MoveSecretsEvent
| DeleteSecretEvent
| DeleteSecretBatchEvent
| GetWorkspaceKeyEvent
@ -1003,26 +1191,44 @@ export type Event =
| LoginIdentityUniversalAuthEvent
| AddIdentityUniversalAuthEvent
| UpdateIdentityUniversalAuthEvent
| DeleteIdentityUniversalAuthEvent
| GetIdentityUniversalAuthEvent
| CreateTokenIdentityTokenAuthEvent
| UpdateTokenIdentityTokenAuthEvent
| GetTokensIdentityTokenAuthEvent
| AddIdentityTokenAuthEvent
| UpdateIdentityTokenAuthEvent
| GetIdentityTokenAuthEvent
| DeleteIdentityTokenAuthEvent
| LoginIdentityKubernetesAuthEvent
| DeleteIdentityKubernetesAuthEvent
| AddIdentityKubernetesAuthEvent
| UpdateIdentityKubernetesAuthEvent
| GetIdentityKubernetesAuthEvent
| CreateIdentityUniversalAuthClientSecretEvent
| GetIdentityUniversalAuthClientSecretsEvent
| GetIdentityUniversalAuthClientSecretByIdEvent
| RevokeIdentityUniversalAuthClientSecretEvent
| LoginIdentityGcpAuthEvent
| AddIdentityGcpAuthEvent
| DeleteIdentityGcpAuthEvent
| UpdateIdentityGcpAuthEvent
| GetIdentityGcpAuthEvent
| LoginIdentityAwsAuthEvent
| AddIdentityAwsAuthEvent
| UpdateIdentityAwsAuthEvent
| GetIdentityAwsAuthEvent
| DeleteIdentityAwsAuthEvent
| LoginIdentityAzureAuthEvent
| AddIdentityAzureAuthEvent
| DeleteIdentityAzureAuthEvent
| UpdateIdentityAzureAuthEvent
| GetIdentityAzureAuthEvent
| LoginIdentityOidcAuthEvent
| AddIdentityOidcAuthEvent
| DeleteIdentityOidcAuthEvent
| UpdateIdentityOidcAuthEvent
| GetIdentityOidcAuthEvent
| CreateEnvironmentEvent
| UpdateEnvironmentEvent
| DeleteEnvironmentEvent

@ -12,7 +12,10 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
const countLeasesForDynamicSecret = async (dynamicSecretId: string, tx?: Knex) => {
try {
const doc = await (tx || db)(TableName.DynamicSecretLease).count("*").where({ dynamicSecretId }).first();
const doc = await (tx || db.replicaNode())(TableName.DynamicSecretLease)
.count("*")
.where({ dynamicSecretId })
.first();
return parseInt(doc || "0", 10);
} catch (error) {
throw new DatabaseError({ error, name: "DynamicSecretCountLeases" });
@ -21,7 +24,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
const findById = async (id: string, tx?: Knex) => {
try {
const doc = await (tx || db)(TableName.DynamicSecretLease)
const doc = await (tx || db.replicaNode())(TableName.DynamicSecretLease)
.where({ [`${TableName.DynamicSecretLease}.id` as "id"]: id })
.first()
.join(

@ -3,7 +3,8 @@ import { z } from "zod";
export enum SqlProviders {
Postgres = "postgres",
MySQL = "mysql2",
Oracle = "oracledb"
Oracle = "oracledb",
MsSQL = "mssql"
}
export const DynamicSecretSqlDBSchema = z.object({

@ -12,7 +12,7 @@ export const groupDALFactory = (db: TDbClient) => {
const findGroups = async (filter: TFindFilter<TGroups>, { offset, limit, sort, tx }: TFindOpt<TGroups> = {}) => {
try {
const query = (tx || db)(TableName.Groups)
const query = (tx || db.replicaNode())(TableName.Groups)
// eslint-disable-next-line
.where(buildFindFilter(filter))
.select(selectAllTableCols(TableName.Groups));
@ -32,7 +32,7 @@ export const groupDALFactory = (db: TDbClient) => {
const findByOrgId = async (orgId: string, tx?: Knex) => {
try {
const docs = await (tx || db)(TableName.Groups)
const docs = await (tx || db.replicaNode())(TableName.Groups)
.where(`${TableName.Groups}.orgId`, orgId)
.leftJoin(TableName.OrgRoles, `${TableName.Groups}.roleId`, `${TableName.OrgRoles}.id`)
.select(selectAllTableCols(TableName.Groups))
@ -74,11 +74,12 @@ export const groupDALFactory = (db: TDbClient) => {
username?: string;
}) => {
try {
let query = db(TableName.OrgMembership)
let query = db
.replicaNode()(TableName.OrgMembership)
.where(`${TableName.OrgMembership}.orgId`, orgId)
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.UserGroupMembership, function () {
this.on(`${TableName.UserGroupMembership}.userId`, "=", `${TableName.Users}.id`).andOn(
.leftJoin(TableName.UserGroupMembership, (bd) => {
bd.on(`${TableName.UserGroupMembership}.userId`, "=", `${TableName.Users}.id`).andOn(
`${TableName.UserGroupMembership}.groupId`,
"=",
db.raw("?", [groupId])

@ -18,7 +18,7 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
*/
const filterProjectsByUserMembership = async (userId: string, groupId: string, projectIds: string[], tx?: Knex) => {
try {
const userProjectMemberships: string[] = await (tx || db)(TableName.ProjectMembership)
const userProjectMemberships: string[] = await (tx || db.replicaNode())(TableName.ProjectMembership)
.where(`${TableName.ProjectMembership}.userId`, userId)
.whereIn(`${TableName.ProjectMembership}.projectId`, projectIds)
.pluck(`${TableName.ProjectMembership}.projectId`);
@ -43,7 +43,8 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
// special query
const findUserGroupMembershipsInProject = async (usernames: string[], projectId: string) => {
try {
const usernameDocs: string[] = await db(TableName.UserGroupMembership)
const usernameDocs: string[] = await db
.replicaNode()(TableName.UserGroupMembership)
.join(
TableName.GroupProjectMembership,
`${TableName.UserGroupMembership}.groupId`,
@ -73,7 +74,7 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
try {
// get list of groups in the project with id [projectId]
// that that are not the group with id [groupId]
const groups: string[] = await (tx || db)(TableName.GroupProjectMembership)
const groups: string[] = await (tx || db.replicaNode())(TableName.GroupProjectMembership)
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
.whereNot(`${TableName.GroupProjectMembership}.groupId`, groupId)
.pluck(`${TableName.GroupProjectMembership}.groupId`);
@ -83,8 +84,8 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
.where(`${TableName.UserGroupMembership}.groupId`, groupId)
.where(`${TableName.UserGroupMembership}.isPending`, false)
.join(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.ProjectMembership, function () {
this.on(`${TableName.Users}.id`, "=", `${TableName.ProjectMembership}.userId`).andOn(
.leftJoin(TableName.ProjectMembership, (bd) => {
bd.on(`${TableName.Users}.id`, "=", `${TableName.ProjectMembership}.userId`).andOn(
`${TableName.ProjectMembership}.projectId`,
"=",
db.raw("?", [projectId])
@ -107,9 +108,9 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
db.ref("publicKey").withSchema(TableName.UserEncryptionKey)
)
.where({ isGhost: false }) // MAKE SURE USER IS NOT A GHOST USER
.whereNotIn(`${TableName.UserGroupMembership}.userId`, function () {
.whereNotIn(`${TableName.UserGroupMembership}.userId`, (bd) => {
// eslint-disable-next-line @typescript-eslint/no-floating-promises
this.select(`${TableName.UserGroupMembership}.userId`)
bd.select(`${TableName.UserGroupMembership}.userId`)
.from(TableName.UserGroupMembership)
.whereIn(`${TableName.UserGroupMembership}.groupId`, groups);
});

@ -23,6 +23,8 @@ import {
} from "@app/lib/crypto/encryption";
import { BadRequestError } from "@app/lib/errors";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TokenType } from "@app/services/auth-token/auth-token-types";
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
import { TOrgDALFactory } from "@app/services/org/org-dal";
@ -30,7 +32,9 @@ import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membe
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { normalizeUsername } from "@app/services/user/user-fns";
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
@ -50,7 +54,7 @@ import {
TTestLdapConnectionDTO,
TUpdateLdapCfgDTO
} from "./ldap-config-types";
import { testLDAPConfig } from "./ldap-fns";
import { searchGroups, testLDAPConfig } from "./ldap-fns";
import { TLdapGroupMapDALFactory } from "./ldap-group-map-dal";
type TLdapConfigServiceFactoryDep = {
@ -73,11 +77,19 @@ type TLdapConfigServiceFactoryDep = {
>;
userDAL: Pick<
TUserDALFactory,
"create" | "findOne" | "transaction" | "updateById" | "findUserEncKeyByUserIdsBatch" | "find"
| "create"
| "findOne"
| "transaction"
| "updateById"
| "findUserEncKeyByUserIdsBatch"
| "find"
| "findUserEncKeyByUserId"
>;
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
smtpService: Pick<TSmtpService, "sendMail">;
};
export type TLdapConfigServiceFactory = ReturnType<typeof ldapConfigServiceFactory>;
@ -97,7 +109,9 @@ export const ldapConfigServiceFactory = ({
userDAL,
userAliasDAL,
permissionService,
licenseService
licenseService,
tokenService,
smtpService
}: TLdapConfigServiceFactoryDep) => {
const createLdapCfg = async ({
actor,
@ -109,6 +123,7 @@ export const ldapConfigServiceFactory = ({
url,
bindDN,
bindPass,
uniqueUserAttribute,
searchBase,
searchFilter,
groupSearchBase,
@ -187,6 +202,7 @@ export const ldapConfigServiceFactory = ({
encryptedBindPass,
bindPassIV,
bindPassTag,
uniqueUserAttribute,
searchBase,
searchFilter,
groupSearchBase,
@ -209,6 +225,7 @@ export const ldapConfigServiceFactory = ({
url,
bindDN,
bindPass,
uniqueUserAttribute,
searchBase,
searchFilter,
groupSearchBase,
@ -231,7 +248,8 @@ export const ldapConfigServiceFactory = ({
searchBase,
searchFilter,
groupSearchBase,
groupSearchFilter
groupSearchFilter,
uniqueUserAttribute
};
const orgBot = await orgBotDAL.findOne({ orgId });
@ -269,7 +287,7 @@ export const ldapConfigServiceFactory = ({
return ldapConfig;
};
const getLdapCfg = async (filter: { orgId: string; isActive?: boolean }) => {
const getLdapCfg = async (filter: { orgId: string; isActive?: boolean; id?: string }) => {
const ldapConfig = await ldapConfigDAL.findOne(filter);
if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" });
@ -332,6 +350,7 @@ export const ldapConfigServiceFactory = ({
url: ldapConfig.url,
bindDN,
bindPass,
uniqueUserAttribute: ldapConfig.uniqueUserAttribute,
searchBase: ldapConfig.searchBase,
searchFilter: ldapConfig.searchFilter,
groupSearchBase: ldapConfig.groupSearchBase,
@ -368,6 +387,7 @@ export const ldapConfigServiceFactory = ({
url: ldapConfig.url,
bindDN: ldapConfig.bindDN,
bindCredentials: ldapConfig.bindPass,
uniqueUserAttribute: ldapConfig.uniqueUserAttribute,
searchBase: ldapConfig.searchBase,
searchFilter: ldapConfig.searchFilter || "(uid={{username}})",
// searchAttributes: ["uid", "uidNumber", "givenName", "sn", "mail"],
@ -398,6 +418,13 @@ export const ldapConfigServiceFactory = ({
}: TLdapLoginDTO) => {
const appCfg = getConfig();
const serverCfg = await getServerCfg();
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.LDAP)) {
throw new BadRequestError({
message: "Login with LDAP is disabled by administrator."
});
}
let userAlias = await userAliasDAL.findOne({
externalId,
orgId,
@ -437,9 +464,24 @@ export const ldapConfigServiceFactory = ({
}
});
} else {
const plan = await licenseService.getPlan(orgId);
if (plan?.memberLimit && plan.membersUsed >= plan.memberLimit) {
// limit imposed on number of members allowed / number of members used exceeds the number of members allowed
throw new BadRequestError({
message: "Failed to create new member via LDAP due to member limit reached. Upgrade plan to add more members."
});
}
if (plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) {
// limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed
throw new BadRequestError({
message: "Failed to create new member via LDAP due to member limit reached. Upgrade plan to add more members."
});
}
userAlias = await userDAL.transaction(async (tx) => {
let newUser: TUsers | undefined;
if (serverCfg.trustSamlEmails) {
if (serverCfg.trustLdapEmails) {
newUser = await userDAL.findOne(
{
email,
@ -488,7 +530,7 @@ export const ldapConfigServiceFactory = ({
if (!orgMembership) {
await orgMembershipDAL.create(
{
userId: userAlias.userId,
userId: newUser.id,
inviteEmail: email,
orgId,
role: OrgMembershipRole.Member,
@ -592,12 +634,14 @@ export const ldapConfigServiceFactory = ({
});
const isUserCompleted = Boolean(user.isAccepted);
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
const providerAuthToken = jwt.sign(
{
authTokenType: AuthTokenType.PROVIDER_TOKEN,
userId: user.id,
username: user.username,
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
firstName,
lastName,
@ -619,6 +663,22 @@ export const ldapConfigServiceFactory = ({
}
);
if (user.email && !user.isEmailVerified) {
const token = await tokenService.createTokenForUser({
type: TokenType.TOKEN_EMAIL_VERIFICATION,
userId: user.id
});
await smtpService.sendMail({
template: SmtpTemplates.EmailVerification,
subjectLine: "Infisical confirmation code",
recipients: [user.email],
substitutions: {
code: token
}
});
}
return { isUserCompleted, providerAuthToken };
};
@ -664,11 +724,25 @@ export const ldapConfigServiceFactory = ({
message: "Failed to create LDAP group map due to plan restriction. Upgrade plan to create LDAP group map."
});
const ldapConfig = await ldapConfigDAL.findOne({
id: ldapConfigId,
orgId
const ldapConfig = await getLdapCfg({
orgId,
id: ldapConfigId
});
if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" });
if (!ldapConfig.groupSearchBase) {
throw new BadRequestError({
message: "Configure a group search base in your LDAP configuration in order to proceed."
});
}
const groupSearchFilter = `(cn=${ldapGroupCN})`;
const groups = await searchGroups(ldapConfig, groupSearchFilter, ldapConfig.groupSearchBase);
if (!groups.some((g) => g.cn === ldapGroupCN)) {
throw new BadRequestError({
message: "Failed to find LDAP Group CN"
});
}
const group = await groupDAL.findOne({ slug: groupSlug, orgId });
if (!group) throw new BadRequestError({ message: "Failed to find group" });

@ -7,6 +7,7 @@ export type TLDAPConfig = {
url: string;
bindDN: string;
bindPass: string;
uniqueUserAttribute: string;
searchBase: string;
groupSearchBase: string;
groupSearchFilter: string;
@ -19,6 +20,7 @@ export type TCreateLdapCfgDTO = {
url: string;
bindDN: string;
bindPass: string;
uniqueUserAttribute: string;
searchBase: string;
searchFilter: string;
groupSearchBase: string;
@ -33,6 +35,7 @@ export type TUpdateLdapCfgDTO = {
url: string;
bindDN: string;
bindPass: string;
uniqueUserAttribute: string;
searchBase: string;
searchFilter: string;
groupSearchBase: string;

@ -10,7 +10,8 @@ export const ldapGroupMapDALFactory = (db: TDbClient) => {
const findLdapGroupMapsByLdapConfigId = async (ldapConfigId: string) => {
try {
const docs = await db(TableName.LdapGroupMap)
const docs = await db
.replicaNode()(TableName.LdapGroupMap)
.where(`${TableName.LdapGroupMap}.ldapConfigId`, ldapConfigId)
.join(TableName.Groups, `${TableName.LdapGroupMap}.groupId`, `${TableName.Groups}.id`)
.select(selectAllTableCols(TableName.LdapGroupMap))

@ -7,6 +7,8 @@ export const getDefaultOnPremFeatures = () => {
workspacesUsed: 0,
memberLimit: null,
membersUsed: 0,
identityLimit: null,
identitiesUsed: 0,
environmentLimit: null,
environmentsUsed: 0,
secretVersioning: true,

@ -15,6 +15,8 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
membersUsed: 0,
environmentLimit: null,
environmentsUsed: 0,
identityLimit: null,
identitiesUsed: 0,
dynamicSecret: false,
secretVersioning: true,
pitRecovery: false,
@ -27,6 +29,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
auditLogStreams: false,
auditLogStreamLimit: 3,
samlSSO: false,
oidcSSO: false,
scim: false,
ldap: false,
groups: false,
@ -35,7 +38,8 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
has_used_trial: true,
secretApproval: false,
secretRotation: true,
caCrl: false
caCrl: false,
instanceUserManagement: false
});
export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {

@ -9,7 +9,7 @@ export type TLicenseDALFactory = ReturnType<typeof licenseDALFactory>;
export const licenseDALFactory = (db: TDbClient) => {
const countOfOrgMembers = async (orgId: string | null, tx?: Knex) => {
try {
const doc = await (tx || db)(TableName.OrgMembership)
const doc = await (tx || db.replicaNode())(TableName.OrgMembership)
.where({ status: OrgMembershipStatus.Accepted })
.andWhere((bd) => {
if (orgId) {
@ -19,11 +19,44 @@ export const licenseDALFactory = (db: TDbClient) => {
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
.where(`${TableName.Users}.isGhost`, false)
.count();
return doc?.[0].count;
return Number(doc?.[0].count);
} catch (error) {
throw new DatabaseError({ error, name: "Count of Org Members" });
}
};
return { countOfOrgMembers };
const countOrgUsersAndIdentities = async (orgId: string | null, tx?: Knex) => {
try {
// count org users
const userDoc = await (tx || db)(TableName.OrgMembership)
.where({ status: OrgMembershipStatus.Accepted })
.andWhere((bd) => {
if (orgId) {
void bd.where({ orgId });
}
})
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
.where(`${TableName.Users}.isGhost`, false)
.count();
const userCount = Number(userDoc?.[0].count);
// count org identities
const identityDoc = await (tx || db)(TableName.IdentityOrgMembership)
.where((bd) => {
if (orgId) {
void bd.where({ orgId });
}
})
.count();
const identityCount = Number(identityDoc?.[0].count);
return userCount + identityCount;
} catch (error) {
throw new DatabaseError({ error, name: "Count of Org Users + Identities" });
}
};
return { countOfOrgMembers, countOrgUsersAndIdentities };
};

@ -5,6 +5,7 @@
// TODO(akhilmhdh): With tony find out the api structure and fill it here
import { ForbiddenError } from "@casl/ability";
import { Knex } from "knex";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
@ -155,6 +156,7 @@ export const licenseServiceFactory = ({
LICENSE_SERVER_CLOUD_PLAN_TTL,
JSON.stringify(currentPlan)
);
return currentPlan;
}
} catch (error) {
@ -199,21 +201,29 @@ export const licenseServiceFactory = ({
await licenseServerCloudApi.request.delete(`/api/license-server/v1/customers/${customerId}`);
};
const updateSubscriptionOrgMemberCount = async (orgId: string) => {
const updateSubscriptionOrgMemberCount = async (orgId: string, tx?: Knex) => {
if (instanceType === InstanceType.Cloud) {
const org = await orgDAL.findOrgById(orgId);
if (!org) throw new BadRequestError({ message: "Org not found" });
const count = await licenseDAL.countOfOrgMembers(orgId);
const quantity = await licenseDAL.countOfOrgMembers(orgId, tx);
const quantityIdentities = await licenseDAL.countOrgUsersAndIdentities(orgId, tx);
if (org?.customerId) {
await licenseServerCloudApi.request.patch(`/api/license-server/v1/customers/${org.customerId}/cloud-plan`, {
quantity: count
quantity,
quantityIdentities
});
}
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
} else if (instanceType === InstanceType.EnterpriseOnPrem) {
const usedSeats = await licenseDAL.countOfOrgMembers(null);
await licenseServerOnPremApi.request.patch(`/api/license/v1/license`, { usedSeats });
const usedSeats = await licenseDAL.countOfOrgMembers(null, tx);
const usedIdentitySeats = await licenseDAL.countOrgUsersAndIdentities(null, tx);
onPremFeatures.membersUsed = usedSeats;
onPremFeatures.identitiesUsed = usedIdentitySeats;
await licenseServerOnPremApi.request.patch(`/api/license/v1/license`, {
usedSeats,
usedIdentitySeats
});
}
await refreshPlan(orgId);
};

@ -30,7 +30,9 @@ export type TFeatureSet = {
workspacesUsed: 0;
dynamicSecret: false;
memberLimit: null;
membersUsed: 0;
membersUsed: number;
identityLimit: null;
identitiesUsed: number;
environmentLimit: null;
environmentsUsed: 0;
secretVersioning: true;
@ -44,6 +46,7 @@ export type TFeatureSet = {
auditLogStreams: false;
auditLogStreamLimit: 3;
samlSSO: false;
oidcSSO: false;
scim: false;
ldap: false;
groups: false;
@ -53,6 +56,7 @@ export type TFeatureSet = {
secretApproval: false;
secretRotation: true;
caCrl: false;
instanceUserManagement: false;
};
export type TOrgPlansTableDTO = {

@ -0,0 +1,11 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TOidcConfigDALFactory = ReturnType<typeof oidcConfigDALFactory>;
export const oidcConfigDALFactory = (db: TDbClient) => {
const oidcCfgOrm = ormify(db, TableName.OidcConfig);
return { ...oidcCfgOrm };
};

@ -0,0 +1,645 @@
/* eslint-disable @typescript-eslint/no-unsafe-call */
import { ForbiddenError } from "@casl/ability";
import jwt from "jsonwebtoken";
import { Issuer, Issuer as OpenIdIssuer, Strategy as OpenIdStrategy, TokenSet } from "openid-client";
import { OrgMembershipRole, OrgMembershipStatus, SecretKeyEncoding, TableName, TUsers } from "@app/db/schemas";
import { TOidcConfigsUpdate } from "@app/db/schemas/oidc-configs";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { getConfig } from "@app/lib/config/env";
import {
decryptSymmetric,
encryptSymmetric,
generateAsymmetricKeyPair,
generateSymmetricKey,
infisicalSymmetricDecrypt,
infisicalSymmetricEncypt
} from "@app/lib/crypto/encryption";
import { BadRequestError } from "@app/lib/errors";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TokenType } from "@app/services/auth-token/auth-token-types";
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
import { TOrgDALFactory } from "@app/services/org/org-dal";
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { normalizeUsername } from "@app/services/user/user-fns";
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
import { UserAliasType } from "@app/services/user-alias/user-alias-types";
import { TOidcConfigDALFactory } from "./oidc-config-dal";
import {
OIDCConfigurationType,
TCreateOidcCfgDTO,
TGetOidcCfgDTO,
TOidcLoginDTO,
TUpdateOidcCfgDTO
} from "./oidc-config-types";
type TOidcConfigServiceFactoryDep = {
userDAL: Pick<
TUserDALFactory,
"create" | "findOne" | "transaction" | "updateById" | "findById" | "findUserEncKeyByUserId"
>;
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
orgDAL: Pick<
TOrgDALFactory,
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
>;
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
smtpService: Pick<TSmtpService, "sendMail">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne" | "update" | "create">;
};
export type TOidcConfigServiceFactory = ReturnType<typeof oidcConfigServiceFactory>;
export const oidcConfigServiceFactory = ({
orgDAL,
orgMembershipDAL,
userDAL,
userAliasDAL,
licenseService,
permissionService,
tokenService,
orgBotDAL,
smtpService,
oidcConfigDAL
}: TOidcConfigServiceFactoryDep) => {
const getOidc = async (dto: TGetOidcCfgDTO) => {
const org = await orgDAL.findOne({ slug: dto.orgSlug });
if (!org) {
throw new BadRequestError({
message: "Organization not found",
name: "OrgNotFound"
});
}
if (dto.type === "external") {
const { permission } = await permissionService.getOrgPermission(
dto.actor,
dto.actorId,
org.id,
dto.actorAuthMethod,
dto.actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
}
const oidcCfg = await oidcConfigDAL.findOne({
orgId: org.id
});
if (!oidcCfg) {
throw new BadRequestError({
message: "Failed to find organization OIDC configuration"
});
}
// decrypt and return cfg
const orgBot = await orgBotDAL.findOne({ orgId: oidcCfg.orgId });
if (!orgBot) {
throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" });
}
const key = infisicalSymmetricDecrypt({
ciphertext: orgBot.encryptedSymmetricKey,
iv: orgBot.symmetricKeyIV,
tag: orgBot.symmetricKeyTag,
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
});
const { encryptedClientId, clientIdIV, clientIdTag, encryptedClientSecret, clientSecretIV, clientSecretTag } =
oidcCfg;
let clientId = "";
if (encryptedClientId && clientIdIV && clientIdTag) {
clientId = decryptSymmetric({
ciphertext: encryptedClientId,
key,
tag: clientIdTag,
iv: clientIdIV
});
}
let clientSecret = "";
if (encryptedClientSecret && clientSecretIV && clientSecretTag) {
clientSecret = decryptSymmetric({
key,
tag: clientSecretTag,
iv: clientSecretIV,
ciphertext: encryptedClientSecret
});
}
return {
id: oidcCfg.id,
issuer: oidcCfg.issuer,
authorizationEndpoint: oidcCfg.authorizationEndpoint,
configurationType: oidcCfg.configurationType,
discoveryURL: oidcCfg.discoveryURL,
jwksUri: oidcCfg.jwksUri,
tokenEndpoint: oidcCfg.tokenEndpoint,
userinfoEndpoint: oidcCfg.userinfoEndpoint,
orgId: oidcCfg.orgId,
isActive: oidcCfg.isActive,
allowedEmailDomains: oidcCfg.allowedEmailDomains,
clientId,
clientSecret
};
};
const oidcLogin = async ({ externalId, email, firstName, lastName, orgId, callbackPort }: TOidcLoginDTO) => {
const serverCfg = await getServerCfg();
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.OIDC)) {
throw new BadRequestError({
message: "Login with OIDC is disabled by administrator."
});
}
const appCfg = getConfig();
const userAlias = await userAliasDAL.findOne({
externalId,
orgId,
aliasType: UserAliasType.OIDC
});
const organization = await orgDAL.findOrgById(orgId);
if (!organization) throw new BadRequestError({ message: "Org not found" });
let user: TUsers;
if (userAlias) {
user = await userDAL.transaction(async (tx) => {
const foundUser = await userDAL.findById(userAlias.userId, tx);
const [orgMembership] = await orgDAL.findMembership(
{
[`${TableName.OrgMembership}.userId` as "userId"]: foundUser.id,
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
},
{ tx }
);
if (!orgMembership) {
await orgMembershipDAL.create(
{
userId: userAlias.userId,
inviteEmail: email,
orgId,
role: OrgMembershipRole.Member,
status: foundUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
},
tx
);
// Only update the membership to Accepted if the user account is already completed.
} else if (orgMembership.status === OrgMembershipStatus.Invited && foundUser.isAccepted) {
await orgDAL.updateMembershipById(
orgMembership.id,
{
status: OrgMembershipStatus.Accepted
},
tx
);
}
return foundUser;
});
} else {
user = await userDAL.transaction(async (tx) => {
let newUser: TUsers | undefined;
if (serverCfg.trustOidcEmails) {
newUser = await userDAL.findOne(
{
email,
isEmailVerified: true
},
tx
);
}
if (!newUser) {
const uniqueUsername = await normalizeUsername(externalId, userDAL);
newUser = await userDAL.create(
{
email,
firstName,
isEmailVerified: serverCfg.trustOidcEmails,
username: serverCfg.trustOidcEmails ? email : uniqueUsername,
lastName,
authMethods: [],
isGhost: false
},
tx
);
}
await userAliasDAL.create(
{
userId: newUser.id,
aliasType: UserAliasType.OIDC,
externalId,
emails: email ? [email] : [],
orgId
},
tx
);
const [orgMembership] = await orgDAL.findMembership(
{
[`${TableName.OrgMembership}.userId` as "userId"]: newUser.id,
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
},
{ tx }
);
if (!orgMembership) {
await orgMembershipDAL.create(
{
userId: newUser.id,
inviteEmail: email,
orgId,
role: OrgMembershipRole.Member,
status: newUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
},
tx
);
// Only update the membership to Accepted if the user account is already completed.
} else if (orgMembership.status === OrgMembershipStatus.Invited && newUser.isAccepted) {
await orgDAL.updateMembershipById(
orgMembership.id,
{
status: OrgMembershipStatus.Accepted
},
tx
);
}
return newUser;
});
}
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
const isUserCompleted = Boolean(user.isAccepted);
const providerAuthToken = jwt.sign(
{
authTokenType: AuthTokenType.PROVIDER_TOKEN,
userId: user.id,
username: user.username,
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
firstName,
lastName,
organizationName: organization.name,
organizationId: organization.id,
organizationSlug: organization.slug,
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
authMethod: AuthMethod.OIDC,
authType: UserAliasType.OIDC,
isUserCompleted,
...(callbackPort && { callbackPort })
},
appCfg.AUTH_SECRET,
{
expiresIn: appCfg.JWT_PROVIDER_AUTH_LIFETIME
}
);
if (user.email && !user.isEmailVerified) {
const token = await tokenService.createTokenForUser({
type: TokenType.TOKEN_EMAIL_VERIFICATION,
userId: user.id
});
await smtpService.sendMail({
template: SmtpTemplates.EmailVerification,
subjectLine: "Infisical confirmation code",
recipients: [user.email],
substitutions: {
code: token
}
});
}
return { isUserCompleted, providerAuthToken };
};
const updateOidcCfg = async ({
orgSlug,
allowedEmailDomains,
configurationType,
discoveryURL,
actor,
actorOrgId,
actorAuthMethod,
actorId,
issuer,
isActive,
authorizationEndpoint,
jwksUri,
tokenEndpoint,
userinfoEndpoint,
clientId,
clientSecret
}: TUpdateOidcCfgDTO) => {
const org = await orgDAL.findOne({
slug: orgSlug
});
if (!org) {
throw new BadRequestError({
message: "Organization not found"
});
}
const plan = await licenseService.getPlan(org.id);
if (!plan.oidcSSO)
throw new BadRequestError({
message:
"Failed to update OIDC SSO configuration due to plan restriction. Upgrade plan to update SSO configuration."
});
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
org.id,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Sso);
const orgBot = await orgBotDAL.findOne({ orgId: org.id });
if (!orgBot) throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" });
const key = infisicalSymmetricDecrypt({
ciphertext: orgBot.encryptedSymmetricKey,
iv: orgBot.symmetricKeyIV,
tag: orgBot.symmetricKeyTag,
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
});
const updateQuery: TOidcConfigsUpdate = {
allowedEmailDomains,
configurationType,
discoveryURL,
issuer,
authorizationEndpoint,
tokenEndpoint,
userinfoEndpoint,
jwksUri,
isActive
};
if (clientId !== undefined) {
const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key);
updateQuery.encryptedClientId = encryptedClientId;
updateQuery.clientIdIV = clientIdIV;
updateQuery.clientIdTag = clientIdTag;
}
if (clientSecret !== undefined) {
const {
ciphertext: encryptedClientSecret,
iv: clientSecretIV,
tag: clientSecretTag
} = encryptSymmetric(clientSecret, key);
updateQuery.encryptedClientSecret = encryptedClientSecret;
updateQuery.clientSecretIV = clientSecretIV;
updateQuery.clientSecretTag = clientSecretTag;
}
const [ssoConfig] = await oidcConfigDAL.update({ orgId: org.id }, updateQuery);
return ssoConfig;
};
const createOidcCfg = async ({
orgSlug,
allowedEmailDomains,
configurationType,
discoveryURL,
actor,
actorOrgId,
actorAuthMethod,
actorId,
issuer,
isActive,
authorizationEndpoint,
jwksUri,
tokenEndpoint,
userinfoEndpoint,
clientId,
clientSecret
}: TCreateOidcCfgDTO) => {
const org = await orgDAL.findOne({
slug: orgSlug
});
if (!org) {
throw new BadRequestError({
message: "Organization not found"
});
}
const plan = await licenseService.getPlan(org.id);
if (!plan.oidcSSO)
throw new BadRequestError({
message:
"Failed to create OIDC SSO configuration due to plan restriction. Upgrade plan to update SSO configuration."
});
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
org.id,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Sso);
const orgBot = await orgBotDAL.transaction(async (tx) => {
const doc = await orgBotDAL.findOne({ orgId: org.id }, tx);
if (doc) return doc;
const { privateKey, publicKey } = generateAsymmetricKeyPair();
const key = generateSymmetricKey();
const {
ciphertext: encryptedPrivateKey,
iv: privateKeyIV,
tag: privateKeyTag,
encoding: privateKeyKeyEncoding,
algorithm: privateKeyAlgorithm
} = infisicalSymmetricEncypt(privateKey);
const {
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
encoding: symmetricKeyKeyEncoding,
algorithm: symmetricKeyAlgorithm
} = infisicalSymmetricEncypt(key);
return orgBotDAL.create(
{
name: "Infisical org bot",
publicKey,
privateKeyIV,
encryptedPrivateKey,
symmetricKeyIV,
symmetricKeyTag,
encryptedSymmetricKey,
symmetricKeyAlgorithm,
orgId: org.id,
privateKeyTag,
privateKeyAlgorithm,
privateKeyKeyEncoding,
symmetricKeyKeyEncoding
},
tx
);
});
const key = infisicalSymmetricDecrypt({
ciphertext: orgBot.encryptedSymmetricKey,
iv: orgBot.symmetricKeyIV,
tag: orgBot.symmetricKeyTag,
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
});
const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key);
const {
ciphertext: encryptedClientSecret,
iv: clientSecretIV,
tag: clientSecretTag
} = encryptSymmetric(clientSecret, key);
const oidcCfg = await oidcConfigDAL.create({
issuer,
isActive,
configurationType,
discoveryURL,
authorizationEndpoint,
allowedEmailDomains,
jwksUri,
tokenEndpoint,
userinfoEndpoint,
orgId: org.id,
encryptedClientId,
clientIdIV,
clientIdTag,
encryptedClientSecret,
clientSecretIV,
clientSecretTag
});
return oidcCfg;
};
const getOrgAuthStrategy = async (orgSlug: string, callbackPort?: string) => {
const appCfg = getConfig();
const org = await orgDAL.findOne({
slug: orgSlug
});
if (!org) {
throw new BadRequestError({
message: "Organization not found."
});
}
const oidcCfg = await getOidc({
type: "internal",
orgSlug
});
if (!oidcCfg || !oidcCfg.isActive) {
throw new BadRequestError({
message: "Failed to authenticate with OIDC SSO"
});
}
let issuer: Issuer;
if (oidcCfg.configurationType === OIDCConfigurationType.DISCOVERY_URL) {
if (!oidcCfg.discoveryURL) {
throw new BadRequestError({
message: "OIDC not configured correctly"
});
}
issuer = await Issuer.discover(oidcCfg.discoveryURL);
} else {
if (
!oidcCfg.issuer ||
!oidcCfg.authorizationEndpoint ||
!oidcCfg.jwksUri ||
!oidcCfg.tokenEndpoint ||
!oidcCfg.userinfoEndpoint
) {
throw new BadRequestError({
message: "OIDC not configured correctly"
});
}
issuer = new OpenIdIssuer({
issuer: oidcCfg.issuer,
authorization_endpoint: oidcCfg.authorizationEndpoint,
jwks_uri: oidcCfg.jwksUri,
token_endpoint: oidcCfg.tokenEndpoint,
userinfo_endpoint: oidcCfg.userinfoEndpoint
});
}
const client = new issuer.Client({
client_id: oidcCfg.clientId,
client_secret: oidcCfg.clientSecret,
redirect_uris: [`${appCfg.SITE_URL}/api/v1/sso/oidc/callback`]
});
const strategy = new OpenIdStrategy(
{
client,
passReqToCallback: true
},
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(_req: any, tokenSet: TokenSet, cb: any) => {
const claims = tokenSet.claims();
if (!claims.email || !claims.given_name) {
throw new BadRequestError({
message: "Invalid request. Missing email or first name"
});
}
if (oidcCfg.allowedEmailDomains) {
const allowedDomains = oidcCfg.allowedEmailDomains.split(", ");
if (!allowedDomains.includes(claims.email.split("@")[1])) {
throw new BadRequestError({
message: "Email not allowed."
});
}
}
oidcLogin({
email: claims.email,
externalId: claims.sub,
firstName: claims.given_name ?? "",
lastName: claims.family_name ?? "",
orgId: org.id,
callbackPort
})
.then(({ isUserCompleted, providerAuthToken }) => {
cb(null, { isUserCompleted, providerAuthToken });
})
.catch((error) => {
cb(error);
});
}
);
return strategy;
};
return { oidcLogin, getOrgAuthStrategy, getOidc, updateOidcCfg, createOidcCfg };
};

@ -0,0 +1,56 @@
import { TGenericPermission } from "@app/lib/types";
export enum OIDCConfigurationType {
CUSTOM = "custom",
DISCOVERY_URL = "discoveryURL"
}
export type TOidcLoginDTO = {
externalId: string;
email: string;
firstName: string;
lastName?: string;
orgId: string;
callbackPort?: string;
};
export type TGetOidcCfgDTO =
| ({
type: "external";
orgSlug: string;
} & TGenericPermission)
| {
type: "internal";
orgSlug: string;
};
export type TCreateOidcCfgDTO = {
issuer?: string;
authorizationEndpoint?: string;
discoveryURL?: string;
configurationType: OIDCConfigurationType;
allowedEmailDomains?: string;
jwksUri?: string;
tokenEndpoint?: string;
userinfoEndpoint?: string;
clientId: string;
clientSecret: string;
isActive: boolean;
orgSlug: string;
} & TGenericPermission;
export type TUpdateOidcCfgDTO = Partial<{
issuer: string;
authorizationEndpoint: string;
allowedEmailDomains: string;
discoveryURL: string;
jwksUri: string;
configurationType: OIDCConfigurationType;
tokenEndpoint: string;
userinfoEndpoint: string;
clientId: string;
clientSecret: string;
isActive: boolean;
orgSlug: string;
}> &
TGenericPermission;

@ -116,7 +116,6 @@ const buildMemberPermission = () => {
can(OrgPermissionActions.Read, OrgPermissionSubjects.Role);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Billing);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
can(OrgPermissionActions.Read, OrgPermissionSubjects.IncidentAccount);
can(OrgPermissionActions.Read, OrgPermissionSubjects.SecretScanning);

@ -10,7 +10,8 @@ export type TPermissionDALFactory = ReturnType<typeof permissionDALFactory>;
export const permissionDALFactory = (db: TDbClient) => {
const getOrgPermission = async (userId: string, orgId: string) => {
try {
const membership = await db(TableName.OrgMembership)
const membership = await db
.replicaNode()(TableName.OrgMembership)
.leftJoin(TableName.OrgRoles, `${TableName.OrgMembership}.roleId`, `${TableName.OrgRoles}.id`)
.join(TableName.Organization, `${TableName.OrgMembership}.orgId`, `${TableName.Organization}.id`)
.where("userId", userId)
@ -28,7 +29,8 @@ export const permissionDALFactory = (db: TDbClient) => {
const getOrgIdentityPermission = async (identityId: string, orgId: string) => {
try {
const membership = await db(TableName.IdentityOrgMembership)
const membership = await db
.replicaNode()(TableName.IdentityOrgMembership)
.leftJoin(TableName.OrgRoles, `${TableName.IdentityOrgMembership}.roleId`, `${TableName.OrgRoles}.id`)
.join(TableName.Organization, `${TableName.IdentityOrgMembership}.orgId`, `${TableName.Organization}.id`)
.where("identityId", identityId)
@ -45,11 +47,13 @@ export const permissionDALFactory = (db: TDbClient) => {
const getProjectPermission = async (userId: string, projectId: string) => {
try {
const groups: string[] = await db(TableName.GroupProjectMembership)
const groups: string[] = await db
.replicaNode()(TableName.GroupProjectMembership)
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
.pluck(`${TableName.GroupProjectMembership}.groupId`);
const groupDocs = await db(TableName.UserGroupMembership)
const groupDocs = await db
.replicaNode()(TableName.UserGroupMembership)
.where(`${TableName.UserGroupMembership}.userId`, userId)
.whereIn(`${TableName.UserGroupMembership}.groupId`, groups)
.join(
@ -231,7 +235,8 @@ export const permissionDALFactory = (db: TDbClient) => {
const getProjectIdentityPermission = async (identityId: string, projectId: string) => {
try {
const docs = await db(TableName.IdentityProjectMembership)
const docs = await db
.replicaNode()(TableName.IdentityProjectMembership)
.join(
TableName.IdentityProjectMembershipRole,
`${TableName.IdentityProjectMembershipRole}.projectMembershipId`,

@ -10,7 +10,8 @@ export const samlConfigDALFactory = (db: TDbClient) => {
const findEnforceableSamlCfg = async (orgId: string) => {
try {
const samlCfg = await db(TableName.SamlConfig)
const samlCfg = await db
.replicaNode()(TableName.SamlConfig)
.where({
orgId,
isActive: true

@ -28,6 +28,7 @@ import { TOrgDALFactory } from "@app/services/org/org-dal";
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { normalizeUsername } from "@app/services/user/user-fns";
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
@ -41,7 +42,10 @@ import { TCreateSamlCfgDTO, TGetSamlCfgDTO, TSamlLoginDTO, TUpdateSamlCfgDTO } f
type TSamlConfigServiceFactoryDep = {
samlConfigDAL: Pick<TSamlConfigDALFactory, "create" | "findOne" | "update" | "findById">;
userDAL: Pick<TUserDALFactory, "create" | "findOne" | "transaction" | "updateById" | "findById">;
userDAL: Pick<
TUserDALFactory,
"create" | "findOne" | "transaction" | "updateById" | "findById" | "findUserEncKeyByUserId"
>;
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
orgDAL: Pick<
TOrgDALFactory,
@ -332,6 +336,13 @@ export const samlConfigServiceFactory = ({
}: TSamlLoginDTO) => {
const appCfg = getConfig();
const serverCfg = await getServerCfg();
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.SAML)) {
throw new BadRequestError({
message: "Login with SAML is disabled by administrator."
});
}
const userAlias = await userAliasDAL.findOne({
externalId,
orgId,
@ -377,6 +388,21 @@ export const samlConfigServiceFactory = ({
return foundUser;
});
} else {
const plan = await licenseService.getPlan(orgId);
if (plan?.memberLimit && plan.membersUsed >= plan.memberLimit) {
// limit imposed on number of members allowed / number of members used exceeds the number of members allowed
throw new BadRequestError({
message: "Failed to create new member via SAML due to member limit reached. Upgrade plan to add more members."
});
}
if (plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) {
// limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed
throw new BadRequestError({
message: "Failed to create new member via SAML due to member limit reached. Upgrade plan to add more members."
});
}
user = await userDAL.transaction(async (tx) => {
let newUser: TUsers | undefined;
if (serverCfg.trustSamlEmails) {
@ -452,6 +478,7 @@ export const samlConfigServiceFactory = ({
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
const isUserCompleted = Boolean(user.isAccepted);
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
const providerAuthToken = jwt.sign(
{
authTokenType: AuthTokenType.PROVIDER_TOKEN,
@ -464,6 +491,7 @@ export const samlConfigServiceFactory = ({
organizationId: organization.id,
organizationSlug: organization.slug,
authMethod: authProvider,
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
authType: UserAliasType.SAML,
isUserCompleted,
...(relayState

@ -2,7 +2,7 @@ import { ForbiddenError } from "@casl/ability";
import slugify from "@sindresorhus/slugify";
import jwt from "jsonwebtoken";
import { OrgMembershipRole, OrgMembershipStatus, TableName, TGroups, TOrgMemberships, TUsers } from "@app/db/schemas";
import { OrgMembershipRole, OrgMembershipStatus, TableName, TOrgMemberships, TUsers } from "@app/db/schemas";
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "@app/ee/services/group/group-fns";
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
@ -66,7 +66,7 @@ type TScimServiceFactoryDep = {
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find" | "delete" | "findProjectMembershipsByUserId">;
groupDAL: Pick<
TGroupDALFactory,
"create" | "findOne" | "findAllGroupMembers" | "update" | "delete" | "findGroups" | "transaction"
"create" | "findOne" | "findAllGroupMembers" | "delete" | "findGroups" | "transaction" | "updateById" | "update"
>;
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
userGroupMembershipDAL: Pick<
@ -817,7 +817,6 @@ export const scimServiceFactory = ({
});
};
// TODO: add support for add/remove op
const updateScimGroupNamePatch = async ({ groupId, orgId, operations }: TUpdateScimGroupNamePatchDTO) => {
const plan = await licenseService.getPlan(orgId);
if (!plan.groups)
@ -840,23 +839,45 @@ export const scimServiceFactory = ({
status: 403
});
let group: TGroups | undefined;
let group = await groupDAL.findOne({
id: groupId,
orgId
});
if (!group) {
throw new ScimRequestError({
detail: "Group Not Found",
status: 404
});
}
for await (const operation of operations) {
switch (operation.op) {
case "replace": {
await groupDAL.update(
{
id: groupId,
orgId
},
{
name: operation.value.displayName
}
);
group = await groupDAL.updateById(group.id, {
name: operation.value.displayName
});
break;
}
case "add": {
// TODO
const orgMemberships = await orgMembershipDAL.find({
$in: {
id: operation.value.map((member) => member.value)
}
});
await addUsersToGroupByUserIds({
group,
userIds: orgMemberships.map((membership) => membership.userId as string),
userDAL,
userGroupMembershipDAL,
orgDAL,
groupProjectDAL,
projectKeyDAL,
projectDAL,
projectBotDAL
});
break;
}
case "remove": {
@ -872,13 +893,6 @@ export const scimServiceFactory = ({
}
}
if (!group) {
throw new ScimRequestError({
detail: "Group Not Found",
status: 404
});
}
return buildScimGroup({
groupId: group.id,
name: group.name,

@ -125,10 +125,11 @@ type TRemoveOp = {
type TAddOp = {
op: "add";
path: string;
value: {
value: string;
display?: string;
};
}[];
};
export type TDeleteScimGroupDTO = {

@ -1,49 +1,59 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName, TSecretApprovalPolicies } from "@app/db/schemas";
import { SecretApprovalPoliciesSchema, TableName, TSecretApprovalPolicies } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, mergeOneToManyRelation, ormify, selectAllTableCols, TFindFilter } from "@app/lib/knex";
import { buildFindFilter, ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex";
export type TSecretApprovalPolicyDALFactory = ReturnType<typeof secretApprovalPolicyDALFactory>;
export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
const secretApprovalPolicyOrm = ormify(db, TableName.SecretApprovalPolicy);
const sapFindQuery = (tx: Knex, filter: TFindFilter<TSecretApprovalPolicies>) =>
const secretApprovalPolicyFindQuery = (tx: Knex, filter: TFindFilter<TSecretApprovalPolicies>) =>
tx(TableName.SecretApprovalPolicy)
// eslint-disable-next-line
.where(buildFindFilter(filter))
.join(TableName.Environment, `${TableName.SecretApprovalPolicy}.envId`, `${TableName.Environment}.id`)
.join(
.leftJoin(
TableName.SecretApprovalPolicyApprover,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalPolicyApprover}.policyId`
)
.select(tx.ref("approverId").withSchema(TableName.SecretApprovalPolicyApprover))
.select(tx.ref("name").withSchema(TableName.Environment).as("envName"))
.select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug"))
.select(tx.ref("id").withSchema(TableName.Environment).as("envId"))
.select(tx.ref("projectId").withSchema(TableName.Environment))
.select(tx.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover))
.select(
tx.ref("name").withSchema(TableName.Environment).as("envName"),
tx.ref("slug").withSchema(TableName.Environment).as("envSlug"),
tx.ref("id").withSchema(TableName.Environment).as("envId"),
tx.ref("projectId").withSchema(TableName.Environment)
)
.select(selectAllTableCols(TableName.SecretApprovalPolicy))
.orderBy("createdAt", "asc");
const findById = async (id: string, tx?: Knex) => {
try {
const doc = await sapFindQuery(tx || db, {
const doc = await secretApprovalPolicyFindQuery(tx || db.replicaNode(), {
[`${TableName.SecretApprovalPolicy}.id` as "id"]: id
});
const formatedDoc = mergeOneToManyRelation(
doc,
"id",
({ approverId, envId, envName: name, envSlug: slug, ...el }) => ({
...el,
envId,
environment: { id: envId, name, slug }
const formatedDoc = sqlNestRelationships({
data: doc,
key: "id",
parentMapper: (data) => ({
environment: { id: data.envId, name: data.envName, slug: data.envSlug },
projectId: data.projectId,
...SecretApprovalPoliciesSchema.parse(data)
}),
({ approverId }) => approverId,
"approvers"
);
childrenMapper: [
{
key: "approverUserId",
label: "userApprovers" as const,
mapper: ({ approverUserId }) => ({
userId: approverUserId
})
}
]
});
return formatedDoc?.[0];
} catch (error) {
throw new DatabaseError({ error, name: "FindById" });
@ -52,18 +62,25 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
const find = async (filter: TFindFilter<TSecretApprovalPolicies & { projectId: string }>, tx?: Knex) => {
try {
const docs = await sapFindQuery(tx || db, filter);
const formatedDoc = mergeOneToManyRelation(
docs,
"id",
({ approverId, envId, envName: name, envSlug: slug, ...el }) => ({
...el,
envId,
environment: { id: envId, name, slug }
const docs = await secretApprovalPolicyFindQuery(tx || db.replicaNode(), filter);
const formatedDoc = sqlNestRelationships({
data: docs,
key: "id",
parentMapper: (data) => ({
environment: { id: data.envId, name: data.envName, slug: data.envSlug },
projectId: data.projectId,
...SecretApprovalPoliciesSchema.parse(data)
}),
({ approverId }) => approverId,
"approvers"
);
childrenMapper: [
{
key: "approverUserId",
label: "userApprovers" as const,
mapper: ({ approverUserId }) => ({
userId: approverUserId
})
}
]
});
return formatedDoc;
} catch (error) {
throw new DatabaseError({ error, name: "Find" });

@ -7,7 +7,6 @@ import { BadRequestError } from "@app/lib/errors";
import { removeTrailingSlash } from "@app/lib/fn";
import { containsGlobPatterns } from "@app/lib/picomatch";
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { TSecretApprovalPolicyApproverDALFactory } from "./secret-approval-policy-approver-dal";
import { TSecretApprovalPolicyDALFactory } from "./secret-approval-policy-dal";
@ -29,7 +28,6 @@ type TSecretApprovalPolicyServiceFactoryDep = {
secretApprovalPolicyDAL: TSecretApprovalPolicyDALFactory;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
secretApprovalPolicyApproverDAL: TSecretApprovalPolicyApproverDALFactory;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find">;
};
export type TSecretApprovalPolicyServiceFactory = ReturnType<typeof secretApprovalPolicyServiceFactory>;
@ -38,8 +36,7 @@ export const secretApprovalPolicyServiceFactory = ({
secretApprovalPolicyDAL,
permissionService,
secretApprovalPolicyApproverDAL,
projectEnvDAL,
projectMembershipDAL
projectEnvDAL
}: TSecretApprovalPolicyServiceFactoryDep) => {
const createSecretApprovalPolicy = async ({
name,
@ -48,12 +45,12 @@ export const secretApprovalPolicyServiceFactory = ({
actorOrgId,
actorAuthMethod,
approvals,
approvers,
approverUserIds,
projectId,
secretPath,
environment
}: TCreateSapDTO) => {
if (approvals > approvers.length)
if (approvals > approverUserIds.length)
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
const { permission } = await permissionService.getProjectPermission(
@ -70,13 +67,6 @@ export const secretApprovalPolicyServiceFactory = ({
const env = await projectEnvDAL.findOne({ slug: environment, projectId });
if (!env) throw new BadRequestError({ message: "Environment not found" });
const secretApprovers = await projectMembershipDAL.find({
projectId,
$in: { id: approvers }
});
if (secretApprovers.length !== approvers.length)
throw new BadRequestError({ message: "Approver not found in project" });
const secretApproval = await secretApprovalPolicyDAL.transaction(async (tx) => {
const doc = await secretApprovalPolicyDAL.create(
{
@ -88,8 +78,8 @@ export const secretApprovalPolicyServiceFactory = ({
tx
);
await secretApprovalPolicyApproverDAL.insertMany(
secretApprovers.map(({ id }) => ({
approverId: id,
approverUserIds.map((approverUserId) => ({
approverUserId,
policyId: doc.id
})),
tx
@ -100,7 +90,7 @@ export const secretApprovalPolicyServiceFactory = ({
};
const updateSecretApprovalPolicy = async ({
approvers,
approverUserIds,
secretPath,
name,
actorId,
@ -132,22 +122,11 @@ export const secretApprovalPolicyServiceFactory = ({
},
tx
);
if (approvers) {
const secretApprovers = await projectMembershipDAL.find(
{
projectId: secretApprovalPolicy.projectId,
$in: { id: approvers }
},
{ tx }
);
if (secretApprovers.length !== approvers.length)
throw new BadRequestError({ message: "Approver not found in project" });
if (doc.approvals > secretApprovers.length)
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
if (approverUserIds) {
await secretApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx);
await secretApprovalPolicyApproverDAL.insertMany(
secretApprovers.map(({ id }) => ({
approverId: id,
approverUserIds.map((approverUserId) => ({
approverUserId,
policyId: doc.id
})),
tx

@ -4,7 +4,7 @@ export type TCreateSapDTO = {
approvals: number;
secretPath?: string | null;
environment: string;
approvers: string[];
approverUserIds: string[];
projectId: string;
name: string;
} & Omit<TProjectPermission, "projectId">;
@ -13,7 +13,7 @@ export type TUpdateSapDTO = {
secretPolicyId: string;
approvals?: number;
secretPath?: string | null;
approvers: string[];
approverUserIds: string[];
name?: string;
} & Omit<TProjectPermission, "projectId">;

@ -5,7 +5,8 @@ import {
SecretApprovalRequestsSchema,
TableName,
TSecretApprovalRequests,
TSecretApprovalRequestsSecrets
TSecretApprovalRequestsSecrets,
TUsers
} from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols, sqlNestRelationships, stripUndefinedInWhere, TFindFilter } from "@app/lib/knex";
@ -16,7 +17,7 @@ export type TSecretApprovalRequestDALFactory = ReturnType<typeof secretApprovalR
type TFindQueryFilter = {
projectId: string;
membershipId: string;
userId: string;
status?: RequestState;
environment?: string;
committer?: string;
@ -37,32 +38,68 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
`${TableName.SecretApprovalRequest}.policyId`,
`${TableName.SecretApprovalPolicy}.id`
)
.leftJoin<TUsers>(
db(TableName.Users).as("statusChangedByUser"),
`${TableName.SecretApprovalRequest}.statusChangedByUserId`,
`statusChangedByUser.id`
)
.join<TUsers>(
db(TableName.Users).as("committerUser"),
`${TableName.SecretApprovalRequest}.committerUserId`,
`committerUser.id`
)
.join(
TableName.SecretApprovalPolicyApprover,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalPolicyApprover}.policyId`
)
.join<TUsers>(
db(TableName.Users).as("secretApprovalPolicyApproverUser"),
`${TableName.SecretApprovalPolicyApprover}.approverUserId`,
"secretApprovalPolicyApproverUser.id"
)
.leftJoin(
TableName.SecretApprovalRequestReviewer,
`${TableName.SecretApprovalRequest}.id`,
`${TableName.SecretApprovalRequestReviewer}.requestId`
)
.leftJoin<TUsers>(
db(TableName.Users).as("secretApprovalReviewerUser"),
`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`,
`secretApprovalReviewerUser.id`
)
.select(selectAllTableCols(TableName.SecretApprovalRequest))
.select(
tx.ref("member").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerMemberId"),
tx.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
tx.ref("email").withSchema("secretApprovalPolicyApproverUser").as("approverEmail"),
tx.ref("username").withSchema("secretApprovalPolicyApproverUser").as("approverUsername"),
tx.ref("firstName").withSchema("secretApprovalPolicyApproverUser").as("approverFirstName"),
tx.ref("lastName").withSchema("secretApprovalPolicyApproverUser").as("approverLastName"),
tx.ref("email").withSchema("statusChangedByUser").as("statusChangedByUserEmail"),
tx.ref("username").withSchema("statusChangedByUser").as("statusChangedByUserUsername"),
tx.ref("firstName").withSchema("statusChangedByUser").as("statusChangedByUserFirstName"),
tx.ref("lastName").withSchema("statusChangedByUser").as("statusChangedByUserLastName"),
tx.ref("email").withSchema("committerUser").as("committerUserEmail"),
tx.ref("username").withSchema("committerUser").as("committerUserUsername"),
tx.ref("firstName").withSchema("committerUser").as("committerUserFirstName"),
tx.ref("lastName").withSchema("committerUser").as("committerUserLastName"),
tx.ref("reviewerUserId").withSchema(TableName.SecretApprovalRequestReviewer),
tx.ref("status").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerStatus"),
tx.ref("email").withSchema("secretApprovalReviewerUser").as("reviewerEmail"),
tx.ref("username").withSchema("secretApprovalReviewerUser").as("reviewerUsername"),
tx.ref("firstName").withSchema("secretApprovalReviewerUser").as("reviewerFirstName"),
tx.ref("lastName").withSchema("secretApprovalReviewerUser").as("reviewerLastName"),
tx.ref("id").withSchema(TableName.SecretApprovalPolicy).as("policyId"),
tx.ref("name").withSchema(TableName.SecretApprovalPolicy).as("policyName"),
tx.ref("projectId").withSchema(TableName.Environment),
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
tx.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
tx.ref("approverId").withSchema(TableName.SecretApprovalPolicyApprover)
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals")
);
const findById = async (id: string, tx?: Knex) => {
try {
const sql = findQuery({ [`${TableName.SecretApprovalRequest}.id` as "id"]: id }, tx || db);
const sql = findQuery({ [`${TableName.SecretApprovalRequest}.id` as "id"]: id }, tx || db.replicaNode());
const docs = await sql;
const formatedDoc = sqlNestRelationships({
data: docs,
@ -71,6 +108,22 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
...SecretApprovalRequestsSchema.parse(el),
projectId: el.projectId,
environment: el.environment,
statusChangedByUser: el.statusChangedByUserId
? {
userId: el.statusChangedByUserId,
email: el.statusChangedByUserEmail,
firstName: el.statusChangedByUserFirstName,
lastName: el.statusChangedByUserLastName,
username: el.statusChangedByUserUsername
}
: undefined,
committerUser: {
userId: el.committerUserId,
email: el.committerUserEmail,
firstName: el.committerUserFirstName,
lastName: el.committerUserLastName,
username: el.committerUserUsername
},
policy: {
id: el.policyId,
name: el.policyName,
@ -80,11 +133,34 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
}),
childrenMapper: [
{
key: "reviewerMemberId",
key: "reviewerUserId",
label: "reviewers" as const,
mapper: ({ reviewerMemberId: member, reviewerStatus: status }) => (member ? { member, status } : undefined)
mapper: ({
reviewerUserId: userId,
reviewerStatus: status,
reviewerEmail: email,
reviewerLastName: lastName,
reviewerUsername: username,
reviewerFirstName: firstName
}) => (userId ? { userId, status, email, firstName, lastName, username } : undefined)
},
{ key: "approverId", label: "approvers" as const, mapper: ({ approverId }) => approverId }
{
key: "approverUserId",
label: "approvers" as const,
mapper: ({
approverUserId,
approverEmail: email,
approverUsername: username,
approverLastName: lastName,
approverFirstName: firstName
}) => ({
userId: approverUserId,
email,
firstName,
lastName,
username
})
}
]
});
if (!formatedDoc?.[0]) return;
@ -97,12 +173,12 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
}
};
const findProjectRequestCount = async (projectId: string, membershipId: string, tx?: Knex) => {
const findProjectRequestCount = async (projectId: string, userId: string, tx?: Knex) => {
try {
const docs = await (tx || db)
.with(
"temp",
(tx || db)(TableName.SecretApprovalRequest)
(tx || db.replicaNode())(TableName.SecretApprovalRequest)
.join(TableName.SecretFolder, `${TableName.SecretApprovalRequest}.folderId`, `${TableName.SecretFolder}.id`)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.join(
@ -114,8 +190,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
.andWhere(
(bd) =>
void bd
.where(`${TableName.SecretApprovalPolicyApprover}.approverId`, membershipId)
.orWhere(`${TableName.SecretApprovalRequest}.committerId`, membershipId)
.where(`${TableName.SecretApprovalPolicyApprover}.approverUserId`, userId)
.orWhere(`${TableName.SecretApprovalRequest}.committerUserId`, userId)
)
.select("status", `${TableName.SecretApprovalRequest}.id`)
.groupBy(`${TableName.SecretApprovalRequest}.id`, "status")
@ -142,13 +218,13 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
};
const findByProjectId = async (
{ status, limit = 20, offset = 0, projectId, committer, environment, membershipId }: TFindQueryFilter,
{ status, limit = 20, offset = 0, projectId, committer, environment, userId }: TFindQueryFilter,
tx?: Knex
) => {
try {
// akhilmhdh: If ever u wanted a 1 to so many relationship connected with pagination
// this is the place u wanna look at.
const query = (tx || db)(TableName.SecretApprovalRequest)
const query = (tx || db.replicaNode())(TableName.SecretApprovalRequest)
.join(TableName.SecretFolder, `${TableName.SecretApprovalRequest}.folderId`, `${TableName.SecretFolder}.id`)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.join(
@ -161,6 +237,11 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalPolicyApprover}.policyId`
)
.join<TUsers>(
db(TableName.Users).as("committerUser"),
`${TableName.SecretApprovalRequest}.committerUserId`,
`committerUser.id`
)
.leftJoin(
TableName.SecretApprovalRequestReviewer,
`${TableName.SecretApprovalRequest}.id`,
@ -176,20 +257,21 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
projectId,
[`${TableName.Environment}.slug` as "slug"]: environment,
[`${TableName.SecretApprovalRequest}.status`]: status,
committerId: committer
committerUserId: committer
})
)
.andWhere(
(bd) =>
void bd
.where(`${TableName.SecretApprovalPolicyApprover}.approverId`, membershipId)
.orWhere(`${TableName.SecretApprovalRequest}.committerId`, membershipId)
.where(`${TableName.SecretApprovalPolicyApprover}.approverUserId`, userId)
.orWhere(`${TableName.SecretApprovalRequest}.committerUserId`, userId)
)
.select(selectAllTableCols(TableName.SecretApprovalRequest))
.select(
db.ref("projectId").withSchema(TableName.Environment),
db.ref("slug").withSchema(TableName.Environment).as("environment"),
db.ref("id").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerMemberId"),
db.ref("id").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerId"),
db.ref("reviewerUserId").withSchema(TableName.SecretApprovalRequestReviewer),
db.ref("status").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerStatus"),
db.ref("id").withSchema(TableName.SecretApprovalPolicy).as("policyId"),
db.ref("name").withSchema(TableName.SecretApprovalPolicy).as("policyName"),
@ -201,7 +283,11 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
),
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
db.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
db.ref("approverId").withSchema(TableName.SecretApprovalPolicyApprover)
db.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
db.ref("email").withSchema("committerUser").as("committerUserEmail"),
db.ref("username").withSchema("committerUser").as("committerUserUsername"),
db.ref("firstName").withSchema("committerUser").as("committerUserFirstName"),
db.ref("lastName").withSchema("committerUser").as("committerUserLastName")
)
.orderBy("createdAt", "desc");
@ -223,18 +309,26 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
name: el.policyName,
approvals: el.policyApprovals,
secretPath: el.policySecretPath
},
committerUser: {
userId: el.committerUserId,
email: el.committerUserEmail,
firstName: el.committerUserFirstName,
lastName: el.committerUserLastName,
username: el.committerUserUsername
}
}),
childrenMapper: [
{
key: "reviewerMemberId",
key: "reviewerId",
label: "reviewers" as const,
mapper: ({ reviewerMemberId: member, reviewerStatus: s }) => (member ? { member, status: s } : undefined)
mapper: ({ reviewerUserId, reviewerStatus: s }) =>
reviewerUserId ? { userId: reviewerUserId, status: s } : undefined
},
{
key: "approverId",
key: "approverUserId",
label: "approvers" as const,
mapper: ({ approverId }) => approverId
mapper: ({ approverUserId }) => approverUserId
},
{
key: "commitId",

@ -47,7 +47,7 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
const findByRequestId = async (requestId: string, tx?: Knex) => {
try {
const doc = await (tx || db)({
const doc = await (tx || db.replicaNode())({
secVerTag: TableName.SecretTag
})
.from(TableName.SecretApprovalRequestSecret)

@ -87,7 +87,7 @@ export const secretApprovalRequestServiceFactory = ({
const requestCount = async ({ projectId, actor, actorId, actorOrgId, actorAuthMethod }: TApprovalRequestCountDTO) => {
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
const { membership } = await permissionService.getProjectPermission(
await permissionService.getProjectPermission(
actor as ActorType.USER,
actorId,
projectId,
@ -95,7 +95,7 @@ export const secretApprovalRequestServiceFactory = ({
actorOrgId
);
const count = await secretApprovalRequestDAL.findProjectRequestCount(projectId, membership.id);
const count = await secretApprovalRequestDAL.findProjectRequestCount(projectId, actorId);
return count;
};
@ -113,19 +113,13 @@ export const secretApprovalRequestServiceFactory = ({
}: TListApprovalsDTO) => {
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
const { membership } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
await permissionService.getProjectPermission(actor, actorId, projectId, actorAuthMethod, actorOrgId);
const approvals = await secretApprovalRequestDAL.findByProjectId({
projectId,
committer,
environment,
status,
membershipId: membership.id,
userId: actorId,
limit,
offset
});
@ -145,7 +139,7 @@ export const secretApprovalRequestServiceFactory = ({
if (!secretApprovalRequest) throw new BadRequestError({ message: "Secret approval request not found" });
const { policy } = secretApprovalRequest;
const { membership, hasRole } = await permissionService.getProjectPermission(
const { hasRole } = await permissionService.getProjectPermission(
actor,
actorId,
secretApprovalRequest.projectId,
@ -154,8 +148,8 @@ export const secretApprovalRequestServiceFactory = ({
);
if (
!hasRole(ProjectMembershipRole.Admin) &&
secretApprovalRequest.committerId !== membership.id &&
!policy.approvers.find((approverId) => approverId === membership.id)
secretApprovalRequest.committerUserId !== actorId &&
!policy.approvers.find(({ userId }) => userId === actorId)
) {
throw new UnauthorizedError({ message: "User has no access" });
}
@ -180,7 +174,7 @@ export const secretApprovalRequestServiceFactory = ({
if (actor !== ActorType.USER) throw new BadRequestError({ message: "Must be a user" });
const { policy } = secretApprovalRequest;
const { membership, hasRole } = await permissionService.getProjectPermission(
const { hasRole } = await permissionService.getProjectPermission(
ActorType.USER,
actorId,
secretApprovalRequest.projectId,
@ -189,8 +183,8 @@ export const secretApprovalRequestServiceFactory = ({
);
if (
!hasRole(ProjectMembershipRole.Admin) &&
secretApprovalRequest.committerId !== membership.id &&
!policy.approvers.find((approverId) => approverId === membership.id)
secretApprovalRequest.committerUserId !== actorId &&
!policy.approvers.find(({ userId }) => userId === actorId)
) {
throw new UnauthorizedError({ message: "User has no access" });
}
@ -198,7 +192,7 @@ export const secretApprovalRequestServiceFactory = ({
const review = await secretApprovalRequestReviewerDAL.findOne(
{
requestId: secretApprovalRequest.id,
member: membership.id
reviewerUserId: actorId
},
tx
);
@ -207,7 +201,7 @@ export const secretApprovalRequestServiceFactory = ({
{
status,
requestId: secretApprovalRequest.id,
member: membership.id
reviewerUserId: actorId
},
tx
);
@ -230,7 +224,7 @@ export const secretApprovalRequestServiceFactory = ({
if (actor !== ActorType.USER) throw new BadRequestError({ message: "Must be a user" });
const { policy } = secretApprovalRequest;
const { membership, hasRole } = await permissionService.getProjectPermission(
const { hasRole } = await permissionService.getProjectPermission(
ActorType.USER,
actorId,
secretApprovalRequest.projectId,
@ -239,8 +233,8 @@ export const secretApprovalRequestServiceFactory = ({
);
if (
!hasRole(ProjectMembershipRole.Admin) &&
secretApprovalRequest.committerId !== membership.id &&
!policy.approvers.find((approverId) => approverId === membership.id)
secretApprovalRequest.committerUserId !== actorId &&
!policy.approvers.find(({ userId }) => userId === actorId)
) {
throw new UnauthorizedError({ message: "User has no access" });
}
@ -253,7 +247,7 @@ export const secretApprovalRequestServiceFactory = ({
const updatedRequest = await secretApprovalRequestDAL.updateById(secretApprovalRequest.id, {
status,
statusChangeBy: membership.id
statusChangedByUserId: actorId
});
return { ...secretApprovalRequest, ...updatedRequest };
};
@ -270,7 +264,7 @@ export const secretApprovalRequestServiceFactory = ({
if (actor !== ActorType.USER) throw new BadRequestError({ message: "Must be a user" });
const { policy, folderId, projectId } = secretApprovalRequest;
const { membership, hasRole } = await permissionService.getProjectPermission(
const { hasRole } = await permissionService.getProjectPermission(
ActorType.USER,
actorId,
projectId,
@ -280,19 +274,19 @@ export const secretApprovalRequestServiceFactory = ({
if (
!hasRole(ProjectMembershipRole.Admin) &&
secretApprovalRequest.committerId !== membership.id &&
!policy.approvers.find((approverId) => approverId === membership.id)
secretApprovalRequest.committerUserId !== actorId &&
!policy.approvers.find(({ userId }) => userId === actorId)
) {
throw new UnauthorizedError({ message: "User has no access" });
}
const reviewers = secretApprovalRequest.reviewers.reduce<Record<string, ApprovalStatus>>(
(prev, curr) => ({ ...prev, [curr.member.toString()]: curr.status as ApprovalStatus }),
(prev, curr) => ({ ...prev, [curr.userId.toString()]: curr.status as ApprovalStatus }),
{}
);
const hasMinApproval =
secretApprovalRequest.policy.approvals <=
secretApprovalRequest.policy.approvers.filter(
(approverId) => reviewers[approverId.toString()] === ApprovalStatus.APPROVED
({ userId: approverId }) => reviewers[approverId.toString()] === ApprovalStatus.APPROVED
).length;
if (!hasMinApproval) throw new BadRequestError({ message: "Doesn't have minimum approvals needed" });
@ -472,7 +466,7 @@ export const secretApprovalRequestServiceFactory = ({
conflicts: JSON.stringify(conflicts),
hasMerged: true,
status: RequestState.Closed,
statusChangeBy: membership.id
statusChangedByUserId: actorId
},
tx
);
@ -509,7 +503,7 @@ export const secretApprovalRequestServiceFactory = ({
}: TGenerateSecretApprovalRequestDTO) => {
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
const { permission, membership } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
@ -663,7 +657,7 @@ export const secretApprovalRequestServiceFactory = ({
policyId: policy.id,
status: "open",
hasMerged: false,
committerId: membership.id
committerUserId: actorId
},
tx
);

@ -11,7 +11,6 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { QueueName, TQueueServiceFactory } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
import { fnSecretBulkInsert, fnSecretBulkUpdate } from "@app/services/secret/secret-fns";
import { TSecretQueueFactory, uniqueSecretQueueKey } from "@app/services/secret/secret-queue";
@ -46,7 +45,6 @@ type TSecretReplicationServiceFactoryDep = {
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
secretTagDAL: Pick<TSecretTagDALFactory, "findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret" | "find">;
secretApprovalRequestDAL: Pick<TSecretApprovalRequestDALFactory, "create" | "transaction">;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findOne">;
secretApprovalRequestSecretDAL: Pick<
TSecretApprovalRequestSecretDALFactory,
"insertMany" | "insertApprovalSecretTags"
@ -92,7 +90,6 @@ export const secretReplicationServiceFactory = ({
secretApprovalRequestSecretDAL,
secretApprovalRequestDAL,
secretQueueService,
projectMembershipDAL,
projectBotService
}: TSecretReplicationServiceFactoryDep) => {
const getReplicatedSecrets = (
@ -297,12 +294,6 @@ export const secretReplicationServiceFactory = ({
);
// this means it should be a approval request rather than direct replication
if (policy && actor === ActorType.USER) {
const membership = await projectMembershipDAL.findOne({ projectId, userId: actorId });
if (!membership) {
logger.error("Project membership not found in %s for user %s", projectId, actorId);
return;
}
const localSecretsLatestVersions = destinationLocalSecrets.map(({ id }) => id);
const latestSecretVersions = await secretVersionDAL.findLatestVersionMany(
destinationReplicationFolderId,
@ -316,7 +307,7 @@ export const secretReplicationServiceFactory = ({
policyId: policy.id,
status: "open",
hasMerged: false,
committerId: membership.id,
committerUserId: actorId,
isReplicated: true
},
tx

@ -41,7 +41,7 @@ export const secretRotationDALFactory = (db: TDbClient) => {
const find = async (filter: TFindFilter<TSecretRotations & { projectId: string }>, tx?: Knex) => {
try {
const data = await findQuery(filter, tx || db);
const data = await findQuery(filter, tx || db.replicaNode());
return sqlNestRelationships({
data,
key: "id",
@ -93,7 +93,7 @@ export const secretRotationDALFactory = (db: TDbClient) => {
const findById = async (id: string, tx?: Knex) => {
try {
const doc = await (tx || db)(TableName.SecretRotation)
const doc = await (tx || db.replicaNode())(TableName.SecretRotation)
.join(TableName.Environment, `${TableName.SecretRotation}.envId`, `${TableName.Environment}.id`)
.where({ [`${TableName.SecretRotation}.id` as "id"]: id })
.select(selectAllTableCols(TableName.SecretRotation))

@ -331,7 +331,7 @@ export const secretRotationQueueFactory = ({
logger.info("Finished rotating: rotation id: ", rotationId);
} catch (error) {
logger.error(error);
logger.error(error, "Failed to execute secret rotation");
if (error instanceof DisableRotationErrors) {
if (job.id) {
await queue.stopRepeatableJobByJobId(QueueName.SecretRotation, job.id);

@ -133,7 +133,7 @@ export const secretRotationServiceFactory = ({
creds: []
};
const encData = infisicalSymmetricEncypt(JSON.stringify(unencryptedData));
const secretRotation = secretRotationDAL.transaction(async (tx) => {
const secretRotation = await secretRotationDAL.transaction(async (tx) => {
const doc = await secretRotationDAL.create(
{
provider,
@ -148,13 +148,13 @@ export const secretRotationServiceFactory = ({
},
tx
);
await secretRotationQueue.addToQueue(doc.id, doc.interval);
const outputSecretMapping = await secretRotationDAL.secretOutputInsertMany(
Object.entries(outputs).map(([key, secretId]) => ({ key, secretId, rotationId: doc.id })),
tx
);
return { ...doc, outputs: outputSecretMapping, environment: folder.environment };
});
await secretRotationQueue.addToQueue(secretRotation.id, secretRotation.interval);
return secretRotation;
};
@ -212,9 +212,9 @@ export const secretRotationServiceFactory = ({
);
const deletedDoc = await secretRotationDAL.transaction(async (tx) => {
const strat = await secretRotationDAL.deleteById(rotationId, tx);
await secretRotationQueue.removeFromQueue(strat.id, strat.interval);
return strat;
});
await secretRotationQueue.removeFromQueue(deletedDoc.id, deletedDoc.interval);
return { ...doc, ...deletedDoc };
};

@ -21,7 +21,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
const findById = async (id: string, tx?: Knex) => {
try {
const data = await (tx || db)(TableName.Snapshot)
const data = await (tx || db.replicaNode())(TableName.Snapshot)
.where(`${TableName.Snapshot}.id`, id)
.join(TableName.Environment, `${TableName.Snapshot}.envId`, `${TableName.Environment}.id`)
.select(selectAllTableCols(TableName.Snapshot))
@ -43,7 +43,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
const countOfSnapshotsByFolderId = async (folderId: string, tx?: Knex) => {
try {
const doc = await (tx || db)(TableName.Snapshot)
const doc = await (tx || db.replicaNode())(TableName.Snapshot)
.where({ folderId })
.groupBy(["folderId"])
.count("folderId")
@ -56,7 +56,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
const findSecretSnapshotDataById = async (snapshotId: string, tx?: Knex) => {
try {
const data = await (tx || db)(TableName.Snapshot)
const data = await (tx || db.replicaNode())(TableName.Snapshot)
.where(`${TableName.Snapshot}.id`, snapshotId)
.join(TableName.Environment, `${TableName.Snapshot}.envId`, `${TableName.Environment}.id`)
.leftJoin(TableName.SnapshotSecret, `${TableName.Snapshot}.id`, `${TableName.SnapshotSecret}.snapshotId`)
@ -309,7 +309,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
// when we need to rollback we will pull from these snapshots
const findLatestSnapshotByFolderId = async (folderId: string, tx?: Knex) => {
try {
const docs = await (tx || db)(TableName.Snapshot)
const docs = await (tx || db.replicaNode())(TableName.Snapshot)
.where(`${TableName.Snapshot}.folderId`, folderId)
.join<TSecretSnapshots>(
(tx || db)(TableName.Snapshot).groupBy("folderId").max("createdAt").select("folderId").as("latestVersion"),
@ -331,8 +331,8 @@ export const snapshotDALFactory = (db: TDbClient) => {
* Prunes excess snapshots from the database to ensure only a specified number of recent snapshots are retained for each folder.
*
* This function operates in three main steps:
* 1. Pruning snapshots from root/non-versioned folders.
* 2. Pruning snapshots from versioned folders.
* 1. Pruning snapshots from current folders.
* 2. Pruning snapshots from non-current folders (versioned ones).
* 3. Removing orphaned snapshots that do not belong to any existing folder or folder version.
*
* The function processes snapshots in batches, determined by the `PRUNE_FOLDER_BATCH_SIZE` constant,
@ -350,7 +350,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
try {
let uuidOffset = "00000000-0000-0000-0000-000000000000";
// cleanup snapshots from root/non-versioned folders
// cleanup snapshots from current folders
// eslint-disable-next-line no-constant-condition, no-unreachable-loop
while (true) {
const folderBatch = await db(TableName.SecretFolder)
@ -382,12 +382,11 @@ export const snapshotDALFactory = (db: TDbClient) => {
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`)
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
.join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`)
.whereNull(`${TableName.SecretFolder}.parentId`)
.whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
.delete();
} catch (err) {
logger.error(
`Failed to prune snapshots from root/non-versioned folders in range ${batchEntries[0]}:${
`Failed to prune snapshots from current folders in range ${batchEntries[0]}:${
batchEntries[batchEntries.length - 1]
}`
);
@ -399,7 +398,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
}
}
// cleanup snapshots from versioned folders
// cleanup snapshots from non-current folders
uuidOffset = "00000000-0000-0000-0000-000000000000";
// eslint-disable-next-line no-constant-condition
while (true) {
@ -440,7 +439,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
.delete();
} catch (err) {
logger.error(
`Failed to prune snapshots from versioned folders in range ${batchEntries[0]}:${
`Failed to prune snapshots from non-current folders in range ${batchEntries[0]}:${
batchEntries[batchEntries.length - 1]
}`
);

@ -42,6 +42,13 @@ export const IDENTITIES = {
},
DELETE: {
identityId: "The ID of the identity to delete."
},
GET_BY_ID: {
identityId: "The ID of the identity to get details.",
orgId: "The ID of the org of the identity"
},
LIST: {
orgId: "The ID of the organization to list identities."
}
} as const;
@ -63,10 +70,13 @@ export const UNIVERSAL_AUTH = {
"The maximum number of times that an access token can be used; a value of 0 implies infinite number of uses."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve."
identityId: "The ID of the identity to retrieve the auth method for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the auth method for."
},
UPDATE: {
identityId: "The ID of the identity to update.",
identityId: "The ID of the identity to update the auth method for.",
clientSecretTrustedIps: "The new list of IPs or CIDR ranges that the Client Secret can be used from.",
accessTokenTrustedIps: "The new list of IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The new lifetime for an access token in seconds.",
@ -83,6 +93,10 @@ export const UNIVERSAL_AUTH = {
LIST_CLIENT_SECRETS: {
identityId: "The ID of the identity to list client secrets for."
},
GET_CLIENT_SECRET: {
identityId: "The ID of the identity to get the client secret from.",
clientSecretId: "The ID of the client secret to get details."
},
REVOKE_CLIENT_SECRET: {
identityId: "The ID of the identity to revoke the client secret from.",
clientSecretId: "The ID of the client secret to revoke."
@ -104,6 +118,229 @@ export const AWS_AUTH = {
iamRequestBody:
"The base64-encoded body of the signed request. Most likely, the base64-encoding of Action=GetCallerIdentity&Version=2011-06-15.",
iamRequestHeaders: "The base64-encoded headers of the sts:GetCallerIdentity signed request."
},
ATTACH: {
identityId: "The ID of the identity to attach the configuration onto.",
allowedPrincipalArns:
"The comma-separated list of trusted IAM principal ARNs that are allowed to authenticate with Infisical.",
allowedAccountIds:
"The comma-separated list of trusted AWS account IDs that are allowed to authenticate with Infisical.",
accessTokenTTL: "The lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The maximum lifetime for an acccess token in seconds.",
stsEndpoint: "The endpoint URL for the AWS STS API.",
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from."
},
UPDATE: {
identityId: "The ID of the identity to update the auth method for.",
allowedPrincipalArns:
"The new comma-separated list of trusted IAM principal ARNs that are allowed to authenticate with Infisical.",
allowedAccountIds:
"The new comma-separated list of trusted AWS account IDs that are allowed to authenticate with Infisical.",
accessTokenTTL: "The new lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.",
stsEndpoint: "The new endpoint URL for the AWS STS API.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the auth method for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the auth method for."
}
} as const;
export const AZURE_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login."
},
ATTACH: {
identityId: "The ID of the identity to attach the configuration onto.",
tenantId: "The tenant ID for the Azure AD organization.",
resource: "The resource URL for the application registered in Azure AD.",
allowedServicePrincipalIds:
"The comma-separated list of Azure AD service principal IDs that are allowed to authenticate with Infisical.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used."
},
UPDATE: {
identityId: "The ID of the identity to update the auth method for.",
tenantId: "The new tenant ID for the Azure AD organization.",
resource: "The new resource URL for the application registered in Azure AD.",
allowedServicePrincipalIds:
"The new comma-separated list of Azure AD service principal IDs that are allowed to authenticate with Infisical.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The new lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the auth method for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the auth method for."
}
} as const;
export const GCP_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login."
},
ATTACH: {
identityId: "The ID of the identity to attach the configuration onto.",
allowedServiceAccounts:
"The comma-separated list of trusted service account emails corresponding to the GCE resource(s) allowed to authenticate with Infisical.",
allowedProjects:
"The comma-separated list of trusted GCP projects that the GCE instance must belong to authenticate with Infisical.",
allowedZones:
"The comma-separated list of trusted zones that the GCE instances must belong to authenticate with Infisical.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used."
},
UPDATE: {
identityId: "The ID of the identity to update the auth method for.",
allowedServiceAccounts:
"The new comma-separated list of trusted service account emails corresponding to the GCE resource(s) allowed to authenticate with Infisical.",
allowedProjects:
"The new comma-separated list of trusted GCP projects that the GCE instance must belong to authenticate with Infisical.",
allowedZones:
"The new comma-separated list of trusted zones that the GCE instances must belong to authenticate with Infisical.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The new lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the auth method for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the auth method for."
}
} as const;
export const KUBERNETES_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login."
},
ATTACH: {
identityId: "The ID of the identity to attach the configuration onto.",
kubernetesHost: "The host string, host:port pair, or URL to the base of the Kubernetes API server.",
caCert: "The PEM-encoded CA cert for the Kubernetes API server.",
tokenReviewerJwt:
"The long-lived service account JWT token for Infisical to access the TokenReview API to validate other service account JWT tokens submitted by applications/pods.",
allowedNamespaces:
"The comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
allowedNames: "The comma-separated list of trusted service account names that can authenticate with Infisical.",
allowedAudience:
"The optional audience claim that the service account JWT token must have to authenticate with Infisical.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used."
},
UPDATE: {
identityId: "The ID of the identity to update the auth method for.",
kubernetesHost: "The new host string, host:port pair, or URL to the base of the Kubernetes API server.",
caCert: "The new PEM-encoded CA cert for the Kubernetes API server.",
tokenReviewerJwt:
"The new long-lived service account JWT token for Infisical to access the TokenReview API to validate other service account JWT tokens submitted by applications/pods.",
allowedNamespaces:
"The new comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
allowedNames: "The new comma-separated list of trusted service account names that can authenticate with Infisical.",
allowedAudience:
"The new optional audience claim that the service account JWT token must have to authenticate with Infisical.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The new lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the auth method for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the auth method for."
}
} as const;
export const TOKEN_AUTH = {
ATTACH: {
identityId: "The ID of the identity to attach the configuration onto.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used."
},
UPDATE: {
identityId: "The ID of the identity to update the auth method for.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The new lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the auth method for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the auth method for."
},
GET_TOKENS: {
identityId: "The ID of the identity to list token metadata for.",
offset: "The offset to start from. If you enter 10, it will start from the 10th token.",
limit: "The number of tokens to return"
},
CREATE_TOKEN: {
identityId: "The ID of the identity to create the token for.",
name: "The name of the token to create"
},
UPDATE_TOKEN: {
tokenId: "The ID of the token to update metadata for",
name: "The name of the token to update to"
},
REVOKE_TOKEN: {
tokenId: "The ID of the token to revoke"
}
} as const;
export const OIDC_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login."
},
ATTACH: {
identityId: "The ID of the identity to attach the configuration onto.",
oidcDiscoveryUrl: "The URL used to retrieve the OpenID Connect configuration from the identity provider.",
caCert: "The PEM-encoded CA cert for establishing secure communication with the Identity Provider endpoints.",
boundIssuer: "The unique identifier of the identity provider issuing the JWT.",
boundAudiences: "The list of intended recipients.",
boundClaims: "The attributes that should be present in the JWT for it to be valid.",
boundSubject: "The expected principal that is the subject of the JWT.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used."
},
UPDATE: {
identityId: "The ID of the identity to update the auth method for.",
oidcDiscoveryUrl: "The new URL used to retrieve the OpenID Connect configuration from the identity provider.",
caCert: "The new PEM-encoded CA cert for establishing secure communication with the Identity Provider endpoints.",
boundIssuer: "The new unique identifier of the identity provider issuing the JWT.",
boundAudiences: "The new list of intended recipients.",
boundClaims: "The new attributes that should be present in the JWT for it to be valid.",
boundSubject: "The new expected principal that is the subject of the JWT.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The new lifetime for an acccess token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an acccess token in seconds.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the auth method for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the auth method for."
}
} as const;
@ -347,6 +584,7 @@ export const RAW_SECRETS = {
tagIds: "The ID of the tags to be attached to the created secret."
},
GET: {
expand: "Whether or not to expand secret references",
secretName: "The name of the secret to get.",
workspaceId: "The ID of the project to get the secret from.",
workspaceSlug: "The slug of the project to get the secret from.",
@ -508,12 +746,27 @@ export const SECRET_TAGS = {
LIST: {
projectId: "The ID of the project to list tags from."
},
GET_TAG_BY_ID: {
projectId: "The ID of the project to get tags from.",
tagId: "The ID of the tag to get details"
},
GET_TAG_BY_SLUG: {
projectId: "The ID of the project to get tags from.",
tagSlug: "The slug of the tag to get details"
},
CREATE: {
projectId: "The ID of the project to create the tag in.",
name: "The name of the tag to create.",
slug: "The slug of the tag to create.",
color: "The color of the tag to create."
},
UPDATE: {
projectId: "The ID of the project to update the tag in.",
tagId: "The ID of the tag to get details",
name: "The name of the tag to update.",
slug: "The slug of the tag to update.",
color: "The color of the tag to update."
},
DELETE: {
tagId: "The ID of the tag to delete.",
projectId: "The ID of the project to delete the tag from."
@ -641,6 +894,7 @@ export const INTEGRATION_AUTH = {
integration: "The slug of integration for the auth object.",
accessId: "The unique authorized access id of the external integration provider.",
accessToken: "The unique authorized access token of the external integration provider.",
awsAssumeIamRoleArn: "The AWS IAM Role to be assumed by Infisical",
url: "",
namespace: "",
refreshToken: "The refresh token for integration authorization."
@ -789,6 +1043,8 @@ export const CERTIFICATE_AUTHORITIES = {
caId: "The ID of the CA to issue the certificate from",
friendlyName: "A friendly name for the certificate",
commonName: "The common name (CN) for the certificate",
altNames:
"A comma-delimited list of Subject Alternative Names (SANs) for the certificate; these can be host names or email addresses.",
ttl: "The time to live for the certificate such as 1m, 1h, 1d, 1y, ...",
notBefore: "The date and time when the certificate becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format",
notAfter: "The date and time when the certificate expires in YYYY-MM-DDTHH:mm:ss.sssZ format",

@ -5,14 +5,25 @@ import { zpStr } from "../zod";
export const GITLAB_URL = "https://gitlab.com";
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-explicit-any -- If `process.pkg` is set, and it's true, then it means that the app is currently running in a packaged environment (a binary)
export const IS_PACKAGED = (process as any)?.pkg !== undefined;
const zodStrBool = z
.enum(["true", "false"])
.optional()
.transform((val) => val === "true");
const databaseReadReplicaSchema = z
.object({
DB_CONNECTION_URI: z.string().describe("Postgres read replica database connection string"),
DB_ROOT_CERT: zpStr(z.string().optional().describe("Postgres read replica database certificate string"))
})
.array()
.optional();
const envSchema = z
.object({
PORT: z.coerce.number().default(4000),
PORT: z.coerce.number().default(IS_PACKAGED ? 8080 : 4000),
DISABLE_SECRET_SCANNING: z
.enum(["true", "false"])
.default("false")
@ -29,7 +40,8 @@ const envSchema = z
DB_USER: zpStr(z.string().describe("Postgres database username").optional()),
DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()),
DB_NAME: zpStr(z.string().describe("Postgres database name").optional()),
DB_READ_REPLICAS: zpStr(z.string().describe("Postgres read replicas").optional()),
BCRYPT_SALT_ROUND: z.number().default(12),
NODE_ENV: z.enum(["development", "test", "production"]).default("production"),
SALT_ROUNDS: z.coerce.number().default(10),
INITIAL_ORGANIZATION_NAME: zpStr(z.string().optional()),
@ -101,6 +113,9 @@ const envSchema = z
// azure
CLIENT_ID_AZURE: zpStr(z.string().optional()),
CLIENT_SECRET_AZURE: zpStr(z.string().optional()),
// aws
CLIENT_ID_AWS_INTEGRATION: zpStr(z.string().optional()),
CLIENT_SECRET_AWS_INTEGRATION: zpStr(z.string().optional()),
// gitlab
CLIENT_ID_GITLAB: zpStr(z.string().optional()),
CLIENT_SECRET_GITLAB: zpStr(z.string().optional()),
@ -119,19 +134,24 @@ const envSchema = z
// GENERIC
STANDALONE_MODE: z
.enum(["true", "false"])
.transform((val) => val === "true")
.transform((val) => val === "true" || IS_PACKAGED)
.optional(),
INFISICAL_CLOUD: zodStrBool.default("false"),
MAINTENANCE_MODE: zodStrBool.default("false"),
CAPTCHA_SECRET: zpStr(z.string().optional())
CAPTCHA_SECRET: zpStr(z.string().optional()),
PLAIN_API_KEY: zpStr(z.string().optional()),
PLAIN_WISH_LABEL_IDS: zpStr(z.string().optional())
})
.transform((data) => ({
...data,
DB_READ_REPLICAS: data.DB_READ_REPLICAS
? databaseReadReplicaSchema.parse(JSON.parse(data.DB_READ_REPLICAS))
: undefined,
isCloud: Boolean(data.LICENSE_SERVER_KEY),
isSmtpConfigured: Boolean(data.SMTP_HOST),
isRedisConfigured: Boolean(data.REDIS_URL),
isDevelopmentMode: data.NODE_ENV === "development",
isProductionMode: data.NODE_ENV === "production",
isProductionMode: data.NODE_ENV === "production" || IS_PACKAGED,
isSecretScanningConfigured:
Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&

@ -6,7 +6,7 @@ import tweetnacl from "tweetnacl-util";
import { TUserEncryptionKeys } from "@app/db/schemas";
import { decryptSymmetric, encryptAsymmetric, encryptSymmetric } from "./encryption";
import { decryptSymmetric128BitHexKeyUTF8, encryptAsymmetric, encryptSymmetric } from "./encryption";
export const generateSrpServerKey = async (salt: string, verifier: string) => {
// eslint-disable-next-line new-cap
@ -97,30 +97,55 @@ export const generateUserSrpKeys = async (email: string, password: string) => {
};
};
export const getUserPrivateKey = async (password: string, user: TUserEncryptionKeys) => {
const derivedKey = await argon2.hash(password, {
salt: Buffer.from(user.salt),
memoryCost: 65536,
timeCost: 3,
parallelism: 1,
hashLength: 32,
type: argon2.argon2id,
raw: true
});
if (!derivedKey) throw new Error("Failed to derive key from password");
const key = decryptSymmetric({
ciphertext: user.protectedKey!,
iv: user.protectedKeyIV!,
tag: user.protectedKeyTag!,
key: derivedKey.toString("base64")
});
const privateKey = decryptSymmetric({
ciphertext: user.encryptedPrivateKey,
iv: user.iv,
tag: user.tag,
key
});
return privateKey;
export const getUserPrivateKey = async (
password: string,
user: Pick<
TUserEncryptionKeys,
| "protectedKeyTag"
| "protectedKey"
| "protectedKeyIV"
| "encryptedPrivateKey"
| "iv"
| "salt"
| "tag"
| "encryptionVersion"
>
) => {
if (user.encryptionVersion === 1) {
return decryptSymmetric128BitHexKeyUTF8({
ciphertext: user.encryptedPrivateKey,
iv: user.iv,
tag: user.tag,
key: password.slice(0, 32).padStart(32 + (password.slice(0, 32).length - new Blob([password]).size), "0")
});
}
if (user.encryptionVersion === 2 && user.protectedKey && user.protectedKeyIV && user.protectedKeyTag) {
const derivedKey = await argon2.hash(password, {
salt: Buffer.from(user.salt),
memoryCost: 65536,
timeCost: 3,
parallelism: 1,
hashLength: 32,
type: argon2.argon2id,
raw: true
});
if (!derivedKey) throw new Error("Failed to derive key from password");
const key = decryptSymmetric128BitHexKeyUTF8({
ciphertext: user.protectedKey,
iv: user.protectedKeyIV,
tag: user.protectedKeyTag,
key: derivedKey
});
const privateKey = decryptSymmetric128BitHexKeyUTF8({
ciphertext: user.encryptedPrivateKey,
iv: user.iv,
tag: user.tag,
key: Buffer.from(key, "hex")
});
return privateKey;
}
throw new Error(`GetUserPrivateKey: Encryption version not found`);
};
export const buildUserProjectKey = async (privateKey: string, publickey: string) => {

@ -59,6 +59,18 @@ export class BadRequestError extends Error {
}
}
export class NotFoundError extends Error {
name: string;
error: unknown;
constructor({ name, error, message }: { message?: string; name?: string; error?: unknown }) {
super(message ?? "The requested entity is not found");
this.name = name || "NotFound";
this.error = error;
}
}
export class DisableRotationErrors extends Error {
name: string;

@ -0,0 +1 @@
export const isMigrationMode = () => !!process.argv.slice(2).find((arg) => arg === "migration:latest"); // example -> ./binary migration:latest

@ -1,6 +1,7 @@
// Some of the functions are taken from https://github.com/rayepps/radash
// Full credits goes to https://github.com/rayapps to those functions
// Code taken to keep in in house and to adjust somethings for our needs
export * from "./argv";
export * from "./array";
export * from "./dates";
export * from "./object";

@ -50,7 +50,7 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
}),
findById: async (id: string, tx?: Knex) => {
try {
const result = await (tx || db)(tableName)
const result = await (tx || db.replicaNode())(tableName)
.where({ id } as never)
.first("*");
return result;
@ -60,7 +60,7 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
},
findOne: async (filter: Partial<Tables[Tname]["base"]>, tx?: Knex) => {
try {
const res = await (tx || db)(tableName).where(filter).first("*");
const res = await (tx || db.replicaNode())(tableName).where(filter).first("*");
return res;
} catch (error) {
throw new DatabaseError({ error, name: "Find one" });
@ -71,7 +71,7 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
{ offset, limit, sort, tx }: TFindOpt<Tables[Tname]["base"]> = {}
) => {
try {
const query = (tx || db)(tableName).where(buildFindFilter(filter));
const query = (tx || db.replicaNode())(tableName).where(buildFindFilter(filter));
if (limit) void query.limit(limit);
if (offset) void query.offset(offset);
if (sort) {

@ -58,7 +58,8 @@ const redactedKeys = [
"decryptedSecret",
"secrets",
"key",
"password"
"password",
"config"
];
export const initLogger = async () => {

Some files were not shown because too many files have changed in this diff Show More