Compare commits

...

199 Commits

Author SHA1 Message Date
Sheen Capadngan
d4ef92787d misc: renamed rate limit sync job 2024-06-14 18:37:15 +08:00
Sheen Capadngan
b7326bf4c6 misc: addressed typo 2024-06-14 18:34:46 +08:00
Sheen Capadngan
3dd024c90a misc: added license check in backend for custom rate limits 2024-06-14 18:32:50 +08:00
Sheen Capadngan
dd6fb4232e misc: addressed formatting issue 2024-06-14 14:24:33 +08:00
Sheen Capadngan
3411185d60 misc: migrated custom rate limits to enterprise 2024-06-14 14:06:49 +08:00
Maidul Islam
2d8a2a6a3a remove dragon 2024-06-13 23:33:36 -04:00
Maidul Islam
5eeea767a3 Merge pull request #1967 from Infisical/create-pull-request/patch-1718327327
GH Action: rename new migration file timestamp
2024-06-13 23:09:28 -04:00
github-actions
2b4f5962e2 chore: renamed new migration files to latest timestamp (gh-action) 2024-06-14 01:08:46 +00:00
Maidul Islam
bf14bbfeee Merge pull request #1965 from Infisical/feat/allow-custom-rate-limits
feat: allow custom rate limits
2024-06-13 21:08:24 -04:00
Maidul Islam
fa77dc01df apply nits for rate limits 2024-06-13 21:01:18 -04:00
Maidul Islam
ed5044a102 Revert "temporary: increase daily clean up interval"
This reverts commit ec7fe013fd.
2024-06-13 19:59:08 -04:00
Maidul Islam
ec7fe013fd temporary: increase daily clean up interval 2024-06-13 19:09:06 -04:00
Daniel Hougaard
a26ad6cfb0 Merge pull request #1966 from Infisical/daniel/operator-release-fix
Fix: Operator release step
2024-06-14 00:56:58 +02:00
Daniel Hougaard
dd0399d12e Fix: Remove go toolchain and go mod tidy 2024-06-14 00:48:59 +02:00
Daniel Hougaard
04456fe996 Merge pull request #1954 from Infisical/daniel/k8-operator-go-sdk
Feat: K8 operator authentication methods
2024-06-14 00:29:56 +02:00
Daniel Hougaard
2605987289 Helm versioning 2024-06-14 00:25:41 +02:00
Daniel Hougaard
7edcf5ff90 Merge branch 'daniel/k8-operator-go-sdk' of https://github.com/Infisical/infisical into daniel/k8-operator-go-sdk 2024-06-14 00:24:06 +02:00
Daniel Hougaard
3947e3dabf Helm versioning 2024-06-14 00:23:40 +02:00
Maidul Islam
fe6e5e09ac nits: update logs format 2024-06-13 17:57:15 -04:00
Daniel Hougaard
068eb9246d Merge branch 'daniel/k8-operator-go-sdk' of https://github.com/Infisical/infisical into daniel/k8-operator-go-sdk 2024-06-13 22:53:57 +02:00
Daniel Hougaard
3472be480a Fix: Only add finalizer if not marked for deletion 2024-06-13 22:53:54 +02:00
Maidul Islam
df71ecffa0 uppercase for constants 2024-06-13 16:37:56 -04:00
Daniel Hougaard
68818beb38 Update infisicalsecret_helper.go 2024-06-13 22:36:59 +02:00
Daniel Hougaard
e600b68684 Docs: Updated kubernetes 2024-06-13 22:15:57 +02:00
Sheen Capadngan
b52aebfd92 misc: added error log 2024-06-14 02:55:25 +08:00
Sheen Capadngan
c9e56e4e9f misc: updated rate limit labels 2024-06-14 02:46:51 +08:00
Maidul Islam
ef03e9bf3b Merge pull request #1964 from akhilmhdh/feat/ui-permission-check-broken
broken permission page fixed
2024-06-13 14:40:09 -04:00
Sheen Capadngan
08a77f6ddb misc: added missing auth check for rate-limit endpoint 2024-06-14 02:37:01 +08:00
Sheen Capadngan
bc3f21809e misc: migrated to structured singleton pattern 2024-06-14 02:32:21 +08:00
Sheen Capadngan
46b48cea63 misc: added loader 2024-06-14 01:14:26 +08:00
Sheen Capadngan
44956c6a37 misc: reorganized cron structure and removed unnecessary checks 2024-06-14 00:58:54 +08:00
Sheen Capadngan
4de63b6140 fix: updated test 2024-06-13 19:00:33 +08:00
Sheen Capadngan
5cee228f5f misc: updated rate limit update message 2024-06-13 18:45:15 +08:00
Sheen Capadngan
20fea1e25f misc: added flag to disable rate limit updates via API 2024-06-13 18:37:29 +08:00
Sheen Capadngan
d0ffb94bc7 misc: added handling of automatic config sync 2024-06-13 18:19:46 +08:00
=
d3932d8f08 fix(ui): broken permission page fixed 2024-06-13 15:16:00 +05:30
Sheen Capadngan
d5658d374a misc: finalized backend flow 2024-06-13 16:27:13 +08:00
Sheen Capadngan
810a58c836 Merge remote-tracking branch 'origin/main' into shubham/eng-993-allow-self-hosted-users-to-set-their-own-rate-limits-in 2024-06-13 15:44:50 +08:00
Sheen Capadngan
9e24050f17 misc: addressed review comments 2024-06-13 15:42:15 +08:00
Daniel Hougaard
7057d399bc Fix: Naming convention 2024-06-13 07:23:36 +02:00
Daniel Hougaard
c63d57f086 Generated 2024-06-13 07:21:48 +02:00
Daniel Hougaard
a9ce3789b0 Helm 2024-06-13 07:21:40 +02:00
Daniel Hougaard
023a0d99ab Fix: Kubernetes native auth 2024-06-13 07:20:23 +02:00
Daniel Hougaard
5aadc41a4a Feat: Resource Variables 2024-06-13 07:19:33 +02:00
Daniel Hougaard
4f38352765 Create auth.go 2024-06-13 07:09:04 +02:00
Daniel Hougaard
cf5e367aba Update SDK 2024-06-13 07:08:11 +02:00
Daniel Hougaard
a70043b80d Conditioning 2024-06-13 03:05:22 +02:00
Daniel Hougaard
b94db5d674 Standalone infisical SDK instance 2024-06-13 02:55:00 +02:00
Daniel Hougaard
bd6a89fa9a Feat: Improved authentication handler 2024-06-13 02:25:49 +02:00
Maidul Islam
81513e4a75 remove time ago 2024-06-12 20:17:09 -04:00
Maidul Islam
a28b458653 Merge pull request #1958 from Infisical/feat/added-projects-list-view
feat: added projects list view
2024-06-12 19:59:42 -04:00
Maidul Islam
9977329741 add sample resources for k8s auth 2024-06-12 18:25:18 -04:00
Maidul Islam
cd030b0370 Merge pull request #1963 from Infisical/create-pull-request/patch-1718222719
GH Action: rename new migration file timestamp
2024-06-12 16:06:03 -04:00
github-actions
6c86db7d4e chore: renamed new migration files to latest timestamp (gh-action) 2024-06-12 20:05:18 +00:00
Maidul Islam
d48e7eca2d Merge pull request #1906 from Infisical/feat/add-version-limits
feat: add limit to the number of retained snapshots and versions
2024-06-12 16:04:55 -04:00
Maidul Islam
30f3dac35f rephrase input and filer for resvered folder 2024-06-12 15:56:47 -04:00
Sheen Capadngan
0e5f0eefc1 misc: added rounded design to loading entries 2024-06-13 02:56:48 +08:00
Sheen Capadngan
2a005d2654 misc: added rounding to list view 2024-06-13 02:40:08 +08:00
Daniel Hougaard
42425d91d5 Merge pull request #1962 from Infisical/daniel/go-sdk-improvements
Fix: Go SDK Docs typos
2024-06-12 19:44:11 +02:00
Daniel Hougaard
a0770baff2 Update go.mdx 2024-06-12 19:40:58 +02:00
Sheen Capadngan
f101366bce fix: resolved double border conflict 2024-06-13 01:35:17 +08:00
Vladyslav Matsiiako
76c468ecc7 style updates 2024-06-12 10:12:51 -07:00
Sheen Capadngan
dcf315a524 misc: addressed ui comments 2024-06-13 00:09:35 +08:00
Sheen Capadngan
f8a4b6365c Merge pull request #1961 from Infisical/misc/resolved-goreleaser-hardcode
misc: resolved goreleaser hardcoded version
2024-06-12 22:02:41 +08:00
Sheen Capadngan
e27d273e8f misc: resolved goreleaser hardcoded version 2024-06-12 22:00:14 +08:00
Maidul Islam
30dc2d0fcb Merge pull request #1960 from Infisical/misc/hardcoded-goreleaser-version-for-cli
misc: hardcoded goreleaser version
2024-06-12 09:57:37 -04:00
Sheen Capadngan
12e217d200 misc: hardcoded goreleaser version 2024-06-12 21:54:35 +08:00
Maidul Islam
a3a1c9d2e5 Merge pull request #1959 from Infisical/fix/resolved-cli-release-action
fix: resolved secret name mismatch for cli release action
2024-06-12 08:52:58 -04:00
Sheen Capadngan
0f266ebe9e fix: resolved secret name mismatch for cli release action 2024-06-12 20:45:57 +08:00
Maidul Islam
506e0b1342 Merge pull request #1953 from akhilmhdh/main
Trailing slash in secret approval policy and overview bug
2024-06-12 08:43:08 -04:00
Sheen Capadngan
579948ea6d feat: added projects list view 2024-06-12 20:18:39 +08:00
Akhil Mohan
958ad8236a Merge pull request #1932 from minuchi/fix/typo-in-gh-actions
fix: remove extraneous 'r' causing script error in github actions
2024-06-12 16:04:45 +05:30
ShubhamPalriwala
e6ed1231cd feat: custom rate limit for self hosters 2024-06-12 10:24:36 +05:30
Daniel Hougaard
b06b8294e9 Merge pull request #1946 from Infisical/daniel/fix-username-unique-bug
Fix: Email confirmation during SAML login failing (edge-case)
2024-06-12 04:39:32 +02:00
Tuan Dang
cb9dabe03f Delete unaccepted users upon merge user op 2024-06-11 18:41:35 -07:00
Daniel Hougaard
9197530b43 Docs 2024-06-12 02:37:09 +02:00
Daniel Hougaard
1eae7d0c30 Feat: Full auth method support 2024-06-12 02:12:54 +02:00
Daniel Hougaard
cc8119766a Feat: Implementation of Go SDK 2024-06-12 02:10:56 +02:00
Daniel Hougaard
928d5a5240 Delete machine-identity-token.go 2024-06-12 02:10:37 +02:00
Daniel Hougaard
32dd478894 Fix: Local ETag computation 2024-06-12 02:10:35 +02:00
Daniel Hougaard
c3f7c1d46b Install Infisical Go SDK 2024-06-12 02:10:26 +02:00
Daniel Hougaard
89644703a0 Update sample.yaml 2024-06-12 02:10:11 +02:00
Daniel Hougaard
d20b897f28 Update secrets.infisical.com_infisicalsecrets.yaml 2024-06-12 02:01:03 +02:00
Daniel Hougaard
70e022826e Update zz_generated.deepcopy.go 2024-06-12 02:00:59 +02:00
Daniel Hougaard
b7f5fa2cec Types 2024-06-12 01:58:45 +02:00
Daniel Hougaard
7b444e91a8 Helm 2024-06-12 01:58:35 +02:00
Daniel Hougaard
7626dbb96e Fix: Permission error page displayed after user sign up if organization enforces SAML auth 2024-06-12 00:55:33 +02:00
Daniel Hougaard
869be3c273 Improvements 2024-06-12 00:28:11 +02:00
=
9a2355fe63 feat: removed trailing slash from secret input and fixed overview not showing nested imported secrets 2024-06-12 00:17:42 +05:30
=
3929a82099 feat: resolved approval failing for trailing slash 2024-06-12 00:16:27 +05:30
BlackMagiq
40e5c6ef66 Merge pull request #1945 from Infisical/daniel/scim-fix
Fix: SCIM Groups find by filter
2024-06-11 10:42:24 -07:00
Maidul Islam
6c95e75d0d Merge pull request #1952 from akhilmhdh/feat/fix-signup
fix: resolved signup failing in cloud
2024-06-11 11:36:53 -04:00
=
d6c9e6db75 fix: resolved signup failing in cloud 2024-06-11 21:03:06 +05:30
Akhil Mohan
76f87a7708 Merge pull request #1951 from akhilmhdh/fix/password-state-stuck
fix: stuck on password step resolved
2024-06-11 20:09:51 +05:30
Sheen Capadngan
366f03080d Merge pull request #1757 from Infisical/fix/resolved-cli-offline-mode-get
fix: resolved cli offline mode get
2024-06-11 22:32:59 +08:00
Sheen Capadngan
dfdd8e95f9 misc: renamed connection check method 2024-06-11 22:14:19 +08:00
Sheen Capadngan
87df5a2749 misc: addressed PR comments 2024-06-11 21:54:14 +08:00
=
c4797ea060 fix: stuck on password step resolved 2024-06-11 19:09:12 +05:30
Sheen Capadngan
6e011a0b52 Merge pull request #1950 from Infisical/fix/resolved-import-override-display-1
feat: handled import override in the API layer
2024-06-11 17:41:43 +08:00
Sheen Capadngan
05ed00834a misc: used set 2024-06-11 17:14:24 +08:00
Sheen Capadngan
38b0edf510 fix: addressed lint issue 2024-06-11 17:07:24 +08:00
Sheen Capadngan
56b9506b39 fix: type fix 2024-06-11 16:48:16 +08:00
Sheen Capadngan
ae34e015db fix: added missing required property 2024-06-11 16:43:51 +08:00
Sheen Capadngan
7c42768cd8 feat: handled import overwrite in the API layer 2024-06-11 16:27:12 +08:00
Sheen Capadngan
b4a9e0e62d Merge pull request #1948 from Infisical/fix/resolved-import-override-display
fix: resolved import override behavior
2024-06-11 15:55:19 +08:00
BlackMagiq
30606093f4 Merge pull request #1949 from Infisical/streamline-smtp
Update SMTP configuration
2024-06-11 00:32:50 -07:00
Tuan Dang
16862a3b33 Fix lint issue 2024-06-11 00:01:58 -07:00
Tuan Dang
e800a455c4 Update SMTP config 2024-06-10 23:45:40 -07:00
Sheen Capadngan
ba0de6afcf fix: resolved import override behavior 2024-06-11 14:04:59 +08:00
Maidul Islam
bfc82105bd Merge pull request #1947 from Infisical/patch-multi-line-encoding
Patch multi line encoding when expandSecretRef is enabled
2024-06-11 00:55:31 -04:00
Maidul Islam
00fd44b33a fix type issues 2024-06-11 00:34:54 -04:00
Maidul Islam
e2550d70b5 make skipMultilineEncoding null|undefined|bool 2024-06-11 00:28:55 -04:00
Maidul Islam
163d33509b Patch multi line encoding when expandSecretRef is enabled
By default when you create a secret, it will have multi line encoding off but we actually treat this as true in the backend and UI. User’s aren’t expecting their multi line secrets to be double quoted by and made into a single line with/n, however we are doing it by default at the moment. This PR makes multi line encoding opt in and not opt out
2024-06-10 23:42:07 -04:00
Sheen Capadngan
c8a3252c1a Merge pull request #1929 from Infisical/feat/add-option-to-mask-and-protect-gitlab-secrets
feat: add integration option to mask and protect gitlab secrets
2024-06-11 11:38:18 +08:00
Daniel Hougaard
0bba1801b9 Merge pull request #1936 from Infisical/daniel/go-sdk-docs
Docs: Go SDK
2024-06-11 04:27:17 +02:00
Daniel Hougaard
a61e92c49c Update go.mdx 2024-06-11 04:26:35 +02:00
Daniel Hougaard
985116c6f2 Update user-service.ts 2024-06-11 04:04:42 +02:00
Shubham Palriwala
9945d249d6 Merge pull request #1937 from Infisical/shubham/eng-487-investigate-the-stripedb-inconsistency
fix: update org seats whenever membership status is accepted
2024-06-11 07:05:27 +05:30
Daniel Hougaard
8bc9a5efcd Fix: SCIM Groups find by filter 2024-06-11 00:00:16 +02:00
Maidul Islam
b31d2be3f3 Merge pull request #1931 from minuchi/fix/preserve-annotations-when-updating
fix: preserve existing annotations when updating managed secret
2024-06-10 17:14:29 -04:00
Maidul Islam
8329cbf299 update toggle lable 2024-06-10 17:09:20 -04:00
Maidul Islam
9138ab8ed7 update flag describe 2024-06-10 17:01:16 -04:00
Maidul Islam
ea517bc199 Merge pull request #1941 from belikedeep/docker-compose-command-update
Fix: Updated docker compose command (from compose V1 to compose v2)
2024-06-10 16:46:10 -04:00
Maidul Islam
a82b813553 describe isReplication flag 2024-06-10 16:20:29 -04:00
Sheen Capadngan
cf9169ad6f test: resolved test issues 2024-06-11 03:37:51 +08:00
Maidul Islam
af03f706ba Merge pull request #1943 from Infisical/create-pull-request/patch-1718043322
GH Action: rename new migration file timestamp
2024-06-10 14:15:55 -04:00
github-actions
9cf5bbc5d5 chore: renamed new migration files to latest timestamp (gh-action) 2024-06-10 18:15:21 +00:00
Maidul Islam
9161dd5e13 Merge pull request #1925 from Infisical/feat/add-captcha
feat: added captcha to password login
2024-06-10 14:14:57 -04:00
Sheen Capadngan
69b76aea64 misc: added secrets folder path to backup scoping 2024-06-11 01:45:34 +08:00
Sheen Capadngan
c9a95023be Revert "adjustment: moved backup logic to cmd layer"
This reverts commit 8fc4fd64f8.
2024-06-11 01:03:26 +08:00
Sheen Capadngan
9db5be1c91 Revert "adjustment: moved secret backup logic to cmd layer"
This reverts commit 920b9a7dfa.
2024-06-11 00:58:36 +08:00
Sheen Capadngan
a1b41ca454 Revert "feature: added offline support for infisical export"
This reverts commit 88a4fb84e6.
2024-06-11 00:52:59 +08:00
Sheen Capadngan
6c252b4bfb misc: revert backup flow modification for run.go 2024-06-11 00:49:57 +08:00
Sheen Capadngan
aafddaa856 misc: finalized option label 2024-06-10 23:08:39 +08:00
Sheen Capadngan
776f464bee misc: used metadata schema parsing 2024-06-10 22:45:17 +08:00
Sheen Capadngan
104b0d6c60 Merge remote-tracking branch 'origin/main' into feat/add-option-to-mask-and-protect-gitlab-secrets 2024-06-10 22:41:44 +08:00
belikedeep
9303124f5f Updated docker compose command (from compose V1 to compose v2) 2024-06-10 20:09:08 +05:30
Sheen Capadngan
03c9a5606b Merge pull request #1928 from Infisical/feat/add-option-for-delete-disabling-github-integ
feat: added option for disabling github secret deletion
2024-06-10 22:35:04 +08:00
Sheen Capadngan
e696bff004 misc: optimized prune orphan snapshots 2024-06-10 21:08:14 +08:00
Sheen Capadngan
d9c4c332ea feat: added handling of versioned folders and cleanup script 2024-06-10 20:40:19 +08:00
Sheen Capadngan
120e482c6f Merge remote-tracking branch 'origin/main' into fix/resolved-cli-offline-mode-get 2024-06-10 14:15:18 +08:00
Sheen Capadngan
f4a1a00b59 misc: improved text of github option 2024-06-10 14:00:56 +08:00
Sheen Capadngan
b9933d711c misc: addressed schema update 2024-06-10 13:58:40 +08:00
Sheen Capadngan
1abdb531d9 misc: removed comments 2024-06-10 13:36:53 +08:00
Sheen Capadngan
59b3123eb3 adjustment: removed unintended yaml updates 2024-06-10 13:33:42 +08:00
Sheen Capadngan
c1954a6386 Merge branch 'feat/add-captcha' of https://github.com/Infisical/infisical into feat/add-captcha 2024-06-10 13:27:31 +08:00
Sheen Capadngan
0bbb86ee2a misc: simplified captcha flag and finalized build process 2024-06-10 13:24:44 +08:00
ShubhamPalriwala
7c9c65312b fix: pass correct id 2024-06-10 09:04:34 +05:30
ShubhamPalriwala
8a46cbd08f fix: update org seats whenever membership status is accepted 2024-06-10 09:02:11 +05:30
Daniel Hougaard
fa05639592 Docs: Go SDK 2024-06-10 05:18:39 +02:00
Maidul Islam
429b2a284d Merge branch 'main' into feat/add-captcha 2024-06-09 17:44:55 -04:00
Maidul Islam
6c596092b0 Merge pull request #1927 from Infisical/shubham/eng-983-optimise-secretinput-usage-to-mask-secret-when-not-in-focus
fix: share secret input now masks value onBlur
2024-06-09 17:43:30 -04:00
Maidul Islam
fcd13eac8a update saml org slug environment 2024-06-09 14:41:23 -04:00
Maidul Islam
1fb653754c update saml slug env 2024-06-09 14:37:13 -04:00
Akhil Mohan
bb1d73b0f5 Merge pull request #1935 from Infisical/fix-saml-auto-redirect
patch saml auto redirect
2024-06-09 23:09:29 +05:30
Maidul Islam
59e9226d85 patch saml auto redirect 2024-06-09 13:30:44 -04:00
Maidul Islam
e6f42e1231 Merge pull request #1933 from Infisical/add-folder-sorting
added sorting for folders in overview
2024-06-08 22:31:07 -04:00
minuchi
4c0e04528e fix: remove extraneous 'r' causing script error in github actions 2024-06-09 02:21:57 +09:00
minuchi
6d40d951c6 fix: preserve existing annotations when updating managed secret 2024-06-09 01:43:41 +09:00
ShubhamPalriwala
e5b7ebbabf revert: change in core component 2024-06-08 05:47:47 +05:30
Sheen Capadngan
7fe7056af4 Merge remote-tracking branch 'origin/main' into feat/add-version-limits 2024-06-08 01:37:14 +08:00
Sheen Capadngan
610dd07a57 misc: updated failed password attempt limit for captcha 2024-06-08 00:39:14 +08:00
Sheen Capadngan
9d6d7540dc misc: removed unnecessary project property 2024-06-08 00:34:33 +08:00
Sheen Capadngan
847c2c67ec adjustment: made secret-deletion opt in 2024-06-08 00:30:30 +08:00
Sheen Capadngan
2bd9ad0137 feat: add option to maks and protect gitlab secrets 2024-06-07 21:46:34 +08:00
Sheen Capadngan
76a424dcfb feat: added option for disabling github secret deletion 2024-06-07 19:00:51 +08:00
ShubhamPalriwala
9d46c269d4 fix: secret input on tab moves to next field and masks value 2024-06-07 13:05:04 +05:30
Sheen Capadngan
15c05b4910 misc: finalized captcha error message 2024-06-06 21:54:35 +08:00
Sheen Capadngan
65d88ef08e misc: improved ux by requiring captcha entry before submission 2024-06-06 21:24:25 +08:00
Sheen Capadngan
81e4129e51 feat: added base captcha implementation 2024-06-06 20:42:54 +08:00
Sheen Capadngan
ee152f2d20 misc: added cleanup frequency note for pit versions 2024-06-04 23:50:10 +08:00
Sheen Capadngan
f21a13f388 adjustment: removed artificial limiting of pit versions 2024-06-04 23:46:02 +08:00
Sheen Capadngan
68a30f4212 misc: removed transactional 2024-06-03 22:06:32 +08:00
Sheen Capadngan
4d830f1d1a misc: added outer try catch block 2024-06-03 17:58:39 +08:00
Sheen Capadngan
cd6caab508 misc: migrated to using keyset pagnination 2024-06-03 17:56:00 +08:00
Sheen Capadngan
ab093dfc85 misc: simplified delete query for secret folder version 2024-06-03 12:49:40 +08:00
Sheen Capadngan
b8e9417466 misc: modified pruning sql logic 2024-06-01 03:26:35 +08:00
Sheen Capadngan
4eb08c64d4 misc: updated error message 2024-06-01 01:07:25 +08:00
Sheen Capadngan
d76760fa9c misc: updated schema 2024-05-31 23:42:49 +08:00
Sheen Capadngan
4d8f94a9dc feat: added version prune to daily resource queue 2024-05-31 23:25:56 +08:00
Sheen Capadngan
abd8d6aa8a feat: added support for version limit update 2024-05-31 23:18:02 +08:00
Sheen Capadngan
9117067ab5 feat: finalized pruning logic 2024-05-31 21:38:16 +08:00
Sheen Capadngan
3a1168c7e8 feat: added initial version pruning and result limiting 2024-05-31 19:12:55 +08:00
Sheen Capadngan
88a4fb84e6 feature: added offline support for infisical export 2024-05-02 03:21:20 +08:00
Sheen Capadngan
a1e8f45a86 misc: added new cli secrets to release build gh action 2024-05-02 01:35:19 +08:00
Sheen Capadngan
04dca9432d misc: updated test comment 2024-05-02 01:09:12 +08:00
Sheen Capadngan
920b9a7dfa adjustment: moved secret backup logic to cmd layer 2024-05-02 00:59:17 +08:00
Sheen Capadngan
8fc4fd64f8 adjustment: moved backup logic to cmd layer 2024-05-02 00:49:29 +08:00
Sheen Capadngan
24f7ecc548 misc: removed infisical init logs 2024-05-01 21:41:07 +08:00
Sheen Capadngan
a5ca96f2df test: restructed setup and added scripting for infisical init 2024-05-01 21:39:20 +08:00
Sheen Capadngan
505ccdf8ea misc: added script for cli-tests env setup 2024-05-01 21:37:18 +08:00
Sheen Capadngan
3897bd70fa adjustment: removed cli display for pty 2024-05-01 11:08:58 +08:00
Sheen Capadngan
4479e626c7 adjustment: renamed cli vault file phrase env 2024-05-01 01:56:10 +08:00
Sheen Capadngan
6640b55504 misc: added envs required for cli test of infisical login 2024-05-01 01:49:06 +08:00
Sheen Capadngan
85f024c814 test: added scripting for user login 2024-05-01 01:45:24 +08:00
Sheen Capadngan
531fa634a2 feature: add logs for cli execution error 2024-04-30 22:02:22 +08:00
Sheen Capadngan
772dd464f5 test: added integration test for secrets get all and secrets get all without connection 2024-04-30 21:11:29 +08:00
Sheen Capadngan
877b9a409e adjustment: modified isConnected check to query linked infisical URL 2024-04-30 21:00:34 +08:00
Sheen Capadngan
104a91647c fix: resolved cli offline mode get 2024-04-29 21:18:13 +08:00
143 changed files with 4134 additions and 891 deletions

View File

@@ -63,3 +63,7 @@ CLIENT_SECRET_GITHUB_LOGIN=
CLIENT_ID_GITLAB_LOGIN=
CLIENT_SECRET_GITLAB_LOGIN=
CAPTCHA_SECRET=
NEXT_PUBLIC_CAPTCHA_SITE_KEY=

View File

@@ -47,7 +47,7 @@ jobs:
- name: Wait for container to be stable and check logs
run: |
SECONDS=0
r HEALTHY=0
HEALTHY=0
while [ $SECONDS -lt 60 ]; do
if docker ps | grep infisical-api | grep -q healthy; then
echo "Container is healthy."

View File

@@ -22,6 +22,9 @@ jobs:
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
goreleaser:
runs-on: ubuntu-20.04
@@ -56,7 +59,7 @@ jobs:
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: latest
version: v1.26.2-pro
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}

View File

@@ -20,7 +20,12 @@ on:
required: true
CLI_TESTS_ENV_SLUG:
required: true
CLI_TESTS_USER_EMAIL:
required: true
CLI_TESTS_USER_PASSWORD:
required: true
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE:
required: true
jobs:
test:
defaults:
@@ -43,5 +48,8 @@ jobs:
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
run: go test -v -count=1 ./test

View File

@@ -1,7 +1,7 @@
ARG POSTHOG_HOST=https://app.posthog.com
ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG SAML_ORG_SLUG=saml-org-slug-default
ARG CAPTCHA_SITE_KEY=captcha-site-key
FROM node:20-alpine AS base
@@ -36,8 +36,8 @@ ARG INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
ARG CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
@@ -113,9 +113,9 @@ ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
ARG INTERCOM_ID=intercom-id
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG \
BAKED_NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
ARG CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
WORKDIR /

View File

@@ -85,13 +85,13 @@ To set up and run Infisical locally, make sure you have Git and Docker installed
Linux/macOS:
```console
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker-compose -f docker-compose.prod.yml up
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker compose -f docker-compose.prod.yml up
```
Windows Command Prompt:
```console
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker-compose -f docker-compose.prod.yml up
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker compose -f docker-compose.prod.yml up
```
Create an account at `http://localhost:80`

View File

@@ -36,6 +36,7 @@
"bcrypt": "^5.1.1",
"bullmq": "^5.4.2",
"cassandra-driver": "^4.7.2",
"cron": "^3.1.7",
"dotenv": "^16.4.1",
"fastify": "^4.26.0",
"fastify-plugin": "^4.5.1",
@@ -4806,6 +4807,11 @@
"long": "*"
}
},
"node_modules/@types/luxon": {
"version": "3.4.2",
"resolved": "https://registry.npmjs.org/@types/luxon/-/luxon-3.4.2.tgz",
"integrity": "sha512-TifLZlFudklWlMBfhubvgqTXRzLDI5pCbGa4P8a3wPyUQSW+1xQ5eDsreP9DWHX3tjq1ke96uYG/nwundroWcA=="
},
"node_modules/@types/mime": {
"version": "1.3.5",
"resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz",
@@ -6689,6 +6695,15 @@
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
"dev": true
},
"node_modules/cron": {
"version": "3.1.7",
"resolved": "https://registry.npmjs.org/cron/-/cron-3.1.7.tgz",
"integrity": "sha512-tlBg7ARsAMQLzgwqVxy8AZl/qlTc5nibqYwtNGoCrd+cV+ugI+tvZC1oT/8dFH8W455YrywGykx/KMmAqOr7Jw==",
"dependencies": {
"@types/luxon": "~3.4.0",
"luxon": "~3.4.0"
}
},
"node_modules/cron-parser": {
"version": "4.9.0",
"resolved": "https://registry.npmjs.org/cron-parser/-/cron-parser-4.9.0.tgz",

View File

@@ -97,6 +97,7 @@
"bcrypt": "^5.1.1",
"bullmq": "^5.4.2",
"cassandra-driver": "^4.7.2",
"cron": "^3.1.7",
"dotenv": "^16.4.1",
"fastify": "^4.26.0",
"fastify-plugin": "^4.5.1",

View File

@@ -14,6 +14,7 @@ import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-con
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
@@ -147,6 +148,7 @@ declare module "fastify" {
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
secretSharing: TSecretSharingServiceFactory;
rateLimit: TRateLimitServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

View File

@@ -149,6 +149,9 @@ import {
TProjectUserMembershipRoles,
TProjectUserMembershipRolesInsert,
TProjectUserMembershipRolesUpdate,
TRateLimit,
TRateLimitInsert,
TRateLimitUpdate,
TSamlConfigs,
TSamlConfigsInsert,
TSamlConfigsUpdate,
@@ -343,6 +346,7 @@ declare module "knex/types/tables" {
TSecretFolderVersionsUpdate
>;
[TableName.SecretSharing]: Knex.CompositeTableType<TSecretSharing, TSecretSharingInsert, TSecretSharingUpdate>;
[TableName.RateLimit]: Knex.CompositeTableType<TRateLimit, TRateLimitInsert, TRateLimitUpdate>;
[TableName.SecretTag]: Knex.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
[TableName.SecretImport]: Knex.CompositeTableType<TSecretImports, TSecretImportsInsert, TSecretImportsUpdate>;
[TableName.Integration]: Knex.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;

View File

@@ -0,0 +1,29 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasConsecutiveFailedPasswordAttempts = await knex.schema.hasColumn(
TableName.Users,
"consecutiveFailedPasswordAttempts"
);
await knex.schema.alterTable(TableName.Users, (tb) => {
if (!hasConsecutiveFailedPasswordAttempts) {
tb.integer("consecutiveFailedPasswordAttempts").defaultTo(0);
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasConsecutiveFailedPasswordAttempts = await knex.schema.hasColumn(
TableName.Users,
"consecutiveFailedPasswordAttempts"
);
await knex.schema.alterTable(TableName.Users, (tb) => {
if (hasConsecutiveFailedPasswordAttempts) {
tb.dropColumn("consecutiveFailedPasswordAttempts");
}
});
}

View File

@@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit");
await knex.schema.alterTable(TableName.Project, (tb) => {
if (!hasPitVersionLimitColumn) {
tb.integer("pitVersionLimit").notNullable().defaultTo(10);
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit");
await knex.schema.alterTable(TableName.Project, (tb) => {
if (hasPitVersionLimitColumn) {
tb.dropColumn("pitVersionLimit");
}
});
}

View File

@@ -0,0 +1,31 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.RateLimit))) {
await knex.schema.createTable(TableName.RateLimit, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.integer("readRateLimit").defaultTo(600).notNullable();
t.integer("writeRateLimit").defaultTo(200).notNullable();
t.integer("secretsRateLimit").defaultTo(60).notNullable();
t.integer("authRateLimit").defaultTo(60).notNullable();
t.integer("inviteUserRateLimit").defaultTo(30).notNullable();
t.integer("mfaRateLimit").defaultTo(20).notNullable();
t.integer("creationLimit").defaultTo(30).notNullable();
t.integer("publicEndpointLimit").defaultTo(30).notNullable();
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.RateLimit);
// create init rate limit entry with defaults
await knex(TableName.RateLimit).insert({});
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.RateLimit);
await dropOnUpdateTrigger(knex, TableName.RateLimit);
}

View File

@@ -48,6 +48,7 @@ export * from "./project-roles";
export * from "./project-user-additional-privilege";
export * from "./project-user-membership-roles";
export * from "./projects";
export * from "./rate-limit";
export * from "./saml-configs";
export * from "./scim-tokens";
export * from "./secret-approval-policies";

View File

@@ -18,6 +18,7 @@ export enum TableName {
IncidentContact = "incident_contacts",
UserAction = "user_actions",
SuperAdmin = "super_admin",
RateLimit = "rate_limit",
ApiKey = "api_keys",
Project = "projects",
ProjectBot = "project_bots",

View File

@@ -16,7 +16,8 @@ export const ProjectsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
version: z.number().default(1),
upgradeStatus: z.string().nullable().optional()
upgradeStatus: z.string().nullable().optional(),
pitVersionLimit: z.number().default(10)
});
export type TProjects = z.infer<typeof ProjectsSchema>;

View File

@@ -0,0 +1,26 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const RateLimitSchema = z.object({
id: z.string().uuid(),
readRateLimit: z.number().default(600),
writeRateLimit: z.number().default(200),
secretsRateLimit: z.number().default(60),
authRateLimit: z.number().default(60),
inviteUserRateLimit: z.number().default(30),
mfaRateLimit: z.number().default(20),
creationLimit: z.number().default(30),
publicEndpointLimit: z.number().default(30),
createdAt: z.date(),
updatedAt: z.date()
});
export type TRateLimit = z.infer<typeof RateLimitSchema>;
export type TRateLimitInsert = Omit<z.input<typeof RateLimitSchema>, TImmutableDBKeys>;
export type TRateLimitUpdate = Partial<Omit<z.input<typeof RateLimitSchema>, TImmutableDBKeys>>;

View File

@@ -25,7 +25,8 @@ export const UsersSchema = z.object({
isEmailVerified: z.boolean().default(false).nullable().optional(),
consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(),
isLocked: z.boolean().default(false).nullable().optional(),
temporaryLockDateEnd: z.date().nullable().optional()
temporaryLockDateEnd: z.date().nullable().optional(),
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional()
});
export type TUsers = z.infer<typeof UsersSchema>;

View File

@@ -10,6 +10,7 @@ import { registerLicenseRouter } from "./license-router";
import { registerOrgRoleRouter } from "./org-role-router";
import { registerProjectRoleRouter } from "./project-role-router";
import { registerProjectRouter } from "./project-router";
import { registerRateLimitRouter } from "./rate-limit-router";
import { registerSamlRouter } from "./saml-router";
import { registerScimRouter } from "./scim-router";
import { registerSecretApprovalPolicyRouter } from "./secret-approval-policy-router";
@@ -45,6 +46,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
await server.register(registerAccessApprovalPolicyRouter, { prefix: "/access-approvals/policies" });
await server.register(registerAccessApprovalRequestRouter, { prefix: "/access-approvals/requests" });
await server.register(registerRateLimitRouter, { prefix: "/rate-limit" });
await server.register(
async (dynamicSecretRouter) => {

View File

@@ -0,0 +1,75 @@
import { z } from "zod";
import { RateLimitSchema } from "@app/db/schemas";
import { BadRequestError } from "@app/lib/errors";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerRateLimitRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
response: {
200: z.object({
rateLimit: RateLimitSchema
})
}
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
handler: async () => {
const rateLimit = await server.services.rateLimit.getRateLimits();
if (!rateLimit) {
throw new BadRequestError({
name: "Get Rate Limit Error",
message: "Rate limit configuration does not exist."
});
}
return { rateLimit };
}
});
server.route({
method: "PUT",
url: "/",
config: {
rateLimit: readLimit
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
schema: {
body: z.object({
readRateLimit: z.number(),
writeRateLimit: z.number(),
secretsRateLimit: z.number(),
authRateLimit: z.number(),
inviteUserRateLimit: z.number(),
mfaRateLimit: z.number(),
creationLimit: z.number(),
publicEndpointLimit: z.number()
}),
response: {
200: z.object({
rateLimit: RateLimitSchema
})
}
},
handler: async (req) => {
const rateLimit = await server.services.rateLimit.updateRateLimit(req.body);
return { rateLimit };
}
});
};

View File

@@ -362,6 +362,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
const groups = await req.server.services.scim.listScimGroups({
orgId: req.permission.orgId,
startIndex: req.query.startIndex,
filter: req.query.filter,
limit: req.query.count
});

View File

@@ -1,6 +1,7 @@
import { nanoid } from "nanoid";
import { z } from "zod";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { sapPubSchema } from "@app/server/routes/sanitizedSchemas";
@@ -19,7 +20,11 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
workspaceId: z.string(),
name: z.string().optional(),
environment: z.string(),
secretPath: z.string().optional().nullable(),
secretPath: z
.string()
.optional()
.nullable()
.transform((val) => (val ? removeTrailingSlash(val) : val)),
approvers: z.string().array().min(1),
approvals: z.number().min(1).default(1)
})
@@ -63,7 +68,11 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
name: z.string().optional(),
approvers: z.string().array().min(1),
approvals: z.number().min(1).default(1),
secretPath: z.string().optional().nullable()
secretPath: z
.string()
.optional()
.nullable()
.transform((val) => (val ? removeTrailingSlash(val) : val))
})
.refine((data) => data.approvals <= data.approvers.length, {
path: ["approvals"],
@@ -157,7 +166,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
querystring: z.object({
workspaceId: z.string().trim(),
environment: z.string().trim(),
secretPath: z.string().trim()
secretPath: z.string().trim().transform(removeTrailingSlash)
}),
response: {
200: z.object({

View File

@@ -77,7 +77,7 @@ type TLdapConfigServiceFactoryDep = {
>;
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
};
export type TLdapConfigServiceFactory = ReturnType<typeof ldapConfigServiceFactory>;
@@ -510,6 +510,7 @@ export const ldapConfigServiceFactory = ({
return newUserAlias;
});
}
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
const user = await userDAL.transaction(async (tx) => {
const newUser = await userDAL.findOne({ id: userAlias.userId }, tx);

View File

@@ -575,6 +575,9 @@ export const licenseServiceFactory = ({
getInstanceType() {
return instanceType;
},
get onPremFeatures() {
return onPremFeatures;
},
getPlan,
updateSubscriptionOrgMemberCount,
refreshPlan,

View File

@@ -0,0 +1,7 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TRateLimitDALFactory = ReturnType<typeof rateLimitDALFactory>;
export const rateLimitDALFactory = (db: TDbClient) => ormify(db, TableName.RateLimit, {});

View File

@@ -0,0 +1,106 @@
import { CronJob } from "cron";
import { logger } from "@app/lib/logger";
import { TLicenseServiceFactory } from "../license/license-service";
import { TRateLimitDALFactory } from "./rate-limit-dal";
import { TRateLimit, TRateLimitUpdateDTO } from "./rate-limit-types";
let rateLimitMaxConfiguration = {
readLimit: 60,
publicEndpointLimit: 30,
writeLimit: 200,
secretsLimit: 60,
authRateLimit: 60,
inviteUserRateLimit: 30,
mfaRateLimit: 20,
creationLimit: 30
};
Object.freeze(rateLimitMaxConfiguration);
export const getRateLimiterConfig = () => {
return rateLimitMaxConfiguration;
};
type TRateLimitServiceFactoryDep = {
rateLimitDAL: TRateLimitDALFactory;
licenseService: Pick<TLicenseServiceFactory, "onPremFeatures">;
};
export type TRateLimitServiceFactory = ReturnType<typeof rateLimitServiceFactory>;
export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateLimitServiceFactoryDep) => {
const DEFAULT_RATE_LIMIT_CONFIG_ID = "00000000-0000-0000-0000-000000000000";
const getRateLimits = async (): Promise<TRateLimit | undefined> => {
let rateLimit: TRateLimit;
try {
rateLimit = await rateLimitDAL.findOne({ id: DEFAULT_RATE_LIMIT_CONFIG_ID });
if (!rateLimit) {
// rate limit might not exist
rateLimit = await rateLimitDAL.create({
// @ts-expect-error id is kept as fixed because there should only be one rate limit config per instance
id: DEFAULT_RATE_LIMIT_CONFIG_ID
});
}
return rateLimit;
} catch (err) {
logger.error("Error fetching rate limits %o", err);
return undefined;
}
};
const updateRateLimit = async (updates: TRateLimitUpdateDTO): Promise<TRateLimit> => {
return rateLimitDAL.updateById(DEFAULT_RATE_LIMIT_CONFIG_ID, updates);
};
const syncRateLimitConfiguration = async () => {
try {
const rateLimit = await getRateLimits();
if (rateLimit) {
const newRateLimitMaxConfiguration: typeof rateLimitMaxConfiguration = {
readLimit: rateLimit.readRateLimit,
publicEndpointLimit: rateLimit.publicEndpointLimit,
writeLimit: rateLimit.writeRateLimit,
secretsLimit: rateLimit.secretsRateLimit,
authRateLimit: rateLimit.authRateLimit,
inviteUserRateLimit: rateLimit.inviteUserRateLimit,
mfaRateLimit: rateLimit.mfaRateLimit,
creationLimit: rateLimit.creationLimit
};
logger.info(`syncRateLimitConfiguration: rate limit configuration: %o`, newRateLimitMaxConfiguration);
Object.freeze(newRateLimitMaxConfiguration);
rateLimitMaxConfiguration = newRateLimitMaxConfiguration;
}
} catch (error) {
logger.error(`Error syncing rate limit configurations: %o`, error);
}
};
const initializeBackgroundSync = async () => {
if (!licenseService.onPremFeatures.customRateLimits) {
logger.info("Current license does not support custom rate limit configuration");
return;
}
logger.info("Setting up background sync process for rate limits");
// initial sync upon startup
await syncRateLimitConfiguration();
// sync rate limits configuration every 10 minutes
const job = new CronJob("*/10 * * * *", syncRateLimitConfiguration);
job.start();
return job;
};
return {
getRateLimits,
updateRateLimit,
initializeBackgroundSync,
syncRateLimitConfiguration
};
};

View File

@@ -0,0 +1,16 @@
export type TRateLimitUpdateDTO = {
readRateLimit: number;
writeRateLimit: number;
secretsRateLimit: number;
authRateLimit: number;
inviteUserRateLimit: number;
mfaRateLimit: number;
creationLimit: number;
publicEndpointLimit: number;
};
export type TRateLimit = {
id: string;
createdAt: Date;
updatedAt: Date;
} & TRateLimitUpdateDTO;

View File

@@ -50,7 +50,7 @@ type TSamlConfigServiceFactoryDep = {
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
smtpService: Pick<TSmtpService, "sendMail">;
};
@@ -449,6 +449,7 @@ export const samlConfigServiceFactory = ({
return newUser;
});
}
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
const isUserCompleted = Boolean(user.isAccepted);
const providerAuthToken = jwt.sign(

View File

@@ -18,6 +18,20 @@ export const buildScimUserList = ({
};
};
export const parseScimFilter = (filterToParse: string | undefined) => {
if (!filterToParse) return {};
const [parsedName, parsedValue] = filterToParse.split("eq").map((s) => s.trim());
let attributeName = parsedName;
if (parsedName === "userName") {
attributeName = "email";
} else if (parsedName === "displayName") {
attributeName = "name";
}
return { [attributeName]: parsedValue.replace(/"/g, "") };
};
export const buildScimUser = ({
orgMembershipId,
username,

View File

@@ -30,7 +30,7 @@ import { UserAliasType } from "@app/services/user-alias/user-alias-types";
import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { buildScimGroup, buildScimGroupList, buildScimUser, buildScimUserList } from "./scim-fns";
import { buildScimGroup, buildScimGroupList, buildScimUser, buildScimUserList, parseScimFilter } from "./scim-fns";
import {
TCreateScimGroupDTO,
TCreateScimTokenDTO,
@@ -184,18 +184,6 @@ export const scimServiceFactory = ({
status: 403
});
const parseFilter = (filterToParse: string | undefined) => {
if (!filterToParse) return {};
const [parsedName, parsedValue] = filterToParse.split("eq").map((s) => s.trim());
let attributeName = parsedName;
if (parsedName === "userName") {
attributeName = "email";
}
return { [attributeName]: parsedValue.replace(/"/g, "") };
};
const findOpts = {
...(startIndex && { offset: startIndex - 1 }),
...(limit && { limit })
@@ -204,7 +192,7 @@ export const scimServiceFactory = ({
const users = await orgDAL.findMembership(
{
[`${TableName.OrgMembership}.orgId` as "id"]: orgId,
...parseFilter(filter)
...parseScimFilter(filter)
},
findOpts
);
@@ -391,7 +379,7 @@ export const scimServiceFactory = ({
);
}
}
await licenseService.updateSubscriptionOrgMemberCount(org.id);
return { user, orgMembership };
});
@@ -557,7 +545,7 @@ export const scimServiceFactory = ({
return {}; // intentionally return empty object upon success
};
const listScimGroups = async ({ orgId, startIndex, limit }: TListScimGroupsDTO) => {
const listScimGroups = async ({ orgId, startIndex, limit, filter }: TListScimGroupsDTO) => {
const plan = await licenseService.getPlan(orgId);
if (!plan.groups)
throw new BadRequestError({
@@ -580,7 +568,8 @@ export const scimServiceFactory = ({
const groups = await groupDAL.findGroups(
{
orgId
orgId,
...(filter && parseScimFilter(filter))
},
{
offset: startIndex - 1,

View File

@@ -66,6 +66,7 @@ export type TDeleteScimUserDTO = {
export type TListScimGroupsDTO = {
startIndex: number;
filter?: string;
limit: number;
orgId: string;
};

View File

@@ -4,6 +4,7 @@ import picomatch from "picomatch";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { BadRequestError } from "@app/lib/errors";
import { removeTrailingSlash } from "@app/lib/fn";
import { containsGlobPatterns } from "@app/lib/picomatch";
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
@@ -207,7 +208,8 @@ export const secretApprovalPolicyServiceFactory = ({
return sapPolicies;
};
const getSecretApprovalPolicy = async (projectId: string, environment: string, secretPath: string) => {
const getSecretApprovalPolicy = async (projectId: string, environment: string, path: string) => {
const secretPath = removeTrailingSlash(path);
const env = await projectEnvDAL.findOne({ slug: environment, projectId });
if (!env) throw new BadRequestError({ message: "Environment not found" });

View File

@@ -81,8 +81,7 @@ export const secretSnapshotServiceFactory = ({
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
const count = await snapshotDAL.countOfSnapshotsByFolderId(folder.id);
return count;
return snapshotDAL.countOfSnapshotsByFolderId(folder.id);
};
const listSnapshots = async ({

View File

@@ -1,3 +1,4 @@
/* eslint-disable no-await-in-loop */
import { Knex } from "knex";
import { TDbClient } from "@app/db";
@@ -11,6 +12,7 @@ import {
} from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
export type TSnapshotDALFactory = ReturnType<typeof snapshotDALFactory>;
@@ -325,12 +327,152 @@ export const snapshotDALFactory = (db: TDbClient) => {
}
};
/**
* Prunes excess snapshots from the database to ensure only a specified number of recent snapshots are retained for each folder.
*
* This function operates in three main steps:
* 1. Pruning snapshots from root/non-versioned folders.
* 2. Pruning snapshots from versioned folders.
* 3. Removing orphaned snapshots that do not belong to any existing folder or folder version.
*
* The function processes snapshots in batches, determined by the `PRUNE_FOLDER_BATCH_SIZE` constant,
* to manage the large datasets without overwhelming the DB.
*
* Steps:
* - Fetch a batch of folder IDs.
* - For each batch, use a Common Table Expression (CTE) to rank snapshots within each folder by their creation date.
* - Identify and delete snapshots that exceed the project's point-in-time version limit (`pitVersionLimit`).
* - Repeat the process for versioned folders.
* - Finally, delete orphaned snapshots that do not have an associated folder.
*/
const pruneExcessSnapshots = async () => {
const PRUNE_FOLDER_BATCH_SIZE = 10000;
try {
let uuidOffset = "00000000-0000-0000-0000-000000000000";
// cleanup snapshots from root/non-versioned folders
// eslint-disable-next-line no-constant-condition, no-unreachable-loop
while (true) {
const folderBatch = await db(TableName.SecretFolder)
.where("id", ">", uuidOffset)
.where("isReserved", false)
.orderBy("id", "asc")
.limit(PRUNE_FOLDER_BATCH_SIZE)
.select("id");
const batchEntries = folderBatch.map((folder) => folder.id);
if (folderBatch.length) {
try {
logger.info(`Pruning snapshots in [range=${batchEntries[0]}:${batchEntries[batchEntries.length - 1]}]`);
await db(TableName.Snapshot)
.with("snapshot_cte", (qb) => {
void qb
.from(TableName.Snapshot)
.whereIn(`${TableName.Snapshot}.folderId`, batchEntries)
.select(
"folderId",
`${TableName.Snapshot}.id as id`,
db.raw(
`ROW_NUMBER() OVER (PARTITION BY ${TableName.Snapshot}."folderId" ORDER BY ${TableName.Snapshot}."createdAt" DESC) AS row_num`
)
);
})
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.Snapshot}.folderId`)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`)
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
.join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`)
.whereNull(`${TableName.SecretFolder}.parentId`)
.whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
.delete();
} catch (err) {
logger.error(
`Failed to prune snapshots from root/non-versioned folders in range ${batchEntries[0]}:${
batchEntries[batchEntries.length - 1]
}`
);
} finally {
uuidOffset = batchEntries[batchEntries.length - 1];
}
} else {
break;
}
}
// cleanup snapshots from versioned folders
uuidOffset = "00000000-0000-0000-0000-000000000000";
// eslint-disable-next-line no-constant-condition
while (true) {
const folderBatch = await db(TableName.SecretFolderVersion)
.select("folderId")
.distinct("folderId")
.where("folderId", ">", uuidOffset)
.orderBy("folderId", "asc")
.limit(PRUNE_FOLDER_BATCH_SIZE);
const batchEntries = folderBatch.map((folder) => folder.folderId);
if (folderBatch.length) {
try {
logger.info(`Pruning snapshots in range ${batchEntries[0]}:${batchEntries[batchEntries.length - 1]}`);
await db(TableName.Snapshot)
.with("snapshot_cte", (qb) => {
void qb
.from(TableName.Snapshot)
.whereIn(`${TableName.Snapshot}.folderId`, batchEntries)
.select(
"folderId",
`${TableName.Snapshot}.id as id`,
db.raw(
`ROW_NUMBER() OVER (PARTITION BY ${TableName.Snapshot}."folderId" ORDER BY ${TableName.Snapshot}."createdAt" DESC) AS row_num`
)
);
})
.join(
TableName.SecretFolderVersion,
`${TableName.SecretFolderVersion}.folderId`,
`${TableName.Snapshot}.folderId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolderVersion}.envId`)
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
.join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`)
.whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
.delete();
} catch (err) {
logger.error(
`Failed to prune snapshots from versioned folders in range ${batchEntries[0]}:${
batchEntries[batchEntries.length - 1]
}`
);
} finally {
uuidOffset = batchEntries[batchEntries.length - 1];
}
} else {
break;
}
}
// cleanup orphaned snapshots (those that don't belong to an existing folder and folder version)
await db(TableName.Snapshot)
.whereNotIn("folderId", (qb) => {
void qb
.select("folderId")
.from(TableName.SecretFolderVersion)
.union((qb1) => void qb1.select("id").from(TableName.SecretFolder));
})
.delete();
} catch (error) {
throw new DatabaseError({ error, name: "SnapshotPrune" });
}
};
return {
...secretSnapshotOrm,
findById,
findLatestSnapshotByFolderId,
findRecursivelySnapshots,
countOfSnapshotsByFolderId,
findSecretSnapshotDataById
findSecretSnapshotDataById,
pruneExcessSnapshots
};
};

View File

@@ -386,6 +386,8 @@ export const SECRET_IMPORTS = {
environment: "The slug of the environment to import into.",
path: "The path to import into.",
workspaceId: "The ID of the project you are working in.",
isReplication:
"When true, secrets from the source will be automatically sent to the destination. If approval policies exist at the destination, the secrets will be sent as approval requests instead of being applied immediately.",
import: {
environment: "The slug of the environment to import from.",
path: "The path to import from."
@@ -674,7 +676,10 @@ export const INTEGRATION = {
secretGCPLabel: "The label for GCP secrets.",
secretAWSTag: "The tags for AWS secrets.",
kmsKeyId: "The ID of the encryption key from AWS KMS.",
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store."
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store.",
shouldMaskSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Masked'.",
shouldProtectSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Protected'.",
shouldEnableDelete: "The flag to enable deletion of secrets"
}
},
UPDATE: {

View File

@@ -39,7 +39,9 @@ const envSchema = z
HTTPS_ENABLED: zodStrBool,
// smtp options
SMTP_HOST: zpStr(z.string().optional()),
SMTP_SECURE: zodStrBool,
SMTP_IGNORE_TLS: zodStrBool.default("false"),
SMTP_REQUIRE_TLS: zodStrBool.default("true"),
SMTP_TLS_REJECT_UNAUTHORIZED: zodStrBool.default("true"),
SMTP_PORT: z.coerce.number().default(587),
SMTP_USERNAME: zpStr(z.string().optional()),
SMTP_PASSWORD: zpStr(z.string().optional()),
@@ -75,6 +77,7 @@ const envSchema = z
.optional()
.default(process.env.URL_GITLAB_LOGIN ?? GITLAB_URL)
), // fallback since URL_GITLAB_LOGIN has been renamed
DEFAULT_SAML_ORG_SLUG: zpStr(z.string().optional()).default(process.env.NEXT_PUBLIC_SAML_ORG_SLUG),
// integration client secrets
// heroku
CLIENT_ID_HEROKU: zpStr(z.string().optional()),
@@ -119,7 +122,8 @@ const envSchema = z
.transform((val) => val === "true")
.optional(),
INFISICAL_CLOUD: zodStrBool.default("false"),
MAINTENANCE_MODE: zodStrBool.default("false")
MAINTENANCE_MODE: zodStrBool.default("false"),
CAPTCHA_SECRET: zpStr(z.string().optional())
})
.transform((data) => ({
...data,
@@ -131,7 +135,8 @@ const envSchema = z
isSecretScanningConfigured:
Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET)
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET),
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG
}));
let envCfg: Readonly<z.infer<typeof envSchema>>;
@@ -150,13 +155,20 @@ export const initEnvConfig = (logger: Logger) => {
return envCfg;
};
export const formatSmtpConfig = () => ({
host: envCfg.SMTP_HOST,
port: envCfg.SMTP_PORT,
auth:
envCfg.SMTP_USERNAME && envCfg.SMTP_PASSWORD
? { user: envCfg.SMTP_USERNAME, pass: envCfg.SMTP_PASSWORD }
: undefined,
secure: envCfg.SMTP_SECURE,
from: `"${envCfg.SMTP_FROM_NAME}" <${envCfg.SMTP_FROM_ADDRESS}>`
});
export const formatSmtpConfig = () => {
return {
host: envCfg.SMTP_HOST,
port: envCfg.SMTP_PORT,
auth:
envCfg.SMTP_USERNAME && envCfg.SMTP_PASSWORD
? { user: envCfg.SMTP_USERNAME, pass: envCfg.SMTP_PASSWORD }
: undefined,
secure: envCfg.SMTP_PORT === 465,
from: `"${envCfg.SMTP_FROM_NAME}" <${envCfg.SMTP_FROM_ADDRESS}>`,
ignoreTLS: envCfg.SMTP_IGNORE_TLS,
requireTLS: envCfg.SMTP_REQUIRE_TLS,
tls: {
rejectUnauthorized: envCfg.SMTP_TLS_REJECT_UNAUTHORIZED
}
};
};

View File

@@ -71,6 +71,7 @@ export const main = async ({ db, smtp, logger, queue, keyStore }: TMain) => {
if (appCfg.isProductionMode) {
await server.register<FastifyRateLimitOptions>(ratelimiter, globalRateLimiterCfg());
}
await server.register(helmet, { contentSecurityPolicy: false });
await server.register(maintenanceMode);

View File

@@ -5,7 +5,6 @@ import { createTransport } from "nodemailer";
import { formatSmtpConfig, getConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger";
import { getTlsOption } from "@app/services/smtp/smtp-service";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
type BootstrapOpt = {
@@ -44,7 +43,7 @@ export const bootstrapCheck = async ({ db }: BootstrapOpt) => {
console.info("Testing smtp connection");
const smtpCfg = formatSmtpConfig();
await createTransport({ ...smtpCfg, ...getTlsOption(smtpCfg.host, smtpCfg.secure) })
await createTransport(smtpCfg)
.verify()
.then(async () => {
console.info("SMTP successfully connected");

View File

@@ -1,6 +1,7 @@
import type { RateLimitOptions, RateLimitPluginOptions } from "@fastify/rate-limit";
import { Redis } from "ioredis";
import { getRateLimiterConfig } from "@app/ee/services/rate-limit/rate-limit-service";
import { getConfig } from "@app/lib/config/env";
export const globalRateLimiterCfg = (): RateLimitPluginOptions => {
@@ -21,14 +22,14 @@ export const globalRateLimiterCfg = (): RateLimitPluginOptions => {
// GET endpoints
export const readLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
max: 600,
max: () => getRateLimiterConfig().readLimit,
keyGenerator: (req) => req.realIp
};
// POST, PATCH, PUT, DELETE endpoints
export const writeLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
max: 200, // (too low, FA having issues so increasing it - maidul)
max: () => getRateLimiterConfig().writeLimit,
keyGenerator: (req) => req.realIp
};
@@ -36,25 +37,25 @@ export const writeLimit: RateLimitOptions = {
export const secretsLimit: RateLimitOptions = {
// secrets, folders, secret imports
timeWindow: 60 * 1000,
max: 60,
max: () => getRateLimiterConfig().secretsLimit,
keyGenerator: (req) => req.realIp
};
export const authRateLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
max: 60,
max: () => getRateLimiterConfig().authRateLimit,
keyGenerator: (req) => req.realIp
};
export const inviteUserRateLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
max: 30,
max: () => getRateLimiterConfig().inviteUserRateLimit,
keyGenerator: (req) => req.realIp
};
export const mfaRateLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
max: 20,
max: () => getRateLimiterConfig().mfaRateLimit,
keyGenerator: (req) => {
return req.headers.authorization?.split(" ")[1] || req.realIp;
}
@@ -63,7 +64,7 @@ export const mfaRateLimit: RateLimitOptions = {
export const creationLimit: RateLimitOptions = {
// identity, project, org
timeWindow: 60 * 1000,
max: 30,
max: () => getRateLimiterConfig().creationLimit,
keyGenerator: (req) => req.realIp
};
@@ -71,6 +72,6 @@ export const creationLimit: RateLimitOptions = {
export const publicEndpointLimit: RateLimitOptions = {
// Shared Secrets
timeWindow: 60 * 1000,
max: 30,
max: () => getRateLimiterConfig().publicEndpointLimit,
keyGenerator: (req) => req.realIp
};

View File

@@ -1,3 +1,4 @@
import { CronJob } from "cron";
import { Knex } from "knex";
import { z } from "zod";
@@ -33,6 +34,8 @@ import { permissionDALFactory } from "@app/ee/services/permission/permission-dal
import { permissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { projectUserAdditionalPrivilegeDALFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-dal";
import { projectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { rateLimitDALFactory } from "@app/ee/services/rate-limit/rate-limit-dal";
import { rateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
import { samlConfigDALFactory } from "@app/ee/services/saml-config/saml-config-dal";
import { samlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { scimDALFactory } from "@app/ee/services/scim/scim-dal";
@@ -185,6 +188,7 @@ export const registerRoutes = async (
const incidentContactDAL = incidentContactDALFactory(db);
const orgRoleDAL = orgRoleDALFactory(db);
const superAdminDAL = superAdminDALFactory(db);
const rateLimitDAL = rateLimitDALFactory(db);
const apiKeyDAL = apiKeyDALFactory(db);
const projectDAL = projectDALFactory(db);
@@ -444,6 +448,10 @@ export const registerRoutes = async (
orgService,
keyStore
});
const rateLimitService = rateLimitServiceFactory({
rateLimitDAL,
licenseService
});
const apiKeyService = apiKeyServiceFactory({ apiKeyDAL, userDAL });
const secretScanningQueue = secretScanningQueueFactory({
@@ -824,6 +832,9 @@ export const registerRoutes = async (
const dailyResourceCleanUp = dailyResourceCleanUpQueueServiceFactory({
auditLogDAL,
queueService,
secretVersionDAL,
secretFolderVersionDAL: folderVersionDAL,
snapshotDAL,
identityAccessTokenDAL,
secretSharingDAL
});
@@ -859,6 +870,7 @@ export const registerRoutes = async (
secret: secretService,
secretReplication: secretReplicationService,
secretTag: secretTagService,
rateLimit: rateLimitService,
folder: folderService,
secretImport: secretImportService,
projectBot: projectBotService,
@@ -897,6 +909,14 @@ export const registerRoutes = async (
secretSharing: secretSharingService
});
const cronJobs: CronJob[] = [];
if (appCfg.isProductionMode) {
const rateLimitSyncJob = await rateLimitService.initializeBackgroundSync();
if (rateLimitSyncJob) {
cronJobs.push(rateLimitSyncJob);
}
}
server.decorate<FastifyZodProvider["store"]>("store", {
user: userDAL
});
@@ -919,7 +939,8 @@ export const registerRoutes = async (
emailConfigured: z.boolean().optional(),
inviteOnlySignup: z.boolean().optional(),
redisConfigured: z.boolean().optional(),
secretScanningConfigured: z.boolean().optional()
secretScanningConfigured: z.boolean().optional(),
samlDefaultOrgSlug: z.string().optional()
})
}
},
@@ -932,7 +953,8 @@ export const registerRoutes = async (
emailConfigured: cfg.isSmtpConfigured,
inviteOnlySignup: Boolean(serverCfg.allowSignUp),
redisConfigured: cfg.isRedisConfigured,
secretScanningConfigured: cfg.isSecretScanningConfigured
secretScanningConfigured: cfg.isSecretScanningConfigured,
samlDefaultOrgSlug: cfg.samlDefaultOrgSlug
};
}
});
@@ -949,6 +971,7 @@ export const registerRoutes = async (
await server.register(registerV3Routes, { prefix: "/api/v3" });
server.addHook("onClose", async () => {
cronJobs.forEach((job) => job.stop());
await telemetryService.flushAll();
});
};

View File

@@ -8,7 +8,7 @@ import { writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { IntegrationMappingBehavior } from "@app/services/integration-auth/integration-list";
import { IntegrationMetadataSchema } from "@app/services/integration/integration-schema";
import { PostHogEventTypes, TIntegrationCreatedEvent } from "@app/services/telemetry/telemetry-types";
export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
@@ -46,36 +46,7 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
path: z.string().trim().optional().describe(INTEGRATION.CREATE.path),
region: z.string().trim().optional().describe(INTEGRATION.CREATE.region),
scope: z.string().trim().optional().describe(INTEGRATION.CREATE.scope),
metadata: z
.object({
secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix),
secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix),
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
mappingBehavior: z
.nativeEnum(IntegrationMappingBehavior)
.optional()
.describe(INTEGRATION.CREATE.metadata.mappingBehavior),
shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy),
secretGCPLabel: z
.object({
labelName: z.string(),
labelValue: z.string()
})
.optional()
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
secretAWSTag: z
.array(
z.object({
key: z.string(),
value: z.string()
})
)
.optional()
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId),
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete)
})
.default({})
metadata: IntegrationMetadataSchema.default({})
}),
response: {
200: z.object({
@@ -161,33 +132,7 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
targetEnvironment: z.string().trim().describe(INTEGRATION.UPDATE.targetEnvironment),
owner: z.string().trim().describe(INTEGRATION.UPDATE.owner),
environment: z.string().trim().describe(INTEGRATION.UPDATE.environment),
metadata: z
.object({
secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix),
secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix),
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
mappingBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.mappingBehavior),
shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy),
secretGCPLabel: z
.object({
labelName: z.string(),
labelValue: z.string()
})
.optional()
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
secretAWSTag: z
.array(
z.object({
key: z.string(),
value: z.string()
})
)
.optional()
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId),
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete)
})
.optional()
metadata: IntegrationMetadataSchema.optional()
}),
response: {
200: z.object({

View File

@@ -334,6 +334,44 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "PUT",
url: "/:workspaceSlug/version-limit",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
workspaceSlug: z.string().trim()
}),
body: z.object({
pitVersionLimit: z.number().min(1).max(100)
}),
response: {
200: z.object({
message: z.string(),
workspace: ProjectsSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const workspace = await server.services.project.updateVersionLimit({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
pitVersionLimit: req.body.pitVersionLimit,
workspaceSlug: req.params.workspaceSlug
});
return {
message: "Successfully changed workspace version limit",
workspace
};
}
});
server.route({
method: "GET",
url: "/:workspaceId/integrations",

View File

@@ -30,7 +30,7 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
environment: z.string().trim().describe(SECRET_IMPORTS.CREATE.import.environment),
path: z.string().trim().transform(removeTrailingSlash).describe(SECRET_IMPORTS.CREATE.import.path)
}),
isReplication: z.boolean().default(false)
isReplication: z.boolean().default(false).describe(SECRET_IMPORTS.CREATE.isReplication)
}),
response: {
200: z.object({

View File

@@ -80,7 +80,8 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
body: z.object({
email: z.string().trim(),
providerAuthToken: z.string().trim().optional(),
clientProof: z.string().trim()
clientProof: z.string().trim(),
captchaToken: z.string().trim().optional()
}),
response: {
200: z.discriminatedUnion("mfaEnabled", [
@@ -106,6 +107,7 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const data = await server.services.login.loginExchangeClientProof({
captchaToken: req.body.captchaToken,
email: req.body.email,
ip: req.realIp,
userAgent,

View File

@@ -3,6 +3,7 @@ import jwt from "jsonwebtoken";
import { TUsers, UserDeviceSchema } from "@app/db/schemas";
import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { generateSrpServerKey, srpCheckClientProof } from "@app/lib/crypto";
import { BadRequestError, DatabaseError, UnauthorizedError } from "@app/lib/errors";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
@@ -176,12 +177,16 @@ export const authLoginServiceFactory = ({
clientProof,
ip,
userAgent,
providerAuthToken
providerAuthToken,
captchaToken
}: TLoginClientProofDTO) => {
const appCfg = getConfig();
const userEnc = await userDAL.findUserEncKeyByUsername({
username: email
});
if (!userEnc) throw new Error("Failed to find user");
const user = await userDAL.findById(userEnc.userId);
const cfg = getConfig();
let authMethod = AuthMethod.EMAIL;
@@ -196,6 +201,31 @@ export const authLoginServiceFactory = ({
}
}
if (
user.consecutiveFailedPasswordAttempts &&
user.consecutiveFailedPasswordAttempts >= 10 &&
Boolean(appCfg.CAPTCHA_SECRET)
) {
if (!captchaToken) {
throw new BadRequestError({
name: "Captcha Required",
message: "Accomplish the required captcha by logging in via Web"
});
}
// validate captcha token
const response = await request.postForm<{ success: boolean }>("https://api.hcaptcha.com/siteverify", {
response: captchaToken,
secret: appCfg.CAPTCHA_SECRET
});
if (!response.data.success) {
throw new BadRequestError({
name: "Invalid Captcha"
});
}
}
if (!userEnc.serverPrivateKey || !userEnc.clientPublicKey) throw new Error("Failed to authenticate. Try again?");
const isValidClientProof = await srpCheckClientProof(
userEnc.salt,
@@ -204,15 +234,31 @@ export const authLoginServiceFactory = ({
userEnc.clientPublicKey,
clientProof
);
if (!isValidClientProof) throw new Error("Failed to authenticate. Try again?");
if (!isValidClientProof) {
await userDAL.update(
{ id: userEnc.userId },
{
$incr: {
consecutiveFailedPasswordAttempts: 1
}
}
);
throw new Error("Failed to authenticate. Try again?");
}
await userDAL.updateUserEncryptionByUserId(userEnc.userId, {
serverPrivateKey: null,
clientPublicKey: null
});
await userDAL.updateById(userEnc.userId, {
consecutiveFailedPasswordAttempts: 0
});
// send multi factor auth token if they it enabled
if (userEnc.isMfaEnabled && userEnc.email) {
const user = await userDAL.findById(userEnc.userId);
enforceUserLockStatus(Boolean(user.isLocked), user.temporaryLockDateEnd);
const mfaToken = jwt.sign(

View File

@@ -12,6 +12,7 @@ export type TLoginClientProofDTO = {
providerAuthToken?: string;
ip: string;
userAgent: string;
captchaToken?: string;
};
export type TVerifyMfaTokenDTO = {

View File

@@ -231,7 +231,7 @@ export const authSignupServiceFactory = ({
const accessToken = jwt.sign(
{
authMethod: AuthMethod.EMAIL,
authMethod: authMethod || AuthMethod.EMAIL,
authTokenType: AuthTokenType.ACCESS_TOKEN,
userId: updateduser.info.id,
tokenVersionId: tokenSession.id,
@@ -244,7 +244,7 @@ export const authSignupServiceFactory = ({
const refreshToken = jwt.sign(
{
authMethod: AuthMethod.EMAIL,
authMethod: authMethod || AuthMethod.EMAIL,
authTokenType: AuthTokenType.REFRESH_TOKEN,
userId: updateduser.info.id,
tokenVersionId: tokenSession.id,

View File

@@ -31,6 +31,7 @@ import { logger } from "@app/lib/logger";
import { TCreateManySecretsRawFn, TUpdateManySecretsRawFn } from "@app/services/secret/secret-types";
import { TIntegrationDALFactory } from "../integration/integration-dal";
import { IntegrationMetadataSchema } from "../integration/integration-schema";
import {
IntegrationInitialSyncBehavior,
IntegrationMappingBehavior,
@@ -1363,38 +1364,41 @@ const syncSecretsGitHub = async ({
}
}
for await (const encryptedSecret of encryptedSecrets) {
if (
!(encryptedSecret.name in secrets) &&
!(appendices?.prefix !== undefined && !encryptedSecret.name.startsWith(appendices?.prefix)) &&
!(appendices?.suffix !== undefined && !encryptedSecret.name.endsWith(appendices?.suffix))
) {
switch (integration.scope) {
case GithubScope.Org: {
await octokit.request("DELETE /orgs/{org}/actions/secrets/{secret_name}", {
org: integration.owner as string,
secret_name: encryptedSecret.name
});
break;
}
case GithubScope.Env: {
await octokit.request(
"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}",
{
repository_id: Number(integration.appId),
environment_name: integration.targetEnvironmentId as string,
const metadata = IntegrationMetadataSchema.parse(integration.metadata);
if (metadata.shouldEnableDelete) {
for await (const encryptedSecret of encryptedSecrets) {
if (
!(encryptedSecret.name in secrets) &&
!(appendices?.prefix !== undefined && !encryptedSecret.name.startsWith(appendices?.prefix)) &&
!(appendices?.suffix !== undefined && !encryptedSecret.name.endsWith(appendices?.suffix))
) {
switch (integration.scope) {
case GithubScope.Org: {
await octokit.request("DELETE /orgs/{org}/actions/secrets/{secret_name}", {
org: integration.owner as string,
secret_name: encryptedSecret.name
}
);
break;
}
default: {
await octokit.request("DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {
owner: integration.owner as string,
repo: integration.app as string,
secret_name: encryptedSecret.name
});
break;
});
break;
}
case GithubScope.Env: {
await octokit.request(
"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}",
{
repository_id: Number(integration.appId),
environment_name: integration.targetEnvironmentId as string,
secret_name: encryptedSecret.name
}
);
break;
}
default: {
await octokit.request("DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {
owner: integration.owner as string,
repo: integration.app as string,
secret_name: encryptedSecret.name
});
break;
}
}
}
}
@@ -1917,13 +1921,13 @@ const syncSecretsGitLab = async ({
return allEnvVariables;
};
const metadata = IntegrationMetadataSchema.parse(integration.metadata);
const allEnvVariables = await getAllEnvVariables(integration?.appId as string, accessToken);
const getSecretsRes: GitLabSecret[] = allEnvVariables
.filter((secret: GitLabSecret) => secret.environment_scope === integration.targetEnvironment)
.filter((gitLabSecret) => {
let isValid = true;
const metadata = z.record(z.any()).parse(integration.metadata);
if (metadata.secretPrefix && !gitLabSecret.key.startsWith(metadata.secretPrefix)) {
isValid = false;
}
@@ -1943,8 +1947,8 @@ const syncSecretsGitLab = async ({
{
key,
value: secrets[key].value,
protected: false,
masked: false,
protected: Boolean(metadata.shouldProtectSecrets),
masked: Boolean(metadata.shouldMaskSecrets),
raw: false,
environment_scope: integration.targetEnvironment
},
@@ -1961,7 +1965,9 @@ const syncSecretsGitLab = async ({
`${gitLabApiUrl}/v4/projects/${integration?.appId}/variables/${existingSecret.key}?filter[environment_scope]=${integration.targetEnvironment}`,
{
...existingSecret,
value: secrets[existingSecret.key].value
value: secrets[existingSecret.key].value,
protected: Boolean(metadata.shouldProtectSecrets),
masked: Boolean(metadata.shouldMaskSecrets)
},
{
headers: {

View File

@@ -0,0 +1,37 @@
import { z } from "zod";
import { INTEGRATION } from "@app/lib/api-docs";
import { IntegrationMappingBehavior } from "../integration-auth/integration-list";
export const IntegrationMetadataSchema = z.object({
secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix),
secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix),
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
mappingBehavior: z
.nativeEnum(IntegrationMappingBehavior)
.optional()
.describe(INTEGRATION.CREATE.metadata.mappingBehavior),
shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy),
secretGCPLabel: z
.object({
labelName: z.string(),
labelValue: z.string()
})
.optional()
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
secretAWSTag: z
.array(
z.object({
key: z.string(),
value: z.string()
})
)
.optional()
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId),
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete),
shouldEnableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldEnableDelete),
shouldMaskSecrets: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldMaskSecrets),
shouldProtectSecrets: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldProtectSecrets)
});

View File

@@ -29,6 +29,9 @@ export type TCreateIntegrationDTO = {
}[];
kmsKeyId?: string;
shouldDisableDelete?: boolean;
shouldMaskSecrets?: boolean;
shouldProtectSecrets?: boolean;
shouldEnableDelete?: boolean;
};
} & Omit<TProjectPermission, "projectId">;
@@ -54,6 +57,7 @@ export type TUpdateIntegrationDTO = {
}[];
kmsKeyId?: string;
shouldDisableDelete?: boolean;
shouldEnableDelete?: boolean;
};
} & Omit<TProjectPermission, "projectId">;

View File

@@ -336,6 +336,7 @@ export const orgServiceFactory = ({
return org;
});
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
return organization;
};

View File

@@ -39,6 +39,7 @@ import {
TToggleProjectAutoCapitalizationDTO,
TUpdateProjectDTO,
TUpdateProjectNameDTO,
TUpdateProjectVersionLimitDTO,
TUpgradeProjectDTO
} from "./project-types";
@@ -133,7 +134,8 @@ export const projectServiceFactory = ({
name: workspaceName,
orgId: organization.id,
slug: projectSlug || slugify(`${workspaceName}-${alphaNumericNanoId(4)}`),
version: ProjectVersion.V2
version: ProjectVersion.V2,
pitVersionLimit: 10
},
tx
);
@@ -406,6 +408,35 @@ export const projectServiceFactory = ({
return updatedProject;
};
const updateVersionLimit = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
pitVersionLimit,
workspaceSlug
}: TUpdateProjectVersionLimitDTO) => {
const project = await projectDAL.findProjectBySlug(workspaceSlug, actorOrgId);
if (!project) {
throw new BadRequestError({
message: "Project not found"
});
}
const { hasRole } = await permissionService.getProjectPermission(
actor,
actorId,
project.id,
actorAuthMethod,
actorOrgId
);
if (!hasRole(ProjectMembershipRole.Admin))
throw new BadRequestError({ message: "Only admins are allowed to take this action" });
return projectDAL.updateById(project.id, { pitVersionLimit });
};
const updateName = async ({
projectId,
actor,
@@ -501,6 +532,7 @@ export const projectServiceFactory = ({
getAProject,
toggleAutoCapitalization,
updateName,
upgradeProject
upgradeProject,
updateVersionLimit
};
};

View File

@@ -43,6 +43,11 @@ export type TToggleProjectAutoCapitalizationDTO = {
autoCapitalization: boolean;
} & TProjectPermission;
export type TUpdateProjectVersionLimitDTO = {
pitVersionLimit: number;
workspaceSlug: string;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateProjectNameDTO = {
name: string;
} & TProjectPermission;

View File

@@ -1,13 +1,19 @@
import { TAuditLogDALFactory } from "@app/ee/services/audit-log/audit-log-dal";
import { TSnapshotDALFactory } from "@app/ee/services/secret-snapshot/snapshot-dal";
import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
import { TSecretVersionDALFactory } from "../secret/secret-version-dal";
import { TSecretFolderVersionDALFactory } from "../secret-folder/secret-folder-version-dal";
import { TSecretSharingDALFactory } from "../secret-sharing/secret-sharing-dal";
type TDailyResourceCleanUpQueueServiceFactoryDep = {
auditLogDAL: Pick<TAuditLogDALFactory, "pruneAuditLog">;
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "removeExpiredTokens">;
secretVersionDAL: Pick<TSecretVersionDALFactory, "pruneExcessVersions">;
secretFolderVersionDAL: Pick<TSecretFolderVersionDALFactory, "pruneExcessVersions">;
snapshotDAL: Pick<TSnapshotDALFactory, "pruneExcessSnapshots">;
secretSharingDAL: Pick<TSecretSharingDALFactory, "pruneExpiredSharedSecrets">;
queueService: TQueueServiceFactory;
};
@@ -17,6 +23,9 @@ export type TDailyResourceCleanUpQueueServiceFactory = ReturnType<typeof dailyRe
export const dailyResourceCleanUpQueueServiceFactory = ({
auditLogDAL,
queueService,
snapshotDAL,
secretVersionDAL,
secretFolderVersionDAL,
identityAccessTokenDAL,
secretSharingDAL
}: TDailyResourceCleanUpQueueServiceFactoryDep) => {
@@ -25,6 +34,9 @@ export const dailyResourceCleanUpQueueServiceFactory = ({
await auditLogDAL.pruneAuditLog();
await identityAccessTokenDAL.removeExpiredTokens();
await secretSharingDAL.pruneExpiredSharedSecrets();
await snapshotDAL.pruneExcessSnapshots();
await secretVersionDAL.pruneExcessVersions();
await secretFolderVersionDAL.pruneExcessVersions();
logger.info(`${QueueName.DailyResourceCleanUp}: queue task completed`);
});

View File

@@ -62,5 +62,32 @@ export const secretFolderVersionDALFactory = (db: TDbClient) => {
}
};
return { ...secretFolderVerOrm, findLatestFolderVersions, findLatestVersionByFolderId };
const pruneExcessVersions = async () => {
try {
await db(TableName.SecretFolderVersion)
.with("folder_cte", (qb) => {
void qb
.from(TableName.SecretFolderVersion)
.select(
"id",
"folderId",
db.raw(
`ROW_NUMBER() OVER (PARTITION BY ${TableName.SecretFolderVersion}."folderId" ORDER BY ${TableName.SecretFolderVersion}."createdAt" DESC) AS row_num`
)
);
})
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolderVersion}.envId`)
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
.join("folder_cte", "folder_cte.id", `${TableName.SecretFolderVersion}.id`)
.whereRaw(`folder_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
.delete();
} catch (error) {
throw new DatabaseError({
error,
name: "Secret Folder Version Prune"
});
}
};
return { ...secretFolderVerOrm, findLatestFolderVersions, findLatestVersionByFolderId, pruneExcessVersions };
};

View File

@@ -309,7 +309,7 @@ export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderD
};
const expandSecrets = async (
secrets: Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean }>
secrets: Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean | null }>
) => {
const expandedSec: Record<string, string> = {};
const interpolatedSec: Record<string, string> = {};
@@ -329,8 +329,8 @@ export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderD
// should not do multi line encoding if user has set it to skip
// eslint-disable-next-line
secrets[key].value = secrets[key].skipMultilineEncoding
? expandedSec[key]
: formatMultiValueEnv(expandedSec[key]);
? formatMultiValueEnv(expandedSec[key])
: expandedSec[key];
// eslint-disable-next-line
continue;
}
@@ -347,7 +347,7 @@ export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderD
);
// eslint-disable-next-line
secrets[key].value = secrets[key].skipMultilineEncoding ? expandedVal : formatMultiValueEnv(expandedVal);
secrets[key].value = secrets[key].skipMultilineEncoding ? formatMultiValueEnv(expandedVal) : expandedVal;
}
return secrets;
@@ -395,7 +395,8 @@ export const decryptSecretRaw = (
type: secret.type,
_id: secret.id,
id: secret.id,
user: secret.userId
user: secret.userId,
skipMultilineEncoding: secret.skipMultilineEncoding
};
};

View File

@@ -1,4 +1,6 @@
/* eslint-disable no-await-in-loop */
import { AxiosError } from "axios";
import { getConfig } from "@app/lib/config/env";
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { daysToMillisecond, secondsToMillis } from "@app/lib/dates";
@@ -67,7 +69,10 @@ const MAX_SYNC_SECRET_DEPTH = 5;
export const uniqueSecretQueueKey = (environment: string, secretPath: string) =>
`secret-queue-dedupe-${environment}-${secretPath}`;
type TIntegrationSecret = Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean }>;
type TIntegrationSecret = Record<
string,
{ value: string; comment?: string; skipMultilineEncoding?: boolean | null | undefined }
>;
export const secretQueueFactory = ({
queueService,
integrationDAL,
@@ -567,11 +572,14 @@ export const secretQueueFactory = ({
isSynced: true
});
} catch (err: unknown) {
logger.info("Secret integration sync error:", err);
logger.info("Secret integration sync error: %o", err);
const message =
err instanceof AxiosError ? JSON.stringify((err as AxiosError)?.response?.data) : (err as Error)?.message;
await integrationDAL.updateById(integration.id, {
lastSyncJobId: job.id,
lastUsed: new Date(),
syncMessage: (err as Error)?.message,
syncMessage: message,
isSynced: false
});
}

View File

@@ -952,15 +952,49 @@ export const secretServiceFactory = ({
});
const decryptedSecrets = secrets.map((el) => decryptSecretRaw(el, botKey));
const decryptedImports = (imports || [])?.map(({ secrets: importedSecrets, ...el }) => ({
...el,
secrets: importedSecrets.map((sec) =>
const processedImports = (imports || [])?.map(({ secrets: importedSecrets, ...el }) => {
const decryptedImportSecrets = importedSecrets.map((sec) =>
decryptSecretRaw(
{ ...sec, environment: el.environment, workspace: projectId, secretPath: el.secretPath },
botKey
)
)
}));
);
// secret-override to handle duplicate keys from different import levels
// this prioritizes secret values from direct imports
const importedKeys = new Set<string>();
const importedEntries = decryptedImportSecrets.reduce(
(
accum: {
secretKey: string;
secretPath: string;
workspace: string;
environment: string;
secretValue: string;
secretComment: string;
version: number;
type: string;
_id: string;
id: string;
user: string | null | undefined;
skipMultilineEncoding: boolean | null | undefined;
}[],
sec
) => {
if (!importedKeys.has(sec.secretKey)) {
importedKeys.add(sec.secretKey);
return [...accum, sec];
}
return accum;
},
[]
);
return {
...el,
secrets: importedEntries
};
});
if (expandSecretReferences) {
const expandSecrets = interpolateSecrets({
@@ -971,10 +1005,24 @@ export const secretServiceFactory = ({
});
const batchSecretsExpand = async (
secretBatch: { secretKey: string; secretValue: string; secretComment?: string; secretPath: string }[]
secretBatch: {
secretKey: string;
secretValue: string;
secretComment?: string;
secretPath: string;
skipMultilineEncoding: boolean | null | undefined;
}[]
) => {
// Group secrets by secretPath
const secretsByPath: Record<string, { secretKey: string; secretValue: string; secretComment?: string }[]> = {};
const secretsByPath: Record<
string,
{
secretKey: string;
secretValue: string;
secretComment?: string;
skipMultilineEncoding: boolean | null | undefined;
}[]
> = {};
secretBatch.forEach((secret) => {
if (!secretsByPath[secret.secretPath]) {
@@ -990,11 +1038,15 @@ export const secretServiceFactory = ({
continue;
}
const secretRecord: Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean }> = {};
const secretRecord: Record<
string,
{ value: string; comment?: string; skipMultilineEncoding: boolean | null | undefined }
> = {};
secretsByPath[secPath].forEach((decryptedSecret) => {
secretRecord[decryptedSecret.secretKey] = {
value: decryptedSecret.secretValue,
comment: decryptedSecret.secretComment
comment: decryptedSecret.secretComment,
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
};
});
@@ -1011,12 +1063,12 @@ export const secretServiceFactory = ({
await batchSecretsExpand(decryptedSecrets);
// expand imports by batch
await Promise.all(decryptedImports.map((decryptedImport) => batchSecretsExpand(decryptedImport.secrets)));
await Promise.all(processedImports.map((processedImport) => batchSecretsExpand(processedImport.secrets)));
}
return {
secrets: decryptedSecrets,
imports: decryptedImports
imports: processedImports
};
};

View File

@@ -111,8 +111,37 @@ export const secretVersionDALFactory = (db: TDbClient) => {
}
};
const pruneExcessVersions = async () => {
try {
await db(TableName.SecretVersion)
.with("version_cte", (qb) => {
void qb
.from(TableName.SecretVersion)
.select(
"id",
"folderId",
db.raw(
`ROW_NUMBER() OVER (PARTITION BY ${TableName.SecretVersion}."secretId" ORDER BY ${TableName.SecretVersion}."createdAt" DESC) AS row_num`
)
);
})
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.SecretVersion}.folderId`)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`)
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
.join("version_cte", "version_cte.id", `${TableName.SecretVersion}.id`)
.whereRaw(`version_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
.delete();
} catch (error) {
throw new DatabaseError({
error,
name: "Secret Version Prune"
});
}
};
return {
...secretVersionOrm,
pruneExcessVersions,
findLatestVersionMany,
bulkUpdate,
findLatestVersionByFolderId,

View File

@@ -41,21 +41,8 @@ export enum SmtpHost {
Office365 = "smtp.office365.com"
}
export const getTlsOption = (host?: SmtpHost | string, secure?: boolean) => {
if (!secure) return { secure: false };
if (!host) return { secure: true };
if ((host as SmtpHost) === SmtpHost.Sendgrid) {
return { secure: true, port: 465 }; // more details here https://nodemailer.com/smtp/
}
if (host.includes("amazonaws.com")) {
return { tls: { ciphers: "TLSv1.2" } };
}
return { requireTLS: true, tls: { ciphers: "TLSv1.2" } };
};
export const smtpServiceFactory = (cfg: TSmtpConfig) => {
const smtp = createTransport({ ...cfg, ...getTlsOption(cfg.host, cfg.secure) });
const smtp = createTransport(cfg);
const isSmtpOn = Boolean(cfg.host);
const sendMail = async ({ substitutions, recipients, template, subjectLine }: TSmtpSendMail) => {

View File

@@ -21,6 +21,7 @@ type TUserServiceFactoryDep = {
| "findOneUserAction"
| "createUserAction"
| "findUserEncKeyByUserId"
| "delete"
>;
userAliasDAL: Pick<TUserAliasDALFactory, "find" | "insertMany">;
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find" | "insertMany">;
@@ -85,7 +86,7 @@ export const userServiceFactory = ({
tx
);
// check if there are users with the same email.
// check if there are verified users with the same email.
const users = await userDAL.find(
{
email,
@@ -134,6 +135,15 @@ export const userServiceFactory = ({
);
}
} else {
await userDAL.delete(
{
email,
isAccepted: false,
isEmailVerified: false
},
tx
);
// update current user's username to [email]
await userDAL.updateById(
user.id,

1
cli/.gitignore vendored
View File

@@ -1,3 +1,4 @@
.infisical.json
dist/
agent-config.test.yaml
.test.env

View File

@@ -3,7 +3,9 @@ module github.com/Infisical/infisical-merge
go 1.21
require (
github.com/bradleyjkemp/cupaloy/v2 v2.8.0
github.com/charmbracelet/lipgloss v0.5.0
github.com/creack/pty v1.1.21
github.com/denisbrodbeck/machineid v1.0.1
github.com/fatih/semgroup v1.2.0
github.com/gitleaks/go-gitdiff v0.8.0
@@ -29,7 +31,6 @@ require (
require (
github.com/alessio/shellescape v1.4.1 // indirect
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef // indirect
github.com/bradleyjkemp/cupaloy/v2 v2.8.0 // indirect
github.com/chzyer/readline v1.5.1 // indirect
github.com/danieljoos/wincred v1.2.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect

View File

@@ -74,6 +74,8 @@ github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSV
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/creack/pty v1.1.21 h1:1/QdRyBaHHJP61QkWMXlOIBfsgdDeeKfK8SYVUWJKf0=
github.com/creack/pty v1.1.21/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
github.com/danieljoos/wincred v1.2.0 h1:ozqKHaLK0W/ii4KVbbvluM91W2H3Sh0BncbUNPS7jLE=
github.com/danieljoos/wincred v1.2.0/go.mod h1:FzQLLMKBFdvu+osBrnFODiv32YGwCfx0SkRa/eYHgec=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=

View File

@@ -4,6 +4,8 @@ import (
"fmt"
"net/http"
"os"
"github.com/Infisical/infisical-merge/packages/config"
)
func GetHomeDir() (string, error) {
@@ -21,7 +23,7 @@ func WriteToFile(fileName string, dataToWrite []byte, filePerm os.FileMode) erro
return nil
}
func CheckIsConnectedToInternet() (ok bool) {
_, err := http.Get("http://clients3.google.com/generate_204")
func ValidateInfisicalAPIConnection() (ok bool) {
_, err := http.Get(fmt.Sprintf("%v/status", config.INFISICAL_URL))
return err == nil
}

View File

@@ -307,32 +307,33 @@ func FilterSecretsByTag(plainTextSecrets []models.SingleEnvironmentVariable, tag
}
func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectConfigFilePath string) ([]models.SingleEnvironmentVariable, error) {
isConnected := CheckIsConnectedToInternet()
var secretsToReturn []models.SingleEnvironmentVariable
// var serviceTokenDetails api.GetServiceTokenDetailsResponse
var errorToReturn error
if params.InfisicalToken == "" && params.UniversalAuthAccessToken == "" {
if isConnected {
log.Debug().Msg("GetAllEnvironmentVariables: Connected to internet, checking logged in creds")
if projectConfigFilePath == "" {
RequireLocalWorkspaceFile()
} else {
ValidateWorkspaceFile(projectConfigFilePath)
}
RequireLogin()
if projectConfigFilePath == "" {
RequireLocalWorkspaceFile()
} else {
ValidateWorkspaceFile(projectConfigFilePath)
}
RequireLogin()
log.Debug().Msg("GetAllEnvironmentVariables: Trying to fetch secrets using logged in details")
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
isConnected := ValidateInfisicalAPIConnection()
if isConnected {
log.Debug().Msg("GetAllEnvironmentVariables: Connected to Infisical instance, checking logged in creds")
}
if err != nil {
return nil, err
}
if loggedInUserDetails.LoginExpired {
if isConnected && loggedInUserDetails.LoginExpired {
PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
}
@@ -364,12 +365,12 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo
backupSecretsEncryptionKey := []byte(loggedInUserDetails.UserCredentials.PrivateKey)[0:32]
if errorToReturn == nil {
WriteBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, backupSecretsEncryptionKey, secretsToReturn)
WriteBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, params.SecretsPath, backupSecretsEncryptionKey, secretsToReturn)
}
// only attempt to serve cached secrets if no internet connection and if at least one secret cached
if !isConnected {
backedSecrets, err := ReadBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, backupSecretsEncryptionKey)
backedSecrets, err := ReadBackupSecrets(infisicalDotJson.WorkspaceId, params.Environment, params.SecretsPath, backupSecretsEncryptionKey)
if len(backedSecrets) > 0 {
PrintWarning("Unable to fetch latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug")
secretsToReturn = backedSecrets
@@ -634,8 +635,9 @@ func GetPlainTextSecrets(key []byte, encryptedSecrets []api.EncryptedSecretV3) (
return plainTextSecrets, nil
}
func WriteBackupSecrets(workspace string, environment string, encryptionKey []byte, secrets []models.SingleEnvironmentVariable) error {
fileName := fmt.Sprintf("secrets_%s_%s", workspace, environment)
func WriteBackupSecrets(workspace string, environment string, secretsPath string, encryptionKey []byte, secrets []models.SingleEnvironmentVariable) error {
formattedPath := strings.ReplaceAll(secretsPath, "/", "-")
fileName := fmt.Sprintf("secrets_%s_%s_%s", workspace, environment, formattedPath)
secrets_backup_folder_name := "secrets-backup"
_, fullConfigFileDirPath, err := GetFullConfigFilePath()
@@ -672,8 +674,9 @@ func WriteBackupSecrets(workspace string, environment string, encryptionKey []by
return nil
}
func ReadBackupSecrets(workspace string, environment string, encryptionKey []byte) ([]models.SingleEnvironmentVariable, error) {
fileName := fmt.Sprintf("secrets_%s_%s", workspace, environment)
func ReadBackupSecrets(workspace string, environment string, secretsPath string, encryptionKey []byte) ([]models.SingleEnvironmentVariable, error) {
formattedPath := strings.ReplaceAll(secretsPath, "/", "-")
fileName := fmt.Sprintf("secrets_%s_%s_%s", workspace, environment, formattedPath)
secrets_backup_folder_name := "secrets-backup"
_, fullConfigFileDirPath, err := GetFullConfigFilePath()

View File

@@ -0,0 +1,23 @@
#!/bin/bash
TEST_ENV_FILE=".test.env"
# Check if the .env file exists
if [ ! -f "$TEST_ENV_FILE" ]; then
echo "$TEST_ENV_FILE does not exist."
exit 1
fi
# Export the variables
while IFS= read -r line
do
# Skip empty lines and lines starting with #
if [[ -z "$line" || "$line" =~ ^\# ]]; then
continue
fi
# Read the key-value pair
IFS='=' read -r key value <<< "$line"
eval export $key=\$value
done < "$TEST_ENV_FILE"
echo "Test environment variables set."

View File

@@ -0,0 +1,7 @@
┌───────────────┬──────────────┬─────────────┐
│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │
├───────────────┼──────────────┼─────────────┤
│ TEST-SECRET-1 │ test-value-1 │ shared │
│ TEST-SECRET-2 │ test-value-2 │ shared │
│ TEST-SECRET-3 │ test-value-3 │ shared │
└───────────────┴──────────────┴─────────────┘

View File

@@ -0,0 +1,8 @@
Warning: Unable to fetch latest secret(s) due to connection error, serving secrets from last successful fetch. For more info, run with --debug
┌───────────────┬──────────────┬─────────────┐
│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │
├───────────────┼──────────────┼─────────────┤
│ TEST-SECRET-1 │ test-value-1 │ shared │
│ TEST-SECRET-2 │ test-value-2 │ shared │
│ TEST-SECRET-3 │ test-value-3 │ shared │
└───────────────┴──────────────┴─────────────┘

View File

@@ -8,7 +8,6 @@ import (
func TestUniversalAuth_ExportSecretsWithImports(t *testing.T) {
MachineIdentityLoginCmd(t)
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent")
@@ -24,8 +23,6 @@ func TestUniversalAuth_ExportSecretsWithImports(t *testing.T) {
}
func TestServiceToken_ExportSecretsWithImports(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent")
if err != nil {
@@ -41,8 +38,6 @@ func TestServiceToken_ExportSecretsWithImports(t *testing.T) {
func TestUniversalAuth_ExportSecretsWithoutImports(t *testing.T) {
MachineIdentityLoginCmd(t)
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--include-imports=false")
if err != nil {
@@ -57,8 +52,6 @@ func TestUniversalAuth_ExportSecretsWithoutImports(t *testing.T) {
}
func TestServiceToken_ExportSecretsWithoutImports(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--include-imports=false")
if err != nil {

View File

@@ -2,10 +2,10 @@ package tests
import (
"fmt"
"log"
"os"
"os/exec"
"strings"
"testing"
)
const (
@@ -23,6 +23,8 @@ type Credentials struct {
ServiceToken string
ProjectID string
EnvSlug string
UserEmail string
UserPassword string
}
var creds = Credentials{
@@ -32,18 +34,21 @@ var creds = Credentials{
ServiceToken: os.Getenv("CLI_TESTS_SERVICE_TOKEN"),
ProjectID: os.Getenv("CLI_TESTS_PROJECT_ID"),
EnvSlug: os.Getenv("CLI_TESTS_ENV_SLUG"),
UserEmail: os.Getenv("CLI_TESTS_USER_EMAIL"),
UserPassword: os.Getenv("CLI_TESTS_USER_PASSWORD"),
}
func ExecuteCliCommand(command string, args ...string) (string, error) {
cmd := exec.Command(command, args...)
output, err := cmd.CombinedOutput()
if err != nil {
fmt.Println(fmt.Sprint(err) + ": " + string(output))
return strings.TrimSpace(string(output)), err
}
return strings.TrimSpace(string(output)), nil
}
func SetupCli(t *testing.T) {
func SetupCli() {
if creds.ClientID == "" || creds.ClientSecret == "" || creds.ServiceToken == "" || creds.ProjectID == "" || creds.EnvSlug == "" {
panic("Missing required environment variables")
@@ -57,7 +62,7 @@ func SetupCli(t *testing.T) {
if !alreadyBuilt {
if err := exec.Command("go", "build", "../.").Run(); err != nil {
t.Fatal(err)
log.Fatal(err)
}
}

View File

@@ -1,14 +1,124 @@
package tests
import (
"log"
"os/exec"
"strings"
"testing"
"github.com/creack/pty"
"github.com/stretchr/testify/assert"
)
func MachineIdentityLoginCmd(t *testing.T) {
SetupCli(t)
func UserInitCmd() {
c := exec.Command(FORMATTED_CLI_NAME, "init")
ptmx, err := pty.Start(c)
if err != nil {
log.Fatalf("error running CLI command: %v", err)
}
defer func() { _ = ptmx.Close() }()
stepChan := make(chan int, 10)
go func() {
buf := make([]byte, 1024)
step := -1
for {
n, err := ptmx.Read(buf)
if n > 0 {
terminalOut := string(buf)
if strings.Contains(terminalOut, "Which Infisical organization would you like to select a project from?") && step < 0 {
step += 1
stepChan <- step
} else if strings.Contains(terminalOut, "Which of your Infisical projects would you like to connect this project to?") && step < 1 {
step += 1;
stepChan <- step
}
}
if err != nil {
close(stepChan)
return
}
}
}()
for i := range stepChan {
switch i {
case 0:
ptmx.Write([]byte("\n"))
case 1:
ptmx.Write([]byte("\n"))
}
}
}
func UserLoginCmd() {
// set vault to file because CI has no keyring
vaultCmd := exec.Command(FORMATTED_CLI_NAME, "vault", "set", "file")
_, err := vaultCmd.Output()
if err != nil {
log.Fatalf("error setting vault: %v", err)
}
// Start programmatic interaction with CLI
c := exec.Command(FORMATTED_CLI_NAME, "login", "--interactive")
ptmx, err := pty.Start(c)
if err != nil {
log.Fatalf("error running CLI command: %v", err)
}
defer func() { _ = ptmx.Close() }()
stepChan := make(chan int, 10)
go func() {
buf := make([]byte, 1024)
step := -1
for {
n, err := ptmx.Read(buf)
if n > 0 {
terminalOut := string(buf)
if strings.Contains(terminalOut, "Infisical Cloud") && step < 0 {
step += 1;
stepChan <- step
} else if strings.Contains(terminalOut, "Email") && step < 1 {
step += 1;
stepChan <- step
} else if strings.Contains(terminalOut, "Password") && step < 2 {
step += 1;
stepChan <- step
} else if strings.Contains(terminalOut, "Infisical organization") && step < 3 {
step += 1;
stepChan <- step
} else if strings.Contains(terminalOut, "Enter passphrase") && step < 4 {
step += 1;
stepChan <- step
}
}
if err != nil {
close(stepChan)
return
}
}
}()
for i := range stepChan {
switch i {
case 0:
ptmx.Write([]byte("\n"))
case 1:
ptmx.Write([]byte(creds.UserEmail))
ptmx.Write([]byte("\n"))
case 2:
ptmx.Write([]byte(creds.UserPassword))
ptmx.Write([]byte("\n"))
case 3:
ptmx.Write([]byte("\n"))
}
}
}
func MachineIdentityLoginCmd(t *testing.T) {
if creds.UAAccessToken != "" {
return
}

23
cli/test/main_test.go Normal file
View File

@@ -0,0 +1,23 @@
package tests
import (
"fmt"
"os"
"testing"
)
func TestMain(m *testing.M) {
// Setup
fmt.Println("Setting up CLI...")
SetupCli()
fmt.Println("Performing user login...")
UserLoginCmd()
fmt.Println("Performing infisical init...")
UserInitCmd()
// Run the tests
code := m.Run()
// Exit
os.Exit(code)
}

View File

@@ -8,8 +8,6 @@ import (
)
func TestServiceToken_RunCmdRecursiveAndImports(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent", "--", "echo", "hello world")
if err != nil {
@@ -25,8 +23,6 @@ func TestServiceToken_RunCmdRecursiveAndImports(t *testing.T) {
}
}
func TestServiceToken_RunCmdWithImports(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--", "echo", "hello world")
if err != nil {
@@ -44,8 +40,6 @@ func TestServiceToken_RunCmdWithImports(t *testing.T) {
func TestUniversalAuth_RunCmdRecursiveAndImports(t *testing.T) {
MachineIdentityLoginCmd(t)
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent", "--", "echo", "hello world")
if err != nil {
@@ -63,8 +57,6 @@ func TestUniversalAuth_RunCmdRecursiveAndImports(t *testing.T) {
func TestUniversalAuth_RunCmdWithImports(t *testing.T) {
MachineIdentityLoginCmd(t)
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--", "echo", "hello world")
if err != nil {
@@ -83,8 +75,6 @@ func TestUniversalAuth_RunCmdWithImports(t *testing.T) {
func TestUniversalAuth_RunCmdWithoutImports(t *testing.T) {
MachineIdentityLoginCmd(t)
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--include-imports=false", "--", "echo", "hello world")
if err != nil {
@@ -101,8 +91,6 @@ func TestUniversalAuth_RunCmdWithoutImports(t *testing.T) {
}
func TestServiceToken_RunCmdWithoutImports(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--include-imports=false", "--", "echo", "hello world")
if err != nil {

View File

@@ -7,8 +7,6 @@ import (
)
func TestServiceToken_GetSecretsByNameRecursive(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "TEST-SECRET-2", "FOLDER-SECRET-1", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent")
if err != nil {
@@ -23,8 +21,6 @@ func TestServiceToken_GetSecretsByNameRecursive(t *testing.T) {
}
func TestServiceToken_GetSecretsByNameWithNotFoundSecret(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "TEST-SECRET-2", "FOLDER-SECRET-1", "DOES-NOT-EXIST", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent")
if err != nil {
@@ -39,8 +35,6 @@ func TestServiceToken_GetSecretsByNameWithNotFoundSecret(t *testing.T) {
}
func TestServiceToken_GetSecretsByNameWithImports(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "STAGING-SECRET-2", "FOLDER-SECRET-1", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent")
if err != nil {
@@ -56,8 +50,6 @@ func TestServiceToken_GetSecretsByNameWithImports(t *testing.T) {
func TestUniversalAuth_GetSecretsByNameRecursive(t *testing.T) {
MachineIdentityLoginCmd(t)
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "TEST-SECRET-2", "FOLDER-SECRET-1", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent")
if err != nil {
@@ -73,8 +65,6 @@ func TestUniversalAuth_GetSecretsByNameRecursive(t *testing.T) {
func TestUniversalAuth_GetSecretsByNameWithNotFoundSecret(t *testing.T) {
MachineIdentityLoginCmd(t)
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "TEST-SECRET-2", "FOLDER-SECRET-1", "DOES-NOT-EXIST", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent")
if err != nil {
@@ -90,8 +80,6 @@ func TestUniversalAuth_GetSecretsByNameWithNotFoundSecret(t *testing.T) {
func TestUniversalAuth_GetSecretsByNameWithImports(t *testing.T) {
MachineIdentityLoginCmd(t)
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "STAGING-SECRET-2", "FOLDER-SECRET-1", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent")
if err != nil {

View File

@@ -3,12 +3,12 @@ package tests
import (
"testing"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/bradleyjkemp/cupaloy/v2"
)
func TestServiceToken_SecretsGetWithImportsAndRecursiveCmd(t *testing.T) {
SetupCli(t)
func TestServiceToken_SecretsGetWithImportsAndRecursiveCmd(t *testing.T) {
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent")
if err != nil {
@@ -23,8 +23,6 @@ func TestServiceToken_SecretsGetWithImportsAndRecursiveCmd(t *testing.T) {
}
func TestServiceToken_SecretsGetWithoutImportsAndWithoutRecursiveCmd(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--include-imports=false", "--silent")
if err != nil {
@@ -39,7 +37,6 @@ func TestServiceToken_SecretsGetWithoutImportsAndWithoutRecursiveCmd(t *testing.
}
func TestUniversalAuth_SecretsGetWithImportsAndRecursiveCmd(t *testing.T) {
SetupCli(t)
MachineIdentityLoginCmd(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent")
@@ -56,7 +53,6 @@ func TestUniversalAuth_SecretsGetWithImportsAndRecursiveCmd(t *testing.T) {
}
func TestUniversalAuth_SecretsGetWithoutImportsAndWithoutRecursiveCmd(t *testing.T) {
SetupCli(t)
MachineIdentityLoginCmd(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--include-imports=false", "--silent")
@@ -73,7 +69,6 @@ func TestUniversalAuth_SecretsGetWithoutImportsAndWithoutRecursiveCmd(t *testing
}
func TestUniversalAuth_SecretsGetWrongEnvironment(t *testing.T) {
SetupCli(t)
MachineIdentityLoginCmd(t)
output, _ := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", "invalid-env", "--recursive", "--silent")
@@ -85,3 +80,45 @@ func TestUniversalAuth_SecretsGetWrongEnvironment(t *testing.T) {
}
}
func TestUserAuth_SecretsGetAll(t *testing.T) {
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--include-imports=false", "--silent")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
// explicitly called here because it should happen directly after successful secretsGetAll
testUserAuth_SecretsGetAllWithoutConnection(t)
}
func testUserAuth_SecretsGetAllWithoutConnection(t *testing.T) {
originalConfigFile, err := util.GetConfigFile()
if err != nil {
t.Fatalf("error getting config file")
}
newConfigFile := originalConfigFile
// set it to a URL that will always be unreachable
newConfigFile.LoggedInUserDomain = "http://localhost:4999"
util.WriteConfigFile(&newConfigFile)
// restore config file
defer util.WriteConfigFile(&originalConfigFile)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--include-imports=false", "--silent")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}

View File

@@ -58,46 +58,108 @@ Once you apply the manifest, the operator will be installed in `infisical-operat
Once you have installed the operator to your cluster, you'll need to create a `InfisicalSecret` custom resource definition (CRD).
```yaml example-infisical-secret-crd.yaml
apiVersion: secrets.infisical.com/v1alpha1
kind: InfisicalSecret
metadata:
name: infisicalsecret-sample
labels:
label-to-be-passed-to-managed-secret: sample-value
annotations:
example.com/annotation-to-be-passed-to-managed-secret: "sample-value"
name: infisicalsecret-sample
labels:
label-to-be-passed-to-managed-secret: sample-value
annotations:
example.com/annotation-to-be-passed-to-managed-secret: "sample-value"
spec:
hostAPI: https://app.infisical.com/api
resyncInterval: 10
authentication:
# Make sure to only have 1 authentication method defined, serviceToken/universalAuth.
# If you have multiple authentication methods defined, it may cause issues.
universalAuth:
secretsScope:
projectSlug: <project-slug>
envSlug: <env-slug> # "dev", "staging", "prod", etc..
secretsPath: "<secrets-path>" # Root is "/"
recursive: true # Fetch all secrets from the specified path and all sub-directories. Default is false.
credentialsRef:
secretName: universal-auth-credentials
secretNamespace: default
hostAPI: https://app.infisical.com/api
resyncInterval: 10
authentication:
# Make sure to only have 1 authentication method defined, serviceToken/universalAuth.
# If you have multiple authentication methods defined, it may cause issues.
# (Deprecated) Service Token Auth
serviceToken:
serviceTokenSecretReference:
secretName: service-token
secretNamespace: default
secretsScope:
envSlug: <env-slug>
secretsPath: <secrets-path>
recursive: true
# Universal Auth
universalAuth:
secretsScope:
projectSlug: new-ob-em
envSlug: dev # "dev", "staging", "prod", etc..
secretsPath: "/" # Root is "/"
recursive: true # Wether or not to use recursive mode (Fetches all secrets in an environment from a given secret path, and all folders inside the path) / defaults to false
credentialsRef:
secretName: universal-auth-credentials
secretNamespace: default
# Native Kubernetes Auth
kubernetesAuth:
identityId: <machine-identity-id>
serviceAccountRef:
name: <service-account-name>
namespace: <service-account-namespace>
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
secretsScope:
projectSlug: your-project-slug
envSlug: prod
secretsPath: "/path"
recursive: true
# AWS IAM Auth
awsIamAuth:
identityId: <your-machine-identity-id>
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
secretsScope:
projectSlug: your-project-slug
envSlug: prod
secretsPath: "/path"
recursive: true
# Azure Auth
azureAuth:
identityId: <your-machine-identity-id>
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
secretsScope:
projectSlug: your-project-slug
envSlug: prod
secretsPath: "/path"
recursive: true
# GCP ID Token Auth
gcpIdTokenAuth:
identityId: <your-machine-identity-id>
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
secretsScope:
projectSlug: your-project-slug
envSlug: prod
secretsPath: "/path"
recursive: true
# GCP IAM Auth
gcpIamAuth:
identityId: <your-machine-identity-id>
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
secretsScope:
projectSlug: your-project-slug
envSlug: prod
secretsPath: "/path"
recursive: true
managedSecretReference:
secretName: managed-secret
secretNamespace: default
creationPolicy: "Orphan" ## Owner | Orphan
# secretType: kubernetes.io/dockerconfigjson
# Service tokens are deprecated and will be removed in the near future. Please use Machine Identities for authenticating with Infisical.
serviceToken:
serviceTokenSecretReference:
secretName: service-token
secretNamespace: default
secretsScope:
envSlug: <env-slug>
secretsPath: <secrets-path> # Root is "/"
recursive: true # Fetch all secrets from the specified path and all sub-directories. Default is false.
managedSecretReference:
secretName: managed-secret
secretNamespace: default
creationPolicy: "Orphan" ## Owner | Orphan (default)
# secretType: kubernetes.io/dockerconfigjson
```
### InfisicalSecret CRD properties
@@ -156,7 +218,7 @@ When `hostAPI` is not defined the operator fetches secrets from Infisical Cloud.
</Steps>
{" "}
<Info>
Make sure to also populate the `secretsScope` field with the project slug
@@ -187,6 +249,233 @@ spec:
</Accordion>
<Accordion title="authentication.kubernetesAuth">
The Kubernetes machine identity authentication method is used to authenticate with Infisical. The identity ID is stored in a field in the InfisicalSecret resource. This authentication method can only be used within a Kubernetes environment.
<Steps>
<Step title="Create a machine identity">
You need to create a machine identity, and give it access to the project(s) you want to interact with. You can [read more about Kubernetes machine identities here](/documentation/platform/identities/kubernetes-auth).
</Step>
<Step title="Add your identity ID to your InfisicalSecret resource">
Once you have created your machine identity and added it to your project(s), you will need to add the identity ID to your InfisicalSecret resource. In the `authentication.kubernetesAuth.identityId` field, add the identity ID of the machine identity you created. See the example below for more details.
</Step>
<Step title="Add your Kubernetes service account token to the InfisicalSecret resource">
When you configured your Kubernetes machine identity, you would have created a service account token if you followed the [Kubernetes machine identity guide](/documentation/platform/identities/kubernetes-auth). If you did not create a service account token, please follow the guide to do so.
You will need to enter the name of the service account and the namespace where the service account lives. The example below shows how to add the service account token to the InfisicalSecret resource.
</Step>
</Steps>
<Info>
Make sure to also populate the `secretsScope` field with the project slug
_`projectSlug`_, environment slug _`envSlug`_, and secrets path
_`secretsPath`_ that you want to fetch secrets from. Please see the example
below.
</Info>
## Example
```yaml example-kubernetes-auth.yaml
apiVersion: secrets.infisical.com/v1alpha1
kind: InfisicalSecret
metadata:
name: infisicalsecret-sample-crd
spec:
authentication:
kubernetesAuth:
identityId: <machine-identity-id>
serviceAccountRef:
name: <service-account-name>
namespace: <service-account-namespace>
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
secretsScope:
projectSlug: your-project-slug
envSlug: prod
secretsPath: "/path"
recursive: true
...
```
</Accordion>
<Accordion title="authentication.awsIamAuth">
The AWS IAM machine identity authentication method is used to authenticate with Infisical. The identity ID is stored in a field in the InfisicalSecret resource. This authentication method can only be used within an AWS environment like an EC2 or a Lambda function.
<Steps>
<Step title="Create a machine identity">
You need to create a machine identity, and give it access to the project(s) you want to interact with. You can [read more about AWS machine identities here](/documentation/platform/identities/aws-auth).
</Step>
<Step title="Add your identity ID to your InfisicalSecret resource">
Once you have created your machine identity and added it to your project(s), you will need to add the identity ID to your InfisicalSecret resource. In the `authentication.awsIamAuth.identityId` field, add the identity ID of the machine identity you created. See the example below for more details.
</Step>
</Steps>
<Info>
Make sure to also populate the `secretsScope` field with the project slug
_`projectSlug`_, environment slug _`envSlug`_, and secrets path
_`secretsPath`_ that you want to fetch secrets from. Please see the example
below.
</Info>
## Example
```yaml example-aws-iam-auth.yaml
apiVersion: secrets.infisical.com/v1alpha1
kind: InfisicalSecret
metadata:
name: infisicalsecret-sample-crd
spec:
authentication:
awsIamAuth:
identityId: <your-machine-identity-id>
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
secretsScope:
projectSlug: your-project-slug
envSlug: prod
secretsPath: "/path"
recursive: true
...
```
</Accordion>
<Accordion title="authentication.azureAuth">
The Azure machine identity authentication method is used to authenticate with Infisical. The identity ID is stored in a field in the InfisicalSecret resource. This authentication method can only be used within an Azure environment.
<Steps>
<Step title="Create a machine identity">
You need to create a machine identity, and give it access to the project(s) you want to interact with. You can [read more about Azure machine identities here](/documentation/platform/identities/azure-auth).
</Step>
<Step title="Add your identity ID to your InfisicalSecret resource">
Once you have created your machine identity and added it to your project(s), you will need to add the identity ID to your InfisicalSecret resource. In the `authentication.azureAuth.identityId` field, add the identity ID of the machine identity you created. See the example below for more details.
</Step>
</Steps>
<Info>
Make sure to also populate the `secretsScope` field with the project slug
_`projectSlug`_, environment slug _`envSlug`_, and secrets path
_`secretsPath`_ that you want to fetch secrets from. Please see the example
below.
</Info>
## Example
```yaml example-azure-auth.yaml
apiVersion: secrets.infisical.com/v1alpha1
kind: InfisicalSecret
metadata:
name: infisicalsecret-sample-crd
spec:
authentication:
azureAuth:
identityId: <your-machine-identity-id>
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
secretsScope:
projectSlug: your-project-slug
envSlug: prod
secretsPath: "/path"
recursive: true
...
```
</Accordion>
<Accordion title="authentication.gcpIdTokenAuth">
The GCP ID Token machine identity authentication method is used to authenticate with Infisical. The identity ID is stored in a field in the InfisicalSecret resource. This authentication method can only be used within GCP environments.
<Steps>
<Step title="Create a machine identity">
You need to create a machine identity, and give it access to the project(s) you want to interact with. You can [read more about GCP machine identities here](/documentation/platform/identities/gcp-auth).
</Step>
<Step title="Add your identity ID to your InfisicalSecret resource">
Once you have created your machine identity and added it to your project(s), you will need to add the identity ID to your InfisicalSecret resource. In the `authentication.gcpIdTokenAuth.identityId` field, add the identity ID of the machine identity you created. See the example below for more details.
</Step>
</Steps>
<Info>
Make sure to also populate the `secretsScope` field with the project slug
_`projectSlug`_, environment slug _`envSlug`_, and secrets path
_`secretsPath`_ that you want to fetch secrets from. Please see the example
below.
</Info>
## Example
```yaml example-gcp-id-token-auth.yaml
apiVersion: secrets.infisical.com/v1alpha1
kind: InfisicalSecret
metadata:
name: infisicalsecret-sample-crd
spec:
authentication:
gcpIdTokenAuth:
identityId: <your-machine-identity-id>
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
secretsScope:
projectSlug: your-project-slug
envSlug: prod
secretsPath: "/path"
recursive: true
...
```
</Accordion>
<Accordion title="authentication.gcpIamAuth">
The GCP IAM machine identity authentication method is used to authenticate with Infisical. The identity ID is stored in a field in the InfisicalSecret resource. This authentication method can only be used both within and outside GCP environments.
<Steps>
<Step title="Create a machine identity">
You need to create a machine identity, and give it access to the project(s) you want to interact with. You can [read more about GCP machine identities here](/documentation/platform/identities/gcp-auth).
</Step>
<Step title="Add your identity ID and service account token path to your InfisicalSecret resource">
Once you have created your machine identity and added it to your project(s), you will need to add the identity ID to your InfisicalSecret resource. In the `authentication.gcpIamAuth.identityId` field, add the identity ID of the machine identity you created.
You'll also need to add the service account key file path to your InfisicalSecret resource. In the `authentication.gcpIamAuth.serviceAccountKeyFilePath` field, add the path to your service account key file path. Please see the example below for more details.
</Step>
</Steps>
<Info>
Make sure to also populate the `secretsScope` field with the project slug
_`projectSlug`_, environment slug _`envSlug`_, and secrets path
_`secretsPath`_ that you want to fetch secrets from. Please see the example
below.
</Info>
## Example
```yaml example-gcp-id-token-auth.yaml
apiVersion: secrets.infisical.com/v1alpha1
kind: InfisicalSecret
metadata:
name: infisicalsecret-sample-crd
spec:
authentication:
gcpIamAuth:
identityId: <your-machine-identity-id>
serviceAccountKeyFilePath: "/path/to-service-account-key-file-path.json"
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
secretsScope:
projectSlug: your-project-slug
envSlug: prod
secretsPath: "/path"
recursive: true
...
```
</Accordion>
<Accordion title="authentication.serviceToken">
<Warning>
Service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities).

View File

@@ -384,6 +384,7 @@
"pages": [
"sdks/languages/node",
"sdks/languages/python",
"sdks/languages/go",
"sdks/languages/java",
"sdks/languages/csharp"
]

438
docs/sdks/languages/go.mdx Normal file
View File

@@ -0,0 +1,438 @@
---
title: "Infisical Go SDK"
sidebarTitle: "Go"
icon: "golang"
---
If you're working with Go Lang, the official [Infisical Go SDK](https://github.com/infisical/go-sdk) package is the easiest way to fetch and work with secrets for your application.
- [Package](https://pkg.go.dev/github.com/infisical/go-sdk)
- [Github Repository](https://github.com/infisical/go-sdk)
## Basic Usage
```go
package main
import (
"fmt"
"os"
infisical "github.com/infisical/go-sdk"
)
func main() {
client, err := infisical.NewInfisicalClient(infisical.Config{
SiteUrl: "https://app.infisical.com", // Optional, default is https://app.infisical.com
})
if err != nil {
fmt.Printf("Error: %v", err)
os.Exit(1)
}
_, err = client.Auth().UniversalAuthLogin("YOUR_CLIENT_ID", "YOUR_CLIENT_SECRET")
if err != nil {
fmt.Printf("Authentication failed: %v", err)
os.Exit(1)
}
apiKeySecret, err := client.Secrets().Retrieve(infisical.RetrieveSecretOptions{
SecretKey: "API_KEY",
Environment: "dev",
ProjectID: "YOUR_PROJECT_ID",
SecretPath: "/",
})
if err != nil {
fmt.Printf("Error: %v", err)
os.Exit(1)
}
fmt.Printf("API Key Secret: %v", apiKeySecret)
}
```
This example demonstrates how to use the Infisical Go SDK in a simple Go application. The application retrieves a secret named `API_KEY` from the `dev` environment of the `YOUR_PROJECT_ID` project.
<Warning>
We do not recommend hardcoding your [Machine Identity Tokens](/platform/identities/overview). Setting it as an environment variable would be best.
</Warning>
# Installation
```console
$ go get github.com/infisical/go-sdk
```
# Configuration
Import the SDK and create a client instance.
```go
client, err := infisical.NewInfisicalClient(infisical.Config{
SiteUrl: "https://app.infisical.com", // Optional, default is https://api.infisical.com
})
if err != nil {
fmt.Printf("Error: %v", err)
os.Exit(1)
}
```
### ClientSettings methods
<ParamField query="options" type="object">
<Expandable title="properties">
<ParamField query="SiteUrl" type="string" optional>
The URL of the Infisical API. Default is `https://api.infisical.com`.
</ParamField>
<ParamField query="UserAgent" type="string" required>
Optionally set the user agent that will be used for HTTP requests. _(Not recommended)_
</ParamField>
</Expandable>
</ParamField>
### Authentication
The SDK supports a variety of authentication methods. The most common authentication method is Universal Auth, which uses a client ID and client secret to authenticate.
#### Universal Auth
**Using environment variables**
Call `.Auth().UniversalAuthLogin()` with empty arguments to use the following environment variables:
- `INFISICAL_UNIVERSAL_AUTH_CLIENT_ID` - Your machine identity client ID.
- `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET` - Your machine identity client secret.
**Using the SDK directly**
```go
_, err := client.Auth().UniversalAuthLogin("CLIENT_ID", "CLIENT_SECRET")
if err != nil {
fmt.Println(err)
os.Exit(1)
}
```
#### GCP ID Token Auth
<Info>
Please note that this authentication method will only work if you're running your application on Google Cloud Platform.
Please [read more](/documentation/platform/identities/gcp-auth) about this authentication method.
</Info>
**Using environment variables**
Call `.Auth().GcpIdTokenAuthLogin()` with empty arguments to use the following environment variables:
- `INFISICAL_GCP_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
**Using the SDK directly**
```go
_, err := client.Auth().GcpIdTokenAuthLogin("YOUR_MACHINE_IDENTITY_ID")
if err != nil {
fmt.Println(err)
os.Exit(1)
}
```
#### GCP IAM Auth
**Using environment variables**
Call `.Auth().GcpIamAuthLogin()` with empty arguments to use the following environment variables:
- `INFISICAL_GCP_IAM_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
- `INFISICAL_GCP_IAM_SERVICE_ACCOUNT_KEY_FILE_PATH` - The path to your GCP service account key file.
**Using the SDK directly**
```go
_, err = client.Auth().GcpIamAuthLogin("MACHINE_IDENTITY_ID", "SERVICE_ACCOUNT_KEY_FILE_PATH")
if err != nil {
fmt.Println(err)
os.Exit(1)
}
```
#### AWS IAM Auth
<Info>
Please note that this authentication method will only work if you're running your application on AWS.
Please [read more](/documentation/platform/identities/aws-auth) about this authentication method.
</Info>
**Using environment variables**
Call `.Auth().AwsIamAuthLogin()` with empty arguments to use the following environment variables:
- `INFISICAL_AWS_IAM_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
**Using the SDK directly**
```go
_, err = client.Auth().AwsIamAuthLogin("MACHINE_IDENTITY_ID")
if err != nil {
fmt.Println(err)
os.Exit(1)
}
```
#### Azure Auth
<Info>
Please note that this authentication method will only work if you're running your application on Azure.
Please [read more](/documentation/platform/identities/azure-auth) about this authentication method.
</Info>
**Using environment variables**
Call `.Auth().AzureAuthLogin()` with empty arguments to use the following environment variables:
- `INFISICAL_AZURE_AUTH_IDENTITY_ID` - Your Infisical Machine Identity ID.
**Using the SDK directly**
```go
_, err = client.Auth().AzureAuthLogin("MACHINE_IDENTITY_ID")
if err != nil {
fmt.Println(err)
os.Exit(1)
}
```
#### Kubernetes Auth
<Info>
Please note that this authentication method will only work if you're running your application on Kubernetes.
Please [read more](/documentation/platform/identities/kubernetes-auth) about this authentication method.
</Info>
**Using environment variables**
Call `.Auth().KubernetesAuthLogin()` with empty arguments to use the following environment variables:
- `INFISICAL_KUBERNETES_IDENTITY_ID` - Your Infisical Machine Identity ID.
- `INFISICAL_KUBERNETES_SERVICE_ACCOUNT_TOKEN_PATH_ENV_NAME` - The environment variable name that contains the path to the service account token. This is optional and will default to `/var/run/secrets/kubernetes.io/serviceaccount/token`.
**Using the SDK directly**
```go
// Service account token path will default to /var/run/secrets/kubernetes.io/serviceaccount/token if empty value is passed
_, err = client.Auth().KubernetesAuthLogin("MACHINE_IDENTITY_ID", "SERVICE_ACCOUNT_TOKEN_PATH")
if err != nil {
fmt.Println(err)
os.Exit(1)
}
```
## Working with Secrets
### client.Secrets().List(options)
```go
secrets, err := client.Secrets().List(infisical.ListSecretsOptions{
ProjectID: "PROJECT_ID",
Environment: "dev",
SecretPath: "/foo/bar",
AttachToProcessEnv: false,
})
```
Retrieve all secrets within the Infisical project and environment that client is connected to
#### Parameters
<ParamField query="Parameters" type="object">
<Expandable title="properties">
<ParamField query="Environment" type="string" required>
The slug name (dev, prod, etc) of the environment from where secrets should be fetched from.
</ParamField>
<ParamField query="ProjectID" type="string">
The project ID where the secret lives in.
</ParamField>
<ParamField query="SecretPath" type="string" optional>
The path from where secrets should be fetched from.
</ParamField>
<ParamField query="AttachToProcessEnv" type="boolean" default="false" optional>
Whether or not to set the fetched secrets to the process environment. If true, you can access the secrets like so `System.getenv("SECRET_NAME")`.
</ParamField>
<ParamField query="IncludeImports" type="boolean" default="false" optional>
Whether or not to include imported secrets from the current path. Read about [secret import](/documentation/platform/secret-reference)
</ParamField>
<ParamField query="Recursive" type="boolean" default="false" optional>
Whether or not to fetch secrets recursively from the specified path. Please note that there's a 20-depth limit for recursive fetching.
</ParamField>
<ParamField query="ExpandSecretReferences" type="boolean" default="true" optional>
Whether or not to expand secret references in the fetched secrets. Read about [secret reference](/documentation/platform/secret-reference)
</ParamField>
</Expandable>
</ParamField>
### client.Secrets().Retrieve(options)
```go
secret, err := client.Secrets().Retrieve(infisical.RetrieveSecretOptions{
SecretKey: "API_KEY",
ProjectID: "PROJECT_ID",
Environment: "dev",
})
```
Retrieve a secret from Infisical.
By default, `Secrets().Retrieve()` fetches and returns a shared secret.
#### Parameters
<ParamField query="Parameters" type="object" optional>
<Expandable title="properties">
<ParamField query="SecretKey" type="string" required>
The key of the secret to retrieve.
</ParamField>
<ParamField query="ProjectID" type="string" required>
The project ID where the secret lives in.
</ParamField>
<ParamField query="Environment" type="string" required>
The slug name (dev, prod, etc) of the environment from where secrets should be fetched from.
</ParamField>
<ParamField query="SecretPath" type="string" optional>
The path from where secret should be fetched from.
</ParamField>
<ParamField query="Type" type="string" optional>
The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared".
</ParamField>
</Expandable>
</ParamField>
### client.Secrets().Create(options)
```go
secret, err := client.Secrets().Create(infisical.CreateSecretOptions{
ProjectID: "PROJECT_ID",
Environment: "dev",
SecretKey: "NEW_SECRET_KEY",
SecretValue: "NEW_SECRET_VALUE",
SecretComment: "This is a new secret",
})
```
Create a new secret in Infisical.
#### Parameters
<ParamField query="Parameters" type="object" optional>
<Expandable title="properties">
<ParamField query="SecretKey" type="string" required>
The key of the secret to create.
</ParamField>
<ParamField query="SecretValue" type="string" required>
The value of the secret.
</ParamField>
<ParamField query="SecretComment" type="string" optional>
A comment for the secret.
</ParamField>
<ParamField query="ProjectID" type="string" required>
The project ID where the secret lives in.
</ParamField>
<ParamField query="Environment" type="string" required>
The slug name (dev, prod, etc) of the environment from where secrets should be fetched from.
</ParamField>
<ParamField query="SecretPath" type="string" optional>
The path from where secret should be created.
</ParamField>
<ParamField query="Type" type="string" optional>
The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared".
</ParamField>
</Expandable>
</ParamField>
### client.Secrets().Update(options)
```go
secret, err := client.Secrets().Update(infisical.UpdateSecretOptions{
ProjectID: "PROJECT_ID",
Environment: "dev",
SecretKey: "NEW_SECRET_KEY",
NewSecretValue: "NEW_SECRET_VALUE",
NewSkipMultilineEncoding: false,
})
```
Update an existing secret in Infisical.
#### Parameters
<ParamField query="Parameters" type="object" optional>
<Expandable title="properties">
<ParamField query="SecretKey" type="string" required>
The key of the secret to update.
</ParamField>
<ParamField query="NewSecretValue" type="string" required>
The new value of the secret.
</ParamField>
<ParamField query="NewSkipMultilineEncoding" type="boolean" default="false" optional>
Whether or not to skip multiline encoding for the new secret value.
</ParamField>
<ParamField query="ProjectID" type="string" required>
The project ID where the secret lives in.
</ParamField>
<ParamField query="Environment" type="string" required>
The slug name (dev, prod, etc) of the environment from where secrets should be fetched from.
</ParamField>
<ParamField query="SecretPath" type="string" optional>
The path from where secret should be updated.
</ParamField>
<ParamField query="Type" type="string" optional>
The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared".
</ParamField>
</Expandable>
</ParamField>
### client.Secrets().Delete(options)
```go
secret, err := client.Secrets().Delete(infisical.DeleteSecretOptions{
ProjectID: "PROJECT_ID",
Environment: "dev",
SecretKey: "SECRET_KEY",
})
```
Delete a secret in Infisical.
#### Parameters
<ParamField query="Parameters" type="object" optional>
<Expandable title="properties">
<ParamField query="SecretKey" type="string">
The key of the secret to update.
</ParamField>
<ParamField query="ProjectID" type="string" required>
The project ID where the secret lives in.
</ParamField>
<ParamField query="Environment" type="string" required>
The slug name (dev, prod, etc) of the environment from where secrets should be fetched from.
</ParamField>
<ParamField query="SecretPath" type="string" optional>
The path from where secret should be deleted.
</ParamField>
<ParamField query="Type" type="string" optional>
The type of the secret. Valid options are "shared" or "personal". If not specified, the default value is "shared".
</ParamField>
</Expandable>
</ParamField>

View File

@@ -48,44 +48,44 @@ The platform utilizes Postgres to persist all of its data and Redis for caching
Without email configuration, Infisical's core functions like sign-up/login and secret operations work, but this disables multi-factor authentication, email invites for projects, alerts for suspicious logins, and all other email-dependent features.
<Accordion title="Generic Configuration">
<ParamField query="SMTP_HOST" type="string" default="none" optional>
Hostname to connect to for establishing SMTP connections
</ParamField>
{" "}
<ParamField query="SMTP_USERNAME" type="string" default="none" optional>
Credential to connect to host (e.g. team@infisical.com)
<ParamField query="SMTP_HOST" type="string" default="none" optional>
Hostname to connect to for establishing SMTP connections
</ParamField>
{" "}
<ParamField query="SMTP_PASSWORD" type="string" default="none" optional>
Credential to connect to host
</ParamField>
{" "}
<ParamField query="SMTP_PORT" type="string" default="587" optional>
Port to connect to for establishing SMTP connections
</ParamField>
{" "}
<ParamField query="SMTP_SECURE" type="string" default="none" optional>
If true, use TLS when connecting to host. If false, TLS will be used if
STARTTLS is supported
<ParamField query="SMTP_USERNAME" type="string" default="none" optional>
Credential to connect to host (e.g. team@infisical.com)
</ParamField>
{" "}
<ParamField query="SMTP_PASSWORD" type="string" default="none" optional>
Credential to connect to host
</ParamField>
<ParamField query="SMTP_FROM_ADDRESS" type="string" default="none" optional>
Email address to be used for sending emails
</ParamField>
<ParamField query="SMTP_FROM_NAME" type="string" default="none" optional>
Name label to be used in From field (e.g. Team)
</ParamField>
<ParamField query="SMTP_FROM_NAME" type="string" default="none" optional>
Name label to be used in From field (e.g. Team)
</ParamField>
<ParamField query="SMTP_IGNORE_TLS" type="bool" default="false" optional>
If this is `true` and `SMTP_PORT` is not 465 then TLS is not used even if the
server supports STARTTLS extension.
</ParamField>
<ParamField query="SMTP_REQUIRE_TLS" type="bool" default="true" optional>
If this is `true` and `SMTP_PORT` is not 465 then Infisical tries to use
STARTTLS even if the server does not advertise support for it. If the
connection can not be encrypted then message is not sent.
</ParamField>
<ParamField query="SMTP_TLS_REJECT_UNAUTHORIZED" type="bool" default="true" optional>
If this is `true`, Infisical will validate the server's SSL/TLS certificate and reject the connection if the certificate is invalid or not trusted. If set to `false`, the client will accept the server's certificate regardless of its validity, which can be useful in development or testing environments but is not recommended for production use.
</ParamField>
</Accordion>
<Accordion title="Twilio SendGrid">
@@ -105,7 +105,6 @@ SMTP_HOST=smtp.sendgrid.net
SMTP_USERNAME=apikey
SMTP_PASSWORD=SG.rqFsfjxYPiqE1lqZTgD_lz7x8IVLx # your SendGrid API Key from step above
SMTP_PORT=587
SMTP_SECURE=true
SMTP_FROM_ADDRESS=hey@example.com # your email address being used to send out emails
SMTP_FROM_NAME=Infisical
```
@@ -128,7 +127,6 @@ SMTP_HOST=smtp.mailgun.org # obtained from credentials page
SMTP_USERNAME=postmaster@example.mailgun.org # obtained from credentials page
SMTP_PASSWORD=password # obtained from credentials page
SMTP_PORT=587
SMTP_SECURE=true
SMTP_FROM_ADDRESS=hey@example.com # your email address being used to send out emails
SMTP_FROM_NAME=Infisical
```
@@ -159,7 +157,6 @@ SMTP_FROM_NAME=Infisical
SMTP_USERNAME=xxx # your SMTP username
SMTP_PASSWORD=xxx # your SMTP password
SMTP_PORT=465
SMTP_SECURE=true
SMTP_FROM_ADDRESS=hey@example.com # your email address being used to send out emails
SMTP_FROM_NAME=Infisical
```
@@ -187,7 +184,6 @@ SMTP_HOST=smtp.socketlabs.com
SMTP_USERNAME=username # obtained from your credentials
SMTP_PASSWORD=password # obtained from your credentials
SMTP_PORT=587
SMTP_SECURE=true
SMTP_FROM_ADDRESS=hey@example.com # your email address being used to send out emails
SMTP_FROM_NAME=Infisical
```
@@ -229,7 +225,6 @@ SMTP_HOST=smtp.resend.com
SMTP_USERNAME=resend
SMTP_PASSWORD=YOUR_API_KEY
SMTP_PORT=587
SMTP_SECURE=true
SMTP_FROM_ADDRESS=hey@example.com # your email address being used to send out emails
SMTP_FROM_NAME=Infisical
```
@@ -253,7 +248,6 @@ SMTP_HOST=smtp.gmail.com
SMTP_USERNAME=hey@gmail.com # your email
SMTP_PASSWORD=password # your password
SMTP_PORT=587
SMTP_SECURE=true
SMTP_FROM_ADDRESS=hey@gmail.com
SMTP_FROM_NAME=Infisical
```
@@ -277,7 +271,6 @@ SMTP_HOST=smtp.office365.com
SMTP_USERNAME=username@yourdomain.com # your username
SMTP_PASSWORD=password # your password
SMTP_PORT=587
SMTP_SECURE=true
SMTP_FROM_ADDRESS=username@yourdomain.com
SMTP_FROM_NAME=Infisical
```
@@ -294,7 +287,6 @@ SMTP_HOST=smtp.zoho.com
SMTP_USERNAME=username # your email
SMTP_PASSWORD=password # your password
SMTP_PORT=587
SMTP_SECURE=true
SMTP_FROM_ADDRESS=hey@example.com # your personal Zoho email or domain-based email linked to Zoho Mail
SMTP_FROM_NAME=Infisical
```
@@ -318,6 +310,12 @@ SMTP_FROM_NAME=Infisical
By default, users can only login via email/password based login method.
To login into Infisical with OAuth providers such as Google, configure the associated variables.
<ParamField query="DEFAULT_SAML_ORG_SLUG" type="string">
When set, all visits to the Infisical login page will automatically redirect users of your Infisical instance to the SAML identity provider associated with the specified organization slug.
</ParamField>
<Accordion title="Google">
Follow detailed guide to configure [Google SSO](/documentation/platform/sso/google)
@@ -369,11 +367,6 @@ To login into Infisical with OAuth providers such as Google, configure the assoc
information.
</Accordion>
<ParamField query="NEXT_PUBLIC_SAML_ORG_SLUG" type="string">
Configure SAML organization slug to automatically redirect all users of your
Infisical instance to the identity provider.
</ParamField>
## Native secret integrations
To help you sync secrets from Infisical to services such as Github and Gitlab, Infisical provides native integrations out of the box.

View File

@@ -2,6 +2,7 @@ ARG POSTHOG_HOST=https://app.posthog.com
ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG NEXT_INFISICAL_PLATFORM_VERSION=next-infisical-platform-version
ARG CAPTCHA_SITE_KEY=captcha-site-key
FROM node:16-alpine AS deps
# Install dependencies only when needed. Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
@@ -31,6 +32,8 @@ ARG POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
ARG INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
@@ -57,7 +60,9 @@ ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG \
BAKED_NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
ARG NEXT_INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION=$NEXT_INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
COPY --chown=nextjs:nodejs --chmod=555 scripts ./scripts
COPY --from=builder /app/public ./public
RUN chown nextjs:nodejs ./public/data

View File

@@ -1,13 +1,12 @@
const path = require("path");
const ContentSecurityPolicy = `
default-src 'self';
script-src 'self' https://app.posthog.com https://js.stripe.com https://api.stripe.com https://widget.intercom.io https://js.intercomcdn.com 'unsafe-inline' 'unsafe-eval';
style-src 'self' https://rsms.me 'unsafe-inline';
script-src 'self' https://app.posthog.com https://js.stripe.com https://api.stripe.com https://widget.intercom.io https://js.intercomcdn.com https://hcaptcha.com https://*.hcaptcha.com 'unsafe-inline' 'unsafe-eval';
style-src 'self' https://rsms.me 'unsafe-inline' https://hcaptcha.com https://*.hcaptcha.com;
child-src https://api.stripe.com;
frame-src https://js.stripe.com/ https://api.stripe.com https://www.youtube.com/;
connect-src 'self' wss://nexus-websocket-a.intercom.io https://api-iam.intercom.io https://api.heroku.com/ https://id.heroku.com/oauth/authorize https://id.heroku.com/oauth/token https://checkout.stripe.com https://app.posthog.com https://api.stripe.com https://api.pwnedpasswords.com http://127.0.0.1:*;
frame-src https://js.stripe.com/ https://api.stripe.com https://www.youtube.com/ https://hcaptcha.com https://*.hcaptcha.com;
connect-src 'self' wss://nexus-websocket-a.intercom.io https://api-iam.intercom.io https://api.heroku.com/ https://id.heroku.com/oauth/authorize https://id.heroku.com/oauth/token https://checkout.stripe.com https://app.posthog.com https://api.stripe.com https://api.pwnedpasswords.com http://127.0.0.1:* https://hcaptcha.com https://*.hcaptcha.com;
img-src 'self' https://static.intercomassets.com https://js.intercomcdn.com https://downloads.intercomcdn.com https://*.stripe.com https://i.ytimg.com/ data:;
media-src https://js.intercomcdn.com;
font-src 'self' https://fonts.intercomcdn.com/ https://maxcdn.bootstrapcdn.com https://rsms.me https://fonts.gstatic.com;

View File

@@ -4,7 +4,6 @@
"requires": true,
"packages": {
"": {
"name": "frontend",
"dependencies": {
"@casl/ability": "^6.5.0",
"@casl/react": "^3.1.0",
@@ -19,6 +18,7 @@
"@fortawesome/free-regular-svg-icons": "^6.1.1",
"@fortawesome/free-solid-svg-icons": "^6.1.2",
"@fortawesome/react-fontawesome": "^0.2.0",
"@hcaptcha/react-hcaptcha": "^1.10.1",
"@headlessui/react": "^1.7.7",
"@hookform/resolvers": "^2.9.10",
"@octokit/rest": "^19.0.7",
@@ -3200,6 +3200,24 @@
"react": ">=16.3"
}
},
"node_modules/@hcaptcha/loader": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/@hcaptcha/loader/-/loader-1.2.4.tgz",
"integrity": "sha512-3MNrIy/nWBfyVVvMPBKdKrX7BeadgiimW0AL/a/8TohNtJqxoySKgTJEXOQvYwlHemQpUzFrIsK74ody7JiMYw=="
},
"node_modules/@hcaptcha/react-hcaptcha": {
"version": "1.10.1",
"resolved": "https://registry.npmjs.org/@hcaptcha/react-hcaptcha/-/react-hcaptcha-1.10.1.tgz",
"integrity": "sha512-P0en4gEZAecah7Pt3WIaJO2gFlaLZKkI0+Tfdg8fNqsDxqT9VytZWSkH4WAkiPRULK1QcGgUZK+J56MXYmPifw==",
"dependencies": {
"@babel/runtime": "^7.17.9",
"@hcaptcha/loader": "^1.2.1"
},
"peerDependencies": {
"react": ">= 16.3.0",
"react-dom": ">= 16.3.0"
}
},
"node_modules/@headlessui/react": {
"version": "1.7.18",
"resolved": "https://registry.npmjs.org/@headlessui/react/-/react-1.7.18.tgz",

View File

@@ -26,6 +26,7 @@
"@fortawesome/free-regular-svg-icons": "^6.1.1",
"@fortawesome/free-solid-svg-icons": "^6.1.2",
"@fortawesome/react-fontawesome": "^0.2.0",
"@hcaptcha/react-hcaptcha": "^1.10.1",
"@headlessui/react": "^1.7.7",
"@hookform/resolvers": "^2.9.10",
"@octokit/rest": "^19.0.7",

View File

@@ -4,7 +4,7 @@ scripts/replace-standalone-build-variable.sh "$BAKED_NEXT_PUBLIC_POSTHOG_API_KEY
scripts/replace-standalone-build-variable.sh "$BAKED_NEXT_PUBLIC_INTERCOM_ID" "$NEXT_PUBLIC_INTERCOM_ID"
scripts/replace-standalone-build-variable.sh "$BAKED_NEXT_PUBLIC_SAML_ORG_SLUG" "$NEXT_PUBLIC_SAML_ORG_SLUG"
scripts/replace-standalone-build-variable.sh "$BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY" "$NEXT_PUBLIC_CAPTCHA_SITE_KEY"
if [ "$TELEMETRY_ENABLED" != "false" ]; then
echo "Telemetry is enabled"

View File

@@ -6,6 +6,8 @@ scripts/replace-variable.sh "$BAKED_NEXT_PUBLIC_INTERCOM_ID" "$NEXT_PUBLIC_INTER
scripts/replace-variable.sh "$BAKED_NEXT_SAML_ORG_SLUG" "$NEXT_PUBLIC_SAML_ORG_SLUG"
scripts/replace-variable.sh "$BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY" "$NEXT_PUBLIC_CAPTCHA_SITE_KEY"
if [ "$TELEMETRY_ENABLED" != "false" ]; then
echo "Telemetry is enabled"
scripts/set-telemetry.sh true

View File

@@ -30,11 +30,13 @@ export interface IsCliLoginSuccessful {
const attemptLogin = async ({
email,
password,
providerAuthToken
providerAuthToken,
captchaToken
}: {
email: string;
password: string;
providerAuthToken?: string;
captchaToken?: string;
}): Promise<IsCliLoginSuccessful> => {
const telemetry = new Telemetry().getInstance();
return new Promise((resolve, reject) => {
@@ -70,7 +72,8 @@ const attemptLogin = async ({
} = await login2({
email,
clientProof,
providerAuthToken
providerAuthToken,
captchaToken
});
if (mfaEnabled) {
// case: MFA is enabled

View File

@@ -22,11 +22,13 @@ interface IsLoginSuccessful {
const attemptLogin = async ({
email,
password,
providerAuthToken
providerAuthToken,
captchaToken
}: {
email: string;
password: string;
providerAuthToken?: string;
captchaToken?: string;
}): Promise<IsLoginSuccessful> => {
const telemetry = new Telemetry().getInstance();
// eslint-disable-next-line new-cap
@@ -58,6 +60,7 @@ const attemptLogin = async ({
iv,
tag
} = await login2({
captchaToken,
email,
clientProof,
providerAuthToken

View File

@@ -2,5 +2,6 @@ const ENV = process.env.NEXT_PUBLIC_ENV! || "development"; // investigate
const POSTHOG_API_KEY = process.env.NEXT_PUBLIC_POSTHOG_API_KEY!;
const POSTHOG_HOST = process.env.NEXT_PUBLIC_POSTHOG_HOST! || "https://app.posthog.com";
const INTERCOMid = process.env.NEXT_PUBLIC_INTERCOMid!;
const CAPTCHA_SITE_KEY = process.env.NEXT_PUBLIC_CAPTCHA_SITE_KEY!;
export { ENV, INTERCOMid, POSTHOG_API_KEY, POSTHOG_HOST };
export { CAPTCHA_SITE_KEY, ENV, INTERCOMid, POSTHOG_API_KEY, POSTHOG_HOST };

View File

@@ -78,7 +78,8 @@ export const SecretPathInput = ({
const validPaths = inputValue.split("/");
validPaths.pop();
const newValue = `${validPaths.join("/")}/${suggestions[selectedIndex]}/`;
// removed trailing slash
const newValue = `${validPaths.join("/")}/${suggestions[selectedIndex]}`;
onChange?.(newValue);
setInputValue(newValue);
setSecretPath(newValue);

View File

@@ -93,27 +93,29 @@ const initProjectHelper = async ({ projectName }: { projectName: string }) => {
});
try {
secrets?.forEach((secret) => {
createSecret({
workspaceId: project.id,
environment: secret.environment,
type: secret.type,
secretKey: secret.secretName,
secretKeyCiphertext: secret.secretKeyCiphertext,
secretKeyIV: secret.secretKeyIV,
secretKeyTag: secret.secretKeyTag,
secretValueCiphertext: secret.secretValueCiphertext,
secretValueIV: secret.secretValueIV,
secretValueTag: secret.secretValueTag,
secretCommentCiphertext: secret.secretCommentCiphertext,
secretCommentIV: secret.secretCommentIV,
secretCommentTag: secret.secretCommentTag,
secretPath: "/",
metadata: {
source: "signup"
}
});
});
await Promise.allSettled(
(secrets || []).map((secret) =>
createSecret({
workspaceId: project.id,
environment: secret.environment,
type: secret.type,
secretKey: secret.secretName,
secretKeyCiphertext: secret.secretKeyCiphertext,
secretKeyIV: secret.secretKeyIV,
secretKeyTag: secret.secretKeyTag,
secretValueCiphertext: secret.secretValueCiphertext,
secretValueIV: secret.secretValueIV,
secretValueTag: secret.secretValueTag,
secretCommentCiphertext: secret.secretCommentCiphertext,
secretCommentIV: secret.secretCommentIV,
secretCommentTag: secret.secretCommentTag,
secretPath: "/",
metadata: {
source: "signup"
}
})
)
);
} catch (err) {
console.error("Failed to upload secrets", err);
}

View File

@@ -30,6 +30,7 @@ export type Login1DTO = {
};
export type Login2DTO = {
captchaToken?: string;
email: string;
clientProof: string;
providerAuthToken?: string;

View File

@@ -17,6 +17,7 @@ export * from "./keys";
export * from "./ldapConfig";
export * from "./organization";
export * from "./projectUserAdditionalPrivilege";
export * from "./rateLimit";
export * from "./roles";
export * from "./scim";
export * from "./secretApproval";

View File

@@ -73,6 +73,9 @@ export const useCreateIntegration = () => {
}[];
kmsKeyId?: string;
shouldDisableDelete?: boolean;
shouldMaskSecrets?: boolean;
shouldProtectSecrets?: boolean;
shouldEnableDelete?: boolean;
};
}) => {
const {

View File

@@ -0,0 +1,2 @@
export { useUpdateRateLimit } from "./mutation";
export { useGetRateLimit } from "./queries";

View File

@@ -0,0 +1,21 @@
import { useMutation, useQueryClient } from "@tanstack/react-query";
import { apiRequest } from "@app/config/request";
import { rateLimitQueryKeys } from "./queries";
import { TRateLimit } from "./types";
export const useUpdateRateLimit = () => {
const queryClient = useQueryClient();
return useMutation<TRateLimit, {}, TRateLimit>({
mutationFn: async (opt) => {
const { data } = await apiRequest.put<{ rateLimit: TRateLimit }>("/api/v1/rate-limit", opt);
return data.rateLimit;
},
onSuccess: (data) => {
queryClient.setQueryData(rateLimitQueryKeys.rateLimit(), data);
queryClient.invalidateQueries(rateLimitQueryKeys.rateLimit());
}
});
};

View File

@@ -0,0 +1,34 @@
import { useQuery, UseQueryOptions } from "@tanstack/react-query";
import { apiRequest } from "@app/config/request";
import { TRateLimit } from "./types";
export const rateLimitQueryKeys = {
rateLimit: () => ["rate-limit"] as const
};
const fetchRateLimit = async () => {
const { data } = await apiRequest.get<{ rateLimit: TRateLimit }>("/api/v1/rate-limit");
return data.rateLimit;
};
export const useGetRateLimit = ({
options = {}
}: {
options?: Omit<
UseQueryOptions<
TRateLimit,
unknown,
TRateLimit,
ReturnType<typeof rateLimitQueryKeys.rateLimit>
>,
"queryKey" | "queryFn"
>;
} = {}) =>
useQuery({
queryKey: rateLimitQueryKeys.rateLimit(),
queryFn: fetchRateLimit,
...options,
enabled: options?.enabled ?? true
});

View File

@@ -0,0 +1,10 @@
export type TRateLimit = {
readRateLimit: number;
writeRateLimit: number;
secretsRateLimit: number;
authRateLimit: number;
inviteUserRateLimit: number;
mfaRateLimit: number;
creationLimit: number;
publicEndpointLimit: number;
};

View File

@@ -264,13 +264,12 @@ export const useGetImportedSecretsAllEnvs = ({
});
const isImportedSecretPresentInEnv = useCallback(
(secPath: string, envSlug: string, secretName: string) => {
(envSlug: string, secretName: string) => {
const selectedEnvIndex = environments.indexOf(envSlug);
if (selectedEnvIndex !== -1) {
const isPresent = secretImports?.[selectedEnvIndex]?.data?.find(
({ secretPath, secrets }) =>
secretPath === secPath && secrets.some((s) => s.key === secretName)
const isPresent = secretImports?.[selectedEnvIndex]?.data?.find(({ secrets }) =>
secrets.some((s) => s.key === secretName)
);
return Boolean(isPresent);

View File

@@ -4,4 +4,5 @@ export type ServerStatus = {
emailConfigured: boolean;
secretScanningConfigured: boolean;
redisConfigured: boolean;
samlDefaultOrgSlug: boolean
};

Some files were not shown because too many files have changed in this diff Show More