Compare commits

...

237 Commits

Author SHA1 Message Date
8bab6d87bb Merge pull request #2424 from scott-ray-wilson/secrets-pagination-fix
Fix: Account for secret import count in secrets offset
2024-09-13 07:37:42 -07:00
39a49f12f5 fix: account for secret import count in secrets offset 2024-09-13 07:27:52 -07:00
cfd841ea08 Merge pull request #2419 from meetcshah19/meet/add-empty-value-log-gcp
chore: add log on empty value being pushed to gcp
2024-09-13 19:53:38 +05:30
4d67c03e3e Merge pull request #2423 from scott-ray-wilson/secrets-pagination
Feature: Secrets Overview Page Pagination/Optimizations
2024-09-13 09:56:48 -04:00
8826bc5d60 fix: include imports in secret pagination, and rectify tag/value search not working for secrets 2024-09-13 06:25:13 -07:00
03fdce67f1 Merge pull request #2417 from akhilmhdh/fix/saml-entra
fix: resolved entra failing
2024-09-13 09:08:07 -04:00
72f3f7980e Merge pull request #2414 from Infisical/misc/address-minor-cert-lint-issues
misc: addressed minor cert lint issues
2024-09-13 20:57:40 +08:00
f1aa2fbd84 chore: better log string 2024-09-13 15:34:12 +05:30
=
217de6250f feat: pagination for main secret page 2024-09-13 14:12:53 +05:30
f742bd01d9 refactor to useCallback select instead of queryFn 2024-09-12 22:47:23 -07:00
3fe53d5183 remove unused import 2024-09-12 22:08:16 -07:00
a5f5f803df feature: secret overview page pagination/optimizations 2024-09-12 21:44:38 -07:00
c37e3ba635 misc: addressed comments 2024-09-13 12:44:12 +08:00
55279e5e41 Merge pull request #2422 from Infisical/pki-docs-improvement
Update README (Expand on PKI / New Features)
2024-09-12 20:16:41 -07:00
88fb37e8c6 Made changes as per review 2024-09-12 20:14:25 -07:00
6271dcc25d Fix mint.json openapi link back 2024-09-12 20:02:40 -07:00
0f7faa6bfe Update README to include newer features, expand on PKI, separate PKI endpoints into separate section in API reference 2024-09-12 19:58:55 -07:00
4ace339d5b Update README to include newer features, expand on PKI, separate PKI endpoints into separate section in API reference 2024-09-12 19:57:37 -07:00
=
e8c0d1ece9 fix: resolved entra failing 2024-09-13 07:18:49 +05:30
bb1977976c Merge pull request #2421 from Infisical/maidful-edwdwqdhwjq
revert PR #2412
2024-09-12 20:43:38 -04:00
bb3da75870 Minor text updates 2024-09-12 17:26:56 -07:00
088e888560 Merge pull request #2420 from scott-ray-wilson/identity-pagination-fix
Fix: Apply Project Identity Pagination Prior to Left Join of Roles
2024-09-12 20:23:03 -04:00
180241fdf0 revert PR #2412 2024-09-13 00:15:26 +00:00
93f27a7ee8 improvement: make limit conditional 2024-09-12 16:19:22 -07:00
ed3bc8dd27 fix: apply project identity offset/limit separate from left joins 2024-09-12 16:11:58 -07:00
8dc4809ec8 Merge pull request #2416 from akhilmhdh/ui/combobox
UI/combobox
2024-09-12 18:50:43 -04:00
a55d64e430 chore: add log on empty value being pushed to gcp 2024-09-13 03:52:09 +05:30
02d54da74a resolve change requests 2024-09-12 15:22:05 -07:00
=
d660168700 fix: org invite check only when needed 2024-09-13 00:35:48 +05:30
=
1c75fc84f0 feat: added a temporary combobox for identity addition to project 2024-09-13 00:35:48 +05:30
f63da87c7f Merge remote-tracking branch 'origin/main' into misc/address-minor-cert-lint-issues 2024-09-13 01:46:00 +08:00
53b9fe2dec Merge pull request #2401 from Infisical/feat/add-key-usages-for-template-and-cert
feat: add support for configuring certificate key usage and extended key usage
2024-09-13 00:55:19 +08:00
87dc0eed7e fix: addressed tslint errors 2024-09-12 23:25:26 +08:00
f2dd6f94a4 Merge pull request #2409 from scott-ray-wilson/identity-pagination
Feature: Project and Org Identities Table Additions: Pagination, Search and Sort
2024-09-12 11:22:45 -04:00
ac26ae3893 misc: addressed minor cert lint issues 2024-09-12 23:16:49 +08:00
4c65e9910a resolve merge conflict 2024-09-12 08:03:10 -07:00
5150c102e6 Merge pull request #2380 from Infisical/daniel/invite-multiple-members-to-project
feat: invite multiple members to projects with role assignment
2024-09-12 11:16:41 +04:00
41c29d41e1 Update AddMemberModal.tsx 2024-09-12 11:13:39 +04:00
4de33190a9 Rebase fixes 2024-09-12 11:12:45 +04:00
7cfecb39e4 Update AddMemberModal.tsx 2024-09-12 11:08:25 +04:00
7524b83c29 Delete project-membership-fns.ts 2024-09-12 11:08:25 +04:00
7a41cdf51b Fix: type errors 2024-09-12 11:08:25 +04:00
17d99cb2cf fix: circular dependencies and query invalidation 2024-09-12 11:07:41 +04:00
bd0da0ff74 Update AddMemberModal.tsx 2024-09-12 11:03:20 +04:00
d2a54234f4 Rebase with Akhi 2024-09-12 11:03:20 +04:00
626262461a feat: assign roles when inviting members to project 2024-09-12 11:03:20 +04:00
93ba29e57f Feat: Invite multiple users to project with multiple roles 2024-09-12 11:03:20 +04:00
1581aa088d Update org-admin-service.ts 2024-09-12 11:03:20 +04:00
ceab951bca feat: remove project role from workspace user encryption computation 2024-09-12 11:03:20 +04:00
2e3dcc50ae API doc 2024-09-12 11:03:20 +04:00
a79087670e misc: addressed comments and doc changes 2024-09-12 13:27:39 +08:00
7b04c08fc7 Merge pull request #2412 from meetcshah19/meet/fix-org-selection
fix: redirect to selected org if already present
2024-09-12 10:14:56 +05:30
70842b8e5e Merge pull request #2411 from akhilmhdh/debug/entra-saml-logpoint
feat: debug added log points for entra failing saml
2024-09-11 19:00:36 -04:00
36e3e4c1b5 fix: redirect to selected org if already present 2024-09-12 03:37:55 +05:30
ce9b66ef14 address feedback suggestions 2024-09-11 12:40:27 -07:00
=
1384c8e855 feat: debug added log points for entra failing saml 2024-09-12 00:19:16 +05:30
f213c75ede Merge pull request #2410 from Infisical/misc/slack-integration-doc-and-ui-updates
misc: added cloud users guide for slack and channel dropdown fix
2024-09-11 14:36:15 -04:00
6ade708e19 misc: added cloud users guide for slack and other ui updates 2024-09-12 02:23:57 +08:00
ce3af41ebc Merge pull request #2388 from Infisical/daniel/permission-visualization
feat: user details page audit logs & groups visualization
2024-09-11 21:45:15 +04:00
e442f10fa5 Fix merge conflicts 2024-09-11 10:38:47 -07:00
2e8ad18285 Merge remote-tracking branch 'origin' into daniel/permission-visualization 2024-09-11 10:32:17 -07:00
f03ca7f916 Minor adjustments 2024-09-11 10:30:16 -07:00
bfa533e9d2 misc: api property description 2024-09-11 22:59:19 +08:00
a8759e7410 feat: added support for custom extended key usages 2024-09-11 22:38:36 +08:00
af1905a39e Merge pull request #2406 from meetcshah19/meet/fix-email-capitalization
Send lower case emails to backend
2024-09-11 20:07:14 +05:30
16182a9d1d feature: project and org identity pagination, search and sort 2024-09-11 07:22:08 -07:00
1321aa712f Merge pull request #2358 from Infisical/feat/native-slack-integration
feat: native slack integration
2024-09-11 09:36:25 -04:00
c1f61f2db4 feat: added custom key usages support for sign endpoint 2024-09-11 20:26:33 +08:00
5ad00130ea Merge pull request #2384 from akhilmhdh/feat/org-project-invite
Manager users without waiting for confirmation of mail
2024-09-11 13:06:28 +04:00
ea5e8e29e6 Requested changes 2024-09-11 12:45:14 +04:00
e7f89bdfef doc: add note for private channels 2024-09-11 13:50:40 +08:00
d23a7e41f3 misc: addressed comments 2024-09-11 13:29:43 +08:00
=
52a885716d feat: changes on review comments 2024-09-11 10:46:49 +05:30
3fc907f076 fix: send lower case emails to backend 2024-09-11 04:38:00 +05:30
eaf10483c0 Merge pull request #2405 from Infisical/fix-azure-saml-map-docs
Fix Stated Map for Azure SAML Attributes
2024-09-10 16:46:40 -04:00
dcd0234fb5 Fix stated map for azure saml attributes 2024-09-10 13:16:36 -07:00
4dda270e8e Requested changes 2024-09-10 23:29:23 +04:00
4e6b289e1b misc: integrated custom key usages for issue-cert endpoint 2024-09-11 01:57:16 +08:00
c1cb85b49f Merge pull request #2404 from akhilmhdh/fix/secret-reference-pass
Secret reference skip if not found
2024-09-10 13:17:56 -04:00
=
ed71e651f6 fix: secret reference skip if not found 2024-09-10 22:23:40 +05:30
6fab7d9507 Merge remote-tracking branch 'origin/main' into feat/add-key-usages-for-template-and-cert 2024-09-11 00:22:04 +08:00
1a11dd954b Merge pull request #2395 from Infisical/misc/allow-wildcard-san-value
misc: allow wildcard SAN domain value for certificates
2024-09-11 00:19:43 +08:00
5d3574d3f6 Merge pull request #2397 from Infisical/cert-template-enforcement
Certificate Template Enforcement Option + PKI UX Improvements
2024-09-10 09:19:37 -07:00
aa42aa05aa misc: updated docs 2024-09-11 00:13:44 +08:00
7a36badb23 misc: addressed review comments 2024-09-11 00:11:19 +08:00
9ce6fd3f8e Made required adjustments based on review 2024-09-10 08:18:31 -07:00
a549c8b9e3 Merge pull request #2353 from Infisical/daniel/cli-run-watch-mode
feat(cli): `run` command watch mode
2024-09-10 10:39:06 -04:00
1c749c84f2 misc: key usages setup 2024-09-10 21:42:41 +08:00
1bc1feb843 Merge pull request #2399 from sanyarajan/patch-1
Remove reference to Okta in Azure SAML setup
2024-09-10 08:46:36 -04:00
80ca115ccd Merge pull request #2396 from Infisical/daniel/cli-stale-session
fix: stale session after logging into CLI
2024-09-10 08:27:16 -04:00
5a6bb90870 Remove reference to Okta in Azure SAML setup 2024-09-10 12:25:11 +02:00
de7a693a6a Merge pull request #2391 from Infisical/daniel/rabbitmq-dynamic-secrets
feat(dynamic-secrets): Rabbit MQ
2024-09-10 12:54:56 +05:30
096417281e Update rabbit-mq.ts 2024-09-10 11:21:52 +04:00
763a96faf8 Update rabbit-mq.ts 2024-09-10 11:21:52 +04:00
870eaf9301 docs(dynamic-secrets): rabbit mq 2024-09-10 11:21:52 +04:00
10abf192a1 chore(docs): cleanup incorrectly formatted images 2024-09-10 11:21:52 +04:00
508f697bdd feat(dynamic-secrets): RabbitMQ 2024-09-10 11:21:52 +04:00
8ea8a6f72e Fix: ElasticSearch provider typo 2024-09-10 11:17:35 +04:00
54e6f4b607 Requested changes 2024-09-10 11:07:25 +04:00
ea3b3c5cec Merge pull request #2394 from Infisical/misc/update-kms-of-existing-params-for-integration
misc: ensure that selected kms key in aws param integration is followed
2024-09-10 12:51:06 +08:00
a8fd83652d Update docs for PKI issuer secret target output 2024-09-09 19:55:02 -07:00
45f3675337 Merge pull request #2389 from Infisical/misc/support-glob-patterns-oidc
misc: support glob patterns for OIDC
2024-09-09 18:22:51 -04:00
87a9a87dcd Show cert template ID on manage policies modal 2024-09-09 14:35:46 -07:00
0b882ece8c Update certificate / template docs 2024-09-09 14:22:26 -07:00
e005e94165 Merge remote-tracking branch 'origin' into cert-template-enforcement 2024-09-09 12:47:06 -07:00
0e07eaaa01 Fix cert template enforcement migration check 2024-09-09 12:45:33 -07:00
e10e313af3 Finish cert template enforcement 2024-09-09 12:42:56 -07:00
e6c0bbb25b fix: stale session after logging into CLI 2024-09-09 23:15:58 +04:00
2b39d9e6c4 Merge pull request #2386 from Infisical/pki-issuer-docs
Documentation for Infisical PKI Issuer for K8s Cert-Manager
2024-09-09 14:33:15 -04:00
cf42279e5b misc: allow wildcard san domain value for certificates 2024-09-10 01:20:31 +08:00
fbc4b47198 misc: ensure that selected kms key in aws param integration is applied 2024-09-09 22:23:22 +08:00
4baa6b1d3d Merge pull request #2390 from akhilmhdh/dynamic-secret/mongodb
Dynamic secret/mongodb
2024-09-09 19:50:03 +05:30
74ee77f41e Merge pull request #2392 from Infisical/misc/throw-saml-sso-errors-properly
misc: throw SAML or SSO errors properly
2024-09-09 08:57:57 -04:00
ee1b12173a misc: throw saml sso errors properly 2024-09-09 19:32:18 +08:00
1bfbc7047c Merge pull request #2382 from srijan-paul/patch-1
fix: small typo (`fasitfy` -> `fastify`)
2024-09-09 15:31:16 +04:00
=
a410d560a7 feat: removed an image 2024-09-09 16:40:14 +05:30
=
99e150cc1d feat: updated doc with requested changes 2024-09-09 16:32:49 +05:30
=
e7191c2f71 feat: made project role multi support for org invite 2024-09-09 16:17:59 +05:30
=
f6deb0969a feat: added atlas warning to doc 2024-09-09 15:24:30 +05:30
=
1163e41e64 docs: dynamic secret mongodb\ 2024-09-09 15:00:21 +05:30
=
a0f93f995e feat: dynamic secret mongodb ui 2024-09-09 15:00:01 +05:30
=
50fcf97a36 feat: dynamic secret api changes for mongodb 2024-09-09 14:59:34 +05:30
8e68d21115 misc: support glob patterns for oidc 2024-09-09 17:17:12 +08:00
372b6cbaea fix: audit log fixes 2024-09-09 10:42:39 +04:00
26add7bfd1 fix: remove delete project membership option 2024-09-09 10:42:10 +04:00
364302a691 Merge pull request #2387 from akhilmhdh/docs/fluent-bit-log-stream
feat: added doc for audit log stream via fluentbit
2024-09-08 15:08:46 -04:00
c8dc29d59b revise audit log stream PR 2024-09-08 15:04:30 -04:00
f3d207ab5c feat: better user visualization 2024-09-08 20:20:34 +04:00
e1cd632546 improvements to user group ui 2024-09-08 20:20:10 +04:00
655ee4f118 Update mutations.tsx 2024-09-08 20:19:50 +04:00
34a2452bf5 feat: fetch all user group memberships 2024-09-08 20:19:10 +04:00
7846a81636 chore: new group with project memberships type 2024-09-08 19:28:17 +04:00
6bdf3455f5 Update mutations.tsx 2024-09-08 19:27:31 +04:00
556ae168dd feat: fetch specific user group memberships 2024-09-08 19:25:48 +04:00
7b19d2aa6a feat: audit logs on organization-level support 2024-09-08 19:24:04 +04:00
bda9bb3d61 fix: rename list audit logs and include project 2024-09-08 19:21:17 +04:00
4b66a9343c feat: audit logs section 2024-09-08 19:20:32 +04:00
4930d7fc02 feat: user groups section 2024-09-08 19:20:18 +04:00
ad644db512 feat: audit logs on organization-level 2024-09-08 19:19:55 +04:00
=
3707b75349 feat: added doc for audit log stream via fluentbit 2024-09-08 20:33:47 +05:30
ffaf145317 misc: removed unused table usage 2024-09-08 17:04:41 +08:00
17b0d0081d misc: moved away from dedicated slack admin config 2024-09-08 17:00:50 +08:00
ecf177fecc misc: added root workflow integration structure 2024-09-08 13:49:32 +08:00
6112bc9356 Add certificate template field + warning to pki issuer docs 2024-09-07 19:23:11 -07:00
6c3156273c Add docs for infisical pki issuer 2024-09-07 16:28:28 -07:00
=
eb7c804bb9 feat(ui): made corresponding changes in api call made from frontend 2024-09-06 23:33:57 +05:30
=
9d7bfae519 feat: made default role on project invite as no access to org level 2024-09-06 23:33:12 +05:30
=
1292b5bf56 feat(api): manage users in org and project level without waiting for confirmation 2024-09-06 23:31:55 +05:30
f09e18a706 Merge pull request #2383 from Infisical/fix/resolve-cert-invalid-issue
fix: resolve cert invalid issue due to invalid root EKU
2024-09-07 01:09:24 +08:00
5d9a43a3fd fix: resolve cert invalid issue 2024-09-07 00:42:55 +08:00
12154c869f fix: small typo (fasitfy -> fastify 2024-09-06 18:10:17 +05:30
8d66272ab2 Merge pull request #2366 from ThallesP/patch-1
docs: add mention of SITE_URL as being required
2024-09-05 16:06:49 -04:00
0e44e630cb Merge pull request #2377 from Infisical/daniel/refactor-circleci-integration
fix(integrations/circle-ci): Refactored Circle CI integration
2024-09-05 16:04:04 -04:00
49c4929c9c Update azure-key-vault.mdx 2024-09-05 15:13:42 -04:00
da561e37c5 Fix: Backwards compatibility and UI fixes 2024-09-05 21:43:10 +04:00
ebc584d36f Merge pull request #2379 from Infisical/fix/client-secret-patch
Update identity-ua-client-secret-dal.ts
2024-09-05 11:02:35 -04:00
656d979d7d Update identity-ua-client-secret-dal.ts 2024-09-05 20:29:18 +05:30
a29fb613b9 Requested changes 2024-09-05 18:48:20 +04:00
5382f3de2d Merge pull request #2378 from Infisical/vmatsiiako-patch-elasticsearch-1
Elasticsearch is one word
2024-09-05 09:11:18 -04:00
b2b858f7e8 Elasticsearch is one word 2024-09-05 09:07:23 -04:00
dbc5b5a3d1 doc: native slack integration 2024-09-05 18:28:38 +08:00
8f3d328b9a Update integration-sync-secret.ts 2024-09-05 13:38:31 +04:00
b7d683ee1b fix(integrations/circle-ci): Refactored Circle CI integration
The integration seemingly never worked in the first place due to inpropper project slugs. This PR resolves it.
2024-09-05 13:30:20 +04:00
9bd6ec19c4 revert "docs: add mention of SITE_URL as being required" 2024-09-04 18:04:25 -03:00
03fd0a1eb9 chore: add site url as required in kubernetes helm deployment 2024-09-04 18:03:18 -03:00
97023e7714 chore: add SITE_URL as required in docker installation 2024-09-04 17:58:42 -03:00
1d23ed0680 chore: add site url as required in envars docs 2024-09-04 17:56:38 -03:00
1bd66a614b misc: added channels count validator 2024-09-05 02:36:27 +08:00
802a9cf83c misc: formatting changes 2024-09-05 01:42:33 +08:00
9e95fdbb58 misc: added proper error message hints 2024-09-05 01:20:12 +08:00
803f56cfe5 misc: added placeholder 2024-09-05 00:46:00 +08:00
b163a6c5ad feat: integration to access request approval 2024-09-05 00:42:21 +08:00
ddc119ceb6 Merge remote-tracking branch 'origin/main' into feat/native-slack-integration 2024-09-05 00:36:44 +08:00
302e068c74 Merge pull request #2376 from Infisical/daniel/info-notif-for-secret-changes
fix(ui): show info notification when secret change is pending review
2024-09-04 20:09:58 +04:00
95b92caff3 Merge pull request #2375 from Infisical/daniel/fix-access-policy-creation
fix(access-requests): policy creation and edits
2024-09-04 20:00:04 +04:00
5d894b6d43 fix(ui): info notification when secret change is pending review 2024-09-04 19:57:32 +04:00
09e621539e misc: finalized labels 2024-09-04 23:54:19 +08:00
dab3e2efad fix(access-requests): policy creation and edits 2024-09-04 19:46:44 +04:00
5e0b78b104 Requested changes 2024-09-04 19:34:51 +04:00
27852607d1 Merge remote-tracking branch 'origin/main' into feat/native-slack-integration 2024-09-04 23:10:15 +08:00
956719f797 feat: admin slack configuration 2024-09-04 23:06:30 +08:00
04cbbccd25 Merge pull request #2374 from Infisical/revert-2362-bugfix/incorrect-alignment-of-logo-on-login-page
Revert "FIX : padding-and-alignment-login-page"
2024-09-04 19:16:08 +05:30
7f48e9d62e Revert "FIX : padding-and-alignment-login-page" 2024-09-04 19:12:58 +05:30
8a0018eff2 Merge pull request #2373 from Infisical/daniel/elastisearch-dynamic-secrets
feat(dynamic-secrets): elastic search support
2024-09-04 15:23:23 +04:00
e6a920caa3 Merge pull request #2362 from mukulpadwal/bugfix/incorrect-alignment-of-logo-on-login-page
FIX : padding-and-alignment-login-page
2024-09-04 16:15:36 +05:30
71b8c59050 feat: slack channel suggestions 2024-09-04 18:03:07 +08:00
11411ca4eb Requested changes 2024-09-04 13:47:35 +04:00
b7c79fa45b Requested changes 2024-09-04 13:47:35 +04:00
18951b99de Further doc fixes 2024-09-04 13:47:17 +04:00
bd05c440c3 Update elastic-search.ts 2024-09-04 13:47:17 +04:00
9ca5013a59 Update mint.json 2024-09-04 13:47:17 +04:00
b65b8bc362 docs(dynamic-secrets): Elastic Search documentation 2024-09-04 13:47:17 +04:00
f494c182ff Update aws-elasticache.mdx 2024-09-04 13:47:17 +04:00
2fae822e1f Fix docs for AWS ElastiCache 2024-09-04 13:47:17 +04:00
5df140cbd5 feat(dynamic-secrets): ElasticSearch support 2024-09-04 13:47:17 +04:00
d93cbb023d Update redis.ts 2024-09-04 13:47:17 +04:00
9056d1be0c feat(dynamic-secrets): ElasticSearch support 2024-09-04 13:47:17 +04:00
5f503949eb Installed elasticsearch SDK 2024-09-04 13:47:16 +04:00
15c5fe4095 misc: slack integration reinstall 2024-09-04 15:44:58 +08:00
91ebcca0fd Update run.go 2024-09-04 10:44:39 +04:00
9cf917de07 Merge pull request #2360 from Infisical/daniel/redirect-node-docs
feat(integrations): Add visibility support to Github Integration
2024-09-04 10:32:13 +04:00
0826b40e2a Fixes and requested changes 2024-09-04 10:18:17 +04:00
911b62c63a Update run.go 2024-09-04 10:05:57 +04:00
5343c7af00 misc: added auto redirect to workflow settings tab 2024-09-04 02:22:53 +08:00
8c03c160a9 misc: implemented secret approval request and project audit logs 2024-09-04 01:48:08 +08:00
604b0467f9 feat: finalized integration selection in project settings 2024-09-04 00:34:03 +08:00
a2b555dd81 feat: finished org-level integration management flow 2024-09-03 22:08:31 +08:00
ce7bb82f02 Merge pull request #2313 from akhilmhdh/feat/test-import
Feat/test import
2024-09-03 09:33:26 -04:00
7cd092c0cf Merge pull request #2368 from akhilmhdh/fix/audit-log-loop
Audit log queue looping
2024-09-03 08:32:04 -04:00
=
cbfb9af0b9 feat: moved log points inside each function respectively 2024-09-03 17:59:32 +05:30
=
ef236106b4 feat: added log points for resoruce clean up tasks 2024-09-03 17:37:14 +05:30
=
773a338397 fix: resolved looping in audit log resource queue 2024-09-03 17:33:38 +05:30
=
afb5820113 feat: added 1-N sink import pattern testing and fixed padding issue 2024-09-03 15:02:49 +05:30
5acc0fc243 Update build-staging-and-deploy-aws.yml 2024-09-02 23:56:24 -04:00
c56469ecdb Run integration tests build building gamma 2024-09-02 23:55:05 -04:00
23c362f9cd docs: add mention of SITE_URL as being required 2024-09-02 12:54:00 -03:00
9120367562 misc: audit logs for slack integration management 2024-09-02 23:15:00 +08:00
f509464947 slack integration reinstall 2024-09-02 21:05:30 +08:00
07fd489982 feat: slack integration deletion 2024-09-02 20:34:13 +08:00
f6d3831d6d feat: finished slack integration update 2024-09-02 20:13:01 +08:00
d604ef2480 feat: integrated secret approval request 2024-09-02 15:38:05 +08:00
45e471b16a FIX : padding-and-alignment-login-page 2024-08-31 16:25:54 +05:30
fe096772e0 feat: initial installation flow 2024-08-31 02:56:02 +08:00
=
7dbe8dd3c9 feat: patched lock file 2024-08-30 10:56:28 +05:30
=
0dec602729 feat: changed all licence type to license 2024-08-30 10:52:46 +05:30
=
66ded779fc feat: added secret version test with secret import 2024-08-30 10:52:46 +05:30
=
01d24291f2 feat: resolved type error 2024-08-30 10:52:46 +05:30
=
55b36b033e feat: changed expand secret factory to iterative solution 2024-08-30 10:52:46 +05:30
=
8f461bf50c feat: added test for checking secret reference expansion 2024-08-30 10:52:46 +05:30
=
1847491cb3 feat: implemented new secret reference strategy 2024-08-30 10:52:46 +05:30
=
541c7b63cd feat: added test for checkings secrets from import via replication and non replicaiton 2024-08-30 10:52:45 +05:30
=
7e5e177680 feat: vitest mocking by alias for license fns 2024-08-30 10:52:45 +05:30
=
40f552e4f1 feat: fixed typo in license function file name 2024-08-30 10:52:45 +05:30
=
ecb54ee3b3 feat: resolved migration down failing for secret approval policy change 2024-08-30 10:52:45 +05:30
35a63b8cc6 Fix: Fixed merge related changes 2024-08-29 22:54:49 +04:00
2a4596d415 Merge branch 'main' into daniel/cli-run-watch-mode 2024-08-29 22:37:35 +04:00
35e476d916 Fix: Runtime bugs 2024-08-29 22:35:21 +04:00
332 changed files with 15546 additions and 3041 deletions

View File

@ -72,3 +72,6 @@ PLAIN_API_KEY=
PLAIN_WISH_LABEL_IDS=
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
WORKFLOW_SLACK_CLIENT_ID=
WORKFLOW_SLACK_CLIENT_SECRET=

View File

@ -6,9 +6,15 @@ permissions:
contents: read
jobs:
infisical-tests:
name: Run tests before deployment
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
uses: ./.github/workflows/run-backend-tests.yml
infisical-image:
name: Build backend image
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3

1
.gitignore vendored
View File

@ -63,6 +63,7 @@ yarn-error.log*
# Editor specific
.vscode/*
.idea/*
frontend-build

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,35 @@
import { seedData1 } from "@app/db/seed-data";
const createPolicy = async (dto: { name: string; secretPath: string; approvers: string[]; approvals: number }) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-approvals`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
name: dto.name,
secretPath: dto.secretPath,
approvers: dto.approvers,
approvals: dto.approvals
}
});
expect(res.statusCode).toBe(200);
return res.json().approval;
};
describe("Secret approval policy router", async () => {
test("Create policy", async () => {
const policy = await createPolicy({
secretPath: "/",
approvals: 1,
approvers: [seedData1.id],
name: "test-policy"
});
expect(policy.name).toBe("test-policy");
});
});

View File

@ -1,73 +1,61 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
const createSecretImport = async (importPath: string, importEnv: string) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/",
import: {
environment: importEnv,
path: importPath
}
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport;
};
const deleteSecretImport = async (id: string) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/secret-imports/${id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/"
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport;
};
describe("Secret Import Router", async () => {
test.each([
{ importEnv: "prod", importPath: "/" }, // one in root
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
// check for default environments
const payload = await createSecretImport(importPath, importEnv);
const payload = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath,
importEnv
});
expect(payload).toEqual(
expect.objectContaining({
id: expect.any(String),
importPath: expect.any(String),
importPath,
importEnv: expect.objectContaining({
name: expect.any(String),
slug: expect.any(String),
slug: importEnv,
id: expect.any(String)
})
})
);
await deleteSecretImport(payload.id);
await deleteSecretImport({
id: payload.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
});
test("Get secret imports", async () => {
const createdImport1 = await createSecretImport("/", "prod");
const createdImport2 = await createSecretImport("/", "staging");
const createdImport1 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "prod"
});
const createdImport2 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "staging"
});
const res = await testServer.inject({
method: "GET",
url: `/api/v1/secret-imports`,
@ -89,25 +77,60 @@ describe("Secret Import Router", async () => {
expect.arrayContaining([
expect.objectContaining({
id: expect.any(String),
importPath: expect.any(String),
importPath: "/",
importEnv: expect.objectContaining({
name: expect.any(String),
slug: expect.any(String),
slug: "prod",
id: expect.any(String)
})
}),
expect.objectContaining({
id: expect.any(String),
importPath: "/",
importEnv: expect.objectContaining({
name: expect.any(String),
slug: "staging",
id: expect.any(String)
})
})
])
);
await deleteSecretImport(createdImport1.id);
await deleteSecretImport(createdImport2.id);
await deleteSecretImport({
id: createdImport1.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport({
id: createdImport2.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
});
test("Update secret import position", async () => {
const prodImportDetails = { path: "/", envSlug: "prod" };
const stagingImportDetails = { path: "/", envSlug: "staging" };
const createdImport1 = await createSecretImport(prodImportDetails.path, prodImportDetails.envSlug);
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
const createdImport1 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: prodImportDetails.path,
importEnv: prodImportDetails.envSlug
});
const createdImport2 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: stagingImportDetails.path,
importEnv: stagingImportDetails.envSlug
});
const updateImportRes = await testServer.inject({
method: "PATCH",
@ -161,22 +184,55 @@ describe("Secret Import Router", async () => {
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
await deleteSecretImport(createdImport1.id);
await deleteSecretImport(createdImport2.id);
await deleteSecretImport({
id: createdImport1.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport({
id: createdImport2.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
});
test("Delete secret import position", async () => {
const createdImport1 = await createSecretImport("/", "prod");
const createdImport2 = await createSecretImport("/", "staging");
const deletedImport = await deleteSecretImport(createdImport1.id);
const createdImport1 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "prod"
});
const createdImport2 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "staging"
});
const deletedImport = await deleteSecretImport({
id: createdImport1.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
// check for default environments
expect(deletedImport).toEqual(
expect.objectContaining({
id: expect.any(String),
importPath: expect.any(String),
importPath: "/",
importEnv: expect.objectContaining({
name: expect.any(String),
slug: expect.any(String),
slug: "prod",
id: expect.any(String)
})
})
@ -201,6 +257,552 @@ describe("Secret Import Router", async () => {
expect(secretImportList.secretImports.length).toEqual(1);
expect(secretImportList.secretImports[0].position).toEqual(1);
await deleteSecretImport(createdImport2.id);
await deleteSecretImport({
id: createdImport2.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
});
});
// dev <- stage <- prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret import waterfall pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging"
});
const stageImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod"
});
return async () => {
await deleteSecretImport({
id: stageImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check one level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
});
test("Check two level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
expect(secret.secretKey).toBe("PROD_KEY");
expect(secret.secretValue).toBe("prod-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
}
);
// dev <- stage, dev <- prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret import multiple destination to one source pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging"
});
const devImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod"
});
return async () => {
await deleteSecretImport({
id: devImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
}),
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
}
);
// dev -> stage, prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret import one source to multiple destination pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const stageImportFromDev = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: seedData1.environment.slug
});
const prodImportFromDev = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: seedData1.environment.slug
});
return async () => {
await deleteSecretImport({
id: prodImportFromDev.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: stageImportFromDev.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check imported secret exist", async () => {
await createSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
await createSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
const stagingSecret = await getSecretByNameV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(stagingSecret.secretKey).toBe("STAGING_KEY");
expect(stagingSecret.secretValue).toBe("stage-value");
const prodSecret = await getSecretByNameV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
expect(prodSecret.secretKey).toBe("PROD_KEY");
expect(prodSecret.secretValue).toBe("prod-value");
await deleteSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
await deleteSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
}
);

View File

@ -0,0 +1,406 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
// dev <- stage <- prod
describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
"Secret replication waterfall pattern testing - %secretPath",
({ secretPath: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging",
isReplication: true
});
const stageImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod",
isReplication: true
});
return async () => {
await deleteSecretImport({
id: stageImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check one level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
// wait for 5 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 5000); // time to breathe for db
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
});
test("Check two level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
// wait for 5 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 5000); // time to breathe for db
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
expect(secret.secretKey).toBe("PROD_KEY");
expect(secret.secretValue).toBe("prod-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
},
{ timeout: 30000 }
);
// dev <- stage, dev <- prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret replication 1-N pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging",
isReplication: true
});
const devImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod",
isReplication: true
});
return async () => {
await deleteSecretImport({
id: devImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
// wait for 5 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 5000); // time to breathe for db
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
}),
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
},
{ timeout: 30000 }
);

View File

@ -0,0 +1,330 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
describe("Secret expansion", () => {
const projectId = seedData1.projectV3.id;
beforeAll(async () => {
const prodRootFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/",
name: "deep"
});
await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
name: "nested"
});
return async () => {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodRootFolder.id,
workspaceId: projectId,
environmentSlug: "prod"
});
};
});
test("Local secret reference", async () => {
const secrets = [
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "HELLO",
value: "world"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "TEST",
// eslint-disable-next-line
value: "hello ${HELLO}"
}
];
await Promise.all(secrets.map((el) => createSecretV2(el)));
const expandedSecret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "TEST"
});
expect(expandedSecret.secretValue).toBe("hello world");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretKey: "TEST",
secretValue: "hello world"
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
});
test("Cross environment secret reference", async () => {
const secrets = [
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
authToken: jwtAuthToken,
key: "DEEP_KEY_1",
value: "testing"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_1",
value: "reference"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_2",
// eslint-disable-next-line
value: "secret ${NESTED_KEY_1}"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY",
// eslint-disable-next-line
value: "hello ${prod.deep.DEEP_KEY_1} ${prod.deep.nested.NESTED_KEY_2}"
}
];
await Promise.all(secrets.map((el) => createSecretV2(el)));
const expandedSecret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY"
});
expect(expandedSecret.secretValue).toBe("hello testing secret reference");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretKey: "KEY",
secretValue: "hello testing secret reference"
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
});
test("Non replicated secret import secret expansion on local reference and nested reference", async () => {
const secrets = [
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
authToken: jwtAuthToken,
key: "DEEP_KEY_1",
value: "testing"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_1",
value: "reference"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_2",
// eslint-disable-next-line
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY",
// eslint-disable-next-line
value: "hello world"
}
];
await Promise.all(secrets.map((el) => createSecretV2(el)));
const secretImportFromProdToDev = await createSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
importEnv: "prod",
importPath: "/deep/nested"
});
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretPath: "/deep/nested",
environment: "prod",
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "NESTED_KEY_1",
secretValue: "reference"
}),
expect.objectContaining({
secretKey: "NESTED_KEY_2",
secretValue: "secret reference testing"
})
])
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
await deleteSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
authToken: jwtAuthToken,
id: secretImportFromProdToDev.id,
secretPath: "/"
});
});
test(
"Replicated secret import secret expansion on local reference and nested reference",
async () => {
const secrets = [
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
authToken: jwtAuthToken,
key: "DEEP_KEY_1",
value: "testing"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_1",
value: "reference"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_2",
// eslint-disable-next-line
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY",
// eslint-disable-next-line
value: "hello world"
}
];
await Promise.all(secrets.map((el) => createSecretV2(el)));
const secretImportFromProdToDev = await createSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
importEnv: "prod",
importPath: "/deep/nested",
isReplication: true
});
// wait for 5 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 5000); // time to breathe for db
});
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretPath: `/__reserve_replication_${secretImportFromProdToDev.id}`,
environment: seedData1.environment.slug,
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "NESTED_KEY_1",
secretValue: "reference"
}),
expect.objectContaining({
secretKey: "NESTED_KEY_2",
secretValue: "secret reference testing"
})
])
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
await deleteSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
authToken: jwtAuthToken,
id: secretImportFromProdToDev.id,
secretPath: "/"
});
},
{ timeout: 10000 }
);
});

View File

@ -8,6 +8,7 @@ type TRawSecret = {
secretComment?: string;
version: number;
};
const createSecret = async (dto: { path: string; key: string; value: string; comment: string; type?: SecretType }) => {
const createSecretReqBody = {
workspaceId: seedData1.projectV3.id,

View File

@ -0,0 +1,73 @@
type TFolder = {
id: string;
name: string;
};
export const createFolder = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
name: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
name: dto.name,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
return res.json().folder as TFolder;
};
export const deleteFolder = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
id: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/folders/${dto.id}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
return res.json().folder as TFolder;
};
export const listFolders = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "GET",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
return res.json().folders as TFolder[];
};

View File

@ -0,0 +1,93 @@
type TSecretImport = {
id: string;
importEnv: {
name: string;
slug: string;
id: string;
};
importPath: string;
};
export const createSecretImport = async (dto: {
workspaceId: string;
environmentSlug: string;
isReplication?: boolean;
secretPath: string;
importPath: string;
importEnv: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
isReplication: dto.isReplication,
path: dto.secretPath,
import: {
environment: dto.importEnv,
path: dto.importPath
}
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport as TSecretImport;
};
export const deleteSecretImport = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
id: string;
}) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/secret-imports/${dto.id}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport as TSecretImport;
};
export const listSecretImport = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "GET",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
query: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImports");
return payload.secretImports as TSecretImport[];
};

View File

@ -0,0 +1,128 @@
import { SecretType } from "@app/db/schemas";
type TRawSecret = {
secretKey: string;
secretValue: string;
secretComment?: string;
version: number;
};
export const createSecretV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
key: string;
value: string;
comment?: string;
authToken: string;
type?: SecretType;
}) => {
const createSecretReqBody = {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
type: dto.type || SecretType.Shared,
secretPath: dto.secretPath,
secretKey: dto.key,
secretValue: dto.value,
secretComment: dto.comment
};
const createSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: createSecretReqBody
});
expect(createSecRes.statusCode).toBe(200);
const createdSecretPayload = JSON.parse(createSecRes.payload);
expect(createdSecretPayload).toHaveProperty("secret");
return createdSecretPayload.secret as TRawSecret;
};
export const deleteSecretV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
key: string;
authToken: string;
}) => {
const deleteSecRes = await testServer.inject({
method: "DELETE",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
secretPath: dto.secretPath
}
});
expect(deleteSecRes.statusCode).toBe(200);
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
expect(updatedSecretPayload).toHaveProperty("secret");
return updatedSecretPayload.secret as TRawSecret;
};
export const getSecretByNameV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
key: string;
authToken: string;
}) => {
const response = await testServer.inject({
method: "GET",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
query: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
secretPath: dto.secretPath,
expandSecretReferences: "true",
include_imports: "true"
}
});
expect(response.statusCode).toBe(200);
const payload = JSON.parse(response.payload);
expect(payload).toHaveProperty("secret");
return payload.secret as TRawSecret;
};
export const getSecretsV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
}) => {
const getSecretsResponse = await testServer.inject({
method: "GET",
url: `/api/v3/secrets/raw`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
query: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
secretPath: dto.secretPath,
expandSecretReferences: "true",
include_imports: "true"
}
});
expect(getSecretsResponse.statusCode).toBe(200);
const getSecretsPayload = JSON.parse(getSecretsResponse.payload);
expect(getSecretsPayload).toHaveProperty("secrets");
expect(getSecretsPayload).toHaveProperty("imports");
return getSecretsPayload as {
secrets: TRawSecret[];
imports: {
secretPath: string;
environment: string;
folderId: string;
secrets: TRawSecret[];
}[];
};
};

View File

@ -11,10 +11,11 @@ import { initLogger } from "@app/lib/logger";
import { main } from "@app/server/app";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { mockQueue } from "./mocks/queue";
import { mockSmtpServer } from "./mocks/smtp";
import { mockKeyStore } from "./mocks/keystore";
import { initDbConnection } from "@app/db";
import { queueServiceFactory } from "@app/queue";
import { keyStoreFactory } from "@app/keystore/keystore";
import { Redis } from "ioredis";
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
export default {
@ -28,19 +29,31 @@ export default {
dbRootCert: cfg.DB_ROOT_CERT
});
const redis = new Redis(cfg.REDIS_URL);
await redis.flushdb("SYNC");
try {
await db.migrate.rollback(
{
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
tableName: "infisical_migrations"
},
true
);
await db.migrate.latest({
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
tableName: "infisical_migrations"
});
await db.seed.run({
directory: path.join(__dirname, "../src/db/seeds"),
extension: "ts"
});
const smtp = mockSmtpServer();
const queue = mockQueue();
const keyStore = mockKeyStore();
const queue = queueServiceFactory(cfg.REDIS_URL);
const keyStore = keyStoreFactory(cfg.REDIS_URL);
const server = await main({ db, smtp, logger, queue, keyStore });
// @ts-expect-error type
globalThis.testServer = server;
@ -58,10 +71,12 @@ export default {
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
);
} catch (error) {
// eslint-disable-next-line
console.log("[TEST] Error setting up environment", error);
await db.destroy();
throw error;
}
// custom setup
return {
async teardown() {
@ -80,6 +95,9 @@ export default {
},
true
);
await redis.flushdb("ASYNC");
redis.disconnect();
await db.destroy();
}
};

File diff suppressed because it is too large Load Diff

View File

@ -103,7 +103,6 @@
"tsup": "^8.0.1",
"tsx": "^4.4.0",
"typescript": "^5.3.2",
"vite-tsconfig-paths": "^4.2.2",
"vitest": "^1.2.2"
},
"dependencies": {
@ -113,6 +112,7 @@
"@aws-sdk/client-secrets-manager": "^3.504.0",
"@aws-sdk/client-sts": "^3.600.0",
"@casl/ability": "^6.5.0",
"@elastic/elasticsearch": "^8.15.0",
"@fastify/cookie": "^9.3.1",
"@fastify/cors": "^8.5.0",
"@fastify/etag": "^5.1.0",
@ -131,6 +131,8 @@
"@peculiar/x509": "^1.12.1",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "1.1.0",
"@slack/oauth": "^3.0.1",
"@slack/web-api": "^7.3.4",
"@team-plain/typescript-sdk": "^4.6.1",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
@ -158,6 +160,7 @@
"ldapjs": "^3.0.7",
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"mongodb": "^6.8.1",
"ms": "^2.1.3",
"mysql2": "^3.9.8",
"nanoid": "^3.3.4",

View File

@ -70,12 +70,14 @@ import { TSecretReplicationServiceFactory } from "@app/services/secret-replicati
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TUserServiceFactory } from "@app/services/user/user-service";
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
declare module "fastify" {
interface FastifyRequest {
@ -177,6 +179,8 @@ declare module "fastify" {
userEngagement: TUserEngagementServiceFactory;
externalKms: TExternalKmsServiceFactory;
orgAdmin: TOrgAdminServiceFactory;
slack: TSlackServiceFactory;
workflowIntegration: TWorkflowIntegrationServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

View File

@ -193,6 +193,9 @@ import {
TProjectRolesUpdate,
TProjects,
TProjectsInsert,
TProjectSlackConfigs,
TProjectSlackConfigsInsert,
TProjectSlackConfigsUpdate,
TProjectsUpdate,
TProjectUserAdditionalPrivilege,
TProjectUserAdditionalPrivilegeInsert,
@ -299,6 +302,9 @@ import {
TServiceTokens,
TServiceTokensInsert,
TServiceTokensUpdate,
TSlackIntegrations,
TSlackIntegrationsInsert,
TSlackIntegrationsUpdate,
TSuperAdmin,
TSuperAdminInsert,
TSuperAdminUpdate,
@ -322,7 +328,10 @@ import {
TUsersUpdate,
TWebhooks,
TWebhooksInsert,
TWebhooksUpdate
TWebhooksUpdate,
TWorkflowIntegrations,
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
} from "@app/db/schemas";
import {
TSecretV2TagJunction,
@ -776,5 +785,20 @@ declare module "knex/types/tables" {
TKmsKeyVersionsInsert,
TKmsKeyVersionsUpdate
>;
[TableName.SlackIntegrations]: KnexOriginal.CompositeTableType<
TSlackIntegrations,
TSlackIntegrationsInsert,
TSlackIntegrationsUpdate
>;
[TableName.ProjectSlackConfigs]: KnexOriginal.CompositeTableType<
TProjectSlackConfigs,
TProjectSlackConfigsInsert,
TProjectSlackConfigsUpdate
>;
[TableName.WorkflowIntegrations]: KnexOriginal.CompositeTableType<
TWorkflowIntegrations,
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
>;
}
}

View File

@ -115,7 +115,14 @@ export async function down(knex: Knex): Promise<void> {
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
approverId: knex(TableName.ProjectMembership)
.select("id")
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalPolicyApprover}.policyId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
.where("userId", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverUserId`]))
});
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
@ -147,13 +154,27 @@ export async function down(knex: Knex): Promise<void> {
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
committerId: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`])),
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalRequest}.policyId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`]))
.select(knex.ref("id").withSchema(TableName.ProjectMembership)),
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
statusChangeBy: knex(TableName.ProjectMembership)
.select("id")
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalRequest}.policyId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangedByUserId`]))
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
});
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
@ -177,8 +198,20 @@ export async function down(knex: Knex): Promise<void> {
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
member: knex(TableName.ProjectMembership)
.select("id")
.join(
TableName.SecretApprovalRequest,
`${TableName.SecretApprovalRequest}.id`,
`${TableName.SecretApprovalRequestReviewer}.requestId`
)
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalRequest}.policyId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`]))
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
});
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
tb.uuid("member").notNullable().alter();

View File

@ -0,0 +1,96 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.WorkflowIntegrations))) {
await knex.schema.createTable(TableName.WorkflowIntegrations, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.string("integration").notNullable();
tb.string("slug").notNullable();
tb.uuid("orgId").notNullable();
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
tb.string("description");
tb.unique(["orgId", "slug"]);
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.WorkflowIntegrations);
}
if (!(await knex.schema.hasTable(TableName.SlackIntegrations))) {
await knex.schema.createTable(TableName.SlackIntegrations, (tb) => {
tb.uuid("id", { primaryKey: true }).notNullable();
tb.foreign("id").references("id").inTable(TableName.WorkflowIntegrations).onDelete("CASCADE");
tb.string("teamId").notNullable();
tb.string("teamName").notNullable();
tb.string("slackUserId").notNullable();
tb.string("slackAppId").notNullable();
tb.binary("encryptedBotAccessToken").notNullable();
tb.string("slackBotId").notNullable();
tb.string("slackBotUserId").notNullable();
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.SlackIntegrations);
}
if (!(await knex.schema.hasTable(TableName.ProjectSlackConfigs))) {
await knex.schema.createTable(TableName.ProjectSlackConfigs, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.string("projectId").notNullable().unique();
tb.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
tb.uuid("slackIntegrationId").notNullable();
tb.foreign("slackIntegrationId").references("id").inTable(TableName.SlackIntegrations).onDelete("CASCADE");
tb.boolean("isAccessRequestNotificationEnabled").notNullable().defaultTo(false);
tb.string("accessRequestChannels").notNullable().defaultTo("");
tb.boolean("isSecretRequestNotificationEnabled").notNullable().defaultTo(false);
tb.string("secretRequestChannels").notNullable().defaultTo("");
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.ProjectSlackConfigs);
}
const doesSuperAdminHaveSlackClientId = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedSlackClientId");
const doesSuperAdminHaveSlackClientSecret = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedSlackClientSecret"
);
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
if (!doesSuperAdminHaveSlackClientId) {
tb.binary("encryptedSlackClientId");
}
if (!doesSuperAdminHaveSlackClientSecret) {
tb.binary("encryptedSlackClientSecret");
}
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.ProjectSlackConfigs);
await dropOnUpdateTrigger(knex, TableName.ProjectSlackConfigs);
await knex.schema.dropTableIfExists(TableName.SlackIntegrations);
await dropOnUpdateTrigger(knex, TableName.SlackIntegrations);
await knex.schema.dropTableIfExists(TableName.WorkflowIntegrations);
await dropOnUpdateTrigger(knex, TableName.WorkflowIntegrations);
const doesSuperAdminHaveSlackClientId = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedSlackClientId");
const doesSuperAdminHaveSlackClientSecret = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedSlackClientSecret"
);
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
if (doesSuperAdminHaveSlackClientId) {
tb.dropColumn("encryptedSlackClientId");
}
if (doesSuperAdminHaveSlackClientSecret) {
tb.dropColumn("encryptedSlackClientSecret");
}
});
}

View File

@ -0,0 +1,25 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
const hasRequireTemplateForIssuanceColumn = await knex.schema.hasColumn(
TableName.CertificateAuthority,
"requireTemplateForIssuance"
);
if (!hasRequireTemplateForIssuanceColumn) {
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
t.boolean("requireTemplateForIssuance").notNullable().defaultTo(false);
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
t.dropColumn("requireTemplateForIssuance");
});
}
}

View File

@ -0,0 +1,85 @@
import { Knex } from "knex";
import { CertKeyUsage } from "@app/services/certificate/certificate-types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
// Certificate template
const hasKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "keyUsages");
const hasExtendedKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "extendedKeyUsages");
await knex.schema.alterTable(TableName.CertificateTemplate, (tb) => {
if (!hasKeyUsagesCol) {
tb.specificType("keyUsages", "text[]");
}
if (!hasExtendedKeyUsagesCol) {
tb.specificType("extendedKeyUsages", "text[]");
}
});
if (!hasKeyUsagesCol) {
await knex(TableName.CertificateTemplate).update({
keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]
});
}
if (!hasExtendedKeyUsagesCol) {
await knex(TableName.CertificateTemplate).update({
extendedKeyUsages: []
});
}
// Certificate
const doesCertTableHaveKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "keyUsages");
const doesCertTableHaveExtendedKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "extendedKeyUsages");
await knex.schema.alterTable(TableName.Certificate, (tb) => {
if (!doesCertTableHaveKeyUsages) {
tb.specificType("keyUsages", "text[]");
}
if (!doesCertTableHaveExtendedKeyUsages) {
tb.specificType("extendedKeyUsages", "text[]");
}
});
if (!doesCertTableHaveKeyUsages) {
await knex(TableName.Certificate).update({
keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]
});
}
if (!doesCertTableHaveExtendedKeyUsages) {
await knex(TableName.Certificate).update({
extendedKeyUsages: []
});
}
}
export async function down(knex: Knex): Promise<void> {
// Certificate Template
const hasKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "keyUsages");
const hasExtendedKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "extendedKeyUsages");
await knex.schema.alterTable(TableName.CertificateTemplate, (t) => {
if (hasKeyUsagesCol) {
t.dropColumn("keyUsages");
}
if (hasExtendedKeyUsagesCol) {
t.dropColumn("extendedKeyUsages");
}
});
// Certificate
const doesCertTableHaveKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "keyUsages");
const doesCertTableHaveExtendedKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "extendedKeyUsages");
await knex.schema.alterTable(TableName.Certificate, (t) => {
if (doesCertTableHaveKeyUsages) {
t.dropColumn("keyUsages");
}
if (doesCertTableHaveExtendedKeyUsages) {
t.dropColumn("extendedKeyUsages");
}
});
}

View File

@ -28,7 +28,8 @@ export const CertificateAuthoritiesSchema = z.object({
keyAlgorithm: z.string(),
notBefore: z.date().nullable().optional(),
notAfter: z.date().nullable().optional(),
activeCaCertId: z.string().uuid().nullable().optional()
activeCaCertId: z.string().uuid().nullable().optional(),
requireTemplateForIssuance: z.boolean().default(false)
});
export type TCertificateAuthorities = z.infer<typeof CertificateAuthoritiesSchema>;

View File

@ -16,7 +16,9 @@ export const CertificateTemplatesSchema = z.object({
subjectAlternativeName: z.string(),
ttl: z.string(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
keyUsages: z.string().array().nullable().optional(),
extendedKeyUsages: z.string().array().nullable().optional()
});
export type TCertificateTemplates = z.infer<typeof CertificateTemplatesSchema>;

View File

@ -22,7 +22,9 @@ export const CertificatesSchema = z.object({
revocationReason: z.number().nullable().optional(),
altNames: z.string().default("").nullable().optional(),
caCertId: z.string().uuid(),
certificateTemplateId: z.string().uuid().nullable().optional()
certificateTemplateId: z.string().uuid().nullable().optional(),
keyUsages: z.string().array().nullable().optional(),
extendedKeyUsages: z.string().array().nullable().optional()
});
export type TCertificates = z.infer<typeof CertificatesSchema>;

View File

@ -62,6 +62,7 @@ export * from "./project-environments";
export * from "./project-keys";
export * from "./project-memberships";
export * from "./project-roles";
export * from "./project-slack-configs";
export * from "./project-user-additional-privilege";
export * from "./project-user-membership-roles";
export * from "./projects";
@ -101,6 +102,7 @@ export * from "./secret-versions-v2";
export * from "./secrets";
export * from "./secrets-v2";
export * from "./service-tokens";
export * from "./slack-integrations";
export * from "./super-admin";
export * from "./trusted-ips";
export * from "./user-actions";
@ -109,3 +111,4 @@ export * from "./user-encryption-keys";
export * from "./user-group-membership";
export * from "./users";
export * from "./webhooks";
export * from "./workflow-integrations";

View File

@ -114,7 +114,10 @@ export enum TableName {
InternalKms = "internal_kms",
InternalKmsKeyVersion = "internal_kms_key_version",
// @depreciated
KmsKeyVersion = "kms_key_versions"
KmsKeyVersion = "kms_key_versions",
WorkflowIntegrations = "workflow_integrations",
SlackIntegrations = "slack_integrations",
ProjectSlackConfigs = "project_slack_configs"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";

View File

@ -0,0 +1,24 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ProjectSlackConfigsSchema = z.object({
id: z.string().uuid(),
projectId: z.string(),
slackIntegrationId: z.string().uuid(),
isAccessRequestNotificationEnabled: z.boolean().default(false),
accessRequestChannels: z.string().default(""),
isSecretRequestNotificationEnabled: z.boolean().default(false),
secretRequestChannels: z.string().default(""),
createdAt: z.date(),
updatedAt: z.date()
});
export type TProjectSlackConfigs = z.infer<typeof ProjectSlackConfigsSchema>;
export type TProjectSlackConfigsInsert = Omit<z.input<typeof ProjectSlackConfigsSchema>, TImmutableDBKeys>;
export type TProjectSlackConfigsUpdate = Partial<Omit<z.input<typeof ProjectSlackConfigsSchema>, TImmutableDBKeys>>;

View File

@ -21,8 +21,8 @@ export const SecretSharingSchema = z.object({
expiresAfterViews: z.number().nullable().optional(),
accessType: z.string().default("anyone"),
name: z.string().nullable().optional(),
password: z.string().nullable().optional(),
lastViewedAt: z.date().nullable().optional()
lastViewedAt: z.date().nullable().optional(),
password: z.string().nullable().optional()
});
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;

View File

@ -0,0 +1,27 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SlackIntegrationsSchema = z.object({
id: z.string().uuid(),
teamId: z.string(),
teamName: z.string(),
slackUserId: z.string(),
slackAppId: z.string(),
encryptedBotAccessToken: zodBuffer,
slackBotId: z.string(),
slackBotUserId: z.string(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSlackIntegrations = z.infer<typeof SlackIntegrationsSchema>;
export type TSlackIntegrationsInsert = Omit<z.input<typeof SlackIntegrationsSchema>, TImmutableDBKeys>;
export type TSlackIntegrationsUpdate = Partial<Omit<z.input<typeof SlackIntegrationsSchema>, TImmutableDBKeys>>;

View File

@ -5,6 +5,8 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SuperAdminSchema = z.object({
@ -19,7 +21,9 @@ export const SuperAdminSchema = z.object({
trustLdapEmails: z.boolean().default(false).nullable().optional(),
trustOidcEmails: z.boolean().default(false).nullable().optional(),
defaultAuthOrgId: z.string().uuid().nullable().optional(),
enabledLoginMethods: z.string().array().nullable().optional()
enabledLoginMethods: z.string().array().nullable().optional(),
encryptedSlackClientId: zodBuffer.nullable().optional(),
encryptedSlackClientSecret: zodBuffer.nullable().optional()
});
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;

View File

@ -0,0 +1,22 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const WorkflowIntegrationsSchema = z.object({
id: z.string().uuid(),
integration: z.string(),
slug: z.string(),
orgId: z.string().uuid(),
description: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TWorkflowIntegrations = z.infer<typeof WorkflowIntegrationsSchema>;
export type TWorkflowIntegrationsInsert = Omit<z.input<typeof WorkflowIntegrationsSchema>, TImmutableDBKeys>;
export type TWorkflowIntegrationsUpdate = Partial<Omit<z.input<typeof WorkflowIntegrationsSchema>, TImmutableDBKeys>>;

View File

@ -17,11 +17,11 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
name: z.string().optional(),
secretPath: z.string().trim().default("/"),
environment: z.string(),
approverUserIds: z.string().array().min(1),
approvers: z.string().array().min(1),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
})
.refine((data) => data.approvals <= data.approverUserIds.length, {
.refine((data) => data.approvals <= data.approvers.length, {
path: ["approvals"],
message: "The number of approvals should be lower than the number of approvers."
}),
@ -127,11 +127,11 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
.trim()
.optional()
.transform((val) => (val === "" ? "/" : val)),
approverUserIds: z.string().array().min(1),
approvers: z.string().array().min(1),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
})
.refine((data) => data.approvals <= data.approverUserIds.length, {
.refine((data) => data.approvals <= data.approvers.length, {
path: ["approvals"],
message: "The number of approvals should be lower than the number of approvers."
}),

View File

@ -11,6 +11,30 @@ export const registerCaCrlRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: readLimit
},
schema: {
description: "Get CRL in DER format (deprecated)",
params: z.object({
crlId: z.string().trim().describe(CA_CRLS.GET.crlId)
}),
response: {
200: z.instanceof(Buffer)
}
},
handler: async (req, res) => {
const { crl } = await server.services.certificateAuthorityCrl.getCrlById(req.params.crlId);
res.header("Content-Type", "application/pkix-crl");
return Buffer.from(crl);
}
});
server.route({
method: "GET",
url: "/:crlId/der",
config: {
rateLimit: readLimit
},
schema: {
description: "Get CRL in DER format",
params: z.object({

View File

@ -1,6 +1,6 @@
/* eslint-disable @typescript-eslint/no-unsafe-return */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
// TODO(akhilmhdh): Fix this when licence service gets it type
// TODO(akhilmhdh): Fix this when license service gets it type
import { z } from "zod";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";

View File

@ -122,6 +122,10 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
})
.merge(
z.object({
project: z.object({
name: z.string(),
slug: z.string()
}),
event: z.object({
type: z.string(),
metadata: z.any()
@ -138,7 +142,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const auditLogs = await server.services.auditLog.listProjectAuditLogs({
const auditLogs = await server.services.auditLog.listAuditLogs({
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,

View File

@ -100,17 +100,34 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
async (req, profile, cb) => {
try {
if (!profile) throw new BadRequestError({ message: "Missing profile" });
const email = profile?.email ?? (profile?.emailAddress as string); // emailRippling is added because in Rippling the field `email` reserved
const email =
profile?.email ??
// entra sends data in this format
(profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/email"] as string) ??
(profile?.emailAddress as string); // emailRippling is added because in Rippling the field `email` reserved\
if (!email || !profile.firstName) {
throw new BadRequestError({ message: "Invalid request. Missing email or first name" });
const firstName = (profile.firstName ??
// entra sends data in this format
profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/firstName"]) as string;
const lastName =
profile.lastName ?? profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/lastName"];
if (!email || !firstName) {
logger.info(
{
err: new Error("Invalid saml request. Missing email or first name"),
profile
},
`email: ${email} firstName: ${profile.firstName as string}`
);
}
const { isUserCompleted, providerAuthToken } = await server.services.saml.samlLogin({
externalId: profile.nameID,
email,
firstName: profile.firstName as string,
lastName: profile.lastName as string,
firstName,
lastName: lastName as string,
relayState: (req.body as { RelayState?: string }).RelayState,
authProvider: (req as unknown as FastifyRequest).ssoConfig?.authProvider as string,
orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId as string
@ -118,7 +135,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
cb(null, { isUserCompleted, providerAuthToken });
} catch (error) {
logger.error(error);
cb(null, {});
cb(error as Error);
}
},
() => {}

View File

@ -44,7 +44,7 @@ export const accessApprovalPolicyServiceFactory = ({
secretPath,
actorAuthMethod,
approvals,
approverUserIds,
approvers,
projectSlug,
environment,
enforcementLevel
@ -52,7 +52,7 @@ export const accessApprovalPolicyServiceFactory = ({
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new BadRequestError({ message: "Project not found" });
if (approvals > approverUserIds.length)
if (approvals > approvers.length)
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
const { permission } = await permissionService.getProjectPermission(
@ -76,7 +76,7 @@ export const accessApprovalPolicyServiceFactory = ({
secretPath,
actorAuthMethod,
permissionService,
userIds: approverUserIds
userIds: approvers
});
const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => {
@ -91,7 +91,7 @@ export const accessApprovalPolicyServiceFactory = ({
tx
);
await accessApprovalPolicyApproverDAL.insertMany(
approverUserIds.map((userId) => ({
approvers.map((userId) => ({
approverUserId: userId,
policyId: doc.id
})),
@ -128,7 +128,7 @@ export const accessApprovalPolicyServiceFactory = ({
const updateAccessApprovalPolicy = async ({
policyId,
approverUserIds,
approvers,
secretPath,
name,
actorId,
@ -161,7 +161,7 @@ export const accessApprovalPolicyServiceFactory = ({
},
tx
);
if (approverUserIds) {
if (approvers) {
await verifyApprovers({
projectId: accessApprovalPolicy.projectId,
orgId: actorOrgId,
@ -169,12 +169,12 @@ export const accessApprovalPolicyServiceFactory = ({
secretPath: doc.secretPath!,
actorAuthMethod,
permissionService,
userIds: approverUserIds
userIds: approvers
});
await accessApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx);
await accessApprovalPolicyApproverDAL.insertMany(
approverUserIds.map((userId) => ({
approvers.map((userId) => ({
approverUserId: userId,
policyId: doc.id
})),

View File

@ -17,7 +17,7 @@ export type TCreateAccessApprovalPolicy = {
approvals: number;
secretPath: string;
environment: string;
approverUserIds: string[];
approvers: string[];
projectSlug: string;
name: string;
enforcementLevel: EnforcementLevel;
@ -26,7 +26,7 @@ export type TCreateAccessApprovalPolicy = {
export type TUpdateAccessApprovalPolicy = {
policyId: string;
approvals?: number;
approverUserIds?: string[];
approvers?: string[];
secretPath?: string;
name?: string;
enforcementLevel?: EnforcementLevel;

View File

@ -5,9 +5,13 @@ import { ProjectMembershipRole } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { TProjectSlackConfigDALFactory } from "@app/services/slack/project-slack-config-dal";
import { triggerSlackNotification } from "@app/services/slack/slack-fns";
import { SlackTriggerFeature } from "@app/services/slack/slack-types";
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
@ -33,7 +37,10 @@ type TSecretApprovalRequestServiceFactoryDep = {
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
accessApprovalPolicyApproverDAL: Pick<TAccessApprovalPolicyApproverDALFactory, "find">;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
projectDAL: Pick<TProjectDALFactory, "checkProjectUpgradeStatus" | "findProjectBySlug">;
projectDAL: Pick<
TProjectDALFactory,
"checkProjectUpgradeStatus" | "findProjectBySlug" | "findProjectWithOrg" | "findById"
>;
accessApprovalRequestDAL: Pick<
TAccessApprovalRequestDALFactory,
| "create"
@ -56,6 +63,8 @@ type TSecretApprovalRequestServiceFactoryDep = {
TUserDALFactory,
"findUserByProjectMembershipId" | "findUsersByProjectMembershipIds" | "find" | "findById"
>;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
projectSlackConfigDAL: Pick<TProjectSlackConfigDALFactory, "getIntegrationDetailsByProject">;
};
export type TAccessApprovalRequestServiceFactory = ReturnType<typeof accessApprovalRequestServiceFactory>;
@ -71,7 +80,9 @@ export const accessApprovalRequestServiceFactory = ({
accessApprovalPolicyApproverDAL,
additionalPrivilegeDAL,
smtpService,
userDAL
userDAL,
kmsService,
projectSlackConfigDAL
}: TSecretApprovalRequestServiceFactoryDep) => {
const createAccessApprovalRequest = async ({
isTemporary,
@ -166,13 +177,36 @@ export const accessApprovalRequestServiceFactory = ({
tx
);
const requesterFullName = `${requestedByUser.firstName} ${requestedByUser.lastName}`;
const approvalUrl = `${cfg.SITE_URL}/project/${project.id}/approval`;
await triggerSlackNotification({
projectId: project.id,
projectSlackConfigDAL,
projectDAL,
kmsService,
notification: {
type: SlackTriggerFeature.ACCESS_REQUEST,
payload: {
projectName: project.name,
requesterFullName,
isTemporary,
requesterEmail: requestedByUser.email as string,
secretPath,
environment: envSlug,
permissions: accessTypes,
approvalUrl
}
}
});
await smtpService.sendMail({
recipients: approverUsers.filter((approver) => approver.email).map((approver) => approver.email!),
subjectLine: "Access Approval Request",
substitutions: {
projectName: project.name,
requesterFullName: `${requestedByUser.firstName} ${requestedByUser.lastName}`,
requesterFullName,
requesterEmail: requestedByUser.email,
isTemporary,
...(isTemporary && {
@ -181,7 +215,7 @@ export const accessApprovalRequestServiceFactory = ({
secretPath,
environment: envSlug,
permissions: accessTypes,
approvalUrl: `${cfg.SITE_URL}/project/${project.id}/approval`
approvalUrl
},
template: SmtpTemplates.AccessApprovalRequest
});

View File

@ -2,10 +2,11 @@ import { ForbiddenError } from "@casl/ability";
import { RawAxiosRequestHeaders } from "axios";
import { SecretKeyEncoding } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
import { BadRequestError } from "@app/lib/errors";
import { validateLocalIps } from "@app/lib/validator";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { AUDIT_LOG_STREAM_TIMEOUT } from "../audit-log/audit-log-queue";
import { TLicenseServiceFactory } from "../license/license-service";
@ -44,6 +45,7 @@ export const auditLogStreamServiceFactory = ({
}: TCreateAuditLogStreamDTO) => {
if (!actorOrgId) throw new BadRequestError({ message: "Missing org id from token" });
const appCfg = getConfig();
const plan = await licenseService.getPlan(actorOrgId);
if (!plan.auditLogStreams)
throw new BadRequestError({
@ -59,7 +61,9 @@ export const auditLogStreamServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Settings);
validateLocalIps(url);
if (appCfg.isCloud) {
blockLocalAndPrivateIpAddresses(url);
}
const totalStreams = await auditLogStreamDAL.find({ orgId: actorOrgId });
if (totalStreams.length >= plan.auditLogStreamLimit) {
@ -131,7 +135,8 @@ export const auditLogStreamServiceFactory = ({
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
if (url) validateLocalIps(url);
const appCfg = getConfig();
if (url && appCfg.isCloud) blockLocalAndPrivateIpAddresses(url);
// testing connection first
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };

View File

@ -1,10 +1,11 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { AuditLogsSchema, TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, stripUndefinedInWhere } from "@app/lib/knex";
import { ormify, selectAllTableCols, stripUndefinedInWhere } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
import { QueueName } from "@app/queue";
export type TAuditLogDALFactory = ReturnType<typeof auditLogDALFactory>;
@ -32,23 +33,44 @@ export const auditLogDALFactory = (db: TDbClient) => {
.where(
stripUndefinedInWhere({
projectId,
orgId,
[`${TableName.AuditLog}.orgId`]: orgId,
eventType,
actor,
userAgentType
})
)
.leftJoin(TableName.Project, `${TableName.AuditLog}.projectId`, `${TableName.Project}.id`)
.select(selectAllTableCols(TableName.AuditLog))
.select(
db.ref("name").withSchema(TableName.Project).as("projectName"),
db.ref("slug").withSchema(TableName.Project).as("projectSlug")
)
.limit(limit)
.offset(offset)
.orderBy("createdAt", "desc");
.orderBy(`${TableName.AuditLog}.createdAt`, "desc");
if (actor) {
void sqlQuery.whereRaw(`"actorMetadata"->>'userId' = ?`, [actor]);
}
if (startDate) {
void sqlQuery.where("createdAt", ">=", startDate);
void sqlQuery.where(`${TableName.AuditLog}.createdAt`, ">=", startDate);
}
if (endDate) {
void sqlQuery.where("createdAt", "<=", endDate);
void sqlQuery.where(`${TableName.AuditLog}.createdAt`, "<=", endDate);
}
const docs = await sqlQuery;
return docs;
return docs.map((doc) => ({
...AuditLogsSchema.parse(doc),
project: {
name: doc.projectName,
slug: doc.projectSlug
}
}));
} catch (error) {
throw new DatabaseError({ error });
}
@ -62,7 +84,9 @@ export const auditLogDALFactory = (db: TDbClient) => {
const today = new Date();
let deletedAuditLogIds: { id: string }[] = [];
let numberOfRetryOnFailure = 0;
let isRetrying = false;
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
do {
try {
const findExpiredLogSubQuery = (tx || db)(TableName.AuditLog)
@ -84,7 +108,9 @@ export const auditLogDALFactory = (db: TDbClient) => {
setTimeout(resolve, 10); // time to breathe for db
});
}
} while (deletedAuditLogIds.length > 0 || numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
isRetrying = numberOfRetryOnFailure > 0;
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
};
return { ...auditLogOrm, pruneAuditLog, find };

View File

@ -3,6 +3,7 @@ import { ForbiddenError } from "@casl/ability";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
import { TAuditLogDALFactory } from "./audit-log-dal";
@ -11,7 +12,7 @@ import { EventType, TCreateAuditLogDTO, TListProjectAuditLogDTO } from "./audit-
type TAuditLogServiceFactoryDep = {
auditLogDAL: TAuditLogDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getOrgPermission">;
auditLogQueue: TAuditLogQueueServiceFactory;
};
@ -22,7 +23,7 @@ export const auditLogServiceFactory = ({
auditLogQueue,
permissionService
}: TAuditLogServiceFactoryDep) => {
const listProjectAuditLogs = async ({
const listAuditLogs = async ({
userAgentType,
eventType,
offset,
@ -36,14 +37,33 @@ export const auditLogServiceFactory = ({
projectId,
auditLogActor
}: TListProjectAuditLogDTO) => {
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
if (projectId) {
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
} else {
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
/**
* NOTE (dangtony98): Update this to organization-level audit log permission check once audit logs are moved
* to the organization level
*/
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Member);
}
// If project ID is not provided, then we need to return all the audit logs for the organization itself.
const auditLogs = await auditLogDAL.find({
startDate,
endDate,
@ -52,8 +72,9 @@ export const auditLogServiceFactory = ({
eventType,
userAgentType,
actor: auditLogActor,
projectId
...(projectId ? { projectId } : { orgId: actorOrgId })
});
return auditLogs.map(({ eventType: logEventType, actor: eActor, actorMetadata, eventMetadata, ...el }) => ({
...el,
event: { type: logEventType, metadata: eventMetadata },
@ -76,6 +97,6 @@ export const auditLogServiceFactory = ({
return {
createAuditLog,
listProjectAuditLogs
listAuditLogs
};
};

View File

@ -6,14 +6,14 @@ import { PkiItemType } from "@app/services/pki-collection/pki-collection-types";
export type TListProjectAuditLogDTO = {
auditLogActor?: string;
projectId: string;
projectId?: string;
eventType?: string;
startDate?: string;
endDate?: string;
userAgentType?: string;
limit?: number;
offset?: number;
} & TProjectPermission;
} & Omit<TProjectPermission, "projectId">;
export type TCreateAuditLogDTO = {
event: Event;
@ -140,6 +140,7 @@ export enum EventType {
GET_CA_CRLS = "get-certificate-authority-crls",
ISSUE_CERT = "issue-cert",
SIGN_CERT = "sign-cert",
GET_CA_CERTIFICATE_TEMPLATES = "get-ca-certificate-templates",
GET_CERT = "get-cert",
DELETE_CERT = "delete-cert",
REVOKE_CERT = "revoke-cert",
@ -169,7 +170,14 @@ export enum EventType {
GET_CERTIFICATE_TEMPLATE = "get-certificate-template",
CREATE_CERTIFICATE_TEMPLATE_EST_CONFIG = "create-certificate-template-est-config",
UPDATE_CERTIFICATE_TEMPLATE_EST_CONFIG = "update-certificate-template-est-config",
GET_CERTIFICATE_TEMPLATE_EST_CONFIG = "get-certificate-template-est-config"
GET_CERTIFICATE_TEMPLATE_EST_CONFIG = "get-certificate-template-est-config",
ATTEMPT_CREATE_SLACK_INTEGRATION = "attempt-create-slack-integration",
ATTEMPT_REINSTALL_SLACK_INTEGRATION = "attempt-reinstall-slack-integration",
GET_SLACK_INTEGRATION = "get-slack-integration",
UPDATE_SLACK_INTEGRATION = "update-slack-integration",
DELETE_SLACK_INTEGRATION = "delete-slack-integration",
GET_PROJECT_SLACK_CONFIG = "get-project-slack-config",
UPDATE_PROJECT_SLACK_CONFIG = "update-project-slack-config"
}
interface UserActorMetadata {
@ -1192,6 +1200,14 @@ interface SignCert {
};
}
interface GetCaCertificateTemplates {
type: EventType.GET_CA_CERTIFICATE_TEMPLATES;
metadata: {
caId: string;
dn: string;
};
}
interface GetCert {
type: EventType.GET_CERT;
metadata: {
@ -1446,6 +1462,63 @@ interface GetCertificateTemplateEstConfig {
};
}
interface AttemptCreateSlackIntegration {
type: EventType.ATTEMPT_CREATE_SLACK_INTEGRATION;
metadata: {
slug: string;
description?: string;
};
}
interface AttemptReinstallSlackIntegration {
type: EventType.ATTEMPT_REINSTALL_SLACK_INTEGRATION;
metadata: {
id: string;
};
}
interface UpdateSlackIntegration {
type: EventType.UPDATE_SLACK_INTEGRATION;
metadata: {
id: string;
slug: string;
description?: string;
};
}
interface DeleteSlackIntegration {
type: EventType.DELETE_SLACK_INTEGRATION;
metadata: {
id: string;
};
}
interface GetSlackIntegration {
type: EventType.GET_SLACK_INTEGRATION;
metadata: {
id: string;
};
}
interface UpdateProjectSlackConfig {
type: EventType.UPDATE_PROJECT_SLACK_CONFIG;
metadata: {
id: string;
slackIntegrationId: string;
isAccessRequestNotificationEnabled: boolean;
accessRequestChannels: string;
isSecretRequestNotificationEnabled: boolean;
secretRequestChannels: string;
};
}
interface GetProjectSlackConfig {
type: EventType.GET_PROJECT_SLACK_CONFIG;
metadata: {
id: string;
};
}
export type Event =
| GetSecretsEvent
| GetSecretEvent
@ -1547,6 +1620,7 @@ export type Event =
| GetCaCrls
| IssueCert
| SignCert
| GetCaCertificateTemplates
| GetCert
| DeleteCert
| RevokeCert
@ -1576,4 +1650,11 @@ export type Event =
| DeleteCertificateTemplate
| CreateCertificateTemplateEstConfig
| UpdateCertificateTemplateEstConfig
| GetCertificateTemplateEstConfig;
| GetCertificateTemplateEstConfig
| AttemptCreateSlackIntegration
| AttemptReinstallSlackIntegration
| UpdateSlackIntegration
| DeleteSlackIntegration
| GetSlackIntegration
| UpdateProjectSlackConfig
| GetProjectSlackConfig;

View File

@ -0,0 +1,126 @@
import { Client as ElasticSearchClient } from "@elastic/elasticsearch";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 64)();
};
const generateUsername = () => {
return alphaNumericNanoId(32);
};
export const ElasticSearchProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const appCfg = getConfig();
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
if (
isCloud &&
// localhost
// internal ips
(providerInputs.host === "host.docker.internal" ||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
) {
throw new BadRequestError({ message: "Invalid db host" });
}
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
throw new BadRequestError({ message: "Invalid db host" });
}
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
const connection = new ElasticSearchClient({
node: {
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
...(providerInputs.ca && {
ssl: {
rejectUnauthorized: false,
ca: providerInputs.ca
}
})
},
auth: {
...(providerInputs.auth.type === ElasticSearchAuthTypes.ApiKey
? {
apiKey: {
api_key: providerInputs.auth.apiKey,
id: providerInputs.auth.apiKeyId
}
}
: {
username: providerInputs.auth.username,
password: providerInputs.auth.password
})
}
});
return connection;
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const infoResponse = await connection
.info()
.then(() => true)
.catch(() => false);
return infoResponse;
};
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
await connection.security.putUser({
username,
password,
full_name: "Managed by Infisical.com",
roles: providerInputs.roles
});
await connection.close();
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
await connection.security.deleteUser({
username: entityId
});
await connection.close();
return { entityId };
};
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
return { entityId };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

View File

@ -1,8 +1,11 @@
import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache";
import { AwsIamProvider } from "./aws-iam";
import { CassandraProvider } from "./cassandra";
import { ElasticSearchProvider } from "./elastic-search";
import { DynamicSecretProviders } from "./models";
import { MongoAtlasProvider } from "./mongo-atlas";
import { MongoDBProvider } from "./mongo-db";
import { RabbitMqProvider } from "./rabbit-mq";
import { RedisDatabaseProvider } from "./redis";
import { SqlDatabaseProvider } from "./sql-database";
@ -12,5 +15,8 @@ export const buildDynamicSecretProviders = () => ({
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
[DynamicSecretProviders.Redis]: RedisDatabaseProvider(),
[DynamicSecretProviders.AwsElastiCache]: AwsElastiCacheDatabaseProvider(),
[DynamicSecretProviders.MongoAtlas]: MongoAtlasProvider()
[DynamicSecretProviders.MongoAtlas]: MongoAtlasProvider(),
[DynamicSecretProviders.MongoDB]: MongoDBProvider(),
[DynamicSecretProviders.ElasticSearch]: ElasticSearchProvider(),
[DynamicSecretProviders.RabbitMq]: RabbitMqProvider()
});

View File

@ -7,12 +7,16 @@ export enum SqlProviders {
MsSQL = "mssql"
}
export enum ElasticSearchAuthTypes {
User = "user",
ApiKey = "api-key"
}
export const DynamicSecretRedisDBSchema = z.object({
host: z.string().trim().toLowerCase(),
port: z.number(),
username: z.string().trim(), // this is often "default".
password: z.string().trim().optional(),
creationStatement: z.string().trim(),
revocationStatement: z.string().trim(),
renewStatement: z.string().trim().optional(),
@ -30,6 +34,48 @@ export const DynamicSecretAwsElastiCacheSchema = z.object({
ca: z.string().optional()
});
export const DynamicSecretElasticSearchSchema = z.object({
host: z.string().trim().min(1),
port: z.number(),
roles: z.array(z.string().trim().min(1)).min(1),
// two auth types "user, apikey"
auth: z.discriminatedUnion("type", [
z.object({
type: z.literal(ElasticSearchAuthTypes.User),
username: z.string().trim(),
password: z.string().trim()
}),
z.object({
type: z.literal(ElasticSearchAuthTypes.ApiKey),
apiKey: z.string().trim(),
apiKeyId: z.string().trim()
})
]),
ca: z.string().optional()
});
export const DynamicSecretRabbitMqSchema = z.object({
host: z.string().trim().min(1),
port: z.number(),
tags: z.array(z.string().trim()).default([]),
username: z.string().trim().min(1),
password: z.string().trim().min(1),
ca: z.string().optional(),
virtualHost: z.object({
name: z.string().trim().min(1),
permissions: z.object({
read: z.string().trim().min(1),
write: z.string().trim().min(1),
configure: z.string().trim().min(1)
})
})
});
export const DynamicSecretSqlDBSchema = z.object({
client: z.nativeEnum(SqlProviders),
host: z.string().trim().toLowerCase(),
@ -104,13 +150,32 @@ export const DynamicSecretMongoAtlasSchema = z.object({
.array()
});
export const DynamicSecretMongoDBSchema = z.object({
host: z.string().min(1).trim().toLowerCase(),
port: z.number().optional(),
username: z.string().min(1).trim(),
password: z.string().min(1).trim(),
database: z.string().min(1).trim(),
ca: z.string().min(1).optional(),
roles: z
.string()
.array()
.min(1)
.describe(
'Enum: "atlasAdmin" "backup" "clusterMonitor" "dbAdmin" "dbAdminAnyDatabase" "enableSharding" "read" "readAnyDatabase" "readWrite" "readWriteAnyDatabase" "<a custom role name>".Human-readable label that identifies a group of privileges assigned to a database user. This value can either be a built-in role or a custom role.'
)
});
export enum DynamicSecretProviders {
SqlDatabase = "sql-database",
Cassandra = "cassandra",
AwsIam = "aws-iam",
Redis = "redis",
AwsElastiCache = "aws-elasticache",
MongoAtlas = "mongo-db-atlas"
MongoAtlas = "mongo-db-atlas",
ElasticSearch = "elastic-search",
MongoDB = "mongo-db",
RabbitMq = "rabbit-mq"
}
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
@ -119,7 +184,10 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
z.object({ type: z.literal(DynamicSecretProviders.AwsElastiCache), inputs: DynamicSecretAwsElastiCacheSchema }),
z.object({ type: z.literal(DynamicSecretProviders.MongoAtlas), inputs: DynamicSecretMongoAtlasSchema })
z.object({ type: z.literal(DynamicSecretProviders.MongoAtlas), inputs: DynamicSecretMongoAtlasSchema }),
z.object({ type: z.literal(DynamicSecretProviders.ElasticSearch), inputs: DynamicSecretElasticSearchSchema }),
z.object({ type: z.literal(DynamicSecretProviders.MongoDB), inputs: DynamicSecretMongoDBSchema }),
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema })
]);
export type TDynamicProviderFns = {

View File

@ -0,0 +1,116 @@
import { MongoClient } from "mongodb";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(32);
};
export const MongoDBProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const appCfg = getConfig();
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
if (
appCfg.isCloud &&
// localhost
// internal ips
(providerInputs.host === "host.docker.internal" ||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
)
throw new BadRequestError({ message: "Invalid db host" });
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
throw new BadRequestError({ message: "Invalid db host" });
}
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
const isSrv = !providerInputs.port;
const uri = isSrv
? `mongodb+srv://${providerInputs.host}`
: `mongodb://${providerInputs.host}:${providerInputs.port}`;
const client = new MongoClient(uri, {
auth: {
username: providerInputs.username,
password: providerInputs.password
},
directConnection: !isSrv,
ca: providerInputs.ca
});
return client;
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const isConnected = await client
.db(providerInputs.database)
.command({ ping: 1 })
.then(() => true);
await client.close();
return isConnected;
};
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
const db = client.db(providerInputs.database);
await db.command({
createUser: username,
pwd: password,
roles: providerInputs.roles
});
await client.close();
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const username = entityId;
const db = client.db(providerInputs.database);
await db.command({
dropUser: username
});
await client.close();
return { entityId: username };
};
const renew = async (_inputs: unknown, entityId: string) => {
return { entityId };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

View File

@ -0,0 +1,172 @@
import axios, { Axios } from "axios";
import https from "https";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { removeTrailingSlash } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 64)();
};
const generateUsername = () => {
return alphaNumericNanoId(32);
};
type TCreateRabbitMQUser = {
axiosInstance: Axios;
createUser: {
username: string;
password: string;
tags: string[];
};
virtualHost: {
name: string;
permissions: {
read: string;
write: string;
configure: string;
};
};
};
type TDeleteRabbitMqUser = {
axiosInstance: Axios;
usernameToDelete: string;
};
async function createRabbitMqUser({ axiosInstance, createUser, virtualHost }: TCreateRabbitMQUser): Promise<void> {
try {
// Create user
const userUrl = `/users/${createUser.username}`;
const userData = {
password: createUser.password,
tags: createUser.tags.join(",")
};
await axiosInstance.put(userUrl, userData);
// Set permissions for the virtual host
if (virtualHost) {
const permissionData = {
configure: virtualHost.permissions.configure,
write: virtualHost.permissions.write,
read: virtualHost.permissions.read
};
await axiosInstance.put(
`/permissions/${encodeURIComponent(virtualHost.name)}/${createUser.username}`,
permissionData
);
}
} catch (error) {
logger.error(error, "Error creating RabbitMQ user");
throw error;
}
}
async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRabbitMqUser) {
await axiosInstance.delete(`users/${usernameToDelete}`);
return { username: usernameToDelete };
}
export const RabbitMqProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const appCfg = getConfig();
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
if (
isCloud &&
// localhost
// internal ips
(providerInputs.host === "host.docker.internal" ||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
) {
throw new BadRequestError({ message: "Invalid db host" });
}
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
throw new BadRequestError({ message: "Invalid db host" });
}
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
const axiosInstance = axios.create({
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
auth: {
username: providerInputs.username,
password: providerInputs.password
},
headers: {
"Content-Type": "application/json"
},
...(providerInputs.ca && {
httpsAgent: new https.Agent({ ca: providerInputs.ca, rejectUnauthorized: false })
})
});
return axiosInstance;
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const infoResponse = await connection.get("/whoami").then(() => true);
return infoResponse;
};
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
await createRabbitMqUser({
axiosInstance: connection,
virtualHost: providerInputs.virtualHost,
createUser: {
password,
username,
tags: [...(providerInputs.tags ?? []), "infisical-user"]
}
});
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
return { entityId };
};
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
return { entityId };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

View File

@ -1,4 +1,3 @@
/* eslint-disable no-console */
import handlebars from "handlebars";
import { Redis } from "ioredis";
import { customAlphabet } from "nanoid";

View File

@ -41,10 +41,9 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
};
// special query
const findUserGroupMembershipsInProject = async (usernames: string[], projectId: string) => {
const findUserGroupMembershipsInProject = async (usernames: string[], projectId: string, tx?: Knex) => {
try {
const usernameDocs: string[] = await db
.replicaNode()(TableName.UserGroupMembership)
const usernameDocs: string[] = await (tx || db.replicaNode())(TableName.UserGroupMembership)
.join(
TableName.GroupProjectMembership,
`${TableName.UserGroupMembership}.groupId`,

View File

@ -26,8 +26,10 @@ export const getDefaultOnPremFeatures = () => {
status: null,
trial_end: null,
has_used_trial: true,
secretApproval: false,
secretApproval: true,
secretRotation: true,
caCrl: false
};
};
export const setupLicenseRequestWithStore = () => {};

View File

@ -49,15 +49,15 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
pkiEst: false
});
export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {
export const setupLicenseRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {
let token: string;
const licenceReq = axios.create({
const licenseReq = axios.create({
baseURL,
timeout: 35 * 1000
// signal: AbortSignal.timeout(60 * 1000)
});
const refreshLicence = async () => {
const refreshLicense = async () => {
const appCfg = getConfig();
const {
data: { token: authToken }
@ -75,7 +75,7 @@ export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string
return token;
};
licenceReq.interceptors.request.use(
licenseReq.interceptors.request.use(
(config) => {
if (token && config.headers) {
// eslint-disable-next-line no-param-reassign
@ -86,7 +86,7 @@ export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string
(err) => Promise.reject(err)
);
licenceReq.interceptors.response.use(
licenseReq.interceptors.response.use(
(response) => response,
async (err) => {
const originalRequest = (err as AxiosError).config;
@ -97,15 +97,15 @@ export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string
(originalRequest as any)._retry = true; // injected
// refresh
await refreshLicence();
await refreshLicense();
licenceReq.defaults.headers.common.Authorization = `Bearer ${token}`;
return licenceReq(originalRequest!);
licenseReq.defaults.headers.common.Authorization = `Bearer ${token}`;
return licenseReq(originalRequest!);
}
return Promise.reject(err);
}
);
return { request: licenceReq, refreshLicence };
return { request: licenseReq, refreshLicense };
};

View File

@ -16,8 +16,8 @@ import { TOrgDALFactory } from "@app/services/org/org-dal";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { getDefaultOnPremFeatures, setupLicenceRequestWithStore } from "./licence-fns";
import { TLicenseDALFactory } from "./license-dal";
import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns";
import {
InstanceType,
TAddOrgPmtMethodDTO,
@ -64,13 +64,13 @@ export const licenseServiceFactory = ({
let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures();
const appCfg = getConfig();
const licenseServerCloudApi = setupLicenceRequestWithStore(
const licenseServerCloudApi = setupLicenseRequestWithStore(
appCfg.LICENSE_SERVER_URL || "",
LICENSE_SERVER_CLOUD_LOGIN,
appCfg.LICENSE_SERVER_KEY || ""
);
const licenseServerOnPremApi = setupLicenceRequestWithStore(
const licenseServerOnPremApi = setupLicenseRequestWithStore(
appCfg.LICENSE_SERVER_URL || "",
LICENSE_SERVER_ON_PREM_LOGIN,
appCfg.LICENSE_KEY || ""
@ -79,7 +79,7 @@ export const licenseServiceFactory = ({
const init = async () => {
try {
if (appCfg.LICENSE_SERVER_KEY) {
const token = await licenseServerCloudApi.refreshLicence();
const token = await licenseServerCloudApi.refreshLicense();
if (token) instanceType = InstanceType.Cloud;
logger.info(`Instance type: ${InstanceType.Cloud}`);
isValidLicense = true;
@ -87,7 +87,7 @@ export const licenseServiceFactory = ({
}
if (appCfg.LICENSE_KEY) {
const token = await licenseServerOnPremApi.refreshLicence();
const token = await licenseServerOnPremApi.refreshLicense();
if (token) {
const {
data: { currentPlan }

View File

@ -47,6 +47,9 @@ import {
} from "@app/services/secret-v2-bridge/secret-v2-bridge-fns";
import { TSecretVersionV2DALFactory } from "@app/services/secret-v2-bridge/secret-version-dal";
import { TSecretVersionV2TagDALFactory } from "@app/services/secret-v2-bridge/secret-version-tag-dal";
import { TProjectSlackConfigDALFactory } from "@app/services/slack/project-slack-config-dal";
import { triggerSlackNotification } from "@app/services/slack/slack-fns";
import { SlackTriggerFeature } from "@app/services/slack/slack-types";
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
@ -89,7 +92,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
secretVersionDAL: Pick<TSecretVersionDALFactory, "findLatestVersionMany" | "insertMany">;
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "insertMany">;
smtpService: Pick<TSmtpService, "sendMail">;
userDAL: Pick<TUserDALFactory, "find" | "findOne">;
userDAL: Pick<TUserDALFactory, "find" | "findOne" | "findById">;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
projectDAL: Pick<
TProjectDALFactory,
@ -104,6 +107,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
secretApprovalPolicyDAL: Pick<TSecretApprovalPolicyDALFactory, "findById">;
projectSlackConfigDAL: Pick<TProjectSlackConfigDALFactory, "getIntegrationDetailsByProject">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
};
@ -132,7 +136,8 @@ export const secretApprovalRequestServiceFactory = ({
secretV2BridgeDAL,
secretVersionV2BridgeDAL,
secretVersionTagV2BridgeDAL,
licenseService
licenseService,
projectSlackConfigDAL
}: TSecretApprovalRequestServiceFactoryDep) => {
const requestCount = async ({ projectId, actor, actorId, actorOrgId, actorAuthMethod }: TApprovalRequestCountDTO) => {
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
@ -1069,6 +1074,25 @@ export const secretApprovalRequestServiceFactory = ({
return { ...doc, commits: approvalCommits };
});
const env = await projectEnvDAL.findOne({ id: policy.envId });
const user = await userDAL.findById(secretApprovalRequest.committerUserId);
await triggerSlackNotification({
projectId,
projectDAL,
kmsService,
projectSlackConfigDAL,
notification: {
type: SlackTriggerFeature.SECRET_APPROVAL,
payload: {
userEmail: user.email as string,
environment: env.name,
secretPath,
projectId,
requestId: secretApprovalRequest.id
}
}
});
await sendApprovalEmailsFn({
projectDAL,
secretApprovalPolicyDAL,
@ -1331,6 +1355,25 @@ export const secretApprovalRequestServiceFactory = ({
return { ...doc, commits: approvalCommits };
});
const user = await userDAL.findById(secretApprovalRequest.committerUserId);
const env = await projectEnvDAL.findOne({ id: policy.envId });
await triggerSlackNotification({
projectId,
projectDAL,
kmsService,
projectSlackConfigDAL,
notification: {
type: SlackTriggerFeature.SECRET_APPROVAL,
payload: {
userEmail: user.email as string,
environment: env.name,
secretPath,
projectId,
requestId: secretApprovalRequest.id
}
}
});
await sendApprovalEmailsFn({
projectDAL,
secretApprovalPolicyDAL,

View File

@ -16,6 +16,7 @@ import {
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
import { QueueName } from "@app/queue";
export type TSnapshotDALFactory = ReturnType<typeof snapshotDALFactory>;
@ -599,6 +600,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
const pruneExcessSnapshots = async () => {
const PRUNE_FOLDER_BATCH_SIZE = 10000;
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret snapshots started`);
try {
let uuidOffset = "00000000-0000-0000-0000-000000000000";
// cleanup snapshots from current folders
@ -714,6 +716,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
} catch (error) {
throw new DatabaseError({ error, name: "SnapshotPrune" });
}
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret snapshots completed`);
};
// special query for migration for secret v2

View File

@ -363,7 +363,12 @@ export const ORGANIZATIONS = {
membershipId: "The ID of the membership to delete."
},
LIST_IDENTITY_MEMBERSHIPS: {
orgId: "The ID of the organization to get identity memberships from."
orgId: "The ID of the organization to get identity memberships from.",
offset: "The offset to start from. If you enter 10, it will start from the 10th identity membership.",
limit: "The number of identity memberships to return.",
orderBy: "The column to order identity memberships by.",
orderDirection: "The direction identity memberships will be sorted in.",
search: "The text string that identity membership names will be filtered by."
},
GET_PROJECTS: {
organizationId: "The ID of the organization to get projects from."
@ -447,7 +452,9 @@ export const PROJECT_USERS = {
INVITE_MEMBER: {
projectId: "The ID of the project to invite the member to.",
emails: "A list of organization member emails to invite to the project.",
usernames: "A list of usernames to invite to the project."
usernames: "A list of usernames to invite to the project.",
roleSlugs:
"A list of role slugs to assign to the newly created project membership. If nothing is provided, it will default to the Member role."
},
REMOVE_MEMBER: {
projectId: "The ID of the project to remove the member from.",
@ -470,7 +477,12 @@ export const PROJECT_USERS = {
export const PROJECT_IDENTITIES = {
LIST_IDENTITY_MEMBERSHIPS: {
projectId: "The ID of the project to get identity memberships from."
projectId: "The ID of the project to get identity memberships from.",
offset: "The offset to start from. If you enter 10, it will start from the 10th identity membership.",
limit: "The number of identity memberships to return.",
orderBy: "The column to order identity memberships by.",
orderDirection: "The direction identity memberships will be sorted in.",
search: "The text string that identity membership names will be filtered by."
},
GET_IDENTITY_MEMBERSHIP_BY_ID: {
identityId: "The ID of the identity to get the membership for.",
@ -1037,14 +1049,18 @@ export const CERTIFICATE_AUTHORITIES = {
maxPathLength:
"The maximum number of intermediate CAs that may follow this CA in the certificate / CA chain. A maxPathLength of -1 implies no path limit on the chain.",
keyAlgorithm:
"The type of public key algorithm and size, in bits, of the key pair for the CA; when you create an intermediate CA, you must use a key algorithm supported by the parent CA."
"The type of public key algorithm and size, in bits, of the key pair for the CA; when you create an intermediate CA, you must use a key algorithm supported by the parent CA.",
requireTemplateForIssuance:
"Whether or not certificates for this CA can only be issued through certificate templates."
},
GET: {
caId: "The ID of the CA to get"
},
UPDATE: {
caId: "The ID of the CA to update",
status: "The status of the CA to update to. This can be one of active or disabled"
status: "The status of the CA to update to. This can be one of active or disabled",
requireTemplateForIssuance:
"Whether or not certificates for this CA can only be issued through certificate templates."
},
DELETE: {
caId: "The ID of the CA to delete"
@ -1067,6 +1083,10 @@ export const CERTIFICATE_AUTHORITIES = {
certificateChain: "The certificate chain of the CA",
serialNumber: "The serial number of the CA certificate"
},
GET_CERT_BY_ID: {
caId: "The ID of the CA to get the CA certificate from",
caCertId: "The ID of the CA certificate to get"
},
GET_CA_CERTS: {
caId: "The ID of the CA to get the CA certificates for",
certificate: "The certificate body of the CA certificate",
@ -1106,11 +1126,15 @@ export const CERTIFICATE_AUTHORITIES = {
issuingCaCertificate: "The certificate of the issuing CA",
certificateChain: "The certificate chain of the issued certificate",
privateKey: "The private key of the issued certificate",
serialNumber: "The serial number of the issued certificate"
serialNumber: "The serial number of the issued certificate",
keyUsages: "The key usage extension of the certificate",
extendedKeyUsages: "The extended key usage extension of the certificate"
},
SIGN_CERT: {
caId: "The ID of the CA to issue the certificate from",
pkiCollectionId: "The ID of the PKI collection to add the certificate to",
keyUsages: "The key usage extension of the certificate",
extendedKeyUsages: "The extended key usage extension of the certificate",
csr: "The pem-encoded CSR to sign with the CA to be used for certificate issuance",
friendlyName: "A friendly name for the certificate",
commonName: "The common name (CN) for the certificate",
@ -1160,7 +1184,10 @@ export const CERTIFICATE_TEMPLATES = {
name: "The name of the template",
commonName: "The regular expression string to use for validating common names",
subjectAlternativeName: "The regular expression string to use for validating subject alternative names",
ttl: "The max TTL for the template"
ttl: "The max TTL for the template",
keyUsages: "The key usage constraint or default value for when template is used during certificate issuance",
extendedKeyUsages:
"The extended key usage constraint or default value for when template is used during certificate issuance"
},
GET: {
certificateTemplateId: "The ID of the certificate template to get"
@ -1172,7 +1199,11 @@ export const CERTIFICATE_TEMPLATES = {
name: "The updated name of the template",
commonName: "The updated regular expression string for validating common names",
subjectAlternativeName: "The updated regular expression string for validating subject alternative names",
ttl: "The updated max TTL for the template"
ttl: "The updated max TTL for the template",
keyUsages:
"The updated key usage constraint or default value for when template is used during certificate issuance",
extendedKeyUsages:
"The updated extended key usage constraint or default value for when template is used during certificate issuance"
},
DELETE: {
certificateTemplateId: "The ID of the certificate template to delete"

View File

@ -146,7 +146,9 @@ const envSchema = z
PLAIN_API_KEY: zpStr(z.string().optional()),
PLAIN_WISH_LABEL_IDS: zpStr(z.string().optional()),
DISABLE_AUDIT_LOG_GENERATION: zodStrBool.default("false"),
SSL_CLIENT_CERTIFICATE_HEADER_KEY: zpStr(z.string().optional()).default("x-ssl-client-cert")
SSL_CLIENT_CERTIFICATE_HEADER_KEY: zpStr(z.string().optional()).default("x-ssl-client-cert"),
WORKFLOW_SLACK_CLIENT_ID: zpStr(z.string()).optional(),
WORKFLOW_SLACK_CLIENT_SECRET: zpStr(z.string()).optional()
})
.transform((data) => ({
...data,

View File

@ -5,6 +5,7 @@ import nacl from "tweetnacl";
import tweetnacl from "tweetnacl-util";
import { TUserEncryptionKeys } from "@app/db/schemas";
import { UserEncryption } from "@app/services/user/user-types";
import { decryptSymmetric128BitHexKeyUTF8, encryptAsymmetric, encryptSymmetric } from "./encryption";
@ -36,12 +37,16 @@ export const srpCheckClientProof = async (
// Ghost user related:
// This functionality is intended for ghost user logic. This happens on the frontend when a user is being created.
// We replicate the same functionality on the backend when creating a ghost user.
export const generateUserSrpKeys = async (email: string, password: string) => {
export const generateUserSrpKeys = async (
email: string,
password: string,
customKeys?: { publicKey: string; privateKey: string }
) => {
const pair = nacl.box.keyPair();
const secretKeyUint8Array = pair.secretKey;
const publicKeyUint8Array = pair.publicKey;
const privateKey = tweetnacl.encodeBase64(secretKeyUint8Array);
const publicKey = tweetnacl.encodeBase64(publicKeyUint8Array);
const privateKey = customKeys?.privateKey || tweetnacl.encodeBase64(secretKeyUint8Array);
const publicKey = customKeys?.publicKey || tweetnacl.encodeBase64(publicKeyUint8Array);
// eslint-disable-next-line
const client = new jsrp.client();
@ -111,7 +116,7 @@ export const getUserPrivateKey = async (
| "encryptionVersion"
>
) => {
if (user.encryptionVersion === 1) {
if (user.encryptionVersion === UserEncryption.V1) {
return decryptSymmetric128BitHexKeyUTF8({
ciphertext: user.encryptedPrivateKey,
iv: user.iv,
@ -119,7 +124,12 @@ export const getUserPrivateKey = async (
key: password.slice(0, 32).padStart(32 + (password.slice(0, 32).length - new Blob([password]).size), "0")
});
}
if (user.encryptionVersion === 2 && user.protectedKey && user.protectedKeyIV && user.protectedKeyTag) {
if (
user.encryptionVersion === UserEncryption.V2 &&
user.protectedKey &&
user.protectedKeyIV &&
user.protectedKeyTag
) {
const derivedKey = await argon2.hash(password, {
salt: Buffer.from(user.salt),
memoryCost: 65536,

View File

@ -52,3 +52,8 @@ export enum SecretSharingAccessType {
Anyone = "anyone",
Organization = "organization"
}
export enum OrderByDirection {
ASC = "asc",
DESC = "desc"
}

View File

@ -1,2 +1,2 @@
export { isDisposableEmail } from "./validate-email";
export { validateLocalIps } from "./validate-url";
export { blockLocalAndPrivateIpAddresses } from "./validate-url";

View File

@ -1,10 +1,16 @@
import fs from "fs/promises";
import path from "path";
export const isDisposableEmail = async (email: string) => {
const emailDomain = email.split("@")[1];
export const isDisposableEmail = async (emails: string | string[]) => {
const disposableEmails = await fs.readFile(path.join(__dirname, "disposable_emails.txt"), "utf8");
if (Array.isArray(emails)) {
return emails.some((email) => {
const emailDomain = email.split("@")[1];
return disposableEmails.split("\n").includes(emailDomain);
});
}
const emailDomain = emails.split("@")[1];
if (disposableEmails.split("\n").includes(emailDomain)) return true;
return false;
};

View File

@ -1,7 +1,7 @@
import { getConfig } from "../config/env";
import { BadRequestError } from "../errors";
export const validateLocalIps = (url: string) => {
export const blockLocalAndPrivateIpAddresses = (url: string) => {
const validUrl = new URL(url);
const appCfg = getConfig();
// on cloud local ips are not allowed

View File

@ -10,7 +10,7 @@ import fastifyFormBody from "@fastify/formbody";
import helmet from "@fastify/helmet";
import type { FastifyRateLimitOptions } from "@fastify/rate-limit";
import ratelimiter from "@fastify/rate-limit";
import fasitfy from "fastify";
import fastify from "fastify";
import { Knex } from "knex";
import { Logger } from "pino";
@ -39,7 +39,7 @@ type TMain = {
// Run the server!
export const main = async ({ db, smtp, logger, queue, keyStore }: TMain) => {
const appCfg = getConfig();
const server = fasitfy({
const server = fastify({
logger: appCfg.NODE_ENV === "test" ? false : logger,
trustProxy: true,
connectionTimeout: 30 * 1000,

View File

@ -182,6 +182,9 @@ import { secretVersionV2BridgeDALFactory } from "@app/services/secret-v2-bridge/
import { secretVersionV2TagBridgeDALFactory } from "@app/services/secret-v2-bridge/secret-version-tag-dal";
import { serviceTokenDALFactory } from "@app/services/service-token/service-token-dal";
import { serviceTokenServiceFactory } from "@app/services/service-token/service-token-service";
import { projectSlackConfigDALFactory } from "@app/services/slack/project-slack-config-dal";
import { slackIntegrationDALFactory } from "@app/services/slack/slack-integration-dal";
import { slackServiceFactory } from "@app/services/slack/slack-service";
import { TSmtpService } from "@app/services/smtp/smtp-service";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { getServerCfg, superAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
@ -194,6 +197,8 @@ import { userAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
import { userEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
import { webhookDALFactory } from "@app/services/webhook/webhook-dal";
import { webhookServiceFactory } from "@app/services/webhook/webhook-service";
import { workflowIntegrationDALFactory } from "@app/services/workflow-integration/workflow-integration-dal";
import { workflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
import { injectAuditLogInfo } from "../plugins/audit-log";
import { injectIdentity } from "../plugins/auth/inject-identity";
@ -322,6 +327,10 @@ export const registerRoutes = async (
const externalKmsDAL = externalKmsDALFactory(db);
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
const slackIntegrationDAL = slackIntegrationDALFactory(db);
const projectSlackConfigDAL = projectSlackConfigDALFactory(db);
const workflowIntegrationDAL = workflowIntegrationDALFactory(db);
const permissionService = permissionServiceFactory({
permissionDAL,
orgRoleDAL,
@ -464,11 +473,13 @@ export const registerRoutes = async (
userAliasDAL,
orgMembershipDAL,
tokenService,
permissionService,
groupProjectDAL,
smtpService,
projectMembershipDAL
});
const loginService = authLoginServiceFactory({ userDAL, smtpService, tokenService, orgDAL, tokenDAL: authTokenDAL });
const loginService = authLoginServiceFactory({ userDAL, smtpService, tokenService, orgDAL });
const passwordService = authPaswordServiceFactory({
tokenService,
smtpService,
@ -488,6 +499,7 @@ export const registerRoutes = async (
tokenService,
projectUserAdditionalPrivilegeDAL,
projectUserMembershipRoleDAL,
projectRoleDAL,
projectDAL,
projectMembershipDAL,
orgMembershipDAL,
@ -520,8 +532,10 @@ export const registerRoutes = async (
serverCfgDAL: superAdminDAL,
orgService,
keyStore,
licenseService
licenseService,
kmsService
});
const orgAdminService = orgAdminServiceFactory({
projectDAL,
permissionService,
@ -721,7 +735,9 @@ export const registerRoutes = async (
keyStore,
kmsService,
projectBotDAL,
certificateTemplateDAL
certificateTemplateDAL,
projectSlackConfigDAL,
slackIntegrationDAL
});
const projectEnvService = projectEnvServiceFactory({
@ -872,7 +888,8 @@ export const registerRoutes = async (
smtpService,
projectEnvDAL,
userDAL,
licenseService
licenseService,
projectSlackConfigDAL
});
const secretService = secretServiceFactory({
@ -922,7 +939,9 @@ export const registerRoutes = async (
projectEnvDAL,
userDAL,
smtpService,
accessApprovalPolicyApproverDAL
accessApprovalPolicyApproverDAL,
projectSlackConfigDAL,
kmsService
});
const secretReplicationService = secretReplicationServiceFactory({
@ -1150,6 +1169,18 @@ export const registerRoutes = async (
userDAL
});
const slackService = slackServiceFactory({
permissionService,
kmsService,
slackIntegrationDAL,
workflowIntegrationDAL
});
const workflowIntegrationService = workflowIntegrationServiceFactory({
permissionService,
workflowIntegrationDAL
});
await superAdminService.initServerCfg();
//
// setup the communication with license key server
@ -1231,7 +1262,9 @@ export const registerRoutes = async (
secretSharing: secretSharingService,
userEngagement: userEngagementService,
externalKms: externalKmsService,
orgAdmin: orgAdminService
orgAdmin: orgAdminService,
slack: slackService,
workflowIntegration: workflowIntegrationService
});
const cronJobs: CronJob[] = [];

View File

@ -21,7 +21,12 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
schema: {
response: {
200: z.object({
config: SuperAdminSchema.omit({ createdAt: true, updatedAt: true }).extend({
config: SuperAdminSchema.omit({
createdAt: true,
updatedAt: true,
encryptedSlackClientId: true,
encryptedSlackClientSecret: true
}).extend({
isMigrationModeOn: z.boolean(),
defaultAuthOrgSlug: z.string().nullable(),
isSecretScanningDisabled: z.boolean()
@ -62,7 +67,9 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
.optional()
.refine((methods) => !methods || methods.length > 0, {
message: "At least one login method should be enabled."
})
}),
slackClientId: z.string().optional(),
slackClientSecret: z.string().optional()
}),
response: {
200: z.object({
@ -123,6 +130,32 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "GET",
url: "/integrations/slack/config",
config: {
rateLimit: readLimit
},
schema: {
response: {
200: z.object({
clientId: z.string(),
clientSecret: z.string()
})
}
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
handler: async () => {
const adminSlackConfig = await server.services.superAdmin.getAdminSlackConfig();
return adminSlackConfig;
}
});
server.route({
method: "DELETE",
url: "/user-management/users/:userId",

View File

@ -1,13 +1,14 @@
/* eslint-disable @typescript-eslint/no-floating-promises */
import ms from "ms";
import { z } from "zod";
import { CertificateAuthoritiesSchema } from "@app/db/schemas";
import { CertificateAuthoritiesSchema, CertificateTemplatesSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "@app/services/certificate/certificate-types";
import { CaRenewalType, CaStatus, CaType } from "@app/services/certificate-authority/certificate-authority-types";
import {
validateAltNamesField,
@ -42,7 +43,11 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
keyAlgorithm: z
.nativeEnum(CertKeyAlgorithm)
.default(CertKeyAlgorithm.RSA_2048)
.describe(CERTIFICATE_AUTHORITIES.CREATE.keyAlgorithm)
.describe(CERTIFICATE_AUTHORITIES.CREATE.keyAlgorithm),
requireTemplateForIssuance: z
.boolean()
.default(false)
.describe(CERTIFICATE_AUTHORITIES.CREATE.requireTemplateForIssuance)
})
.refine(
(data) => {
@ -135,6 +140,33 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
}
});
// this endpoint will be used to serve the CA certificate when a client makes a request
// against the Authority Information Access CA Issuer URL
server.route({
method: "GET",
url: "/:caId/certificates/:caCertId/der",
config: {
rateLimit: readLimit
},
schema: {
description: "Get DER-encoded certificate of CA",
params: z.object({
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CERT_BY_ID.caId),
caCertId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CERT_BY_ID.caCertId)
}),
response: {
200: z.instanceof(Buffer)
}
},
handler: async (req, res) => {
const caCert = await server.services.certificateAuthority.getCaCertById(req.params);
res.header("Content-Type", "application/pkix-cert");
return Buffer.from(caCert.rawData);
}
});
server.route({
method: "PATCH",
url: "/:caId",
@ -148,7 +180,11 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.UPDATE.caId)
}),
body: z.object({
status: z.enum([CaStatus.ACTIVE, CaStatus.DISABLED]).optional().describe(CERTIFICATE_AUTHORITIES.UPDATE.status)
status: z.enum([CaStatus.ACTIVE, CaStatus.DISABLED]).optional().describe(CERTIFICATE_AUTHORITIES.UPDATE.status),
requireTemplateForIssuance: z
.boolean()
.optional()
.describe(CERTIFICATE_AUTHORITIES.CREATE.requireTemplateForIssuance)
}),
response: {
200: z.object({
@ -565,7 +601,9 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.ttl),
notBefore: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.notBefore),
notAfter: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.notAfter)
notAfter: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.notAfter),
keyUsages: z.nativeEnum(CertKeyUsage).array().optional(),
extendedKeyUsages: z.nativeEnum(CertExtendedKeyUsage).array().optional()
})
.refine(
(data) => {
@ -645,7 +683,9 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.ttl),
notBefore: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.notBefore),
notAfter: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.notAfter)
notAfter: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.notAfter),
keyUsages: z.nativeEnum(CertKeyUsage).array().optional(),
extendedKeyUsages: z.nativeEnum(CertExtendedKeyUsage).array().optional()
})
.refine(
(data) => {
@ -700,6 +740,51 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "GET",
url: "/:caId/certificate-templates",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Get list of certificate templates for the CA",
params: z.object({
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.caId)
}),
response: {
200: z.object({
certificateTemplates: CertificateTemplatesSchema.array()
})
}
},
handler: async (req) => {
const { certificateTemplates, ca } = await server.services.certificateAuthority.getCaCertificateTemplates({
caId: req.params.caId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.GET_CA_CERTIFICATE_TEMPLATES,
metadata: {
caId: ca.id,
dn: ca.dn
}
}
});
return {
certificateTemplates
};
}
});
server.route({
method: "GET",
url: "/:caId/crls",

View File

@ -7,7 +7,7 @@ import { CERTIFICATE_AUTHORITIES, CERTIFICATES } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CrlReason } from "@app/services/certificate/certificate-types";
import { CertExtendedKeyUsage, CertKeyUsage, CrlReason } from "@app/services/certificate/certificate-types";
import {
validateAltNamesField,
validateCaDateField
@ -86,7 +86,17 @@ export const registerCertRouter = async (server: FastifyZodProvider) => {
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.ttl),
notBefore: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.notBefore),
notAfter: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.notAfter)
notAfter: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.notAfter),
keyUsages: z
.nativeEnum(CertKeyUsage)
.array()
.optional()
.describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.keyUsages),
extendedKeyUsages: z
.nativeEnum(CertExtendedKeyUsage)
.array()
.optional()
.describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.extendedKeyUsages)
})
.refine(
(data) => {
@ -177,7 +187,17 @@ export const registerCertRouter = async (server: FastifyZodProvider) => {
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.ttl),
notBefore: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.notBefore),
notAfter: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.notAfter)
notAfter: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.notAfter),
keyUsages: z
.nativeEnum(CertKeyUsage)
.array()
.optional()
.describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.keyUsages),
extendedKeyUsages: z
.nativeEnum(CertExtendedKeyUsage)
.array()
.optional()
.describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.extendedKeyUsages)
})
.refine(
(data) => {

View File

@ -7,6 +7,7 @@ import { CERTIFICATE_TEMPLATES } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertExtendedKeyUsage, CertKeyUsage } from "@app/services/certificate/certificate-types";
import { sanitizedCertificateTemplate } from "@app/services/certificate-template/certificate-template-schema";
import { validateTemplateRegexField } from "@app/services/certificate-template/certificate-template-validators";
@ -74,7 +75,19 @@ export const registerCertificateTemplateRouter = async (server: FastifyZodProvid
ttl: z
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.describe(CERTIFICATE_TEMPLATES.CREATE.ttl)
.describe(CERTIFICATE_TEMPLATES.CREATE.ttl),
keyUsages: z
.nativeEnum(CertKeyUsage)
.array()
.optional()
.default([CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT])
.describe(CERTIFICATE_TEMPLATES.CREATE.keyUsages),
extendedKeyUsages: z
.nativeEnum(CertExtendedKeyUsage)
.array()
.optional()
.default([])
.describe(CERTIFICATE_TEMPLATES.CREATE.extendedKeyUsages)
}),
response: {
200: sanitizedCertificateTemplate
@ -130,7 +143,13 @@ export const registerCertificateTemplateRouter = async (server: FastifyZodProvid
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.optional()
.describe(CERTIFICATE_TEMPLATES.UPDATE.ttl)
.describe(CERTIFICATE_TEMPLATES.UPDATE.ttl),
keyUsages: z.nativeEnum(CertKeyUsage).array().optional().describe(CERTIFICATE_TEMPLATES.UPDATE.keyUsages),
extendedKeyUsages: z
.nativeEnum(CertExtendedKeyUsage)
.array()
.optional()
.describe(CERTIFICATE_TEMPLATES.UPDATE.extendedKeyUsages)
}),
params: z.object({
certificateTemplateId: z.string().describe(CERTIFICATE_TEMPLATES.UPDATE.certificateTemplateId)

View File

@ -246,12 +246,13 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
description: true
}).optional(),
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true })
}).array()
}).array(),
totalCount: z.number()
})
}
},
handler: async (req) => {
const identities = await server.services.identity.listOrgIdentities({
const { identityMemberships, totalCount } = await server.services.identity.listOrgIdentities({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
@ -259,7 +260,7 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
orgId: req.query.orgId
});
return { identities };
return { identities: identityMemberships, totalCount };
}
});

View File

@ -29,11 +29,13 @@ import { registerSecretFolderRouter } from "./secret-folder-router";
import { registerSecretImportRouter } from "./secret-import-router";
import { registerSecretSharingRouter } from "./secret-sharing-router";
import { registerSecretTagRouter } from "./secret-tag-router";
import { registerSlackRouter } from "./slack-router";
import { registerSsoRouter } from "./sso-router";
import { registerUserActionRouter } from "./user-action-router";
import { registerUserEngagementRouter } from "./user-engagement-router";
import { registerUserRouter } from "./user-router";
import { registerWebhookRouter } from "./webhook-router";
import { registerWorkflowIntegrationRouter } from "./workflow-integration-router";
export const registerV1Routes = async (server: FastifyZodProvider) => {
await server.register(registerSsoRouter, { prefix: "/sso" });
@ -61,6 +63,14 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
await server.register(registerSecretImportRouter, { prefix: "/secret-imports" });
await server.register(registerSecretFolderRouter, { prefix: "/folders" });
await server.register(
async (workflowIntegrationRouter) => {
await workflowIntegrationRouter.register(registerWorkflowIntegrationRouter);
await workflowIntegrationRouter.register(registerSlackRouter, { prefix: "/slack" });
},
{ prefix: "/workflow-integrations" }
);
await server.register(
async (projectRouter) => {
await projectRouter.register(registerProjectRouter);

View File

@ -18,9 +18,14 @@ export const registerInviteOrgRouter = async (server: FastifyZodProvider) => {
body: z.object({
inviteeEmails: z.array(z.string().trim().email()),
organizationId: z.string().trim(),
projectIds: z.array(z.string().trim()).optional(),
projectRoleSlug: z.nativeEnum(ProjectMembershipRole).optional(),
organizationRoleSlug: z.nativeEnum(OrgMembershipRole)
projects: z
.object({
id: z.string(),
projectRoleSlug: z.string().array().default([ProjectMembershipRole.Member])
})
.array()
.optional(),
organizationRoleSlug: z.string().default(OrgMembershipRole.Member)
}),
response: {
200: z.object({
@ -40,12 +45,12 @@ export const registerInviteOrgRouter = async (server: FastifyZodProvider) => {
handler: async (req) => {
if (req.auth.actor !== ActorType.USER) return;
const completeInviteLinks = await server.services.org.inviteUserToOrganization({
const { signupTokens: completeInviteLinks } = await server.services.org.inviteUserToOrganization({
orgId: req.body.organizationId,
userId: req.permission.id,
actor: req.permission.type,
actorId: req.permission.id,
inviteeEmails: req.body.inviteeEmails,
projectIds: req.body.projectIds,
projectRoleSlug: req.body.projectRoleSlug,
projects: req.body.projects,
organizationRoleSlug: req.body.organizationRoleSlug,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId

View File

@ -1,6 +1,7 @@
import { z } from "zod";
import {
AuditLogsSchema,
GroupsSchema,
IncidentContactsSchema,
OrganizationsSchema,
@ -8,7 +9,9 @@ import {
OrgRolesSchema,
UsersSchema
} from "@app/db/schemas";
import { ORGANIZATIONS } from "@app/lib/api-docs";
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
import { AUDIT_LOGS, ORGANIZATIONS } from "@app/lib/api-docs";
import { getLastMidnightDateISO } from "@app/lib/fn";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -62,6 +65,68 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "GET",
url: "/audit-logs",
config: {
rateLimit: readLimit
},
schema: {
description: "Get all audit logs for an organization",
querystring: z.object({
eventType: z.nativeEnum(EventType).optional().describe(AUDIT_LOGS.EXPORT.eventType),
userAgentType: z.nativeEnum(UserAgentType).optional().describe(AUDIT_LOGS.EXPORT.userAgentType),
startDate: z.string().datetime().optional().describe(AUDIT_LOGS.EXPORT.startDate),
endDate: z.string().datetime().optional().describe(AUDIT_LOGS.EXPORT.endDate),
offset: z.coerce.number().default(0).describe(AUDIT_LOGS.EXPORT.offset),
limit: z.coerce.number().default(20).describe(AUDIT_LOGS.EXPORT.limit),
actor: z.string().optional().describe(AUDIT_LOGS.EXPORT.actor)
}),
response: {
200: z.object({
auditLogs: AuditLogsSchema.omit({
eventMetadata: true,
eventType: true,
actor: true,
actorMetadata: true
})
.merge(
z.object({
project: z.object({
name: z.string(),
slug: z.string()
}),
event: z.object({
type: z.string(),
metadata: z.any()
}),
actor: z.object({
type: z.string(),
metadata: z.any()
})
})
)
.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const auditLogs = await server.services.auditLog.listAuditLogs({
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.query,
endDate: req.query.endDate,
startDate: req.query.startDate || getLastMidnightDateISO(),
auditLogActor: req.query.actor,
actor: req.permission.type
});
return { auditLogs };
}
});
server.route({
method: "GET",
url: "/:organizationId/users",

View File

@ -4,14 +4,17 @@ import {
IntegrationsSchema,
ProjectMembershipsSchema,
ProjectRolesSchema,
ProjectSlackConfigsSchema,
UserEncryptionKeysSchema,
UsersSchema
} from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { PROJECTS } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { ProjectFilterType } from "@app/services/project/project-types";
import { validateSlackChannelsField } from "@app/services/slack/slack-auth-validators";
import { integrationAuthPubSchema, SanitizedProjectSchema } from "../sanitizedSchemas";
import { sanitizedServiceTokenSchema } from "../v2/service-token-router";
@ -542,4 +545,111 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
return { serviceTokenData };
}
});
server.route({
method: "GET",
url: "/:workspaceId/slack-config",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
workspaceId: z.string().trim()
}),
response: {
200: ProjectSlackConfigsSchema.pick({
id: true,
slackIntegrationId: true,
isAccessRequestNotificationEnabled: true,
accessRequestChannels: true,
isSecretRequestNotificationEnabled: true,
secretRequestChannels: true
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const slackConfig = await server.services.project.getProjectSlackConfig({
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
projectId: req.params.workspaceId
});
if (slackConfig) {
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.params.workspaceId,
event: {
type: EventType.GET_PROJECT_SLACK_CONFIG,
metadata: {
id: slackConfig.id
}
}
});
}
return slackConfig;
}
});
server.route({
method: "PUT",
url: "/:workspaceId/slack-config",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
workspaceId: z.string().trim()
}),
body: z.object({
slackIntegrationId: z.string(),
isAccessRequestNotificationEnabled: z.boolean(),
accessRequestChannels: validateSlackChannelsField,
isSecretRequestNotificationEnabled: z.boolean(),
secretRequestChannels: validateSlackChannelsField
}),
response: {
200: ProjectSlackConfigsSchema.pick({
id: true,
slackIntegrationId: true,
isAccessRequestNotificationEnabled: true,
accessRequestChannels: true,
isSecretRequestNotificationEnabled: true,
secretRequestChannels: true
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const slackConfig = await server.services.project.updateProjectSlackConfig({
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
projectId: req.params.workspaceId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.params.workspaceId,
event: {
type: EventType.UPDATE_PROJECT_SLACK_CONFIG,
metadata: {
id: slackConfig.id,
slackIntegrationId: slackConfig.slackIntegrationId,
isAccessRequestNotificationEnabled: slackConfig.isAccessRequestNotificationEnabled,
accessRequestChannels: slackConfig.accessRequestChannels,
isSecretRequestNotificationEnabled: slackConfig.isSecretRequestNotificationEnabled,
secretRequestChannels: slackConfig.secretRequestChannels
}
}
});
return slackConfig;
}
});
};

View File

@ -0,0 +1,355 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { SlackIntegrationsSchema, WorkflowIntegrationsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { getConfig } from "@app/lib/config/env";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const sanitizedSlackIntegrationSchema = WorkflowIntegrationsSchema.pick({
id: true,
description: true,
slug: true,
integration: true
}).merge(
SlackIntegrationsSchema.pick({
teamName: true
})
);
export const registerSlackRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
server.route({
method: "GET",
url: "/install",
config: {
rateLimit: readLimit
},
schema: {
security: [
{
bearerAuth: []
}
],
querystring: z.object({
slug: z
.string()
.trim()
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
}),
description: z.string().optional()
}),
response: {
200: z.string()
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const url = await server.services.slack.getInstallUrl({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.query
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.ATTEMPT_CREATE_SLACK_INTEGRATION,
metadata: {
slug: req.query.slug,
description: req.query.description
}
}
});
return url;
}
});
server.route({
method: "GET",
url: "/reinstall",
config: {
rateLimit: readLimit
},
schema: {
security: [
{
bearerAuth: []
}
],
querystring: z.object({
id: z.string()
}),
response: {
200: z.string()
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const url = await server.services.slack.getReinstallUrl({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.query.id
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.ATTEMPT_REINSTALL_SLACK_INTEGRATION,
metadata: {
id: req.query.id
}
}
});
return url;
}
});
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
security: [
{
bearerAuth: []
}
],
response: {
200: sanitizedSlackIntegrationSchema.array()
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const slackIntegrations = await server.services.slack.getSlackIntegrationsByOrg({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return slackIntegrations;
}
});
server.route({
method: "DELETE",
url: "/:id",
config: {
rateLimit: writeLimit
},
schema: {
security: [
{
bearerAuth: []
}
],
params: z.object({
id: z.string()
}),
response: {
200: sanitizedSlackIntegrationSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const deletedSlackIntegration = await server.services.slack.deleteSlackIntegration({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.id
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: deletedSlackIntegration.orgId,
event: {
type: EventType.DELETE_SLACK_INTEGRATION,
metadata: {
id: deletedSlackIntegration.id
}
}
});
return deletedSlackIntegration;
}
});
server.route({
method: "GET",
url: "/:id",
config: {
rateLimit: readLimit
},
schema: {
security: [
{
bearerAuth: []
}
],
params: z.object({
id: z.string()
}),
response: {
200: sanitizedSlackIntegrationSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const slackIntegration = await server.services.slack.getSlackIntegrationById({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.id
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: slackIntegration.orgId,
event: {
type: EventType.GET_SLACK_INTEGRATION,
metadata: {
id: slackIntegration.id
}
}
});
return slackIntegration;
}
});
server.route({
method: "GET",
url: "/:id/channels",
config: {
rateLimit: readLimit
},
schema: {
security: [
{
bearerAuth: []
}
],
params: z.object({
id: z.string()
}),
response: {
200: z
.object({
name: z.string(),
id: z.string()
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const slackChannels = await server.services.slack.getSlackIntegrationChannels({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.id
});
return slackChannels;
}
});
server.route({
method: "PATCH",
url: "/:id",
config: {
rateLimit: readLimit
},
schema: {
security: [
{
bearerAuth: []
}
],
params: z.object({
id: z.string()
}),
body: z.object({
slug: z
.string()
.trim()
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional(),
description: z.string().optional()
}),
response: {
200: sanitizedSlackIntegrationSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const slackIntegration = await server.services.slack.updateSlackIntegration({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.id,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: slackIntegration.orgId,
event: {
type: EventType.UPDATE_SLACK_INTEGRATION,
metadata: {
id: slackIntegration.id,
slug: slackIntegration.slug,
description: slackIntegration.description as string
}
}
});
return slackIntegration;
}
});
server.route({
method: "GET",
url: "/oauth_redirect",
config: {
rateLimit: readLimit
},
handler: async (req, res) => {
const installer = await server.services.slack.getSlackInstaller();
return installer.handleCallback(req.raw, res.raw, {
failureAsync: async () => {
return res.redirect(appCfg.SITE_URL as string);
},
successAsync: async (installation) => {
const metadata = JSON.parse(installation.metadata || "") as {
orgId: string;
};
return res.redirect(`${appCfg.SITE_URL}/org/${metadata.orgId}/settings?selectedTab=workflow-integrations`);
}
});
}
});
};

View File

@ -57,7 +57,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
cb(null, { isUserCompleted, providerAuthToken });
} catch (error) {
logger.error(error);
cb(null, false);
cb(error as Error, false);
}
}
)
@ -91,7 +91,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
return cb(null, { isUserCompleted, providerAuthToken });
} catch (error) {
logger.error(error);
cb(null, false);
cb(error as Error, false);
}
}
)
@ -126,7 +126,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
return cb(null, { isUserCompleted, providerAuthToken });
} catch (error) {
logger.error(error);
cb(null, false);
cb(error as Error, false);
}
}
)

View File

@ -134,4 +134,39 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
);
}
});
server.route({
method: "GET",
url: "/me/:username/groups",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
username: z.string().trim()
}),
response: {
200: z
.object({
id: z.string(),
name: z.string(),
slug: z.string(),
orgId: z.string()
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const groupMemberships = await server.services.user.listUserGroups({
username: req.params.username,
actorOrgId: req.permission.orgId,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actor: req.permission.type
});
return groupMemberships;
}
});
};

View File

@ -0,0 +1,42 @@
import { WorkflowIntegrationsSchema } from "@app/db/schemas";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const sanitizedWorkflowIntegrationSchema = WorkflowIntegrationsSchema.pick({
id: true,
description: true,
slug: true,
integration: true
});
export const registerWorkflowIntegrationRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
security: [
{
bearerAuth: []
}
],
response: {
200: sanitizedWorkflowIntegrationSchema.array()
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const workflowIntegrations = await server.services.workflowIntegration.getIntegrationsByOrg({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return workflowIntegrations;
}
});
};

View File

@ -2,9 +2,11 @@ import { z } from "zod";
import { IdentitiesSchema, IdentityOrgMembershipsSchema, OrgRolesSchema } from "@app/db/schemas";
import { ORGANIZATIONS } from "@app/lib/api-docs";
import { OrderByDirection } from "@app/lib/types";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { OrgIdentityOrderBy } from "@app/services/identity/identity-types";
export const registerIdentityOrgRouter = async (server: FastifyZodProvider) => {
server.route({
@ -24,6 +26,27 @@ export const registerIdentityOrgRouter = async (server: FastifyZodProvider) => {
params: z.object({
orgId: z.string().trim().describe(ORGANIZATIONS.LIST_IDENTITY_MEMBERSHIPS.orgId)
}),
querystring: z.object({
offset: z.coerce.number().min(0).default(0).describe(ORGANIZATIONS.LIST_IDENTITY_MEMBERSHIPS.offset).optional(),
limit: z.coerce
.number()
.min(1)
.max(20000) // TODO: temp limit until combobox added to add identity to project modal, reduce once added
.default(100)
.describe(ORGANIZATIONS.LIST_IDENTITY_MEMBERSHIPS.limit)
.optional(),
orderBy: z
.nativeEnum(OrgIdentityOrderBy)
.default(OrgIdentityOrderBy.Name)
.describe(ORGANIZATIONS.LIST_IDENTITY_MEMBERSHIPS.orderBy)
.optional(),
orderDirection: z
.nativeEnum(OrderByDirection)
.default(OrderByDirection.ASC)
.describe(ORGANIZATIONS.LIST_IDENTITY_MEMBERSHIPS.orderDirection)
.optional(),
search: z.string().trim().describe(ORGANIZATIONS.LIST_IDENTITY_MEMBERSHIPS.search).optional()
}),
response: {
200: z.object({
identityMemberships: IdentityOrgMembershipsSchema.merge(
@ -37,20 +60,26 @@ export const registerIdentityOrgRouter = async (server: FastifyZodProvider) => {
}).optional(),
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true })
})
).array()
).array(),
totalCount: z.number()
})
}
},
handler: async (req) => {
const identityMemberships = await server.services.identity.listOrgIdentities({
const { identityMemberships, totalCount } = await server.services.identity.listOrgIdentities({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
orgId: req.params.orgId
orgId: req.params.orgId,
limit: req.query.limit,
offset: req.query.offset,
orderBy: req.query.orderBy,
orderDirection: req.query.orderDirection,
search: req.query.search
});
return { identityMemberships };
return { identityMemberships, totalCount };
}
});
};

View File

@ -7,11 +7,13 @@ import {
ProjectMembershipRole,
ProjectUserMembershipRolesSchema
} from "@app/db/schemas";
import { PROJECT_IDENTITIES } from "@app/lib/api-docs";
import { ORGANIZATIONS, PROJECT_IDENTITIES } from "@app/lib/api-docs";
import { BadRequestError } from "@app/lib/errors";
import { OrderByDirection } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { ProjectIdentityOrderBy } from "@app/services/identity-project/identity-project-types";
import { ProjectUserMembershipTemporaryMode } from "@app/services/project-membership/project-membership-types";
import { SanitizedProjectSchema } from "../sanitizedSchemas";
@ -214,6 +216,32 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
params: z.object({
projectId: z.string().trim().describe(PROJECT_IDENTITIES.LIST_IDENTITY_MEMBERSHIPS.projectId)
}),
querystring: z.object({
offset: z.coerce
.number()
.min(0)
.default(0)
.describe(PROJECT_IDENTITIES.LIST_IDENTITY_MEMBERSHIPS.offset)
.optional(),
limit: z.coerce
.number()
.min(1)
.max(20000) // TODO: temp limit until combobox added to add identity to project modal, reduce once added
.default(100)
.describe(PROJECT_IDENTITIES.LIST_IDENTITY_MEMBERSHIPS.limit)
.optional(),
orderBy: z
.nativeEnum(ProjectIdentityOrderBy)
.default(ProjectIdentityOrderBy.Name)
.describe(ORGANIZATIONS.LIST_IDENTITY_MEMBERSHIPS.orderBy)
.optional(),
orderDirection: z
.nativeEnum(OrderByDirection)
.default(OrderByDirection.ASC)
.describe(ORGANIZATIONS.LIST_IDENTITY_MEMBERSHIPS.orderDirection)
.optional(),
search: z.string().trim().describe(PROJECT_IDENTITIES.LIST_IDENTITY_MEMBERSHIPS.search).optional()
}),
response: {
200: z.object({
identityMemberships: z
@ -239,19 +267,25 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true }),
project: SanitizedProjectSchema.pick({ name: true, id: true })
})
.array()
.array(),
totalCount: z.number()
})
}
},
handler: async (req) => {
const identityMemberships = await server.services.identityProject.listProjectIdentities({
const { identityMemberships, totalCount } = await server.services.identityProject.listProjectIdentities({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
projectId: req.params.projectId
projectId: req.params.projectId,
limit: req.query.limit,
offset: req.query.offset,
orderBy: req.query.orderBy,
orderDirection: req.query.orderDirection,
search: req.query.search
});
return { identityMemberships };
return { identityMemberships, totalCount };
}
});

View File

@ -1,6 +1,6 @@
import { z } from "zod";
import { ProjectMembershipsSchema } from "@app/db/schemas";
import { OrgMembershipRole, ProjectMembershipRole, ProjectMembershipsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { PROJECT_USERS } from "@app/lib/api-docs";
import { writeLimit } from "@app/server/config/rateLimiter";
@ -26,7 +26,8 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
}),
body: z.object({
emails: z.string().email().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.emails),
usernames: z.string().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.usernames)
usernames: z.string().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.usernames),
roleSlugs: z.string().array().optional().describe(PROJECT_USERS.INVITE_MEMBER.roleSlugs)
}),
response: {
200: z.object({
@ -36,14 +37,21 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const memberships = await server.services.projectMembership.addUsersToProjectNonE2EE({
projectId: req.params.projectId,
const usernamesAndEmails = [...req.body.emails, ...req.body.usernames];
const { projectMemberships: memberships } = await server.services.org.inviteUserToOrganization({
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actor: req.permission.type,
emails: req.body.emails,
usernames: req.body.usernames
inviteeEmails: usernamesAndEmails,
orgId: req.permission.orgId,
organizationRoleSlug: OrgMembershipRole.NoAccess,
projects: [
{
id: req.params.projectId,
projectRoleSlug: [ProjectMembershipRole.Member]
}
]
});
await server.services.auditLog.createAuditLog({

View File

@ -42,7 +42,8 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
},
schema: {
body: z.object({
organizationId: z.string().trim()
organizationId: z.string().trim(),
userAgent: z.enum(["cli"]).optional()
}),
response: {
200: z.object({
@ -53,7 +54,7 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
handler: async (req, res) => {
const cfg = getConfig();
const tokens = await server.services.login.selectOrganization({
userAgent: req.headers["user-agent"],
userAgent: req.body.userAgent ?? req.headers["user-agent"],
authJwtToken: req.headers.authorization,
organizationId: req.body.organizationId,
ipAddress: req.realIp

View File

@ -12,7 +12,6 @@ import { BadRequestError, DatabaseError, UnauthorizedError } from "@app/lib/erro
import { logger } from "@app/lib/logger";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
import { TTokenDALFactory } from "../auth-token/auth-token-dal";
import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service";
import { TokenType } from "../auth-token/auth-token-types";
import { TOrgDALFactory } from "../org/org-dal";
@ -34,7 +33,6 @@ type TAuthLoginServiceFactoryDep = {
orgDAL: TOrgDALFactory;
tokenService: TAuthTokenServiceFactory;
smtpService: TSmtpService;
tokenDAL: TTokenDALFactory;
};
export type TAuthLoginFactory = ReturnType<typeof authLoginServiceFactory>;
@ -42,8 +40,7 @@ export const authLoginServiceFactory = ({
userDAL,
tokenService,
smtpService,
orgDAL,
tokenDAL
orgDAL
}: TAuthLoginServiceFactoryDep) => {
/*
* Private
@ -376,8 +373,6 @@ export const authLoginServiceFactory = ({
});
}
await tokenDAL.incrementTokenSessionVersion(user.id, decodedToken.tokenVersionId);
const tokens = await generateUserTokens({
authMethod: decodedToken.authMethod,
user,

View File

@ -1,15 +1,15 @@
import bcrypt from "bcrypt";
import jwt from "jsonwebtoken";
import { OrgMembershipStatus, TableName } from "@app/db/schemas";
import { OrgMembershipStatus, SecretKeyEncoding, TableName } from "@app/db/schemas";
import { convertPendingGroupAdditionsToGroupMemberships } from "@app/ee/services/group/group-fns";
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns";
import { getConfig } from "@app/lib/config/env";
import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
import { getUserPrivateKey } from "@app/lib/crypto/srp";
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
import { generateUserSrpKeys, getUserPrivateKey } from "@app/lib/crypto/srp";
import { BadRequestError } from "@app/lib/errors";
import { isDisposableEmail } from "@app/lib/validator";
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
@ -17,14 +17,14 @@ import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service";
import { TokenMetadataType, TokenType, TTokenMetadata } from "../auth-token/auth-token-types";
import { TokenType } from "../auth-token/auth-token-types";
import { TOrgDALFactory } from "../org/org-dal";
import { TOrgServiceFactory } from "../org/org-service";
import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal";
import { addMembersToProject } from "../project-membership/project-membership-fns";
import { TProjectUserMembershipRoleDALFactory } from "../project-membership/project-user-membership-role-dal";
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
import { TUserDALFactory } from "../user/user-dal";
import { UserEncryption } from "../user/user-types";
import { TAuthDALFactory } from "./auth-dal";
import { validateProviderAuthToken, validateSignUpAuthorization } from "./auth-fns";
import { TCompleteAccountInviteDTO, TCompleteAccountSignupDTO } from "./auth-signup-type";
@ -67,8 +67,6 @@ export const authSignupServiceFactory = ({
smtpService,
orgService,
orgDAL,
projectMembershipDAL,
projectUserMembershipRoleDAL,
licenseService
}: TAuthSignupDep) => {
// first step of signup. create user and send email
@ -177,32 +175,88 @@ export const authSignupServiceFactory = ({
encryptedPrivateKey,
iv: encryptedPrivateKeyIV,
tag: encryptedPrivateKeyTag,
encryptionVersion: 2
encryptionVersion: UserEncryption.V2
});
const { tag, encoding, ciphertext, iv } = infisicalSymmetricEncypt(privateKey);
const updateduser = await authDAL.transaction(async (tx) => {
const us = await userDAL.updateById(user.id, { firstName, lastName, isAccepted: true }, tx);
if (!us) throw new Error("User not found");
const userEncKey = await userDAL.upsertUserEncryptionKey(
us.id,
{
salt,
verifier,
publicKey,
protectedKey,
protectedKeyIV,
protectedKeyTag,
encryptedPrivateKey,
iv: encryptedPrivateKeyIV,
tag: encryptedPrivateKeyTag,
hashedPassword,
serverEncryptedPrivateKeyEncoding: encoding,
serverEncryptedPrivateKeyTag: tag,
serverEncryptedPrivateKeyIV: iv,
serverEncryptedPrivateKey: ciphertext
},
tx
);
const systemGeneratedUserEncryptionKey = await userDAL.findUserEncKeyByUserId(us.id, tx);
let userEncKey;
// below condition is true means this is system generated credentials
// the private key is actually system generated password
// thus we will re-encrypt the system generated private key with the new password
// akhilmhdh: you may find this like why? The reason is simple we are moving away from e2ee and these are pieces of it
// without a dummy key in place some things will break and backward compatiability too. 2025 we will be removing all these things
if (
systemGeneratedUserEncryptionKey &&
!systemGeneratedUserEncryptionKey.hashedPassword &&
systemGeneratedUserEncryptionKey.serverEncryptedPrivateKey &&
systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyTag &&
systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyIV &&
systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyEncoding
) {
// get server generated password
const serverGeneratedPassword = infisicalSymmetricDecrypt({
iv: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyIV,
tag: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyTag,
ciphertext: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKey,
keyEncoding: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyEncoding as SecretKeyEncoding
});
const serverGeneratedPrivateKey = await getUserPrivateKey(serverGeneratedPassword, {
...systemGeneratedUserEncryptionKey
});
const encKeys = await generateUserSrpKeys(email, password, {
publicKey: systemGeneratedUserEncryptionKey.publicKey,
privateKey: serverGeneratedPrivateKey
});
// now reencrypt server generated key with user provided password
userEncKey = await userDAL.upsertUserEncryptionKey(
us.id,
{
encryptionVersion: UserEncryption.V2,
protectedKey: encKeys.protectedKey,
protectedKeyIV: encKeys.protectedKeyIV,
protectedKeyTag: encKeys.protectedKeyTag,
publicKey: encKeys.publicKey,
encryptedPrivateKey: encKeys.encryptedPrivateKey,
iv: encKeys.encryptedPrivateKeyIV,
tag: encKeys.encryptedPrivateKeyTag,
salt: encKeys.salt,
verifier: encKeys.verifier,
hashedPassword,
serverEncryptedPrivateKeyEncoding: encoding,
serverEncryptedPrivateKeyTag: tag,
serverEncryptedPrivateKeyIV: iv,
serverEncryptedPrivateKey: ciphertext
},
tx
);
} else {
userEncKey = await userDAL.upsertUserEncryptionKey(
us.id,
{
encryptionVersion: UserEncryption.V2,
salt,
verifier,
publicKey,
protectedKey,
protectedKeyIV,
protectedKeyTag,
encryptedPrivateKey,
iv: encryptedPrivateKeyIV,
tag: encryptedPrivateKeyTag,
hashedPassword,
serverEncryptedPrivateKeyEncoding: encoding,
serverEncryptedPrivateKeyTag: tag,
serverEncryptedPrivateKeyIV: iv,
serverEncryptedPrivateKey: ciphertext
},
tx
);
}
// If it's SAML Auth and the organization ID is present, we should check if the user has a pending invite for this org, and accept it
if (
(isAuthMethodSaml(authMethod) || [AuthMethod.LDAP, AuthMethod.OIDC].includes(authMethod as AuthMethod)) &&
@ -312,8 +366,7 @@ export const authSignupServiceFactory = ({
encryptedPrivateKey,
encryptedPrivateKeyIV,
encryptedPrivateKeyTag,
authorization,
tokenMetadata
authorization
}: TCompleteAccountInviteDTO) => {
const user = await userDAL.findUserByUsername(email);
if (!user || (user && user.isAccepted)) {
@ -348,65 +401,76 @@ export const authSignupServiceFactory = ({
const updateduser = await authDAL.transaction(async (tx) => {
const us = await userDAL.updateById(user.id, { firstName, lastName, isAccepted: true }, tx);
if (!us) throw new Error("User not found");
const userEncKey = await userDAL.upsertUserEncryptionKey(
us.id,
{
salt,
encryptionVersion: 2,
verifier,
publicKey,
protectedKey,
protectedKeyIV,
protectedKeyTag,
encryptedPrivateKey,
iv: encryptedPrivateKeyIV,
tag: encryptedPrivateKeyTag,
hashedPassword,
serverEncryptedPrivateKeyEncoding: encoding,
serverEncryptedPrivateKeyTag: tag,
serverEncryptedPrivateKeyIV: iv,
serverEncryptedPrivateKey: ciphertext
},
tx
);
if (tokenMetadata) {
const metadataObj = jwt.verify(tokenMetadata, appCfg.AUTH_SECRET) as TTokenMetadata;
if (
metadataObj?.payload?.userId !== user.id ||
metadataObj?.payload?.orgId !== orgMembership.orgId ||
metadataObj?.type !== TokenMetadataType.InviteToProjects
) {
throw new UnauthorizedError({
message: "Malformed or invalid metadata token"
});
}
for await (const projectId of metadataObj.payload.projectIds) {
await addMembersToProject({
orgDAL,
projectDAL,
projectMembershipDAL,
projectKeyDAL,
userGroupMembershipDAL,
projectBotDAL,
projectUserMembershipRoleDAL,
smtpService
}).addMembersToNonE2EEProject(
{
emails: [user.email!],
usernames: [],
projectId,
projectMembershipRole: metadataObj.payload.projectRoleSlug,
sendEmails: false
},
{
tx,
throwOnProjectNotFound: false
}
);
}
const systemGeneratedUserEncryptionKey = await userDAL.findUserEncKeyByUserId(us.id, tx);
let userEncKey;
// this means this is system generated credentials
// now replace the private key
if (
systemGeneratedUserEncryptionKey &&
!systemGeneratedUserEncryptionKey.hashedPassword &&
systemGeneratedUserEncryptionKey.serverEncryptedPrivateKey &&
systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyTag &&
systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyIV &&
systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyEncoding
) {
// get server generated password
const serverGeneratedPassword = infisicalSymmetricDecrypt({
iv: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyIV,
tag: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyTag,
ciphertext: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKey,
keyEncoding: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyEncoding as SecretKeyEncoding
});
const serverGeneratedPrivateKey = await getUserPrivateKey(serverGeneratedPassword, {
...systemGeneratedUserEncryptionKey
});
const encKeys = await generateUserSrpKeys(email, password, {
publicKey: systemGeneratedUserEncryptionKey.publicKey,
privateKey: serverGeneratedPrivateKey
});
// now reencrypt server generated key with user provided password
userEncKey = await userDAL.upsertUserEncryptionKey(
us.id,
{
encryptionVersion: 2,
protectedKey: encKeys.protectedKey,
protectedKeyIV: encKeys.protectedKeyIV,
protectedKeyTag: encKeys.protectedKeyTag,
publicKey: encKeys.publicKey,
encryptedPrivateKey: encKeys.encryptedPrivateKey,
iv: encKeys.encryptedPrivateKeyIV,
tag: encKeys.encryptedPrivateKeyTag,
salt: encKeys.salt,
verifier: encKeys.verifier,
hashedPassword,
serverEncryptedPrivateKeyEncoding: encoding,
serverEncryptedPrivateKeyTag: tag,
serverEncryptedPrivateKeyIV: iv,
serverEncryptedPrivateKey: ciphertext
},
tx
);
} else {
userEncKey = await userDAL.upsertUserEncryptionKey(
us.id,
{
encryptionVersion: UserEncryption.V2,
salt,
verifier,
publicKey,
protectedKey,
protectedKeyIV,
protectedKeyTag,
encryptedPrivateKey,
iv: encryptedPrivateKeyIV,
tag: encryptedPrivateKeyTag,
hashedPassword,
serverEncryptedPrivateKeyEncoding: encoding,
serverEncryptedPrivateKeyTag: tag,
serverEncryptedPrivateKeyIV: iv,
serverEncryptedPrivateKey: ciphertext
},
tx
);
}
const updatedMembersips = await orgDAL.updateMembership(

View File

@ -15,7 +15,7 @@ import {
/* eslint-disable no-bitwise */
export const createSerialNumber = () => {
const randomBytes = crypto.randomBytes(32);
const randomBytes = crypto.randomBytes(20);
randomBytes[0] &= 0x7f; // ensure the first bit is 0
return randomBytes.toString("hex");
};

View File

@ -19,7 +19,13 @@ import { TProjectDALFactory } from "@app/services/project/project-dal";
import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns";
import { TCertificateAuthorityCrlDALFactory } from "../../ee/services/certificate-authority-crl/certificate-authority-crl-dal";
import { CertKeyAlgorithm, CertStatus } from "../certificate/certificate-types";
import {
CertExtendedKeyUsage,
CertExtendedKeyUsageOIDToName,
CertKeyAlgorithm,
CertKeyUsage,
CertStatus
} from "../certificate/certificate-types";
import { TCertificateTemplateDALFactory } from "../certificate-template/certificate-template-dal";
import { validateCertificateDetailsAgainstTemplate } from "../certificate-template/certificate-template-fns";
import { TCertificateAuthorityCertDALFactory } from "./certificate-authority-cert-dal";
@ -41,6 +47,7 @@ import {
TCreateCaDTO,
TDeleteCaDTO,
TGetCaCertDTO,
TGetCaCertificateTemplatesDTO,
TGetCaCertsDTO,
TGetCaCsrDTO,
TGetCaDTO,
@ -64,7 +71,7 @@ type TCertificateAuthorityServiceFactoryDep = {
>;
certificateAuthoritySecretDAL: Pick<TCertificateAuthoritySecretDALFactory, "create" | "findOne">;
certificateAuthorityCrlDAL: Pick<TCertificateAuthorityCrlDALFactory, "create" | "findOne" | "update">;
certificateTemplateDAL: Pick<TCertificateTemplateDALFactory, "getById">;
certificateTemplateDAL: Pick<TCertificateTemplateDALFactory, "getById" | "find">;
certificateAuthorityQueue: TCertificateAuthorityQueueFactory; // TODO: Pick
certificateDAL: Pick<TCertificateDALFactory, "transaction" | "create" | "find">;
certificateBodyDAL: Pick<TCertificateBodyDALFactory, "create">;
@ -108,6 +115,7 @@ export const certificateAuthorityServiceFactory = ({
notAfter,
maxPathLength,
keyAlgorithm,
requireTemplateForIssuance,
actorId,
actorAuthMethod,
actor,
@ -170,7 +178,8 @@ export const certificateAuthorityServiceFactory = ({
notBefore: notBeforeDate,
notAfter: notAfterDate,
serialNumber
})
}),
requireTemplateForIssuance
},
tx
);
@ -213,7 +222,6 @@ export const certificateAuthorityServiceFactory = ({
keys,
extensions: [
new x509.BasicConstraintsExtension(true, maxPathLength === -1 ? undefined : maxPathLength, true),
new x509.ExtendedKeyUsageExtension(["1.2.3.4.5.6.7", "2.3.4.5.6.7.8"], true),
// eslint-disable-next-line no-bitwise
new x509.KeyUsagesExtension(x509.KeyUsageFlags.keyCertSign | x509.KeyUsageFlags.cRLSign, true),
await x509.SubjectKeyIdentifierExtension.create(keys.publicKey)
@ -303,7 +311,15 @@ export const certificateAuthorityServiceFactory = ({
* Update CA with id [caId].
* Note: Used to enable/disable CA
*/
const updateCaById = async ({ caId, status, actorId, actorAuthMethod, actor, actorOrgId }: TUpdateCaDTO) => {
const updateCaById = async ({
caId,
status,
requireTemplateForIssuance,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TUpdateCaDTO) => {
const ca = await certificateAuthorityDAL.findById(caId);
if (!ca) throw new BadRequestError({ message: "CA not found" });
@ -320,7 +336,7 @@ export const certificateAuthorityServiceFactory = ({
ProjectPermissionSub.CertificateAuthorities
);
const updatedCa = await certificateAuthorityDAL.updateById(caId, { status });
const updatedCa = await certificateAuthorityDAL.updateById(caId, { status, requireTemplateForIssuance });
return updatedCa;
};
@ -496,7 +512,6 @@ export const certificateAuthorityServiceFactory = ({
ca.maxPathLength === -1 || !ca.maxPathLength ? undefined : ca.maxPathLength,
true
),
new x509.ExtendedKeyUsageExtension(["1.2.3.4.5.6.7", "2.3.4.5.6.7.8"], true),
// eslint-disable-next-line no-bitwise
new x509.KeyUsagesExtension(x509.KeyUsageFlags.keyCertSign | x509.KeyUsageFlags.cRLSign, true),
await x509.SubjectKeyIdentifierExtension.create(caPublicKey)
@ -753,6 +768,39 @@ export const certificateAuthorityServiceFactory = ({
};
};
/**
* Return CA certificate object by ID
*/
const getCaCertById = async ({ caId, caCertId }: { caId: string; caCertId: string }) => {
const caCert = await certificateAuthorityCertDAL.findOne({
caId,
id: caCertId
});
if (!caCert) {
throw new NotFoundError({ message: "CA certificate not found" });
}
const ca = await certificateAuthorityDAL.findById(caId);
const keyId = await getProjectKmsCertificateKeyId({
projectId: ca.projectId,
projectDAL,
kmsService
});
const kmsDecryptor = await kmsService.decryptWithKmsKey({
kmsId: keyId
});
const decryptedCaCert = await kmsDecryptor({
cipherTextBlob: caCert.encryptedCertificate
});
const caCertObj = new x509.X509Certificate(decryptedCaCert);
return caCertObj;
};
/**
* Issue certificate to be imported back in for intermediate CA
*/
@ -767,6 +815,7 @@ export const certificateAuthorityServiceFactory = ({
notAfter,
maxPathLength
}: TSignIntermediateDTO) => {
const appCfg = getConfig();
const ca = await certificateAuthorityDAL.findById(caId);
if (!ca) throw new BadRequestError({ message: "CA not found" });
@ -841,7 +890,7 @@ export const certificateAuthorityServiceFactory = ({
throw new BadRequestError({ message: "notAfter date is after CA certificate's notAfter date" });
}
const { caPrivateKey } = await getCaCredentials({
const { caPrivateKey, caSecret } = await getCaCredentials({
caId: ca.id,
certificateAuthorityDAL,
certificateAuthoritySecretDAL,
@ -850,6 +899,11 @@ export const certificateAuthorityServiceFactory = ({
});
const serialNumber = createSerialNumber();
const caCrl = await certificateAuthorityCrlDAL.findOne({ caSecretId: caSecret.id });
const distributionPointUrl = `${appCfg.SITE_URL}/api/v1/pki/crl/${caCrl.id}/der`;
const caIssuerUrl = `${appCfg.SITE_URL}/api/v1/pki/ca/${ca.id}/certificates/${caCert.id}/der`;
const intermediateCert = await x509.X509CertificateGenerator.create({
serialNumber,
subject: csrObj.subject,
@ -869,7 +923,11 @@ export const certificateAuthorityServiceFactory = ({
),
new x509.BasicConstraintsExtension(true, maxPathLength === -1 ? undefined : maxPathLength, true),
await x509.AuthorityKeyIdentifierExtension.create(caCertObj, false),
await x509.SubjectKeyIdentifierExtension.create(csrObj.publicKey)
await x509.SubjectKeyIdentifierExtension.create(csrObj.publicKey),
new x509.CRLDistributionPointsExtension([distributionPointUrl]),
new x509.AuthorityInfoAccessExtension({
caIssuers: new x509.GeneralName("url", caIssuerUrl)
})
]
});
@ -1043,7 +1101,9 @@ export const certificateAuthorityServiceFactory = ({
actorId,
actorAuthMethod,
actor,
actorOrgId
actorOrgId,
keyUsages,
extendedKeyUsages
}: TIssueCertFromCaDTO) => {
let ca: TCertificateAuthorities | undefined;
let certificateTemplate: TCertificateTemplates | undefined;
@ -1079,6 +1139,9 @@ export const certificateAuthorityServiceFactory = ({
if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" });
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
if (ca.requireTemplateForIssuance && !certificateTemplate) {
throw new BadRequestError({ message: "Certificate template is required for issuance" });
}
const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId);
if (ca.notAfter && new Date() > new Date(ca.notAfter)) {
@ -1156,16 +1219,70 @@ export const certificateAuthorityServiceFactory = ({
const caCrl = await certificateAuthorityCrlDAL.findOne({ caSecretId: caSecret.id });
const appCfg = getConfig();
const distributionPointUrl = `${appCfg.SITE_URL}/api/v1/pki/crl/${caCrl.id}`;
const distributionPointUrl = `${appCfg.SITE_URL}/api/v1/pki/crl/${caCrl.id}/der`;
const caIssuerUrl = `${appCfg.SITE_URL}/api/v1/pki/ca/${ca.id}/certificates/${caCert.id}/der`;
const extensions: x509.Extension[] = [
new x509.KeyUsagesExtension(x509.KeyUsageFlags.digitalSignature | x509.KeyUsageFlags.keyEncipherment, true),
new x509.BasicConstraintsExtension(false),
new x509.CRLDistributionPointsExtension([distributionPointUrl]),
await x509.AuthorityKeyIdentifierExtension.create(caCertObj, false),
await x509.SubjectKeyIdentifierExtension.create(csrObj.publicKey)
await x509.SubjectKeyIdentifierExtension.create(csrObj.publicKey),
new x509.AuthorityInfoAccessExtension({
caIssuers: new x509.GeneralName("url", caIssuerUrl)
}),
new x509.CertificatePolicyExtension(["2.5.29.32.0"]) // anyPolicy
];
// handle key usages
let selectedKeyUsages: CertKeyUsage[] = keyUsages ?? [];
if (keyUsages === undefined && !certificateTemplate) {
selectedKeyUsages = [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT];
}
if (keyUsages === undefined && certificateTemplate) {
selectedKeyUsages = (certificateTemplate.keyUsages ?? []) as CertKeyUsage[];
}
if (keyUsages?.length && certificateTemplate) {
const validKeyUsages = certificateTemplate.keyUsages || [];
if (keyUsages.some((keyUsage) => !validKeyUsages.includes(keyUsage))) {
throw new BadRequestError({
message: "Invalid key usage value based on template policy"
});
}
selectedKeyUsages = keyUsages;
}
const keyUsagesBitValue = selectedKeyUsages.reduce((accum, keyUsage) => accum | x509.KeyUsageFlags[keyUsage], 0);
if (keyUsagesBitValue) {
extensions.push(new x509.KeyUsagesExtension(keyUsagesBitValue, true));
}
// handle extended key usages
let selectedExtendedKeyUsages: CertExtendedKeyUsage[] = extendedKeyUsages ?? [];
if (extendedKeyUsages === undefined && certificateTemplate) {
selectedExtendedKeyUsages = (certificateTemplate.extendedKeyUsages ?? []) as CertExtendedKeyUsage[];
}
if (extendedKeyUsages?.length && certificateTemplate) {
const validExtendedKeyUsages = certificateTemplate.extendedKeyUsages || [];
if (extendedKeyUsages.some((eku) => !validExtendedKeyUsages.includes(eku))) {
throw new BadRequestError({
message: "Invalid extended key usage value based on template policy"
});
}
selectedExtendedKeyUsages = extendedKeyUsages;
}
if (selectedExtendedKeyUsages.length) {
extensions.push(
new x509.ExtendedKeyUsageExtension(
selectedExtendedKeyUsages.map((eku) => x509.ExtendedKeyUsage[eku]),
true
)
);
}
let altNamesArray: {
type: "email" | "dns";
value: string;
@ -1247,7 +1364,9 @@ export const certificateAuthorityServiceFactory = ({
altNames,
serialNumber,
notBefore: notBeforeDate,
notAfter: notAfterDate
notAfter: notAfterDate,
keyUsages: selectedKeyUsages,
extendedKeyUsages: selectedExtendedKeyUsages
},
tx
);
@ -1296,6 +1415,7 @@ export const certificateAuthorityServiceFactory = ({
* Note: CSR is generated externally and submitted to Infisical.
*/
const signCertFromCa = async (dto: TSignCertFromCaDTO) => {
const appCfg = getConfig();
let ca: TCertificateAuthorities | undefined;
let certificateTemplate: TCertificateTemplates | undefined;
@ -1309,7 +1429,9 @@ export const certificateAuthorityServiceFactory = ({
altNames,
ttl,
notBefore,
notAfter
notAfter,
keyUsages,
extendedKeyUsages
} = dto;
let collectionId = pkiCollectionId;
@ -1349,6 +1471,9 @@ export const certificateAuthorityServiceFactory = ({
if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" });
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
if (ca.requireTemplateForIssuance && !certificateTemplate) {
throw new BadRequestError({ message: "Certificate template is required for issuance" });
}
const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId);
@ -1417,7 +1542,7 @@ export const certificateAuthorityServiceFactory = ({
message: "A common name (CN) is required in the CSR or as a parameter to this endpoint"
});
const { caPrivateKey } = await getCaCredentials({
const { caPrivateKey, caSecret } = await getCaCredentials({
caId: ca.id,
certificateAuthorityDAL,
certificateAuthoritySecretDAL,
@ -1425,13 +1550,115 @@ export const certificateAuthorityServiceFactory = ({
kmsService
});
const caCrl = await certificateAuthorityCrlDAL.findOne({ caSecretId: caSecret.id });
const distributionPointUrl = `${appCfg.SITE_URL}/api/v1/pki/crl/${caCrl.id}/der`;
const caIssuerUrl = `${appCfg.SITE_URL}/api/v1/pki/ca/${ca.id}/certificates/${caCert.id}/der`;
const extensions: x509.Extension[] = [
new x509.KeyUsagesExtension(x509.KeyUsageFlags.digitalSignature | x509.KeyUsageFlags.keyEncipherment, true),
new x509.BasicConstraintsExtension(false),
await x509.AuthorityKeyIdentifierExtension.create(caCertObj, false),
await x509.SubjectKeyIdentifierExtension.create(csrObj.publicKey)
await x509.SubjectKeyIdentifierExtension.create(csrObj.publicKey),
new x509.CRLDistributionPointsExtension([distributionPointUrl]),
new x509.AuthorityInfoAccessExtension({
caIssuers: new x509.GeneralName("url", caIssuerUrl)
}),
new x509.CertificatePolicyExtension(["2.5.29.32.0"]) // anyPolicy
];
// handle key usages
const csrKeyUsageExtension = csrObj.getExtension("2.5.29.15") as x509.KeyUsagesExtension;
let csrKeyUsages: CertKeyUsage[] = [];
if (csrKeyUsageExtension) {
csrKeyUsages = Object.values(CertKeyUsage).filter(
(keyUsage) => (x509.KeyUsageFlags[keyUsage] & csrKeyUsageExtension.usages) !== 0
);
}
let selectedKeyUsages: CertKeyUsage[] = keyUsages ?? [];
if (keyUsages === undefined && !certificateTemplate) {
if (csrKeyUsageExtension) {
selectedKeyUsages = csrKeyUsages;
} else {
selectedKeyUsages = [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT];
}
}
if (keyUsages === undefined && certificateTemplate) {
if (csrKeyUsageExtension) {
const validKeyUsages = certificateTemplate.keyUsages || [];
if (csrKeyUsages.some((keyUsage) => !validKeyUsages.includes(keyUsage))) {
throw new BadRequestError({
message: "Invalid key usage value based on template policy"
});
}
selectedKeyUsages = csrKeyUsages;
} else {
selectedKeyUsages = (certificateTemplate.keyUsages ?? []) as CertKeyUsage[];
}
}
if (keyUsages?.length && certificateTemplate) {
const validKeyUsages = certificateTemplate.keyUsages || [];
if (keyUsages.some((keyUsage) => !validKeyUsages.includes(keyUsage))) {
throw new BadRequestError({
message: "Invalid key usage value based on template policy"
});
}
selectedKeyUsages = keyUsages;
}
const keyUsagesBitValue = selectedKeyUsages.reduce((accum, keyUsage) => accum | x509.KeyUsageFlags[keyUsage], 0);
if (keyUsagesBitValue) {
extensions.push(new x509.KeyUsagesExtension(keyUsagesBitValue, true));
}
// handle extended key usages
const csrExtendedKeyUsageExtension = csrObj.getExtension("2.5.29.37") as x509.ExtendedKeyUsageExtension;
let csrExtendedKeyUsages: CertExtendedKeyUsage[] = [];
if (csrExtendedKeyUsageExtension) {
csrExtendedKeyUsages = csrExtendedKeyUsageExtension.usages.map(
(ekuOid) => CertExtendedKeyUsageOIDToName[ekuOid as string]
);
}
let selectedExtendedKeyUsages: CertExtendedKeyUsage[] = extendedKeyUsages ?? [];
if (extendedKeyUsages === undefined && !certificateTemplate && csrExtendedKeyUsageExtension) {
selectedExtendedKeyUsages = csrExtendedKeyUsages;
}
if (extendedKeyUsages === undefined && certificateTemplate) {
if (csrExtendedKeyUsageExtension) {
const validExtendedKeyUsages = certificateTemplate.extendedKeyUsages || [];
if (csrExtendedKeyUsages.some((eku) => !validExtendedKeyUsages.includes(eku))) {
throw new BadRequestError({
message: "Invalid extended key usage value based on template policy"
});
}
selectedExtendedKeyUsages = csrExtendedKeyUsages;
} else {
selectedExtendedKeyUsages = (certificateTemplate.extendedKeyUsages ?? []) as CertExtendedKeyUsage[];
}
}
if (extendedKeyUsages?.length && certificateTemplate) {
const validExtendedKeyUsages = certificateTemplate.extendedKeyUsages || [];
if (extendedKeyUsages.some((keyUsage) => !validExtendedKeyUsages.includes(keyUsage))) {
throw new BadRequestError({
message: "Invalid extended key usage value based on template policy"
});
}
selectedExtendedKeyUsages = extendedKeyUsages;
}
if (selectedExtendedKeyUsages.length) {
extensions.push(
new x509.ExtendedKeyUsageExtension(
selectedExtendedKeyUsages.map((eku) => x509.ExtendedKeyUsage[eku]),
true
)
);
}
let altNamesFromCsr: string = "";
let altNamesArray: {
type: "email" | "dns";
@ -1527,7 +1754,9 @@ export const certificateAuthorityServiceFactory = ({
altNames: altNamesFromCsr || altNames,
serialNumber,
notBefore: notBeforeDate,
notAfter: notAfterDate
notAfter: notAfterDate,
keyUsages: selectedKeyUsages,
extendedKeyUsages: selectedExtendedKeyUsages
},
tx
);
@ -1570,6 +1799,40 @@ export const certificateAuthorityServiceFactory = ({
};
};
/**
* Return list of certificate templates for CA with id [caId].
*/
const getCaCertificateTemplates = async ({
caId,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TGetCaCertificateTemplatesDTO) => {
const ca = await certificateAuthorityDAL.findById(caId);
if (!ca) throw new BadRequestError({ message: "CA not found" });
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
ca.projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
ProjectPermissionSub.CertificateTemplates
);
const certificateTemplates = await certificateTemplateDAL.find({ caId });
return {
certificateTemplates,
ca
};
};
return {
createCa,
getCaById,
@ -1579,9 +1842,11 @@ export const certificateAuthorityServiceFactory = ({
renewCaCert,
getCaCerts,
getCaCert,
getCaCertById,
signIntermediate,
importCertToCa,
issueCertFromCa,
signCertFromCa
signCertFromCa,
getCaCertificateTemplates
};
};

View File

@ -4,7 +4,7 @@ import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TCertificateAuthorityCrlDALFactory } from "../../ee/services/certificate-authority-crl/certificate-authority-crl-dal";
import { CertKeyAlgorithm } from "../certificate/certificate-types";
import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "../certificate/certificate-types";
import { TCertificateAuthorityCertDALFactory } from "./certificate-authority-cert-dal";
import { TCertificateAuthorityDALFactory } from "./certificate-authority-dal";
import { TCertificateAuthoritySecretDALFactory } from "./certificate-authority-secret-dal";
@ -38,6 +38,7 @@ export type TCreateCaDTO = {
notAfter?: string;
maxPathLength: number;
keyAlgorithm: CertKeyAlgorithm;
requireTemplateForIssuance: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TGetCaDTO = {
@ -47,6 +48,7 @@ export type TGetCaDTO = {
export type TUpdateCaDTO = {
caId: string;
status?: CaStatus;
requireTemplateForIssuance?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TDeleteCaDTO = {
@ -95,6 +97,8 @@ export type TIssueCertFromCaDTO = {
ttl: string;
notBefore?: string;
notAfter?: string;
keyUsages?: CertKeyUsage[];
extendedKeyUsages?: CertExtendedKeyUsage[];
} & Omit<TProjectPermission, "projectId">;
export type TSignCertFromCaDTO =
@ -110,6 +114,8 @@ export type TSignCertFromCaDTO =
ttl?: string;
notBefore?: string;
notAfter?: string;
keyUsages?: CertKeyUsage[];
extendedKeyUsages?: CertExtendedKeyUsage[];
}
| ({
isInternal: false;
@ -123,8 +129,14 @@ export type TSignCertFromCaDTO =
ttl: string;
notBefore?: string;
notAfter?: string;
keyUsages?: CertKeyUsage[];
extendedKeyUsages?: CertExtendedKeyUsage[];
} & Omit<TProjectPermission, "projectId">);
export type TGetCaCertificateTemplatesDTO = {
caId: string;
} & Omit<TProjectPermission, "projectId">;
export type TDNParts = {
commonName?: string;
organization?: string;

View File

@ -7,7 +7,7 @@ const isValidDate = (dateString: string) => {
export const validateCaDateField = z.string().trim().refine(isValidDate, { message: "Invalid date format" });
export const hostnameRegex = /^(?!:\/\/)([a-zA-Z0-9-_]{1,63}\.?)+(?!:\/\/)([a-zA-Z]{2,63})$/;
export const hostnameRegex = /^(?!:\/\/)(\*\.)?([a-zA-Z0-9-_]{1,63}\.?)+(?!:\/\/)([a-zA-Z]{2,63})$/;
export const validateAltNamesField = z
.string()
.trim()

View File

@ -9,7 +9,9 @@ export const sanitizedCertificateTemplate = CertificateTemplatesSchema.pick({
commonName: true,
subjectAlternativeName: true,
pkiCollectionId: true,
ttl: true
ttl: true,
keyUsages: true,
extendedKeyUsages: true
}).merge(
z.object({
projectId: z.string(),

View File

@ -57,7 +57,9 @@ export const certificateTemplateServiceFactory = ({
actorId,
actorAuthMethod,
actor,
actorOrgId
actorOrgId,
keyUsages,
extendedKeyUsages
}: TCreateCertTemplateDTO) => {
const ca = await certificateAuthorityDAL.findById(caId);
if (!ca) {
@ -86,7 +88,9 @@ export const certificateTemplateServiceFactory = ({
name,
commonName,
subjectAlternativeName,
ttl
ttl,
keyUsages,
extendedKeyUsages
},
tx
);
@ -113,7 +117,9 @@ export const certificateTemplateServiceFactory = ({
actorId,
actorAuthMethod,
actor,
actorOrgId
actorOrgId,
keyUsages,
extendedKeyUsages
}: TUpdateCertTemplateDTO) => {
const certTemplate = await certificateTemplateDAL.getById(id);
if (!certTemplate) {
@ -153,7 +159,9 @@ export const certificateTemplateServiceFactory = ({
commonName,
subjectAlternativeName,
name,
ttl
ttl,
keyUsages,
extendedKeyUsages
},
tx
);

View File

@ -1,4 +1,5 @@
import { TProjectPermission } from "@app/lib/types";
import { CertExtendedKeyUsage, CertKeyUsage } from "@app/services/certificate/certificate-types";
export type TCreateCertTemplateDTO = {
caId: string;
@ -7,6 +8,8 @@ export type TCreateCertTemplateDTO = {
commonName: string;
subjectAlternativeName: string;
ttl: string;
keyUsages: CertKeyUsage[];
extendedKeyUsages: CertExtendedKeyUsage[];
} & Omit<TProjectPermission, "projectId">;
export type TUpdateCertTemplateDTO = {
@ -17,6 +20,8 @@ export type TUpdateCertTemplateDTO = {
commonName?: string;
subjectAlternativeName?: string;
ttl?: string;
keyUsages?: CertKeyUsage[];
extendedKeyUsages?: CertExtendedKeyUsage[];
} & Omit<TProjectPermission, "projectId">;
export type TGetCertTemplateDTO = {

View File

@ -1,3 +1,5 @@
import * as x509 from "@peculiar/x509";
import { TProjectPermission } from "@app/lib/types";
export enum CertStatus {
@ -12,6 +14,36 @@ export enum CertKeyAlgorithm {
ECDSA_P384 = "EC_secp384r1"
}
export enum CertKeyUsage {
DIGITAL_SIGNATURE = "digitalSignature",
KEY_ENCIPHERMENT = "keyEncipherment",
NON_REPUDIATION = "nonRepudiation",
DATA_ENCIPHERMENT = "dataEncipherment",
KEY_AGREEMENT = "keyAgreement",
KEY_CERT_SIGN = "keyCertSign",
CRL_SIGN = "cRLSign",
ENCIPHER_ONLY = "encipherOnly",
DECIPHER_ONLY = "decipherOnly"
}
export enum CertExtendedKeyUsage {
CLIENT_AUTH = "clientAuth",
SERVER_AUTH = "serverAuth",
CODE_SIGNING = "codeSigning",
EMAIL_PROTECTION = "emailProtection",
TIMESTAMPING = "timeStamping",
OCSP_SIGNING = "ocspSigning"
}
export const CertExtendedKeyUsageOIDToName: Record<string, CertExtendedKeyUsage> = {
[x509.ExtendedKeyUsage.clientAuth]: CertExtendedKeyUsage.CLIENT_AUTH,
[x509.ExtendedKeyUsage.serverAuth]: CertExtendedKeyUsage.SERVER_AUTH,
[x509.ExtendedKeyUsage.codeSigning]: CertExtendedKeyUsage.CODE_SIGNING,
[x509.ExtendedKeyUsage.emailProtection]: CertExtendedKeyUsage.EMAIL_PROTECTION,
[x509.ExtendedKeyUsage.ocspSigning]: CertExtendedKeyUsage.OCSP_SIGNING,
[x509.ExtendedKeyUsage.timeStamping]: CertExtendedKeyUsage.TIMESTAMPING
};
export enum CrlReason {
UNSPECIFIED = "UNSPECIFIED",
KEY_COMPROMISE = "KEY_COMPROMISE",

View File

@ -95,6 +95,30 @@ export const groupProjectDALFactory = (db: TDbClient) => {
}
};
const findByUserId = async (userId: string, orgId: string, tx?: Knex) => {
try {
const docs = await (tx || db.replicaNode())(TableName.UserGroupMembership)
.where(`${TableName.UserGroupMembership}.userId`, userId)
.join(TableName.Groups, function () {
this.on(`${TableName.UserGroupMembership}.groupId`, "=", `${TableName.Groups}.id`).andOn(
`${TableName.Groups}.orgId`,
"=",
db.raw("?", [orgId])
);
})
.select(
db.ref("id").withSchema(TableName.Groups),
db.ref("name").withSchema(TableName.Groups),
db.ref("slug").withSchema(TableName.Groups),
db.ref("orgId").withSchema(TableName.Groups)
);
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "FindByUserId" });
}
};
// The GroupProjectMembership table has a reference to the project (projectId) AND the group (groupId).
// We need to join the GroupProjectMembership table with the Groups table to get the group name and slug.
// We also need to join the GroupProjectMembershipRole table to get the role of the group in the project.
@ -197,5 +221,5 @@ export const groupProjectDALFactory = (db: TDbClient) => {
return members;
};
return { ...groupProjectOrm, findByProjectId, findAllProjectGroupMembers };
return { ...groupProjectOrm, findByProjectId, findByUserId, findAllProjectGroupMembers };
};

View File

@ -4,6 +4,8 @@ import { TDbClient } from "@app/db";
import { IdentityAuthMethod, TableName, TIdentityAccessTokens } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
import { QueueName } from "@app/queue";
export type TIdentityAccessTokenDALFactory = ReturnType<typeof identityAccessTokenDALFactory>;
@ -95,6 +97,7 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
};
const removeExpiredTokens = async (tx?: Knex) => {
logger.info(`${QueueName.DailyResourceCleanUp}: remove expired access token started`);
try {
const docs = (tx || db)(TableName.IdentityAccessToken)
.where({
@ -131,7 +134,8 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
});
})
.delete();
return await docs;
await docs;
logger.info(`${QueueName.DailyResourceCleanUp}: remove expired access token completed`);
} catch (error) {
throw new DatabaseError({ error, name: "IdentityAccessTokenPrune" });
}

View File

@ -0,0 +1,4 @@
import picomatch from "picomatch";
export const doesFieldValueMatchOidcPolicy = (fieldValue: string, policyValue: string) =>
policyValue === fieldValue || picomatch.isMatch(fieldValue, policyValue);

View File

@ -28,6 +28,7 @@ import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identit
import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types";
import { TOrgBotDALFactory } from "../org/org-bot-dal";
import { TIdentityOidcAuthDALFactory } from "./identity-oidc-auth-dal";
import { doesFieldValueMatchOidcPolicy } from "./identity-oidc-auth-fns";
import {
TAttachOidcAuthDTO,
TGetOidcAuthDTO,
@ -123,7 +124,7 @@ export const identityOidcAuthServiceFactory = ({
}) as Record<string, string>;
if (identityOidcAuth.boundSubject) {
if (tokenData.sub !== identityOidcAuth.boundSubject) {
if (!doesFieldValueMatchOidcPolicy(tokenData.sub, identityOidcAuth.boundSubject)) {
throw new ForbiddenRequestError({
message: "Access denied: OIDC subject not allowed."
});
@ -131,7 +132,11 @@ export const identityOidcAuthServiceFactory = ({
}
if (identityOidcAuth.boundAudiences) {
if (!identityOidcAuth.boundAudiences.split(", ").includes(tokenData.aud)) {
if (
!identityOidcAuth.boundAudiences
.split(", ")
.some((policyValue) => doesFieldValueMatchOidcPolicy(tokenData.aud, policyValue))
) {
throw new ForbiddenRequestError({
message: "Access denied: OIDC audience not allowed."
});
@ -142,7 +147,9 @@ export const identityOidcAuthServiceFactory = ({
Object.keys(identityOidcAuth.boundClaims).forEach((claimKey) => {
const claimValue = (identityOidcAuth.boundClaims as Record<string, string>)[claimKey];
// handle both single and multi-valued claims
if (!claimValue.split(", ").some((claimEntry) => tokenData[claimKey] === claimEntry)) {
if (
!claimValue.split(", ").some((claimEntry) => doesFieldValueMatchOidcPolicy(tokenData[claimKey], claimEntry))
) {
throw new ForbiddenRequestError({
message: "Access denied: OIDC claim not allowed."
});

View File

@ -1,9 +1,11 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { TableName, TIdentities } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, sqlNestRelationships } from "@app/lib/knex";
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
import { OrderByDirection } from "@app/lib/types";
import { ProjectIdentityOrderBy, TListProjectIdentityDTO } from "@app/services/identity-project/identity-project-types";
export type TIdentityProjectDALFactory = ReturnType<typeof identityProjectDALFactory>;
@ -107,12 +109,45 @@ export const identityProjectDALFactory = (db: TDbClient) => {
}
};
const findByProjectId = async (projectId: string, filter: { identityId?: string } = {}, tx?: Knex) => {
const findByProjectId = async (
projectId: string,
filter: { identityId?: string } & Pick<
TListProjectIdentityDTO,
"limit" | "offset" | "search" | "orderBy" | "orderDirection"
> = {},
tx?: Knex
) => {
try {
const docs = await (tx || db.replicaNode())(TableName.IdentityProjectMembership)
// TODO: scott - optimize, there's redundancy here with project membership and the below query
const fetchIdentitySubquery = (tx || db.replicaNode())(TableName.Identity)
.where((qb) => {
if (filter.search) {
void qb.whereILike(`${TableName.Identity}.name`, `%${filter.search}%`);
}
})
.join(
TableName.IdentityProjectMembership,
`${TableName.IdentityProjectMembership}.identityId`,
`${TableName.Identity}.id`
)
.where(`${TableName.IdentityProjectMembership}.projectId`, projectId)
.orderBy(
`${TableName.Identity}.${filter.orderBy ?? ProjectIdentityOrderBy.Name}`,
filter.orderDirection ?? OrderByDirection.ASC
)
.select(selectAllTableCols(TableName.Identity))
.as(TableName.Identity); // required for subqueries
if (filter.limit) {
void fetchIdentitySubquery.offset(filter.offset ?? 0).limit(filter.limit);
}
const query = (tx || db.replicaNode())(TableName.IdentityProjectMembership)
.where(`${TableName.IdentityProjectMembership}.projectId`, projectId)
.join(TableName.Project, `${TableName.IdentityProjectMembership}.projectId`, `${TableName.Project}.id`)
.join(TableName.Identity, `${TableName.IdentityProjectMembership}.identityId`, `${TableName.Identity}.id`)
.join<TIdentities, TIdentities>(fetchIdentitySubquery, (bd) => {
bd.on(`${TableName.IdentityProjectMembership}.identityId`, `${TableName.Identity}.id`);
})
.where((qb) => {
if (filter.identityId) {
void qb.where("identityId", filter.identityId);
@ -154,6 +189,19 @@ export const identityProjectDALFactory = (db: TDbClient) => {
db.ref("name").as("projectName").withSchema(TableName.Project)
);
// TODO: scott - joins seem to reorder identities so need to order again, for the sake of urgency will optimize at a later point
if (filter.orderBy) {
switch (filter.orderBy) {
case "name":
void query.orderBy(`${TableName.Identity}.${filter.orderBy}`, filter.orderDirection);
break;
default:
// do nothing
}
}
const docs = await query;
const members = sqlNestRelationships({
data: docs,
parentMapper: ({ identityId, identityName, identityAuthMethod, id, createdAt, updatedAt, projectName }) => ({
@ -208,9 +256,37 @@ export const identityProjectDALFactory = (db: TDbClient) => {
}
};
const getCountByProjectId = async (
projectId: string,
filter: { identityId?: string } & Pick<TListProjectIdentityDTO, "search"> = {},
tx?: Knex
) => {
try {
const identities = await (tx || db.replicaNode())(TableName.IdentityProjectMembership)
.where(`${TableName.IdentityProjectMembership}.projectId`, projectId)
.join(TableName.Project, `${TableName.IdentityProjectMembership}.projectId`, `${TableName.Project}.id`)
.join(TableName.Identity, `${TableName.IdentityProjectMembership}.identityId`, `${TableName.Identity}.id`)
.where((qb) => {
if (filter.identityId) {
void qb.where("identityId", filter.identityId);
}
if (filter.search) {
void qb.whereILike(`${TableName.Identity}.name`, `%${filter.search}%`);
}
})
.count();
return Number(identities[0].count);
} catch (error) {
throw new DatabaseError({ error, name: "GetCountByProjectId" });
}
};
return {
...identityProjectOrm,
findByIdentityId,
findByProjectId
findByProjectId,
getCountByProjectId
};
};

View File

@ -268,7 +268,12 @@ export const identityProjectServiceFactory = ({
actor,
actorId,
actorAuthMethod,
actorOrgId
actorOrgId,
limit,
offset,
orderBy,
orderDirection,
search
}: TListProjectIdentityDTO) => {
const { permission } = await permissionService.getProjectPermission(
actor,
@ -279,8 +284,17 @@ export const identityProjectServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
const identityMemberships = await identityProjectDAL.findByProjectId(projectId);
return identityMemberships;
const identityMemberships = await identityProjectDAL.findByProjectId(projectId, {
limit,
offset,
orderBy,
orderDirection,
search
});
const totalCount = await identityProjectDAL.getCountByProjectId(projectId, { search });
return { identityMemberships, totalCount };
};
const getProjectIdentityByIdentityId = async ({

View File

@ -1,4 +1,4 @@
import { TProjectPermission } from "@app/lib/types";
import { OrderByDirection, TProjectPermission } from "@app/lib/types";
import { ProjectUserMembershipTemporaryMode } from "../project-membership/project-membership-types";
@ -40,8 +40,18 @@ export type TDeleteProjectIdentityDTO = {
identityId: string;
} & TProjectPermission;
export type TListProjectIdentityDTO = TProjectPermission;
export type TListProjectIdentityDTO = {
limit?: number;
offset?: number;
orderBy?: ProjectIdentityOrderBy;
orderDirection?: OrderByDirection;
search?: string;
} & TProjectPermission;
export type TGetProjectIdentityByIdentityIdDTO = {
identityId: string;
} & TProjectPermission;
export enum ProjectIdentityOrderBy {
Name = "name"
}

View File

@ -5,6 +5,7 @@ import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
import { QueueName } from "@app/queue";
export type TIdentityUaClientSecretDALFactory = ReturnType<typeof identityUaClientSecretDALFactory>;
@ -30,7 +31,9 @@ export const identityUaClientSecretDALFactory = (db: TDbClient) => {
let deletedClientSecret: { id: string }[] = [];
let numberOfRetryOnFailure = 0;
let isRetrying = false;
logger.info(`${QueueName.DailyResourceCleanUp}: remove expired univesal auth client secret started`);
do {
try {
const findExpiredClientSecretQuery = (tx || db)(TableName.IdentityUaClientSecret)
@ -39,7 +42,7 @@ export const identityUaClientSecretDALFactory = (db: TDbClient) => {
})
.orWhere((qb) => {
void qb
.where("clientSecretNumUses", ">", 0)
.where("clientSecretNumUsesLimit", ">", 0)
.andWhere(
"clientSecretNumUses",
">=",
@ -71,7 +74,9 @@ export const identityUaClientSecretDALFactory = (db: TDbClient) => {
setTimeout(resolve, 10); // time to breathe for db
});
}
} while (deletedClientSecret.length > 0 || numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
isRetrying = numberOfRetryOnFailure > 0;
} while (deletedClientSecret.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
logger.info(`${QueueName.DailyResourceCleanUp}: remove expired univesal auth client secret completed`);
};
return { ...uaClientSecretOrm, incrementUsage, removeExpiredClientSecrets };

View File

@ -4,6 +4,8 @@ import { TDbClient } from "@app/db";
import { TableName, TIdentityOrgMemberships } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols } from "@app/lib/knex";
import { OrderByDirection } from "@app/lib/types";
import { TListOrgIdentitiesByOrgIdDTO } from "@app/services/identity/identity-types";
export type TIdentityOrgDALFactory = ReturnType<typeof identityOrgDALFactory>;
@ -27,9 +29,20 @@ export const identityOrgDALFactory = (db: TDbClient) => {
}
};
const find = async (filter: Partial<TIdentityOrgMemberships>, tx?: Knex) => {
const find = async (
{
limit,
offset = 0,
orderBy,
orderDirection = OrderByDirection.ASC,
search,
...filter
}: Partial<TIdentityOrgMemberships> &
Pick<TListOrgIdentitiesByOrgIdDTO, "offset" | "limit" | "orderBy" | "orderDirection" | "search">,
tx?: Knex
) => {
try {
const docs = await (tx || db.replicaNode())(TableName.IdentityOrgMembership)
const query = (tx || db.replicaNode())(TableName.IdentityOrgMembership)
.where(filter)
.join(TableName.Identity, `${TableName.IdentityOrgMembership}.identityId`, `${TableName.Identity}.id`)
.leftJoin(TableName.OrgRoles, `${TableName.IdentityOrgMembership}.roleId`, `${TableName.OrgRoles}.id`)
@ -44,6 +57,30 @@ export const identityOrgDALFactory = (db: TDbClient) => {
.select(db.ref("id").as("identityId").withSchema(TableName.Identity))
.select(db.ref("name").as("identityName").withSchema(TableName.Identity))
.select(db.ref("authMethod").as("identityAuthMethod").withSchema(TableName.Identity));
if (limit) {
void query.offset(offset).limit(limit);
}
if (orderBy) {
switch (orderBy) {
case "name":
void query.orderBy(`${TableName.Identity}.${orderBy}`, orderDirection);
break;
case "role":
void query.orderBy(`${TableName.IdentityOrgMembership}.${orderBy}`, orderDirection);
break;
default:
// do nothing
}
}
if (search?.length) {
void query.whereILike(`${TableName.Identity}.name`, `%${search}%`);
}
const docs = await query;
return docs.map(
({
crId,
@ -79,5 +116,27 @@ export const identityOrgDALFactory = (db: TDbClient) => {
}
};
return { ...identityOrgOrm, find, findOne };
const countAllOrgIdentities = async (
{ search, ...filter }: Partial<TIdentityOrgMemberships> & Pick<TListOrgIdentitiesByOrgIdDTO, "search">,
tx?: Knex
) => {
try {
const query = (tx || db.replicaNode())(TableName.IdentityOrgMembership)
.where(filter)
.join(TableName.Identity, `${TableName.IdentityOrgMembership}.identityId`, `${TableName.Identity}.id`)
.count();
if (search?.length) {
void query.whereILike(`${TableName.Identity}.name`, `%${search}%`);
}
const identities = await query;
return Number(identities[0].count);
} catch (error) {
throw new DatabaseError({ error, name: "countAllOrgIdentities" });
}
};
return { ...identityOrgOrm, find, findOne, countAllOrgIdentities };
};

Some files were not shown because too many files have changed in this diff Show More