Compare commits

..

147 Commits

Author SHA1 Message Date
edf6a37fe5 fix lint 2025-03-11 13:08:04 -04:00
f5749e326a remove regex and fix lint 2025-03-11 12:49:55 -04:00
75e0a68b68 remove password regex 2025-03-11 12:46:43 -04:00
6fa41a609b remove char and digit rangs and other requested changes/improvments 2025-03-11 12:28:48 -04:00
16d3bbb67a Add password requirements to dyanmic secret
This will add a new accordion to add custom requirements for the generated password for DB drivers. We can use this pattern for other dynamic secrets too
2025-03-10 23:46:04 -04:00
3986df8e8a Merge pull request #3214 from akhilmhdh/fix/gateway-cert-error
feat: changed to permission check
2025-03-10 14:59:16 -04:00
3fcd84b592 Merge pull request #3198 from Infisical/daniel/reset-password-serverside
Daniel/reset password serverside
2025-03-10 22:31:22 +04:00
=
29e39b558b feat: changed to permission check 2025-03-10 23:59:17 +05:30
9458c8b04f Update auth-fns.ts 2025-03-10 22:15:30 +04:00
3b95c5d859 Merge pull request #3211 from Infisical/add-systemmd-service
add system md service for gateway
2025-03-10 14:07:18 -04:00
de8f315211 Merge pull request #3201 from Infisical/feat/addMoreVisibilityToServerAdmins
Add is-admin filter to Server Admin Console and add a component to sh…
2025-03-10 14:06:08 -04:00
9960d58e1b Merge pull request #3213 from akhilmhdh/fix/gateway-cert-error
feat: removed ca pool from dialing
2025-03-10 13:02:34 -04:00
=
0057404562 feat: removed ca pool from dialing 2025-03-10 22:22:58 +05:30
47ca1b3011 Merge branch 'main' into feat/addMoreVisibilityToServerAdmins 2025-03-10 11:57:15 -03:00
716cd090c4 Merge pull request #3212 from Infisical/daniel/breaking-change-check-fix
fix: breaking change check fix
2025-03-10 18:55:30 +04:00
e870bb3ade Update check-api-for-breaking-changes.yml 2025-03-10 18:53:01 +04:00
98c9e98082 Merge pull request #3207 from Infisical/feat/allowProjectSlugEdition
Allow project slug edition
2025-03-10 11:32:29 -03:00
a814f459ab Add condition to hide Instance Admins on cloud instances 2025-03-10 10:58:39 -03:00
66817a40db Adjust modal width to match the rest of the modals 2025-03-10 08:31:19 -03:00
20bd2ca71c Improve slug description, regex and replace useState with watch 2025-03-10 08:18:43 -03:00
=
004a8b71a2 feat: refactored the systemd service to seperate package file 2025-03-10 16:03:51 +05:30
f0fce3086e Merge pull request #3208 from Infisical/fix/TagsDeleteButtonNotWorking
Use slug to check tag on remove icon click
2025-03-09 22:32:36 -04:00
a9e7db6fc0 Merge pull request #3057 from akhilmhdh/fix/permission-scope
Permission boundary check
2025-03-09 22:25:16 -04:00
2bd681d58f add system md service for gateway 2025-03-09 16:07:33 -04:00
51fef3ce60 Merge pull request #3210 from akhilmhdh/fix/gateway-patch-up
Gateway patch up
2025-03-09 14:03:21 -04:00
=
df9e7bf6ee feat: renamed timeout 2025-03-09 22:06:27 +05:30
=
04479bb70a fix: removed cert read to load 2025-03-09 21:37:28 +05:30
=
cdc90411e5 feat: updated gateway to use dtls 2025-03-09 21:15:10 +05:30
=
dcb05a3093 feat: resolved not able to edit sql form due to gateway change 2025-03-09 21:15:10 +05:30
=
b055cda64d feat: increased turn cred duration, and fixed gateway crashing 2025-03-09 21:15:10 +05:30
f68602280e Merge pull request #3197 from Infisical/gateway-arch
add gateway security docs
2025-03-07 20:15:49 -05:00
f9483afe95 Merge pull request #3204 from akoullick1/patch-13
Update meetings.mdx
2025-03-07 18:31:16 -05:00
d742534f6a Update meetings.mdx
ECD detail
2025-03-07 14:54:38 -08:00
99eb8eb8ed Use slug to check tag on remove icon click 2025-03-07 19:45:10 -03:00
1dea024880 Improvement on admin visibility UI components 2025-03-07 19:19:55 -03:00
699e03c1a9 Allow project slug edition and refactor frontend components to reduce duplicated code 2025-03-07 17:49:30 -03:00
f6372249b4 Merge pull request #3206 from Infisical/fix/removeInviteAllOnProjectCreation
Remove addAllMembers option from project creation modal
2025-03-07 17:16:12 -03:00
0f42fcd688 Remove addAllMembers option from project creation modal 2025-03-07 16:59:12 -03:00
2e02f8bea8 Merge pull request #3199 from akhilmhdh/feat/webhook-reminder
Added webhook trigger for secret reminder
2025-03-07 14:17:11 -05:00
8203158c63 Merge pull request #3195 from Infisical/feat/addSecretNameToSlackNotification
Feat/add secret name to slack notification
2025-03-07 15:39:06 -03:00
ada04ed4fc Update meetings.mdx
Added daily standup
2025-03-07 10:19:54 -08:00
cc9cc70125 Merge pull request #3203 from Infisical/misc/add-uncaught-exception-handler
misc: add uncaught exception handler
2025-03-08 00:36:08 +08:00
045debeaf3 misc: added unhandled rejection handler 2025-03-08 00:29:23 +08:00
3fb8ad2fac misc: add uncaught exception handler 2025-03-08 00:22:27 +08:00
795d9e4413 Update auth-password-service.ts 2025-03-07 20:15:30 +04:00
67f2e4671a requested changes 2025-03-07 19:59:29 +04:00
cbe3acde74 Merge pull request #3202 from Infisical/fix/address-unhandled-promise-rejects-causing-502
fix: address unhandled promise rejects causing 502s
2025-03-07 23:48:43 +08:00
de480b5771 Merge pull request #3181 from Infisical/daniel/id-get-secret
feat: get secret by ID
2025-03-07 19:35:52 +04:00
07b93c5cec Update secret-v2-bridge-service.ts 2025-03-07 19:26:18 +04:00
77431b4719 requested changes 2025-03-07 19:26:18 +04:00
50610945be feat: get secret by ID 2025-03-07 19:25:53 +04:00
57f54440d6 misc: added support for type 2025-03-07 23:15:05 +08:00
9711e73a06 fix: address unhandled promise rejects causing 502s 2025-03-07 23:05:47 +08:00
214f837041 Add is-admin filter to Server Admin Console and add a component to show the server admins on side panel 2025-03-07 11:42:15 -03:00
58ebebb162 Merge pull request #3191 from Infisical/feat/addActorToVersionHistory
Add actor to secret version history
2025-03-07 08:06:24 -03:00
65ddddb6de Change slack notification label from key to secret key 2025-03-07 08:03:02 -03:00
=
a55b26164a feat: updated doc 2025-03-07 15:14:09 +05:30
=
6cd448b8a5 feat: webhook on secret reminder trigger 2025-03-07 15:01:14 +05:30
c48c9ae628 cleanup 2025-03-07 04:55:18 +04:00
7003ad608a Update user-service.ts 2025-03-07 04:37:08 +04:00
104edca6f1 feat: reset password without emergency kit 2025-03-07 04:34:34 +04:00
75345d91c0 add gateway security docs 2025-03-06 18:49:57 -05:00
b7640f2d03 Lint fixes 2025-03-06 17:36:09 -03:00
2ee4d68fd0 Fix case for multiple projects messing with the joins 2025-03-06 17:04:01 -03:00
3ca931acf1 Add condition to query to only retrieve the actual project id 2025-03-06 16:38:49 -03:00
7f6715643d Change label from Secret to Key for consistency with the UI 2025-03-06 15:31:37 -03:00
8e311658d4 Improve query to only use one to retrieve all information 2025-03-06 15:15:52 -03:00
9116acd37b Fix linter issues 2025-03-06 13:07:03 -03:00
0513307d98 Improve code quality 2025-03-06 12:55:10 -03:00
28c2f1874e Add secret name to slack notification 2025-03-06 12:46:43 -03:00
efc3b6d474 Remove secret_version_v1 changes 2025-03-06 11:31:26 -03:00
07e1d1b130 Merge branch 'main' into feat/addActorToVersionHistory 2025-03-06 10:56:54 -03:00
7f76779124 Fix frontend type errors 2025-03-06 09:17:55 -03:00
30bcf1f204 Fix linter and type issues, made a small fix for secret rotation platform events 2025-03-06 09:10:13 -03:00
706feafbf2 revert featureset changes 2025-03-06 00:20:08 -05:00
fc4e3f1f72 update relay health check 2025-03-05 23:50:11 -05:00
dcd5f20325 add example 2025-03-05 22:20:13 -05:00
58f3e116a3 add example 2025-03-05 22:19:56 -05:00
7bc5aad8ec fix infinite loop 2025-03-05 22:14:09 -05:00
a16dc3aef6 add windows stub to fix build issue 2025-03-05 18:29:29 -05:00
da7746c639 use forked pion 2025-03-05 17:54:23 -05:00
cd5b6da541 Merge branch 'main' into feat/addActorToVersionHistory 2025-03-05 17:53:57 -03:00
2dda7180a9 Fix linter issue 2025-03-05 17:36:00 -03:00
30ccfbfc8e Add actor to secret version history 2025-03-05 17:20:57 -03:00
aa76924ee6 fix import 2025-03-05 14:48:36 -05:00
d8f679e72d Merge pull request #3189 from Infisical/revert-3128-daniel/view-secret-value-permission
Revert "feat(api/secrets): view secret value permission"
2025-03-05 14:15:16 -05:00
bf6cfbac7a Revert "feat(api/secrets): view secret value permission" 2025-03-05 14:15:02 -05:00
8e82813894 Merge pull request #3128 from Infisical/daniel/view-secret-value-permission
feat(api/secrets): view secret value permission
2025-03-05 22:57:25 +04:00
df21a1fb81 fix: types 2025-03-05 22:47:40 +04:00
bdbb6346cb fix: permission error instead of not found error on single secret import 2025-03-05 22:47:40 +04:00
ea9da6d2a8 fix: view secret value (requested changes) 2025-03-05 22:47:40 +04:00
3c2c70912f Update secret-service.ts 2025-03-05 22:47:40 +04:00
b607429b99 chore: minor ui improvements 2025-03-05 22:47:40 +04:00
16c1516979 fix: move permissions 2025-03-05 22:47:40 +04:00
f5dbbaf1fd Update SecretEditRow.tsx 2025-03-05 22:47:40 +04:00
2a292455ef chore: minor ui improvements 2025-03-05 22:47:40 +04:00
4d040706a9 Update SecretDetailSidebar.tsx 2025-03-05 22:47:40 +04:00
5183f76397 fix: pathing 2025-03-05 22:47:40 +04:00
4b3efb43b0 fix: view secret value permission (requested changes) 2025-03-05 22:47:40 +04:00
96046726b2 Update 20250218020306_backfill-secret-permissions-with-readvalue.ts 2025-03-05 22:47:40 +04:00
a86a951acc Update secret-snapshot-service.ts 2025-03-05 22:47:40 +04:00
5e70860160 fix: ui bug 2025-03-05 22:47:40 +04:00
abbd427ee2 minor lint fixes 2025-03-05 22:47:40 +04:00
8fd5fdbc6a chore: minor changes 2025-03-05 22:47:40 +04:00
77e1ccc8d7 fix: view secret value permission (requested changes) 2025-03-05 22:47:40 +04:00
711cc438f6 chore: better error 2025-03-05 22:47:40 +04:00
8447190bf8 fix: coderabbit requested changes 2025-03-05 22:47:40 +04:00
12b447425b chore: further cleanup 2025-03-05 22:47:40 +04:00
9cb1a31287 fix: allow Viewer role to read value 2025-03-05 22:47:40 +04:00
b00413817d fix: add service token read value permissions 2025-03-05 22:47:40 +04:00
2a8bd74e88 Update 20250218020306_backfill-secret-permissions-with-readvalue.ts 2025-03-05 22:47:40 +04:00
f28f4f7561 fix: requested changes 2025-03-05 22:47:40 +04:00
f0b05c683b fix: service token creation 2025-03-05 22:47:40 +04:00
3e8f02a4f9 Update service-token.spec.ts 2025-03-05 22:47:40 +04:00
50ee60a3ea Update service-token.spec.ts 2025-03-05 22:47:40 +04:00
21bdecdf2a Update secret-v2-bridge-service.ts 2025-03-05 22:47:40 +04:00
bf09461416 Update secret-v2-bridge-service.ts 2025-03-05 22:47:40 +04:00
1ff615913c fix: bulk secret create 2025-03-05 22:47:40 +04:00
281cedf1a2 fix: updated migration to support additional privileges 2025-03-05 22:47:39 +04:00
a8d847f139 chore: remove logs 2025-03-05 22:47:39 +04:00
2a0c0590f1 fix: cleanup and bug fixes 2025-03-05 22:47:39 +04:00
2e6d525d27 chore: cleanup 2025-03-05 22:47:39 +04:00
7fd4249d00 fix: frontend requested changes 2025-03-05 22:47:39 +04:00
90cfc44592 fix: personal secret support without read value permission 2025-03-05 22:47:39 +04:00
8c403780c2 chore: lint & ts 2025-03-05 22:47:39 +04:00
b69c091f2f Update 20250218020306_backfill-secret-permissions-with-readvalue.ts 2025-03-05 22:47:39 +04:00
4a66395ce6 feat(api): view secret value, WIP 2025-03-05 22:47:39 +04:00
8c18753e3f Merge pull request #3188 from Infisical/daniel/fix-breaking-check
fix: breaking changes check
2025-03-05 22:45:56 +04:00
85c5d69c36 chore: remove breaking change test 2025-03-05 22:42:29 +04:00
94fe577046 chore: test breaking change 2025-03-05 22:38:35 +04:00
a0a579834c fix: check docs endpoint instead of status 2025-03-05 22:36:43 +04:00
b5575f4c20 fix api endpoint 2025-03-05 22:31:01 +04:00
f98f212ecf Update check-api-for-breaking-changes.yml 2025-03-05 22:23:49 +04:00
b331a4a708 fix: breaking changes check 2025-03-05 22:17:16 +04:00
e351a16b5a Merge pull request #3184 from Infisical/feat/add-secret-approval-review-comment
feat: add secret approval review comment
2025-03-05 12:24:59 -05:00
2cfca823f2 Merge pull request #3187 from akhilmhdh/feat/connector
feat: added ca to cli
2025-03-05 10:13:27 -05:00
=
a8398a7009 feat: added ca to cli 2025-03-05 20:00:45 +05:30
8c054cedfc misc: added section for approval and rejections 2025-03-05 22:30:26 +08:00
d1ad605ac4 misc: address nit 2025-03-05 21:19:41 +08:00
9dd5857ff5 misc: minor UI 2025-03-05 19:32:26 +08:00
babbacdc96 feat: add secret approval review comment 2025-03-05 19:25:56 +08:00
=
c54eafc128 fix: resolved typo 2025-02-01 01:55:49 +05:30
=
757942aefc feat: resolved nits 2025-02-01 01:55:49 +05:30
=
1d57629036 feat: added unit test in github action 2025-02-01 01:55:49 +05:30
=
8061066e27 feat: added detail description in ui notification 2025-02-01 01:55:48 +05:30
=
c993b1bbe3 feat: completed new permission boundary check 2025-02-01 01:55:48 +05:30
=
2cbf33ac14 feat: added new permission check 2025-02-01 01:55:11 +05:30
148 changed files with 4659 additions and 2272 deletions

View File

@ -35,7 +35,20 @@ jobs:
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api
echo "Examining built image:"
docker image inspect infisical-api | grep -A 5 "Entrypoint"
docker run --name infisical-api -d -p 4000:4000 \
-e DB_CONNECTION_URI=$DB_CONNECTION_URI \
-e REDIS_URL=$REDIS_URL \
-e JWT_AUTH_SECRET=$JWT_AUTH_SECRET \
-e ENCRYPTION_KEY=$ENCRYPTION_KEY \
--env-file .env \
infisical-api
echo "Container status right after creation:"
docker ps -a | grep infisical-api
env:
REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
@ -49,29 +62,42 @@ jobs:
SECONDS=0
HEALTHY=0
while [ $SECONDS -lt 60 ]; do
if docker ps | grep infisical-api | grep -q healthy; then
echo "Container is healthy."
HEALTHY=1
# Check if container is running
if docker ps | grep infisical-api; then
# Try to access the API endpoint
if curl -s -f http://localhost:4000/api/docs/json > /dev/null 2>&1; then
echo "API endpoint is responding. Container seems healthy."
HEALTHY=1
break
fi
else
echo "Container is not running!"
docker ps -a | grep infisical-api
break
fi
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
docker logs infisical-api
sleep 2
SECONDS=$((SECONDS+2))
sleep 5
SECONDS=$((SECONDS+5))
done
if [ $HEALTHY -ne 1 ]; then
echo "Container did not become healthy in time"
echo "Container status:"
docker ps -a | grep infisical-api
echo "Container logs (if any):"
docker logs infisical-api || echo "No logs available"
echo "Container inspection:"
docker inspect infisical-api | grep -A 5 "State"
exit 1
fi
- name: Install openapi-diff
run: go install github.com/tufin/oasdiff@latest
run: go install github.com/oasdiff/oasdiff@latest
- name: Running OpenAPI Spec diff action
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
- name: cleanup
if: always()
run: |
docker compose -f "docker-compose.dev.yml" down
docker stop infisical-api
docker remove infisical-api
docker stop infisical-api || true
docker rm infisical-api || true

View File

@ -34,7 +34,10 @@ jobs:
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
- name: Start integration test
- name: Run unit test
run: npm run test:unit
working-directory: backend
- name: Run integration test
run: npm run test:e2e
working-directory: backend
env:
@ -44,4 +47,5 @@ jobs:
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- name: cleanup
run: |
docker compose -f "docker-compose.dev.yml" down
docker compose -f "docker-compose.dev.yml" down

View File

@ -40,6 +40,7 @@
"type:check": "tsc --noEmit",
"lint:fix": "eslint --fix --ext js,ts ./src",
"lint": "eslint 'src/**/*.ts'",
"test:unit": "vitest run -c vitest.unit.config.ts",
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",

View File

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment"))) {
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
t.string("comment");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment")) {
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
t.dropColumn("comment");
});
}
}

View File

@ -0,0 +1,45 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
if (!hasSecretVersionV2UserActorId) {
t.uuid("userActorId");
t.foreign("userActorId").references("id").inTable(TableName.Users);
}
if (!hasSecretVersionV2IdentityActorId) {
t.uuid("identityActorId");
t.foreign("identityActorId").references("id").inTable(TableName.Identity);
}
if (!hasSecretVersionV2ActorType) {
t.string("actorType");
}
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
if (hasSecretVersionV2UserActorId) {
t.dropColumn("userActorId");
}
if (hasSecretVersionV2IdentityActorId) {
t.dropColumn("identityActorId");
}
if (hasSecretVersionV2ActorType) {
t.dropColumn("actorType");
}
});
}
}

View File

@ -13,7 +13,8 @@ export const SecretApprovalRequestsReviewersSchema = z.object({
requestId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
reviewerUserId: z.string().uuid()
reviewerUserId: z.string().uuid(),
comment: z.string().nullable().optional()
});
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;

View File

@ -25,7 +25,10 @@ export const SecretVersionsV2Schema = z.object({
folderId: z.string().uuid(),
userId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
userActorId: z.string().uuid().nullable().optional(),
identityActorId: z.string().uuid().nullable().optional(),
actorType: z.string().nullable().optional()
});
export type TSecretVersionsV2 = z.infer<typeof SecretVersionsV2Schema>;

View File

@ -159,7 +159,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
id: z.string()
}),
body: z.object({
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED])
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED]),
comment: z.string().optional()
}),
response: {
200: z.object({
@ -175,8 +176,25 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
approvalId: req.params.id,
status: req.body.status
status: req.body.status,
comment: req.body.comment
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: review.projectId,
event: {
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW,
metadata: {
secretApprovalRequestId: review.requestId,
reviewedBy: review.reviewerUserId,
status: review.status as ApprovalStatus,
comment: review.comment || ""
}
}
});
return { review };
}
});
@ -267,7 +285,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
environment: z.string(),
statusChangedByUser: approvalRequestUser.optional(),
committerUser: approvalRequestUser,
reviewers: approvalRequestUser.extend({ status: z.string() }).array(),
reviewers: approvalRequestUser.extend({ status: z.string(), comment: z.string().optional() }).array(),
secretPath: z.string(),
commits: secretRawSchema
.omit({ _id: true, environment: true, workspace: true, type: true, version: true })

View File

@ -22,6 +22,7 @@ import {
} from "@app/services/secret-sync/secret-sync-types";
import { KmipPermission } from "../kmip/kmip-enum";
import { ApprovalStatus } from "../secret-approval-request/secret-approval-request-types";
export type TListProjectAuditLogDTO = {
filter: {
@ -165,6 +166,7 @@ export enum EventType {
SECRET_APPROVAL_REQUEST = "secret-approval-request",
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
SECRET_APPROVAL_REQUEST_REVIEW = "secret-approval-request-review",
SIGN_SSH_KEY = "sign-ssh-key",
ISSUE_SSH_CREDS = "issue-ssh-creds",
CREATE_SSH_CA = "create-ssh-certificate-authority",
@ -1314,6 +1316,16 @@ interface SecretApprovalRequest {
};
}
interface SecretApprovalRequestReview {
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW;
metadata: {
secretApprovalRequestId: string;
reviewedBy: string;
status: ApprovalStatus;
comment: string;
};
}
interface SignSshKey {
type: EventType.SIGN_SSH_KEY;
metadata: {
@ -2482,4 +2494,5 @@ export type Event =
| KmipOperationRevokeEvent
| KmipOperationLocateEvent
| KmipOperationRegisterEvent
| CreateSecretRequestEvent;
| CreateSecretRequestEvent
| SecretApprovalRequestReview;

View File

@ -1,5 +1,16 @@
import { z } from "zod";
export type PasswordRequirements = {
length: number;
required: {
lowercase: number;
uppercase: number;
digits: number;
symbols: number;
};
allowedSymbols?: string;
};
export enum SqlProviders {
Postgres = "postgres",
MySQL = "mysql2",
@ -100,6 +111,28 @@ export const DynamicSecretSqlDBSchema = z.object({
database: z.string().trim(),
username: z.string().trim(),
password: z.string().trim(),
passwordRequirements: z
.object({
length: z.number().min(1).max(250),
required: z
.object({
lowercase: z.number().min(0),
uppercase: z.number().min(0),
digits: z.number().min(0),
symbols: z.number().min(0)
})
.refine((data) => {
const total = Object.values(data).reduce((sum, count) => sum + count, 0);
return total <= 250;
}, "Sum of required characters cannot exceed 250"),
allowedSymbols: z.string().optional()
})
.refine((data) => {
const total = Object.values(data.required).reduce((sum, count) => sum + count, 0);
return total <= data.length;
}, "Sum of required characters cannot exceed the total length")
.optional()
.describe("Password generation requirements"),
creationStatement: z.string().trim(),
revocationStatement: z.string().trim(),
renewStatement: z.string().trim().optional(),

View File

@ -1,6 +1,6 @@
import { randomInt } from "crypto";
import handlebars from "handlebars";
import knex from "knex";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { withGatewayProxy } from "@app/lib/gateway";
@ -8,16 +8,99 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSqlDBSchema, SqlProviders, TDynamicProviderFns } from "./models";
import { DynamicSecretSqlDBSchema, PasswordRequirements, SqlProviders, TDynamicProviderFns } from "./models";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
const generatePassword = (provider: SqlProviders) => {
// oracle has limit of 48 password length
const size = provider === SqlProviders.Oracle ? 30 : 48;
const DEFAULT_PASSWORD_REQUIREMENTS = {
length: 48,
required: {
lowercase: 1,
uppercase: 1,
digits: 1,
symbols: 0
},
allowedSymbols: "-_.~!*"
};
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
const ORACLE_PASSWORD_REQUIREMENTS = {
...DEFAULT_PASSWORD_REQUIREMENTS,
length: 30
};
const generatePassword = (provider: SqlProviders, requirements?: PasswordRequirements) => {
const defaultReqs = provider === SqlProviders.Oracle ? ORACLE_PASSWORD_REQUIREMENTS : DEFAULT_PASSWORD_REQUIREMENTS;
const finalReqs = requirements || defaultReqs;
try {
const { length, required, allowedSymbols } = finalReqs;
const chars = {
lowercase: "abcdefghijklmnopqrstuvwxyz",
uppercase: "ABCDEFGHIJKLMNOPQRSTUVWXYZ",
digits: "0123456789",
symbols: allowedSymbols || "-_.~!*"
};
const parts: string[] = [];
if (required.lowercase > 0) {
parts.push(
...Array(required.lowercase)
.fill(0)
.map(() => chars.lowercase[randomInt(chars.lowercase.length)])
);
}
if (required.uppercase > 0) {
parts.push(
...Array(required.uppercase)
.fill(0)
.map(() => chars.uppercase[randomInt(chars.uppercase.length)])
);
}
if (required.digits > 0) {
parts.push(
...Array(required.digits)
.fill(0)
.map(() => chars.digits[randomInt(chars.digits.length)])
);
}
if (required.symbols > 0) {
parts.push(
...Array(required.symbols)
.fill(0)
.map(() => chars.symbols[randomInt(chars.symbols.length)])
);
}
const requiredTotal = Object.values(required).reduce<number>((a, b) => a + b, 0);
const remainingLength = Math.max(length - requiredTotal, 0);
const allowedChars = Object.entries(chars)
.filter(([key]) => required[key as keyof typeof required] > 0)
.map(([, value]) => value)
.join("");
parts.push(
...Array(remainingLength)
.fill(0)
.map(() => allowedChars[randomInt(allowedChars.length)])
);
// shuffle the array to mix up the characters
for (let i = parts.length - 1; i > 0; i -= 1) {
const j = randomInt(i + 1);
[parts[i], parts[j]] = [parts[j], parts[i]];
}
return parts.join("");
} catch (error: unknown) {
const message = error instanceof Error ? error.message : "Unknown error";
throw new Error(`Failed to generate password: ${message}`);
}
};
const generateUsername = (provider: SqlProviders) => {
@ -115,7 +198,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername(providerInputs.client);
const password = generatePassword(providerInputs.client);
const password = generatePassword(providerInputs.client, providerInputs.passwordRequirements);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
try {

View File

@ -3,7 +3,7 @@ import slugify from "@sindresorhus/slugify";
import { OrgMembershipRole, TOrgRoles } from "@app/db/schemas";
import { TOidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
@ -87,9 +87,14 @@ export const groupServiceFactory = ({
actorOrgId
);
const isCustomRole = Boolean(customRole);
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to create a more privileged group" });
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to create a more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const group = await groupDAL.transaction(async (tx) => {
const existingGroup = await groupDAL.findOne({ orgId: actorOrgId, name }, tx);
@ -156,9 +161,13 @@ export const groupServiceFactory = ({
);
const isCustomRole = Boolean(customOrgRole);
const hasRequiredNewRolePermission = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasRequiredNewRolePermission)
throw new ForbiddenRequestError({ message: "Failed to create a more privileged group" });
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update a more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
if (isCustomRole) customRole = customOrgRole;
}
@ -329,9 +338,13 @@ export const groupServiceFactory = ({
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, groupRolePermission);
if (!hasRequiredPrivileges)
throw new ForbiddenRequestError({ message: "Failed to add user to more privileged group" });
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to add user to more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const user = await userDAL.findOne({ username });
if (!user) throw new NotFoundError({ message: `Failed to find user with username ${username}` });
@ -396,9 +409,13 @@ export const groupServiceFactory = ({
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, groupRolePermission);
if (!hasRequiredPrivileges)
throw new ForbiddenRequestError({ message: "Failed to delete user from more privileged group" });
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to delete user from more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const user = await userDAL.findOne({ username });
if (!user) throw new NotFoundError({ message: `Failed to find user with username ${username}` });

View File

@ -3,7 +3,7 @@ import { packRules } from "@casl/ability/extra";
import ms from "ms";
import { ActionProjectType, TableName } from "@app/db/schemas";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
@ -79,9 +79,13 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,
@ -161,9 +165,13 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
if (data?.slug) {
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
@ -239,9 +247,13 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const deletedPrivilege = await identityProjectAdditionalPrivilegeDAL.deleteById(identityPrivilege.id);
return {

View File

@ -3,7 +3,7 @@ import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import ms from "ms";
import { ActionProjectType } from "@app/db/schemas";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
@ -88,9 +88,13 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,
@ -172,9 +176,13 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,
@ -268,9 +276,13 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to edit more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to edit more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,

View File

@ -5,22 +5,6 @@ import { PermissionConditionOperators } from "@app/lib/casl";
export const PermissionConditionSchema = {
[PermissionConditionOperators.$IN]: z.string().trim().min(1).array(),
[PermissionConditionOperators.$ALL]: z.string().trim().min(1).array(),
[PermissionConditionOperators.$REGEX]: z
.string()
.min(1)
.refine(
(el) => {
try {
// eslint-disable-next-line no-new
new RegExp(el);
return true;
} catch {
return false;
}
},
{ message: "Invalid regex pattern" }
),
[PermissionConditionOperators.$EQ]: z.string().min(1),
[PermissionConditionOperators.$NEQ]: z.string().min(1),
[PermissionConditionOperators.$GLOB]: z

View File

@ -3,7 +3,7 @@ import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import ms from "ms";
import { ActionProjectType, TableName } from "@app/db/schemas";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
@ -76,9 +76,13 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetUserPermission.update(targetUserPermission.rules.concat(customPermission));
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetUserPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged user",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({
slug,
@ -163,9 +167,13 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetUserPermission.update(targetUserPermission.rules.concat(dto.permissions || []));
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetUserPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
if (dto?.slug) {
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({

View File

@ -100,6 +100,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("lastName").withSchema("committerUser").as("committerUserLastName"),
tx.ref("reviewerUserId").withSchema(TableName.SecretApprovalRequestReviewer),
tx.ref("status").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerStatus"),
tx.ref("comment").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerComment"),
tx.ref("email").withSchema("secretApprovalReviewerUser").as("reviewerEmail"),
tx.ref("username").withSchema("secretApprovalReviewerUser").as("reviewerUsername"),
tx.ref("firstName").withSchema("secretApprovalReviewerUser").as("reviewerFirstName"),
@ -162,8 +163,10 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
reviewerEmail: email,
reviewerLastName: lastName,
reviewerUsername: username,
reviewerFirstName: firstName
}) => (userId ? { userId, status, email, firstName, lastName, username } : undefined)
reviewerFirstName: firstName,
reviewerComment: comment
}) =>
userId ? { userId, status, email, firstName, lastName, username, comment: comment ?? "" } : undefined
},
{
key: "approverUserId",

View File

@ -320,6 +320,7 @@ export const secretApprovalRequestServiceFactory = ({
approvalId,
actor,
status,
comment,
actorId,
actorAuthMethod,
actorOrgId
@ -372,15 +373,18 @@ export const secretApprovalRequestServiceFactory = ({
return secretApprovalRequestReviewerDAL.create(
{
status,
comment,
requestId: secretApprovalRequest.id,
reviewerUserId: actorId
},
tx
);
}
return secretApprovalRequestReviewerDAL.updateById(review.id, { status }, tx);
return secretApprovalRequestReviewerDAL.updateById(review.id, { status, comment }, tx);
});
return reviewStatus;
return { ...reviewStatus, projectId: secretApprovalRequest.projectId };
};
const updateApprovalStatus = async ({
@ -499,7 +503,7 @@ export const secretApprovalRequestServiceFactory = ({
if (!hasMinApproval && !isSoftEnforcement)
throw new BadRequestError({ message: "Doesn't have minimum approvals needed" });
const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId);
const { botKey, shouldUseSecretV2Bridge, project } = await projectBotService.getBotKey(projectId);
let mergeStatus;
if (shouldUseSecretV2Bridge) {
// this cycle if for bridged secrets
@ -857,7 +861,6 @@ export const secretApprovalRequestServiceFactory = ({
if (isSoftEnforcement) {
const cfg = getConfig();
const project = await projectDAL.findProjectById(projectId);
const env = await projectEnvDAL.findOne({ id: policy.envId });
const requestedByUser = await userDAL.findOne({ id: actorId });
const approverUsers = await userDAL.find({
@ -1152,7 +1155,8 @@ export const secretApprovalRequestServiceFactory = ({
environment: env.name,
secretPath,
projectId,
requestId: secretApprovalRequest.id
requestId: secretApprovalRequest.id,
secretKeys: [...new Set(Object.values(data).flatMap((arr) => arr?.map((item) => item.secretName) ?? []))]
}
}
});
@ -1452,7 +1456,8 @@ export const secretApprovalRequestServiceFactory = ({
environment: env.name,
secretPath,
projectId,
requestId: secretApprovalRequest.id
requestId: secretApprovalRequest.id,
secretKeys: [...new Set(Object.values(data).flatMap((arr) => arr?.map((item) => item.secretKey) ?? []))]
}
}
});

View File

@ -80,6 +80,7 @@ export type TStatusChangeDTO = {
export type TReviewRequestDTO = {
approvalId: string;
status: ApprovalStatus;
comment?: string;
} & Omit<TProjectPermission, "projectId">;
export type TApprovalRequestCountDTO = TProjectPermission;

View File

@ -13,6 +13,7 @@ import { NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@ -332,6 +333,7 @@ export const secretRotationQueueFactory = ({
await secretVersionV2BridgeDAL.insertMany(
updatedSecrets.map(({ id, updatedAt, createdAt, ...el }) => ({
...el,
actorType: ActorType.PLATFORM,
secretId: id
})),
tx

View File

@ -7,6 +7,7 @@ import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { InternalServerError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@ -370,7 +371,21 @@ export const secretSnapshotServiceFactory = ({
const secrets = await secretV2BridgeDAL.insertMany(
rollbackSnaps.flatMap(({ secretVersions, folderId }) =>
secretVersions.map(
({ latestSecretVersion, version, updatedAt, createdAt, secretId, envId, id, tags, ...el }) => ({
({
latestSecretVersion,
version,
updatedAt,
createdAt,
secretId,
envId,
id,
tags,
// exclude the bottom fields from the secret - they are for versioning only.
userActorId,
identityActorId,
actorType,
...el
}) => ({
...el,
id: secretId,
version: deletedTopLevelSecsGroupById[secretId] ? latestSecretVersion + 1 : latestSecretVersion,
@ -401,8 +416,18 @@ export const secretSnapshotServiceFactory = ({
})),
tx
);
const userActorId = actor === ActorType.USER ? actorId : undefined;
const identityActorId = actor !== ActorType.USER ? actorId : undefined;
const actorType = actor || ActorType.PLATFORM;
const secretVersions = await secretVersionV2BridgeDAL.insertMany(
secrets.map(({ id, updatedAt, createdAt, ...el }) => ({ ...el, secretId: id })),
secrets.map(({ id, updatedAt, createdAt, ...el }) => ({
...el,
secretId: id,
userActorId,
identityActorId,
actorType
})),
tx
);
await secretVersionV2TagBridgeDAL.insertMany(

View File

@ -459,7 +459,8 @@ export const PROJECTS = {
workspaceId: "The ID of the project to update.",
name: "The new name of the project.",
projectDescription: "An optional description label for the project.",
autoCapitalization: "Disable or enable auto-capitalization for the project."
autoCapitalization: "Disable or enable auto-capitalization for the project.",
slug: "An optional slug for the project. (must be unique within the organization)"
},
GET_KEY: {
workspaceId: "The ID of the project to get the key from."

View File

@ -0,0 +1,669 @@
import { createMongoAbility } from "@casl/ability";
import { PermissionConditionOperators } from ".";
import { validatePermissionBoundary } from "./boundary";
describe("Validate Permission Boundary Function", () => {
test.each([
{
title: "child with equal privilege",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets"
}
]),
childPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets"
}
]),
expectValid: true,
missingPermissions: []
},
{
title: "child with less privilege",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets"
}
]),
childPermission: createMongoAbility([
{
action: ["create", "edit"],
subject: "secrets"
}
]),
expectValid: true,
missingPermissions: []
},
{
title: "child with more privilege",
parentPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets"
}
]),
childPermission: createMongoAbility([
{
action: ["create", "edit"],
subject: "secrets"
}
]),
expectValid: false,
missingPermissions: [{ action: "edit", subject: "secrets" }]
},
{
title: "parent with multiple and child with multiple",
parentPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets"
},
{
action: ["create", "edit"],
subject: "members"
}
]),
childPermission: createMongoAbility([
{
action: ["create"],
subject: "members"
},
{
action: ["create"],
subject: "secrets"
}
]),
expectValid: true,
missingPermissions: []
},
{
title: "Child with no access",
parentPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets"
},
{
action: ["create", "edit"],
subject: "members"
}
]),
childPermission: createMongoAbility([]),
expectValid: true,
missingPermissions: []
},
{
title: "Parent and child disjoint set",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
]),
childPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
]),
expectValid: false,
missingPermissions: ["create", "edit", "delete", "read"].map((el) => ({
action: el,
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "dev" }
}
}))
},
{
title: "Parent with inverted rules",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
},
{
action: "read",
subject: "secrets",
inverted: true,
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**" }
}
}
]),
childPermission: createMongoAbility([
{
action: "read",
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$EQ]: "/" }
}
}
]),
expectValid: true,
missingPermissions: []
},
{
title: "Parent with inverted rules - child accessing invalid one",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
},
{
action: "read",
subject: "secrets",
inverted: true,
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**" }
}
}
]),
childPermission: createMongoAbility([
{
action: "read",
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$EQ]: "/hello/world" }
}
}
]),
expectValid: false,
missingPermissions: [
{
action: "read",
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$EQ]: "/hello/world" }
}
}
]
}
])("Check permission: $title", ({ parentPermission, childPermission, expectValid, missingPermissions }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
if (expectValid) {
expect(permissionBoundary.isValid).toBeTruthy();
} else {
expect(permissionBoundary.isValid).toBeFalsy();
expect(permissionBoundary.missingPermissions).toEqual(expect.arrayContaining(missingPermissions));
}
});
});
describe("Validate Permission Boundary: Checking Parent $eq operator", () => {
const parentPermission = createMongoAbility([
{
action: ["create", "read"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
]);
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "dev" }
}
}
])
}
])("Child $operator truthy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeTruthy();
});
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "prod" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev", "prod"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "dev**" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "staging" }
}
}
])
}
])("Child $operator falsy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeFalsy();
});
});
describe("Validate Permission Boundary: Checking Parent $neq operator", () => {
const parentPermission = createMongoAbility([
{
action: ["create", "read"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$NEQ]: "/hello" }
}
}
]);
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "/" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$NEQ]: "/hello" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$IN]: ["/", "/staging"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/dev**" }
}
}
])
}
])("Child $operator truthy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeTruthy();
});
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "/hello" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$NEQ]: "/" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$IN]: ["/", "/hello"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello**" }
}
}
])
}
])("Child $operator falsy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeFalsy();
});
});
describe("Validate Permission Boundary: Checking Parent $IN operator", () => {
const parentPermission = createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev", "staging"] }
}
}
]);
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev"] }
}
}
])
},
{
operator: `${PermissionConditionOperators.$IN} - 2`,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev", "staging"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "dev" }
}
}
])
}
])("Child $operator truthy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeTruthy();
});
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "prod" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$NEQ]: "dev" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev", "prod"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "dev**" }
}
}
])
}
])("Child $operator falsy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeFalsy();
});
});
describe("Validate Permission Boundary: Checking Parent $GLOB operator", () => {
const parentPermission = createMongoAbility([
{
action: ["create", "read"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**" }
}
}
]);
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "/hello/world" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$IN]: ["/hello/world", "/hello/world2"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**/world" }
}
}
])
}
])("Child $operator truthy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeTruthy();
});
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "/print" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$NEQ]: "/hello/world" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$IN]: ["/", "/hello"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello**" }
}
}
])
}
])("Child $operator falsy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeFalsy();
});
});

View File

@ -0,0 +1,249 @@
import { MongoAbility } from "@casl/ability";
import { MongoQuery } from "@ucast/mongo2js";
import picomatch from "picomatch";
import { PermissionConditionOperators } from "./index";
type TMissingPermission = {
action: string;
subject: string;
conditions?: MongoQuery;
};
type TPermissionConditionShape = {
[PermissionConditionOperators.$EQ]: string;
[PermissionConditionOperators.$NEQ]: string;
[PermissionConditionOperators.$GLOB]: string;
[PermissionConditionOperators.$IN]: string[];
};
const getPermissionSetID = (action: string, subject: string) => `${action}:${subject}`;
const invertTheOperation = (shouldInvert: boolean, operation: boolean) => (shouldInvert ? !operation : operation);
const formatConditionOperator = (condition: TPermissionConditionShape | string) => {
return (
typeof condition === "string" ? { [PermissionConditionOperators.$EQ]: condition } : condition
) as TPermissionConditionShape;
};
const isOperatorsASubset = (parentSet: TPermissionConditionShape, subset: TPermissionConditionShape) => {
// we compute each operator against each other in left hand side and right hand side
if (subset[PermissionConditionOperators.$EQ] || subset[PermissionConditionOperators.$NEQ]) {
const subsetOperatorValue = subset[PermissionConditionOperators.$EQ] || subset[PermissionConditionOperators.$NEQ];
const isInverted = !subset[PermissionConditionOperators.$EQ];
if (
parentSet[PermissionConditionOperators.$EQ] &&
invertTheOperation(isInverted, parentSet[PermissionConditionOperators.$EQ] !== subsetOperatorValue)
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$NEQ] &&
invertTheOperation(isInverted, parentSet[PermissionConditionOperators.$NEQ] === subsetOperatorValue)
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$IN] &&
invertTheOperation(isInverted, !parentSet[PermissionConditionOperators.$IN].includes(subsetOperatorValue))
) {
return false;
}
// ne and glob cannot match each other
if (parentSet[PermissionConditionOperators.$GLOB] && isInverted) {
return false;
}
if (
parentSet[PermissionConditionOperators.$GLOB] &&
!picomatch.isMatch(subsetOperatorValue, parentSet[PermissionConditionOperators.$GLOB], { strictSlashes: false })
) {
return false;
}
}
if (subset[PermissionConditionOperators.$IN]) {
const subsetOperatorValue = subset[PermissionConditionOperators.$IN];
if (
parentSet[PermissionConditionOperators.$EQ] &&
(subsetOperatorValue.length !== 1 || subsetOperatorValue[0] !== parentSet[PermissionConditionOperators.$EQ])
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$NEQ] &&
subsetOperatorValue.includes(parentSet[PermissionConditionOperators.$NEQ])
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$IN] &&
!subsetOperatorValue.every((el) => parentSet[PermissionConditionOperators.$IN].includes(el))
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$GLOB] &&
!subsetOperatorValue.every((el) =>
picomatch.isMatch(el, parentSet[PermissionConditionOperators.$GLOB], {
strictSlashes: false
})
)
) {
return false;
}
}
if (subset[PermissionConditionOperators.$GLOB]) {
const subsetOperatorValue = subset[PermissionConditionOperators.$GLOB];
const { isGlob } = picomatch.scan(subsetOperatorValue);
// if it's glob, all other fixed operators would make this superset because glob is powerful. like eq
// example: $in [dev, prod] => glob: dev** could mean anything starting with dev: thus is bigger
if (
isGlob &&
Object.keys(parentSet).some(
(el) => el !== PermissionConditionOperators.$GLOB && el !== PermissionConditionOperators.$NEQ
)
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$EQ] &&
parentSet[PermissionConditionOperators.$EQ] !== subsetOperatorValue
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$NEQ] &&
picomatch.isMatch(parentSet[PermissionConditionOperators.$NEQ], subsetOperatorValue, {
strictSlashes: false
})
) {
return false;
}
// if parent set is IN, glob cannot be used for children - It's a bigger scope
if (
parentSet[PermissionConditionOperators.$IN] &&
!parentSet[PermissionConditionOperators.$IN].includes(subsetOperatorValue)
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$GLOB] &&
!picomatch.isMatch(subsetOperatorValue, parentSet[PermissionConditionOperators.$GLOB], {
strictSlashes: false
})
) {
return false;
}
}
return true;
};
const isSubsetForSamePermissionSubjectAction = (
parentSetRules: ReturnType<MongoAbility["possibleRulesFor"]>,
subsetRules: ReturnType<MongoAbility["possibleRulesFor"]>,
appendToMissingPermission: (condition?: MongoQuery) => void
) => {
const isMissingConditionInParent = parentSetRules.every((el) => !el.conditions);
if (isMissingConditionInParent) return true;
// all subset rules must pass in comparison to parent rul
return subsetRules.every((subsetRule) => {
const subsetRuleConditions = subsetRule.conditions as Record<string, TPermissionConditionShape | string>;
// compare subset rule with all parent rules
const isSubsetOfNonInvertedParentSet = parentSetRules
.filter((el) => !el.inverted)
.some((parentSetRule) => {
// get conditions and iterate
const parentSetRuleConditions = parentSetRule?.conditions as Record<string, TPermissionConditionShape | string>;
if (!parentSetRuleConditions) return true;
return Object.keys(parentSetRuleConditions).every((parentConditionField) => {
// if parent condition is missing then it's never a subset
if (!subsetRuleConditions?.[parentConditionField]) return false;
// standardize the conditions plain string operator => $eq function
const parentRuleConditionOperators = formatConditionOperator(parentSetRuleConditions[parentConditionField]);
const selectedSubsetRuleCondition = subsetRuleConditions?.[parentConditionField];
const subsetRuleConditionOperators = formatConditionOperator(selectedSubsetRuleCondition);
return isOperatorsASubset(parentRuleConditionOperators, subsetRuleConditionOperators);
});
});
const invertedParentSetRules = parentSetRules.filter((el) => el.inverted);
const isNotSubsetOfInvertedParentSet = invertedParentSetRules.length
? !invertedParentSetRules.some((parentSetRule) => {
// get conditions and iterate
const parentSetRuleConditions = parentSetRule?.conditions as Record<
string,
TPermissionConditionShape | string
>;
if (!parentSetRuleConditions) return true;
return Object.keys(parentSetRuleConditions).every((parentConditionField) => {
// if parent condition is missing then it's never a subset
if (!subsetRuleConditions?.[parentConditionField]) return false;
// standardize the conditions plain string operator => $eq function
const parentRuleConditionOperators = formatConditionOperator(parentSetRuleConditions[parentConditionField]);
const selectedSubsetRuleCondition = subsetRuleConditions?.[parentConditionField];
const subsetRuleConditionOperators = formatConditionOperator(selectedSubsetRuleCondition);
return isOperatorsASubset(parentRuleConditionOperators, subsetRuleConditionOperators);
});
})
: true;
const isSubset = isSubsetOfNonInvertedParentSet && isNotSubsetOfInvertedParentSet;
if (!isSubset) {
appendToMissingPermission(subsetRule.conditions);
}
return isSubset;
});
};
export const validatePermissionBoundary = (parentSetPermissions: MongoAbility, subsetPermissions: MongoAbility) => {
const checkedPermissionRules = new Set<string>();
const missingPermissions: TMissingPermission[] = [];
subsetPermissions.rules.forEach((subsetPermissionRules) => {
const subsetPermissionSubject = subsetPermissionRules.subject.toString();
let subsetPermissionActions: string[] = [];
// actions can be string or string[]
if (typeof subsetPermissionRules.action === "string") {
subsetPermissionActions.push(subsetPermissionRules.action);
} else {
subsetPermissionRules.action.forEach((subsetPermissionAction) => {
subsetPermissionActions.push(subsetPermissionAction);
});
}
// if action is already processed ignore
subsetPermissionActions = subsetPermissionActions.filter(
(el) => !checkedPermissionRules.has(getPermissionSetID(el, subsetPermissionSubject))
);
if (!subsetPermissionActions.length) return;
subsetPermissionActions.forEach((subsetPermissionAction) => {
const parentSetRulesOfSubset = parentSetPermissions.possibleRulesFor(
subsetPermissionAction,
subsetPermissionSubject
);
const nonInveretedOnes = parentSetRulesOfSubset.filter((el) => !el.inverted);
if (!nonInveretedOnes.length) {
missingPermissions.push({ action: subsetPermissionAction, subject: subsetPermissionSubject });
return;
}
const subsetRules = subsetPermissions.possibleRulesFor(subsetPermissionAction, subsetPermissionSubject);
isSubsetForSamePermissionSubjectAction(parentSetRulesOfSubset, subsetRules, (conditions) => {
missingPermissions.push({ action: subsetPermissionAction, subject: subsetPermissionSubject, conditions });
});
});
subsetPermissionActions.forEach((el) =>
checkedPermissionRules.add(getPermissionSetID(el, subsetPermissionSubject))
);
});
if (missingPermissions.length) {
return { isValid: false as const, missingPermissions };
}
return { isValid: true };
};

View File

@ -1,5 +1,5 @@
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import { buildMongoQueryMatcher, MongoAbility } from "@casl/ability";
import { buildMongoQueryMatcher } from "@casl/ability";
import { FieldCondition, FieldInstruction, JsInterpreter } from "@ucast/mongo2js";
import picomatch from "picomatch";
@ -20,45 +20,8 @@ const glob: JsInterpreter<FieldCondition<string>> = (node, object, context) => {
export const conditionsMatcher = buildMongoQueryMatcher({ $glob }, { glob });
/**
* Extracts and formats permissions from a CASL Ability object or a raw permission set.
*/
const extractPermissions = (ability: MongoAbility) => {
const permissions: string[] = [];
ability.rules.forEach((permission) => {
if (typeof permission.action === "string") {
permissions.push(`${permission.action}_${permission.subject as string}`);
} else {
permission.action.forEach((permissionAction) => {
permissions.push(`${permissionAction}_${permission.subject as string}`);
});
}
});
return permissions;
};
/**
* Compares two sets of permissions to determine if the first set is at least as privileged as the second set.
* The function checks if all permissions in the second set are contained within the first set and if the first set has equal or more permissions.
*
*/
export const isAtLeastAsPrivileged = (permissions1: MongoAbility, permissions2: MongoAbility) => {
const set1 = new Set(extractPermissions(permissions1));
const set2 = new Set(extractPermissions(permissions2));
for (const perm of set2) {
if (!set1.has(perm)) {
return false;
}
}
return set1.size >= set2.size;
};
export enum PermissionConditionOperators {
$IN = "$in",
$ALL = "$all",
$REGEX = "$regex",
$EQ = "$eq",
$NEQ = "$ne",
$GLOB = "$glob"

View File

@ -52,10 +52,18 @@ export class ForbiddenRequestError extends Error {
error: unknown;
constructor({ name, error, message }: { message?: string; name?: string; error?: unknown } = {}) {
details?: unknown;
constructor({
name,
error,
message,
details
}: { message?: string; name?: string; error?: unknown; details?: unknown } = {}) {
super(message ?? "You are not allowed to access this resource");
this.name = name || "ForbiddenError";
this.error = error;
this.details = details;
}
}

View File

@ -2,7 +2,7 @@
import crypto from "node:crypto";
import net from "node:net";
import * as quic from "@infisical/quic";
import quicDefault, * as quicModule from "@infisical/quic";
import { BadRequestError } from "../errors";
import { logger } from "../logger";
@ -10,6 +10,8 @@ import { logger } from "../logger";
const DEFAULT_MAX_RETRIES = 3;
const DEFAULT_RETRY_DELAY = 1000; // 1 second
const quic = quicDefault || quicModule;
const parseSubjectDetails = (data: string) => {
const values: Record<string, string> = {};
data.split("\n").forEach((el) => {
@ -94,6 +96,7 @@ export const pingGatewayAndVerify = async ({
error: err as Error
});
});
for (let attempt = 1; attempt <= maxRetries; attempt += 1) {
try {
const stream = quicClient.connection.newStream("bidi");
@ -106,17 +109,13 @@ export const pingGatewayAndVerify = async ({
const { value, done } = await reader.read();
if (done) {
throw new BadRequestError({
message: "Gateway closed before receiving PONG"
});
throw new Error("Gateway closed before receiving PONG");
}
const response = Buffer.from(value).toString();
if (response !== "PONG\n" && response !== "PONG") {
throw new BadRequestError({
message: `Failed to Ping. Unexpected response: ${response}`
});
throw new Error(`Failed to Ping. Unexpected response: ${response}`);
}
reader.releaseLock();
@ -144,6 +143,7 @@ interface TProxyServer {
server: net.Server;
port: number;
cleanup: () => Promise<void>;
getProxyError: () => string;
}
const setupProxyServer = async ({
@ -168,6 +168,7 @@ const setupProxyServer = async ({
error: err as Error
});
});
const proxyErrorMsg = [""];
return new Promise((resolve, reject) => {
const server = net.createServer();
@ -183,31 +184,33 @@ const setupProxyServer = async ({
const forwardWriter = stream.writable.getWriter();
await forwardWriter.write(Buffer.from(`FORWARD-TCP ${targetHost}:${targetPort}\n`));
forwardWriter.releaseLock();
/* eslint-disable @typescript-eslint/no-misused-promises */
// Set up bidirectional copy
const setupCopy = async () => {
const setupCopy = () => {
// Client to QUIC
// eslint-disable-next-line
(async () => {
try {
const writer = stream.writable.getWriter();
const writer = stream.writable.getWriter();
// Create a handler for client data
clientConn.on("data", async (chunk) => {
await writer.write(chunk);
// Create a handler for client data
clientConn.on("data", (chunk) => {
writer.write(chunk).catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
});
// Handle client connection close
clientConn.on("end", async () => {
await writer.close();
// Handle client connection close
clientConn.on("end", () => {
writer.close().catch((err) => {
logger.error(err);
});
});
clientConn.on("error", async (err) => {
await writer.abort(err);
clientConn.on("error", (clientConnErr) => {
writer.abort(clientConnErr?.message).catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
} catch (err) {
clientConn.destroy();
}
});
})();
// QUIC to Client
@ -236,15 +239,18 @@ const setupProxyServer = async ({
}
}
} catch (err) {
proxyErrorMsg.push((err as Error)?.message);
clientConn.destroy();
}
})();
};
await setupCopy();
//
setupCopy();
// Handle connection closure
clientConn.on("close", async () => {
await stream.destroy();
clientConn.on("close", () => {
stream.destroy().catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
});
const cleanup = async () => {
@ -252,13 +258,18 @@ const setupProxyServer = async ({
await stream.destroy();
};
clientConn.on("error", (err) => {
logger.error(err, "Client socket error");
void cleanup();
reject(err);
clientConn.on("error", (clientConnErr) => {
logger.error(clientConnErr, "Client socket error");
cleanup().catch((err) => {
logger.error(err, "Client conn cleanup");
});
});
clientConn.on("end", cleanup);
clientConn.on("end", () => {
cleanup().catch((err) => {
logger.error(err, "Client conn end");
});
});
} catch (err) {
logger.error(err, "Failed to establish target connection:");
clientConn.end();
@ -270,12 +281,12 @@ const setupProxyServer = async ({
reject(err);
});
server.on("close", async () => {
await quicClient?.destroy();
server.on("close", () => {
quicClient?.destroy().catch((err) => {
logger.error(err, "Failed to destroy quic client");
});
});
/* eslint-enable */
server.listen(0, () => {
const address = server.address();
if (!address || typeof address === "string") {
@ -291,7 +302,8 @@ const setupProxyServer = async ({
cleanup: async () => {
server.close();
await quicClient?.destroy();
}
},
getProxyError: () => proxyErrorMsg.join(",")
});
});
});
@ -314,7 +326,7 @@ export const withGatewayProxy = async (
const { relayHost, relayPort, targetHost, targetPort, tlsOptions, identityId, orgId } = options;
// Setup the proxy server
const { port, cleanup } = await setupProxyServer({
const { port, cleanup, getProxyError } = await setupProxyServer({
targetHost,
targetPort,
relayPort,
@ -328,8 +340,12 @@ export const withGatewayProxy = async (
// Execute the callback with the allocated port
await callback(port);
} catch (err) {
logger.error(err, "Failed to proxy");
throw new BadRequestError({ message: (err as Error)?.message });
const proxyErrorMessage = getProxyError();
if (proxyErrorMessage) {
logger.error(new Error(proxyErrorMessage), "Failed to proxy");
}
logger.error(err, "Failed to do gateway");
throw new BadRequestError({ message: proxyErrorMessage || (err as Error)?.message });
} finally {
// Ensure cleanup happens regardless of success or failure
await cleanup();

View File

@ -1,6 +1,6 @@
import crypto from "node:crypto";
const TURN_TOKEN_TTL = 60 * 60 * 1000; // 24 hours in milliseconds
const TURN_TOKEN_TTL = 24 * 60 * 60 * 1000; // 24 hours in milliseconds
export const getTurnCredentials = (id: string, authSecret: string, ttl = TURN_TOKEN_TTL) => {
const timestamp = Math.floor((Date.now() + ttl) / 1000);
const username = `${timestamp}:${id}`;

View File

@ -83,6 +83,14 @@ const run = async () => {
process.exit(0);
});
process.on("uncaughtException", (error) => {
logger.error(error, "CRITICAL ERROR: Uncaught Exception");
});
process.on("unhandledRejection", (error) => {
logger.error(error, "CRITICAL ERROR: Unhandled Promise Rejection");
});
await server.listen({
port: envConfig.PORT,
host: envConfig.HOST,

View File

@ -21,6 +21,7 @@ import {
TQueueSecretSyncSyncSecretsByIdDTO,
TQueueSendSecretSyncActionFailedNotificationsDTO
} from "@app/services/secret-sync/secret-sync-types";
import { TWebhookPayloads } from "@app/services/webhook/webhook-types";
export enum QueueName {
SecretRotation = "secret-rotation",
@ -107,7 +108,7 @@ export type TQueueJobTypes = {
};
[QueueName.SecretWebhook]: {
name: QueueJobs.SecWebhook;
payload: { projectId: string; environment: string; secretPath: string; depth?: number };
payload: TWebhookPayloads;
};
[QueueName.AccessTokenStatusUpdate]:

View File

@ -122,7 +122,8 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
reqId: req.id,
statusCode: HttpStatusCodes.Forbidden,
message: error.message,
error: error.name
error: error.name,
details: error?.details
});
} else if (error instanceof RateLimitError) {
void res.status(HttpStatusCodes.TooManyRequests).send({

View File

@ -111,7 +111,16 @@ export const secretRawSchema = z.object({
secretReminderRepeatDays: z.number().nullable().optional(),
skipMultilineEncoding: z.boolean().default(false).nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
actor: z
.object({
actorId: z.string().nullable().optional(),
actorType: z.string().nullable().optional(),
name: z.string().nullable().optional(),
membershipId: z.string().nullable().optional()
})
.optional()
.nullable()
});
export const ProjectPermissionSchema = z.object({

View File

@ -118,7 +118,12 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
querystring: z.object({
searchTerm: z.string().default(""),
offset: z.coerce.number().default(0),
limit: z.coerce.number().max(100).default(20)
limit: z.coerce.number().max(100).default(20),
// TODO: remove this once z.coerce.boolean() is supported
adminsOnly: z
.string()
.transform((val) => val === "true")
.default("false")
}),
response: {
200: z.object({

View File

@ -6,6 +6,7 @@ import { authRateLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { validateSignUpAuthorization } from "@app/services/auth/auth-fns";
import { AuthMode } from "@app/services/auth/auth-type";
import { UserEncryption } from "@app/services/user/user-types";
export const registerPasswordRouter = async (server: FastifyZodProvider) => {
server.route({
@ -113,20 +114,16 @@ export const registerPasswordRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
message: z.string(),
user: UsersSchema,
token: z.string()
token: z.string(),
userEncryptionVersion: z.nativeEnum(UserEncryption)
})
}
},
handler: async (req) => {
const { token, user } = await server.services.password.verifyPasswordResetEmail(req.body.email, req.body.code);
const passwordReset = await server.services.password.verifyPasswordResetEmail(req.body.email, req.body.code);
return {
message: "Successfully verified email",
user,
token
};
return passwordReset;
}
});

View File

@ -307,7 +307,17 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
.max(256, { message: "Description must be 256 or fewer characters" })
.optional()
.describe(PROJECTS.UPDATE.projectDescription),
autoCapitalization: z.boolean().optional().describe(PROJECTS.UPDATE.autoCapitalization)
autoCapitalization: z.boolean().optional().describe(PROJECTS.UPDATE.autoCapitalization),
slug: z
.string()
.trim()
.regex(
/^[a-z0-9]+(?:[_-][a-z0-9]+)*$/,
"Project slug can only contain lowercase letters and numbers, with optional single hyphens (-) or underscores (_) between words. Cannot start or end with a hyphen or underscore."
)
.max(64, { message: "Slug must be 64 characters or fewer" })
.optional()
.describe(PROJECTS.UPDATE.slug)
}),
response: {
200: z.object({
@ -325,7 +335,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
update: {
name: req.body.name,
description: req.body.description,
autoCapitalization: req.body.autoCapitalization
autoCapitalization: req.body.autoCapitalization,
slug: req.body.slug
},
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,

View File

@ -3,6 +3,7 @@ import { registerIdentityOrgRouter } from "./identity-org-router";
import { registerIdentityProjectRouter } from "./identity-project-router";
import { registerMfaRouter } from "./mfa-router";
import { registerOrgRouter } from "./organization-router";
import { registerPasswordRouter } from "./password-router";
import { registerProjectMembershipRouter } from "./project-membership-router";
import { registerProjectRouter } from "./project-router";
import { registerServiceTokenRouter } from "./service-token-router";
@ -12,6 +13,7 @@ export const registerV2Routes = async (server: FastifyZodProvider) => {
await server.register(registerMfaRouter, { prefix: "/auth" });
await server.register(registerUserRouter, { prefix: "/users" });
await server.register(registerServiceTokenRouter, { prefix: "/service-token" });
await server.register(registerPasswordRouter, { prefix: "/password" });
await server.register(
async (orgRouter) => {
await orgRouter.register(registerOrgRouter);

View File

@ -0,0 +1,53 @@
import { z } from "zod";
import { authRateLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { validatePasswordResetAuthorization } from "@app/services/auth/auth-fns";
import { ResetPasswordV2Type } from "@app/services/auth/auth-password-type";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerPasswordRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/password-reset",
config: {
rateLimit: authRateLimit
},
schema: {
body: z.object({
newPassword: z.string().trim()
})
},
handler: async (req) => {
const token = validatePasswordResetAuthorization(req.headers.authorization);
await server.services.password.resetPasswordV2({
type: ResetPasswordV2Type.Recovery,
newPassword: req.body.newPassword,
userId: token.userId
});
}
});
server.route({
method: "POST",
url: "/user/password-reset",
schema: {
body: z.object({
oldPassword: z.string().trim(),
newPassword: z.string().trim()
})
},
config: {
rateLimit: authRateLimit
},
onRequest: verifyAuth([AuthMode.JWT], { requireOrg: false }),
handler: async (req) => {
await server.services.password.resetPasswordV2({
type: ResetPasswordV2Type.LoggedInReset,
userId: req.permission.id,
newPassword: req.body.newPassword,
oldPassword: req.body.oldPassword
});
}
});
};

View File

@ -380,6 +380,48 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "GET",
url: "/raw/id/:secretId",
config: {
rateLimit: secretsLimit
},
schema: {
params: z.object({
secretId: z.string()
}),
response: {
200: z.object({
secret: secretRawSchema.extend({
secretPath: z.string(),
tags: SecretTagsSchema.pick({
id: true,
slug: true,
color: true
})
.extend({ name: z.string() })
.array()
.optional(),
secretMetadata: ResourceMetadataSchema.optional()
})
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { secretId } = req.params;
const secret = await server.services.secret.getSecretByIdRaw({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
secretId
});
return { secret };
}
});
server.route({
method: "GET",
url: "/raw/:secretName",

View File

@ -45,6 +45,36 @@ export const validateSignUpAuthorization = (token: string, userId: string, valid
if (decodedToken.userId !== userId) throw new UnauthorizedError();
};
export const validatePasswordResetAuthorization = (token?: string) => {
if (!token) throw new UnauthorizedError();
const appCfg = getConfig();
const [AUTH_TOKEN_TYPE, AUTH_TOKEN_VALUE] = <[string, string]>token?.split(" ", 2) ?? [null, null];
if (AUTH_TOKEN_TYPE === null) {
throw new UnauthorizedError({ message: "Missing Authorization Header in the request header." });
}
if (AUTH_TOKEN_TYPE.toLowerCase() !== "bearer") {
throw new UnauthorizedError({
message: `The provided authentication type '${AUTH_TOKEN_TYPE}' is not supported.`
});
}
if (AUTH_TOKEN_VALUE === null) {
throw new UnauthorizedError({
message: "Missing Authorization Body in the request header"
});
}
const decodedToken = jwt.verify(AUTH_TOKEN_VALUE, appCfg.AUTH_SECRET) as AuthModeProviderSignUpTokenPayload;
if (decodedToken.authTokenType !== AuthTokenType.SIGNUP_TOKEN) {
throw new UnauthorizedError({
message: `The provided authentication token type is not supported.`
});
}
return decodedToken;
};
export const enforceUserLockStatus = (isLocked: boolean, temporaryLockDateEnd?: Date | null) => {
if (isLocked) {
throw new ForbiddenRequestError({

View File

@ -4,7 +4,10 @@ import jwt from "jsonwebtoken";
import { SecretEncryptionAlgo, SecretKeyEncoding } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { generateSrpServerKey, srpCheckClientProof } from "@app/lib/crypto";
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
import { generateUserSrpKeys } from "@app/lib/crypto/srp";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { OrgServiceActor } from "@app/lib/types";
import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service";
@ -12,10 +15,13 @@ import { TokenType } from "../auth-token/auth-token-types";
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
import { TTotpConfigDALFactory } from "../totp/totp-config-dal";
import { TUserDALFactory } from "../user/user-dal";
import { UserEncryption } from "../user/user-types";
import { TAuthDALFactory } from "./auth-dal";
import {
ResetPasswordV2Type,
TChangePasswordDTO,
TCreateBackupPrivateKeyDTO,
TResetPasswordV2DTO,
TResetPasswordViaBackupKeyDTO,
TSetupPasswordViaBackupKeyDTO
} from "./auth-password-type";
@ -114,26 +120,31 @@ export const authPaswordServiceFactory = ({
* Email password reset flow via email. Step 1 send email
*/
const sendPasswordResetEmail = async (email: string) => {
const user = await userDAL.findUserByUsername(email);
// ignore as user is not found to avoid an outside entity to identify infisical registered accounts
if (!user || (user && !user.isAccepted)) return;
const sendEmail = async () => {
const user = await userDAL.findUserByUsername(email);
const cfg = getConfig();
const token = await tokenService.createTokenForUser({
type: TokenType.TOKEN_EMAIL_PASSWORD_RESET,
userId: user.id
});
if (user && user.isAccepted) {
const cfg = getConfig();
const token = await tokenService.createTokenForUser({
type: TokenType.TOKEN_EMAIL_PASSWORD_RESET,
userId: user.id
});
await smtpService.sendMail({
template: SmtpTemplates.ResetPassword,
recipients: [email],
subjectLine: "Infisical password reset",
substitutions: {
email,
token,
callback_url: cfg.SITE_URL ? `${cfg.SITE_URL}/password-reset` : ""
await smtpService.sendMail({
template: SmtpTemplates.ResetPassword,
recipients: [email],
subjectLine: "Infisical password reset",
substitutions: {
email,
token,
callback_url: cfg.SITE_URL ? `${cfg.SITE_URL}/password-reset` : ""
}
});
}
});
};
// note(daniel): run in background to prevent timing attacks
void sendEmail().catch((err) => logger.error(err, "Failed to send password reset email"));
};
/*
@ -142,6 +153,11 @@ export const authPaswordServiceFactory = ({
const verifyPasswordResetEmail = async (email: string, code: string) => {
const cfg = getConfig();
const user = await userDAL.findUserByUsername(email);
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
if (!userEnc) throw new BadRequestError({ message: "Failed to find user encryption data" });
// ignore as user is not found to avoid an outside entity to identify infisical registered accounts
if (!user || (user && !user.isAccepted)) {
throw new Error("Failed email verification for pass reset");
@ -162,8 +178,91 @@ export const authPaswordServiceFactory = ({
{ expiresIn: cfg.JWT_SIGNUP_LIFETIME }
);
return { token, user };
return { token, user, userEncryptionVersion: userEnc.encryptionVersion as UserEncryption };
};
const resetPasswordV2 = async ({ userId, newPassword, type, oldPassword }: TResetPasswordV2DTO) => {
const cfg = getConfig();
const user = await userDAL.findUserEncKeyByUserId(userId);
if (!user) {
throw new BadRequestError({ message: `User encryption key not found for user with ID '${userId}'` });
}
if (!user.hashedPassword) {
throw new BadRequestError({ message: "Unable to reset password, no password is set" });
}
if (!user.authMethods?.includes(AuthMethod.EMAIL)) {
throw new BadRequestError({ message: "Unable to reset password, no email authentication method is configured" });
}
// we check the old password if the user is resetting their password while logged in
if (type === ResetPasswordV2Type.LoggedInReset) {
if (!oldPassword) {
throw new BadRequestError({ message: "Current password is required." });
}
const isValid = await bcrypt.compare(oldPassword, user.hashedPassword);
if (!isValid) {
throw new BadRequestError({ message: "Incorrect current password." });
}
}
const newHashedPassword = await bcrypt.hash(newPassword, cfg.BCRYPT_SALT_ROUND);
// we need to get the original private key first for v2
let privateKey: string;
if (
user.serverEncryptedPrivateKey &&
user.serverEncryptedPrivateKeyTag &&
user.serverEncryptedPrivateKeyIV &&
user.serverEncryptedPrivateKeyEncoding &&
user.encryptionVersion === UserEncryption.V2
) {
privateKey = infisicalSymmetricDecrypt({
iv: user.serverEncryptedPrivateKeyIV,
tag: user.serverEncryptedPrivateKeyTag,
ciphertext: user.serverEncryptedPrivateKey,
keyEncoding: user.serverEncryptedPrivateKeyEncoding as SecretKeyEncoding
});
} else {
throw new BadRequestError({
message: "Cannot reset password without current credentials or recovery method",
name: "Reset password"
});
}
const encKeys = await generateUserSrpKeys(user.username, newPassword, {
publicKey: user.publicKey,
privateKey
});
const { tag, iv, ciphertext, encoding } = infisicalSymmetricEncypt(privateKey);
await userDAL.updateUserEncryptionByUserId(userId, {
hashedPassword: newHashedPassword,
// srp params
salt: encKeys.salt,
verifier: encKeys.verifier,
protectedKey: encKeys.protectedKey,
protectedKeyIV: encKeys.protectedKeyIV,
protectedKeyTag: encKeys.protectedKeyTag,
encryptedPrivateKey: encKeys.encryptedPrivateKey,
iv: encKeys.encryptedPrivateKeyIV,
tag: encKeys.encryptedPrivateKeyTag,
serverEncryptedPrivateKey: ciphertext,
serverEncryptedPrivateKeyIV: iv,
serverEncryptedPrivateKeyTag: tag,
serverEncryptedPrivateKeyEncoding: encoding
});
await tokenService.revokeAllMySessions(userId);
};
/*
* Reset password of a user via backup key
* */
@ -391,6 +490,7 @@ export const authPaswordServiceFactory = ({
createBackupPrivateKey,
getBackupPrivateKeyOfUser,
sendPasswordSetupEmail,
setupPassword
setupPassword,
resetPasswordV2
};
};

View File

@ -13,6 +13,18 @@ export type TChangePasswordDTO = {
password: string;
};
export enum ResetPasswordV2Type {
Recovery = "recovery",
LoggedInReset = "logged-in-reset"
}
export type TResetPasswordV2DTO = {
type: ResetPasswordV2Type;
userId: string;
newPassword: string;
oldPassword?: string;
};
export type TResetPasswordViaBackupKeyDTO = {
userId: string;
protectedKey: string;

View File

@ -772,6 +772,10 @@ export const importDataIntoInfisicalFn = async ({
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
actor: {
type: actor,
actorId
},
tx
});
}

View File

@ -4,7 +4,7 @@ import ms from "ms";
import { ActionProjectType, ProjectMembershipRole, SecretKeyEncoding, TGroups } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { decryptAsymmetric, encryptAsymmetric } from "@app/lib/crypto";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
@ -102,11 +102,13 @@ export const groupProjectServiceFactory = ({
project.id
);
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasRequiredPrivileges) {
throw new ForbiddenRequestError({ message: "Failed to assign group to a more privileged role" });
}
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to assign group to a more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
}
// validate custom roles input
@ -267,12 +269,13 @@ export const groupProjectServiceFactory = ({
requestedRoleChange,
project.id
);
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasRequiredPrivileges) {
throw new ForbiddenRequestError({ message: "Failed to assign group to a more privileged role" });
}
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to assign group to a more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
}
// validate custom roles input

View File

@ -7,7 +7,7 @@ import { IdentityAuthMethod } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
@ -339,9 +339,12 @@ export const identityAwsAuthServiceFactory = ({
actorOrgId
);
if (!isAtLeastAsPrivileged(permission, rolePermission))
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
message: "Failed to revoke aws auth of identity with more privileged role"
name: "PermissionBoundaryError",
message: "Failed to revoke aws auth of identity with more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const revokedIdentityAwsAuth = await identityAwsAuthDAL.transaction(async (tx) => {

View File

@ -5,7 +5,7 @@ import { IdentityAuthMethod } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
@ -312,9 +312,12 @@ export const identityAzureAuthServiceFactory = ({
actorAuthMethod,
actorOrgId
);
if (!isAtLeastAsPrivileged(permission, rolePermission))
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
message: "Failed to revoke azure auth of identity with more privileged role"
name: "PermissionBoundaryError",
message: "Failed to revoke azure auth of identity with more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const revokedIdentityAzureAuth = await identityAzureAuthDAL.transaction(async (tx) => {

View File

@ -5,7 +5,7 @@ import { IdentityAuthMethod } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
@ -358,9 +358,12 @@ export const identityGcpAuthServiceFactory = ({
actorAuthMethod,
actorOrgId
);
if (!isAtLeastAsPrivileged(permission, rolePermission))
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
message: "Failed to revoke gcp auth of identity with more privileged role"
name: "PermissionBoundaryError",
message: "Failed to revoke gcp auth of identity with more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const revokedIdentityGcpAuth = await identityGcpAuthDAL.transaction(async (tx) => {

View File

@ -7,7 +7,7 @@ import { IdentityAuthMethod, TIdentityJwtAuthsUpdate } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
@ -508,11 +508,13 @@ export const identityJwtAuthServiceFactory = ({
actorOrgId
);
if (!isAtLeastAsPrivileged(permission, rolePermission)) {
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
message: "Failed to revoke JWT auth of identity with more privileged role"
name: "PermissionBoundaryError",
message: "Failed to revoke jwt auth of identity with more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
}
const revokedIdentityJwtAuth = await identityJwtAuthDAL.transaction(async (tx) => {
const deletedJwtAuth = await identityJwtAuthDAL.delete({ identityId }, tx);

View File

@ -7,7 +7,7 @@ import { IdentityAuthMethod, TIdentityKubernetesAuthsUpdate } from "@app/db/sche
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
@ -487,9 +487,12 @@ export const identityKubernetesAuthServiceFactory = ({
actorAuthMethod,
actorOrgId
);
if (!isAtLeastAsPrivileged(permission, rolePermission))
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
message: "Failed to revoke kubernetes auth of identity with more privileged role"
name: "PermissionBoundaryError",
message: "Failed to revoke kubernetes auth of identity with more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const revokedIdentityKubernetesAuth = await identityKubernetesAuthDAL.transaction(async (tx) => {

View File

@ -8,7 +8,7 @@ import { IdentityAuthMethod, TIdentityOidcAuthsUpdate } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
@ -428,11 +428,13 @@ export const identityOidcAuthServiceFactory = ({
actorOrgId
);
if (!isAtLeastAsPrivileged(permission, rolePermission)) {
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
message: "Failed to revoke OIDC auth of identity with more privileged role"
name: "PermissionBoundaryError",
message: "Failed to revoke oidc auth of identity with more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
}
const revokedIdentityOidcAuth = await identityOidcAuthDAL.transaction(async (tx) => {
const deletedOidcAuth = await identityOidcAuthDAL.delete({ identityId }, tx);

View File

@ -4,7 +4,7 @@ import ms from "ms";
import { ActionProjectType, ProjectMembershipRole } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
@ -91,11 +91,13 @@ export const identityProjectServiceFactory = ({
projectId
);
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasRequiredPriviledges) {
throw new ForbiddenRequestError({ message: "Failed to change to a more privileged role" });
}
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to assign to a more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
}
// validate custom roles input
@ -185,9 +187,13 @@ export const identityProjectServiceFactory = ({
projectId
);
if (!isAtLeastAsPrivileged(permission, rolePermission)) {
throw new ForbiddenRequestError({ message: "Failed to change to a more privileged role" });
}
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to change to a more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
}
// validate custom roles input
@ -277,8 +283,13 @@ export const identityProjectServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
if (!isAtLeastAsPrivileged(permission, identityRolePermission))
throw new ForbiddenRequestError({ message: "Failed to delete more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to remove more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const [deletedIdentity] = await identityProjectDAL.delete({ identityId, projectId });
return deletedIdentity;

View File

@ -5,7 +5,7 @@ import { IdentityAuthMethod, TableName } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
@ -245,11 +245,13 @@ export const identityTokenAuthServiceFactory = ({
actorOrgId
);
if (!isAtLeastAsPrivileged(permission, rolePermission)) {
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
message: "Failed to revoke Token Auth of identity with more privileged role"
name: "PermissionBoundaryError",
message: "Failed to revoke token auth of identity with more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
}
const revokedIdentityTokenAuth = await identityTokenAuthDAL.transaction(async (tx) => {
const deletedTokenAuth = await identityTokenAuthDAL.delete({ identityId }, tx);
@ -295,10 +297,12 @@ export const identityTokenAuthServiceFactory = ({
actorAuthMethod,
actorOrgId
);
const hasPriviledge = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasPriviledge)
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
message: "Failed to create token for identity with more privileged role"
name: "PermissionBoundaryError",
message: "Failed to create token for identity with more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const identityTokenAuth = await identityTokenAuthDAL.findOne({ identityId });
@ -415,10 +419,12 @@ export const identityTokenAuthServiceFactory = ({
actorAuthMethod,
actorOrgId
);
const hasPriviledge = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasPriviledge)
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
message: "Failed to update token for identity with more privileged role"
name: "PermissionBoundaryError",
message: "Failed to update token for identity with more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const [token] = await identityAccessTokenDAL.update(

View File

@ -8,7 +8,7 @@ import { IdentityAuthMethod } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { checkIPAgainstBlocklist, extractIPDetails, isValidIpOrCidr, TIp } from "@app/lib/ip";
@ -367,9 +367,12 @@ export const identityUaServiceFactory = ({
actorAuthMethod,
actorOrgId
);
if (!isAtLeastAsPrivileged(permission, rolePermission))
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
message: "Failed to revoke universal auth of identity with more privileged role"
name: "PermissionBoundaryError",
message: "Failed to revoke universal auth of identity with more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const revokedIdentityUniversalAuth = await identityUaDAL.transaction(async (tx) => {
@ -414,10 +417,12 @@ export const identityUaServiceFactory = ({
actorAuthMethod,
actorOrgId
);
const hasPriviledge = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasPriviledge)
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
message: "Failed to add identity to project with more privileged role"
name: "PermissionBoundaryError",
message: "Failed to create client secret for a more privileged identity.",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const appCfg = getConfig();
@ -475,9 +480,12 @@ export const identityUaServiceFactory = ({
actorOrgId
);
if (!isAtLeastAsPrivileged(permission, rolePermission))
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
message: "Failed to add identity to project with more privileged role"
name: "PermissionBoundaryError",
message: "Failed to get identity client secret with more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const identityUniversalAuth = await identityUaDAL.findOne({
@ -524,9 +532,12 @@ export const identityUaServiceFactory = ({
actorAuthMethod,
actorOrgId
);
if (!isAtLeastAsPrivileged(permission, rolePermission))
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
message: "Failed to read identity client secret of project with more privileged role"
name: "PermissionBoundaryError",
message: "Failed to read identity client secret of identity with more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const clientSecret = await identityUaClientSecretDAL.findById(clientSecretId);
@ -566,10 +577,12 @@ export const identityUaServiceFactory = ({
actorAuthMethod,
actorOrgId
);
if (!isAtLeastAsPrivileged(permission, rolePermission))
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
message: "Failed to revoke identity client secret with more privileged role"
name: "PermissionBoundaryError",
message: "Failed to revoke identity client secret with more privileged role",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const clientSecret = await identityUaClientSecretDAL.updateById(clientSecretId, {

View File

@ -4,7 +4,7 @@ import { OrgMembershipRole, TableName, TOrgRoles } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
@ -58,9 +58,13 @@ export const identityServiceFactory = ({
orgId
);
const isCustomRole = Boolean(customRole);
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to create a more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to create a more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const plan = await licenseService.getPlan(orgId);
@ -129,9 +133,13 @@ export const identityServiceFactory = ({
actorAuthMethod,
actorOrgId
);
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to delete more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update a more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
let customRole: TOrgRoles | undefined;
if (role) {
@ -141,9 +149,13 @@ export const identityServiceFactory = ({
);
const isCustomRole = Boolean(customOrgRole);
const hasRequiredNewRolePermission = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasRequiredNewRolePermission)
throw new ForbiddenRequestError({ message: "Failed to create a more privileged identity" });
const appliedRolePermissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!appliedRolePermissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to create a more privileged identity",
details: { missingPermissions: appliedRolePermissionBoundary.missingPermissions }
});
if (isCustomRole) customRole = customOrgRole;
}
@ -216,9 +228,13 @@ export const identityServiceFactory = ({
actorAuthMethod,
actorOrgId
);
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to delete more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to delete more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const deletedIdentity = await identityDAL.deleteById(id);

View File

@ -114,20 +114,27 @@ export const integrationAuthServiceFactory = ({
const listOrgIntegrationAuth = async ({ actorId, actor, actorOrgId, actorAuthMethod }: TGenericPermission) => {
const authorizations = await integrationAuthDAL.getByOrg(actorOrgId as string);
return Promise.all(
authorizations.filter(async (auth) => {
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: auth.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
const filteredAuthorizations = await Promise.all(
authorizations.map(async (auth) => {
try {
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: auth.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
return permission.can(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
return permission.can(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations) ? auth : null;
} catch (error) {
// user does not belong to the project that the integration auth belongs to
return null;
}
})
);
return filteredAuthorizations.filter((auth): auth is NonNullable<typeof auth> => auth !== null);
};
const getIntegrationAuth = async ({ actor, id, actorId, actorAuthMethod, actorOrgId }: TGetIntegrationAuthDTO) => {

View File

@ -7,7 +7,7 @@ import { TLicenseServiceFactory } from "@app/ee/services/license/license-service
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { TProjectUserAdditionalPrivilegeDALFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-dal";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
@ -274,13 +274,13 @@ export const projectMembershipServiceFactory = ({
projectId
);
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasRequiredPriviledges) {
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
message: `Failed to change to a more privileged role ${requestedRoleChange}`
name: "PermissionBoundaryError",
message: `Failed to change to a more privileged role ${requestedRoleChange}`,
details: { missingPermissions: permissionBoundary.missingPermissions }
});
}
}
// validate custom roles input

View File

@ -563,11 +563,24 @@ export const projectServiceFactory = ({
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Settings);
if (update.slug) {
const existingProject = await projectDAL.findOne({
slug: update.slug,
orgId: actorOrgId
});
if (existingProject && existingProject.id !== project.id) {
throw new BadRequestError({
message: `Failed to update project slug. The project "${existingProject.name}" with the slug "${existingProject.slug}" already exists in your organization. Please choose a unique slug for your project.`
});
}
}
const updatedProject = await projectDAL.updateById(project.id, {
name: update.name,
description: update.description,
autoCapitalization: update.autoCapitalization,
enforceCapitalization: update.autoCapitalization
enforceCapitalization: update.autoCapitalization,
slug: update.slug
});
return updatedProject;

View File

@ -82,6 +82,7 @@ export type TUpdateProjectDTO = {
name?: string;
description?: string;
autoCapitalization?: boolean;
slug?: string;
};
} & Omit<TProjectPermission, "projectId">;

View File

@ -613,6 +613,9 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
`${TableName.SecretV2JnTag}.${TableName.SecretTag}Id`,
`${TableName.SecretTag}.id`
)
.leftJoin(TableName.SecretFolder, `${TableName.SecretV2}.folderId`, `${TableName.SecretFolder}.id`)
.leftJoin(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.leftJoin(TableName.ResourceMetadata, `${TableName.SecretV2}.id`, `${TableName.ResourceMetadata}.secretId`)
.select(selectAllTableCols(TableName.SecretV2))
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
@ -622,12 +625,13 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
db.ref("id").withSchema(TableName.ResourceMetadata).as("metadataId"),
db.ref("key").withSchema(TableName.ResourceMetadata).as("metadataKey"),
db.ref("value").withSchema(TableName.ResourceMetadata).as("metadataValue")
);
)
.select(db.ref("projectId").withSchema(TableName.Environment).as("projectId"));
const docs = sqlNestRelationships({
data: rawDocs,
key: "id",
parentMapper: (el) => ({ _id: el.id, ...SecretsV2Schema.parse(el) }),
parentMapper: (el) => ({ _id: el.id, projectId: el.projectId, ...SecretsV2Schema.parse(el) }),
childrenMapper: [
{
key: "tagId",

View File

@ -5,6 +5,7 @@ import { ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { ActorType } from "../auth/auth-type";
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
import { ResourceMetadataDTO } from "../resource-metadata/resource-metadata-schema";
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
@ -62,6 +63,7 @@ export const fnSecretBulkInsert = async ({
resourceMetadataDAL,
secretTagDAL,
secretVersionTagDAL,
actor,
tx
}: TFnSecretBulkInsert) => {
const sanitizedInputSecrets = inputSecrets.map(
@ -90,6 +92,10 @@ export const fnSecretBulkInsert = async ({
})
);
const userActorId = actor && actor.type === ActorType.USER ? actor.actorId : undefined;
const identityActorId = actor && actor.type !== ActorType.USER ? actor.actorId : undefined;
const actorType = actor?.type || ActorType.PLATFORM;
const newSecrets = await secretDAL.insertMany(
sanitizedInputSecrets.map((el) => ({ ...el, folderId })),
tx
@ -106,6 +112,9 @@ export const fnSecretBulkInsert = async ({
sanitizedInputSecrets.map((el) => ({
...el,
folderId,
userActorId,
identityActorId,
actorType,
secretId: newSecretGroupedByKeyName[el.key][0].id
})),
tx
@ -157,8 +166,13 @@ export const fnSecretBulkUpdate = async ({
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
resourceMetadataDAL
resourceMetadataDAL,
actor
}: TFnSecretBulkUpdate) => {
const userActorId = actor && actor?.type === ActorType.USER ? actor?.actorId : undefined;
const identityActorId = actor && actor?.type !== ActorType.USER ? actor?.actorId : undefined;
const actorType = actor?.type || ActorType.PLATFORM;
const sanitizedInputSecrets = inputSecrets.map(
({
filter,
@ -216,7 +230,10 @@ export const fnSecretBulkUpdate = async ({
encryptedValue,
reminderRepeatDays,
folderId,
secretId
secretId,
userActorId,
identityActorId,
actorType
})
),
tx
@ -616,6 +633,12 @@ export const reshapeBridgeSecret = (
secret: Omit<TSecretsV2, "encryptedValue" | "encryptedComment"> & {
value: string;
comment: string;
userActorName?: string | null;
identityActorName?: string | null;
userActorId?: string | null;
identityActorId?: string | null;
membershipId?: string | null;
actorType?: string | null;
tags?: {
id: string;
slug: string;
@ -636,6 +659,14 @@ export const reshapeBridgeSecret = (
_id: secret.id,
id: secret.id,
user: secret.userId,
actor: secret.actorType
? {
actorType: secret.actorType,
actorId: secret.userActorId || secret.identityActorId,
name: secret.identityActorName || secret.userActorName,
membershipId: secret.membershipId
}
: undefined,
tags: secret.tags,
skipMultilineEncoding: secret.skipMultilineEncoding,
secretReminderRepeatDays: secret.reminderRepeatDays,

View File

@ -28,6 +28,7 @@ import { KmsDataKey } from "../kms/kms-types";
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
import { TResourceMetadataDALFactory } from "../resource-metadata/resource-metadata-dal";
import { TSecretQueueFactory } from "../secret/secret-queue";
import { TGetASecretByIdDTO } from "../secret/secret-types";
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
import { TSecretImportDALFactory } from "../secret-import/secret-import-dal";
import { fnSecretsV2FromImports } from "../secret-import/secret-import-fns";
@ -73,7 +74,13 @@ type TSecretV2BridgeServiceFactoryDep = {
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne" | "findBySlugs">;
folderDAL: Pick<
TSecretFolderDALFactory,
"findBySecretPath" | "updateById" | "findById" | "findByManySecretPath" | "find" | "findBySecretPathMultiEnv"
| "findBySecretPath"
| "updateById"
| "findById"
| "findByManySecretPath"
| "find"
| "findBySecretPathMultiEnv"
| "findSecretPathByFolderIds"
>;
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "findByFolderIds">;
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets" | "handleSecretReminder" | "removeSecretReminder">;
@ -301,6 +308,10 @@ export const secretV2BridgeServiceFactory = ({
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
actor: {
type: actor,
actorId
},
tx
})
);
@ -483,6 +494,10 @@ export const secretV2BridgeServiceFactory = ({
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
actor: {
type: actor,
actorId
},
tx
})
);
@ -947,6 +962,73 @@ export const secretV2BridgeServiceFactory = ({
};
};
const getSecretById = async ({ actorId, actor, actorOrgId, actorAuthMethod, secretId }: TGetASecretByIdDTO) => {
const secret = await secretDAL.findOneWithTags({
[`${TableName.SecretV2}.id` as "id"]: secretId
});
if (!secret) {
throw new NotFoundError({
message: `Secret with ID '${secretId}' not found`,
name: "GetSecretById"
});
}
const [folderWithPath] = await folderDAL.findSecretPathByFolderIds(secret.projectId, [secret.folderId]);
if (!folderWithPath) {
throw new NotFoundError({
message: `Folder with id '${secret.folderId}' not found`,
name: "GetSecretById"
});
}
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: secret.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, {
environment: folderWithPath.environmentSlug,
secretPath: folderWithPath.path,
secretName: secret.key,
secretTags: secret.tags.map((i) => i.slug)
})
);
if (secret.type === SecretType.Personal && secret.userId !== actorId) {
throw new ForbiddenRequestError({
message: "You are not allowed to access this secret",
name: "GetSecretById"
});
}
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: secret.projectId
});
const secretValue = secret.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: secret.encryptedValue }).toString()
: "";
const secretComment = secret.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: secret.encryptedComment }).toString()
: "";
return reshapeBridgeSecret(secret.projectId, folderWithPath.environmentSlug, folderWithPath.path, {
...secret,
value: secretValue,
comment: secretComment
});
};
const getSecretByName = async ({
actorId,
actor,
@ -1230,6 +1312,10 @@ export const secretV2BridgeServiceFactory = ({
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
actor: {
type: actor,
actorId
},
tx
})
);
@ -1490,6 +1576,10 @@ export const secretV2BridgeServiceFactory = ({
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
actor: {
type: actor,
actorId
},
resourceMetadataDAL
});
updatedSecrets.push(...bulkUpdatedSecrets.map((el) => ({ ...el, secretPath: folder.path })));
@ -1522,6 +1612,10 @@ export const secretV2BridgeServiceFactory = ({
secretVersionDAL,
secretTagDAL,
secretVersionTagDAL,
actor: {
type: actor,
actorId
},
tx
});
updatedSecrets.push(...bulkInsertedSecrets.map((el) => ({ ...el, secretPath: folder.path })));
@ -1689,14 +1783,19 @@ export const secretV2BridgeServiceFactory = ({
type: KmsDataKey.SecretManager,
projectId: folder.projectId
});
const secretVersions = await secretVersionDAL.find({ secretId }, { offset, limit, sort: [["createdAt", "desc"]] });
return secretVersions.map((el) =>
reshapeBridgeSecret(folder.projectId, folder.environment.envSlug, "/", {
const secretVersions = await secretVersionDAL.findVersionsBySecretIdWithActors(secretId, folder.projectId, {
offset,
limit,
sort: [["createdAt", "desc"]]
});
return secretVersions.map((el) => {
return reshapeBridgeSecret(folder.projectId, folder.environment.envSlug, "/", {
...el,
value: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : "",
comment: el.encryptedComment ? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString() : ""
})
);
});
});
};
// this is a backfilling API for secret references
@ -1956,6 +2055,10 @@ export const secretV2BridgeServiceFactory = ({
secretTagDAL,
resourceMetadataDAL,
secretVersionTagDAL,
actor: {
type: actor,
actorId
},
inputSecrets: locallyCreatedSecrets.map((doc) => {
return {
type: doc.type,
@ -1982,6 +2085,10 @@ export const secretV2BridgeServiceFactory = ({
tx,
secretTagDAL,
secretVersionTagDAL,
actor: {
type: actor,
actorId
},
inputSecrets: locallyUpdatedSecrets.map((doc) => {
return {
filter: {
@ -2204,6 +2311,7 @@ export const secretV2BridgeServiceFactory = ({
getSecretsCountMultiEnv,
getSecretsMultiEnv,
getSecretReferenceTree,
getSecretsByFolderMappings
getSecretsByFolderMappings,
getSecretById
};
};

View File

@ -168,6 +168,10 @@ export type TFnSecretBulkInsert = {
secretVersionDAL: Pick<TSecretVersionV2DALFactory, "insertMany">;
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2">;
secretVersionTagDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
actor?: {
type: string;
actorId: string;
};
};
type TRequireReferenceIfValue =
@ -192,6 +196,10 @@ export type TFnSecretBulkUpdate = {
secretVersionDAL: Pick<TSecretVersionV2DALFactory, "insertMany">;
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "deleteTagsToSecretV2">;
secretVersionTagDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
actor?: {
type: string;
actorId: string;
};
tx?: Knex;
};

View File

@ -1,9 +1,10 @@
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName, TSecretVersionsV2, TSecretVersionsV2Update } from "@app/db/schemas";
import { BadRequestError, DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols } from "@app/lib/knex";
import { ormify, selectAllTableCols, TFindOpt } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
import { QueueName } from "@app/queue";
@ -119,11 +120,67 @@ export const secretVersionV2BridgeDALFactory = (db: TDbClient) => {
logger.info(`${QueueName.DailyResourceCleanUp}: pruning secret version v2 completed`);
};
const findVersionsBySecretIdWithActors = async (
secretId: string,
projectId: string,
{ offset, limit, sort = [["createdAt", "desc"]] }: TFindOpt<TSecretVersionsV2> = {},
tx?: Knex
) => {
try {
const query = (tx || db)(TableName.SecretVersionV2)
.leftJoin(TableName.Users, `${TableName.Users}.id`, `${TableName.SecretVersionV2}.userActorId`)
.leftJoin(
TableName.ProjectMembership,
`${TableName.ProjectMembership}.userId`,
`${TableName.SecretVersionV2}.userActorId`
)
.leftJoin(TableName.Identity, `${TableName.Identity}.id`, `${TableName.SecretVersionV2}.identityActorId`)
.where((qb) => {
void qb.where(`${TableName.SecretVersionV2}.secretId`, secretId);
void qb.where(`${TableName.ProjectMembership}.projectId`, projectId);
})
.orWhere((qb) => {
void qb.where(`${TableName.SecretVersionV2}.secretId`, secretId);
void qb.whereNull(`${TableName.ProjectMembership}.projectId`);
})
.select(
selectAllTableCols(TableName.SecretVersionV2),
`${TableName.Users}.username as userActorName`,
`${TableName.Identity}.name as identityActorName`,
`${TableName.ProjectMembership}.id as membershipId`
);
if (limit) void query.limit(limit);
if (offset) void query.offset(offset);
if (sort) {
void query.orderBy(
sort.map(([column, order, nulls]) => ({
column: `${TableName.SecretVersionV2}.${column as string}`,
order,
nulls
}))
);
}
const docs: Array<
TSecretVersionsV2 & {
userActorName: string | undefined | null;
identityActorName: string | undefined | null;
membershipId: string | undefined | null;
}
> = await query;
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "FindVersionsBySecretIdWithActors" });
}
};
return {
...secretVersionV2Orm,
pruneExcessVersions,
findLatestVersionMany,
bulkUpdate,
findLatestVersionByFolderId
findLatestVersionByFolderId,
findVersionsBySecretIdWithActors
};
};

View File

@ -579,6 +579,7 @@ export const fnSecretBulkInsert = async ({
[`${TableName.Secret}Id` as const]: newSecretGroupByBlindIndex[secretBlindIndex as string][0].id
}))
);
const secretVersions = await secretVersionDAL.insertMany(
sanitizedInputSecrets.map((el) => ({
...el,

View File

@ -61,6 +61,7 @@ import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
import { TUserDALFactory } from "../user/user-dal";
import { TWebhookDALFactory } from "../webhook/webhook-dal";
import { fnTriggerWebhook } from "../webhook/webhook-fns";
import { WebhookEvents } from "../webhook/webhook-types";
import { TSecretDALFactory } from "./secret-dal";
import { interpolateSecrets } from "./secret-fns";
import {
@ -623,7 +624,14 @@ export const secretQueueFactory = ({
await queueService.queue(
QueueName.SecretWebhook,
QueueJobs.SecWebhook,
{ environment, projectId, secretPath },
{
type: WebhookEvents.SecretModified,
payload: {
environment,
projectId,
secretPath
}
},
{
jobId: `secret-webhook-${environment}-${projectId}-${secretPath}`,
removeOnFail: { count: 5 },
@ -1055,6 +1063,8 @@ export const secretQueueFactory = ({
const organization = await orgDAL.findOrgByProjectId(projectId);
const project = await projectDAL.findById(projectId);
const secret = await secretV2BridgeDAL.findById(data.secretId);
const [folder] = await folderDAL.findSecretPathByFolderIds(project.id, [secret.folderId]);
if (!organization) {
logger.info(`secretReminderQueue.process: [secretDocument=${data.secretId}] no organization found`);
@ -1083,6 +1093,19 @@ export const secretQueueFactory = ({
organizationName: organization.name
}
});
await queueService.queue(QueueName.SecretWebhook, QueueJobs.SecWebhook, {
type: WebhookEvents.SecretReminderExpired,
payload: {
projectName: project.name,
projectId: project.id,
secretPath: folder?.path,
environment: folder?.environmentSlug || "",
reminderNote: data.note,
secretName: secret?.key,
secretId: data.secretId
}
});
});
const startSecretV2Migration = async (projectId: string) => {
@ -1490,14 +1513,17 @@ export const secretQueueFactory = ({
queueService.start(QueueName.SecretWebhook, async (job) => {
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: job.data.projectId
projectId: job.data.payload.projectId
});
await fnTriggerWebhook({
...job.data,
projectId: job.data.payload.projectId,
environment: job.data.payload.environment,
secretPath: job.data.payload.secretPath || "/",
projectEnvDAL,
webhookDAL,
projectDAL,
webhookDAL,
event: job.data,
secretManagerDecryptor: (value) => secretManagerDecryptor({ cipherTextBlob: value }).toString()
});
});

View File

@ -71,6 +71,7 @@ import {
TDeleteManySecretRawDTO,
TDeleteSecretDTO,
TDeleteSecretRawDTO,
TGetASecretByIdRawDTO,
TGetASecretDTO,
TGetASecretRawDTO,
TGetSecretAccessListDTO,
@ -95,7 +96,7 @@ type TSecretServiceFactoryDep = {
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
folderDAL: Pick<
TSecretFolderDALFactory,
"findBySecretPath" | "updateById" | "findById" | "findByManySecretPath" | "find"
"findBySecretPath" | "updateById" | "findById" | "findByManySecretPath" | "find" | "findSecretPathByFolderIds"
>;
secretV2BridgeService: TSecretV2BridgeServiceFactory;
secretBlindIndexDAL: TSecretBlindIndexDALFactory;
@ -1382,6 +1383,18 @@ export const secretServiceFactory = ({
};
};
const getSecretByIdRaw = async ({ secretId, actorId, actor, actorOrgId, actorAuthMethod }: TGetASecretByIdRawDTO) => {
const secret = await secretV2BridgeService.getSecretById({
secretId,
actorId,
actor,
actorOrgId,
actorAuthMethod
});
return secret;
};
const getSecretByNameRaw = async ({
type,
path,
@ -3088,6 +3101,7 @@ export const secretServiceFactory = ({
getSecretsRawMultiEnv,
getSecretReferenceTree,
getSecretsRawByFolderMappings,
getSecretAccessList
getSecretAccessList,
getSecretByIdRaw
};
};

View File

@ -121,6 +121,10 @@ export type TGetASecretDTO = {
version?: number;
} & TProjectPermission;
export type TGetASecretByIdDTO = {
secretId: string;
} & Omit<TProjectPermission, "projectId">;
export type TCreateBulkSecretDTO = {
path: string;
environment: string;
@ -213,6 +217,10 @@ export type TGetASecretRawDTO = {
projectId?: string;
} & Omit<TProjectPermission, "projectId">;
export type TGetASecretByIdRawDTO = {
secretId: string;
} & Omit<TProjectPermission, "projectId">;
export type TCreateSecretRawDTO = TProjectPermission & {
secretName: string;
secretPath: string;

View File

@ -50,6 +50,7 @@ const buildSlackPayload = (notification: TSlackNotification) => {
const messageBody = `A secret approval request has been opened by ${payload.userEmail}.
*Environment*: ${payload.environment}
*Secret path*: ${payload.secretPath || "/"}
*Secret Key${payload.secretKeys.length > 1 ? "s" : ""}*: ${payload.secretKeys.join(", ")}
View the complete details <${appCfg.SITE_URL}/secret-manager/${payload.projectId}/approval?requestId=${
payload.requestId

View File

@ -62,6 +62,7 @@ export type TSlackNotification =
secretPath: string;
requestId: string;
projectId: string;
secretKeys: string[];
};
}
| {

View File

@ -271,12 +271,13 @@ export const superAdminServiceFactory = ({
return { token, user: userInfo, organization };
};
const getUsers = ({ offset, limit, searchTerm }: TAdminGetUsersDTO) => {
const getUsers = ({ offset, limit, searchTerm, adminsOnly }: TAdminGetUsersDTO) => {
return userDAL.getUsersByFilter({
limit,
offset,
searchTerm,
sortBy: "username"
sortBy: "username",
adminsOnly
});
};

View File

@ -20,6 +20,7 @@ export type TAdminGetUsersDTO = {
offset: number;
limit: number;
searchTerm: string;
adminsOnly: boolean;
};
export enum LoginMethod {

View File

@ -23,15 +23,18 @@ export const userDALFactory = (db: TDbClient) => {
limit,
offset,
searchTerm,
sortBy
sortBy,
adminsOnly
}: {
limit: number;
offset: number;
searchTerm: string;
sortBy?: keyof TUsers;
adminsOnly: boolean;
}) => {
try {
let query = db.replicaNode()(TableName.Users).where("isGhost", "=", false);
if (searchTerm) {
query = query.where((qb) => {
void qb
@ -42,6 +45,10 @@ export const userDALFactory = (db: TDbClient) => {
});
}
if (adminsOnly) {
query = query.where("superAdmin", true);
}
if (sortBy) {
query = query.orderBy(sortBy);
}

View File

@ -11,7 +11,7 @@ import { logger } from "@app/lib/logger";
import { TProjectDALFactory } from "../project/project-dal";
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
import { TWebhookDALFactory } from "./webhook-dal";
import { WebhookType } from "./webhook-types";
import { TWebhookPayloads, WebhookEvents, WebhookType } from "./webhook-types";
const WEBHOOK_TRIGGER_TIMEOUT = 15 * 1000;
@ -54,29 +54,64 @@ export const triggerWebhookRequest = async (
return req;
};
export const getWebhookPayload = (
eventName: string,
details: {
workspaceName: string;
workspaceId: string;
environment: string;
secretPath?: string;
type?: string | null;
export const getWebhookPayload = (event: TWebhookPayloads) => {
if (event.type === WebhookEvents.SecretModified) {
const { projectName, projectId, environment, secretPath, type } = event.payload;
switch (type) {
case WebhookType.SLACK:
return {
text: "A secret value has been added or modified.",
attachments: [
{
color: "#E7F256",
fields: [
{
title: "Project",
value: projectName,
short: false
},
{
title: "Environment",
value: environment,
short: false
},
{
title: "Secret Path",
value: secretPath,
short: false
}
]
}
]
};
case WebhookType.GENERAL:
default:
return {
event: event.type,
project: {
workspaceId: projectId,
projectName,
environment,
secretPath
}
};
}
}
) => {
const { workspaceName, workspaceId, environment, secretPath, type } = details;
const { projectName, projectId, environment, secretPath, type, reminderNote, secretName } = event.payload;
switch (type) {
case WebhookType.SLACK:
return {
text: "A secret value has been added or modified.",
text: "You have a secret reminder",
attachments: [
{
color: "#E7F256",
fields: [
{
title: "Project",
value: workspaceName,
value: projectName,
short: false
},
{
@ -88,6 +123,16 @@ export const getWebhookPayload = (
title: "Secret Path",
value: secretPath,
short: false
},
{
title: "Secret Name",
value: secretName,
short: false
},
{
title: "Reminder Note",
value: reminderNote,
short: false
}
]
}
@ -96,11 +141,14 @@ export const getWebhookPayload = (
case WebhookType.GENERAL:
default:
return {
event: eventName,
event: event.type,
project: {
workspaceId,
workspaceId: projectId,
projectName,
environment,
secretPath
secretPath,
secretName,
reminderNote
}
};
}
@ -110,6 +158,7 @@ export type TFnTriggerWebhookDTO = {
projectId: string;
secretPath: string;
environment: string;
event: TWebhookPayloads;
webhookDAL: Pick<TWebhookDALFactory, "findAllWebhooks" | "transaction" | "update" | "bulkUpdate">;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
projectDAL: Pick<TProjectDALFactory, "findById">;
@ -124,8 +173,9 @@ export const fnTriggerWebhook = async ({
projectId,
webhookDAL,
projectEnvDAL,
projectDAL,
secretManagerDecryptor
event,
secretManagerDecryptor,
projectDAL
}: TFnTriggerWebhookDTO) => {
const webhooks = await webhookDAL.findAllWebhooks(projectId, environment);
const toBeTriggeredHooks = webhooks.filter(
@ -134,21 +184,20 @@ export const fnTriggerWebhook = async ({
);
if (!toBeTriggeredHooks.length) return;
logger.info({ environment, secretPath, projectId }, "Secret webhook job started");
const project = await projectDAL.findById(projectId);
let { projectName } = event.payload;
if (!projectName) {
const project = await projectDAL.findById(event.payload.projectId);
projectName = project.name;
}
const webhooksTriggered = await Promise.allSettled(
toBeTriggeredHooks.map((hook) =>
triggerWebhookRequest(
hook,
secretManagerDecryptor,
getWebhookPayload("secrets.modified", {
workspaceName: project.name,
workspaceId: projectId,
environment,
secretPath,
type: hook.type
})
)
)
toBeTriggeredHooks.map((hook) => {
const formattedEvent = {
type: event.type,
payload: { ...event.payload, type: hook.type, projectName }
} as TWebhookPayloads;
return triggerWebhookRequest(hook, secretManagerDecryptor, getWebhookPayload(formattedEvent));
})
);
// filter hooks by status

View File

@ -16,7 +16,8 @@ import {
TDeleteWebhookDTO,
TListWebhookDTO,
TTestWebhookDTO,
TUpdateWebhookDTO
TUpdateWebhookDTO,
WebhookEvents
} from "./webhook-types";
type TWebhookServiceFactoryDep = {
@ -144,12 +145,15 @@ export const webhookServiceFactory = ({
await triggerWebhookRequest(
webhook,
(value) => secretManagerDecryptor({ cipherTextBlob: value }).toString(),
getWebhookPayload("test", {
workspaceName: project.name,
workspaceId: webhook.projectId,
environment: webhook.environment.slug,
secretPath: webhook.secretPath,
type: webhook.type
getWebhookPayload({
type: "test" as WebhookEvents.SecretModified,
payload: {
projectName: project.name,
projectId: webhook.projectId,
environment: webhook.environment.slug,
secretPath: webhook.secretPath,
type: webhook.type
}
})
);
} catch (err) {

View File

@ -30,3 +30,36 @@ export enum WebhookType {
GENERAL = "general",
SLACK = "slack"
}
export enum WebhookEvents {
SecretModified = "secrets.modified",
SecretReminderExpired = "secrets.reminder-expired",
TestEvent = "test"
}
type TWebhookSecretModifiedEventPayload = {
type: WebhookEvents.SecretModified;
payload: {
projectName?: string;
projectId: string;
environment: string;
secretPath?: string;
type?: string | null;
};
};
type TWebhookSecretReminderEventPayload = {
type: WebhookEvents.SecretReminderExpired;
payload: {
projectName?: string;
projectId: string;
environment: string;
secretPath?: string;
type?: string | null;
secretName: string;
secretId: string;
reminderNote?: string | null;
};
};
export type TWebhookPayloads = TWebhookSecretModifiedEventPayload | TWebhookSecretReminderEventPayload;

View File

@ -0,0 +1,17 @@
import path from "path";
import { defineConfig } from "vitest/config";
export default defineConfig({
test: {
globals: true,
env: {
NODE_ENV: "test"
},
include: ["./src/**/*.test.ts"]
},
resolve: {
alias: {
"@app": path.resolve(__dirname, "./src")
}
}
});

View File

@ -0,0 +1,8 @@
public_ip: 127.0.0.1
auth_secret: example-auth-secret
realm: infisical.org
# set port 5349 for tls
# port: 5349
# tls_private_key_path: /full-path
# tls_ca_path: /full-path
# tls_cert_path: /full-path

View File

@ -1,3 +1,8 @@
public_ip: 127.0.0.1
auth_secret: changeThisOnProduction
realm: infisical.org
# set port 5349 for tls
# port: 5349
# tls_private_key_path: /full-path
# tls_ca_path: /full-path
# tls_cert_path: /full-path

View File

@ -28,9 +28,10 @@ require (
github.com/rs/zerolog v1.26.1
github.com/spf13/cobra v1.6.1
github.com/spf13/viper v1.8.1
github.com/stretchr/testify v1.9.0
golang.org/x/crypto v0.35.0
golang.org/x/term v0.29.0
github.com/stretchr/testify v1.10.0
golang.org/x/crypto v0.36.0
golang.org/x/sys v0.31.0
golang.org/x/term v0.30.0
gopkg.in/yaml.v2 v2.4.0
)
@ -114,9 +115,8 @@ require (
golang.org/x/mod v0.23.0 // indirect
golang.org/x/net v0.35.0 // indirect
golang.org/x/oauth2 v0.21.0 // indirect
golang.org/x/sync v0.11.0 // indirect
golang.org/x/sys v0.30.0 // indirect
golang.org/x/text v0.22.0 // indirect
golang.org/x/sync v0.12.0 // indirect
golang.org/x/text v0.23.0 // indirect
golang.org/x/time v0.6.0 // indirect
golang.org/x/tools v0.30.0 // indirect
google.golang.org/api v0.188.0 // indirect
@ -139,3 +139,5 @@ require (
)
replace github.com/zalando/go-keyring => github.com/Infisical/go-keyring v1.0.2
replace github.com/pion/turn/v4 => github.com/Infisical/turn/v4 v4.0.1

View File

@ -49,6 +49,8 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/Infisical/go-keyring v1.0.2 h1:dWOkI/pB/7RocfSJgGXbXxLDcVYsdslgjEPmVhb+nl8=
github.com/Infisical/go-keyring v1.0.2/go.mod h1:LWOnn/sw9FxDW/0VY+jHFAfOFEe03xmwBVSfJnBowto=
github.com/Infisical/turn/v4 v4.0.1 h1:omdelNsnFfzS5cu86W5OBR68by68a8sva4ogR0lQQnw=
github.com/Infisical/turn/v4 v4.0.1/go.mod h1:pMMKP/ieNAG/fN5cZiN4SDuyKsXtNTr0ccN7IToA1zs=
github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0=
github.com/alessio/shellescape v1.4.1/go.mod h1:PZAiSCk0LJaZkiCSkPv8qIobYglO3FPpyFjDCtHLS30=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
@ -365,8 +367,6 @@ github.com/pion/stun/v3 v3.0.0 h1:4h1gwhWLWuZWOJIJR9s2ferRO+W3zA/b6ijOI6mKzUw=
github.com/pion/stun/v3 v3.0.0/go.mod h1:HvCN8txt8mwi4FBvS3EmDghW6aQJ24T+y+1TKjB5jyU=
github.com/pion/transport/v3 v3.0.7 h1:iRbMH05BzSNwhILHoBoAPxoB9xQgOaJk+591KC9P1o0=
github.com/pion/transport/v3 v3.0.7/go.mod h1:YleKiTZ4vqNxVwh77Z0zytYi7rXHl7j6uPLGhhz9rwo=
github.com/pion/turn/v4 v4.0.0 h1:qxplo3Rxa9Yg1xXDxxH8xaqcyGUtbHYw4QSCvmFWvhM=
github.com/pion/turn/v4 v4.0.0/go.mod h1:MuPDkm15nYSklKpN8vWJ9W2M0PlyQZqYt1McGuxG7mA=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
@ -425,8 +425,8 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
@ -486,6 +486,8 @@ golang.org/x/crypto v0.0.0-20211215165025-cf75a172585e/go.mod h1:P+XmwS30IXTQdn5
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs=
golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ=
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@ -592,6 +594,8 @@ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w=
golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw=
golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -642,9 +646,13 @@ golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU=
golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s=
golang.org/x/term v0.30.0 h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y=
golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -656,6 +664,8 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=

View File

@ -1,10 +1,6 @@
package cmd
import (
// "fmt"
// "github.com/Infisical/infisical-merge/packages/api"
// "github.com/Infisical/infisical-merge/packages/models"
"context"
"fmt"
"os"
@ -14,13 +10,8 @@ import (
"github.com/Infisical/infisical-merge/packages/gateway"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/rs/zerolog/log"
// "github.com/Infisical/infisical-merge/packages/visualize"
// "github.com/rs/zerolog/log"
// "github.com/go-resty/resty/v2"
"github.com/posthog/posthog-go"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
)
@ -40,6 +31,16 @@ var gatewayCmd = &cobra.Command{
util.HandleError(fmt.Errorf("Token not found"))
}
domain, err := cmd.Flags().GetString("domain")
if err != nil {
util.HandleError(err, "Unable to parse domain flag")
}
// Try to install systemd service if possible
if err := gateway.InstallGatewaySystemdService(token.Token, domain); err != nil {
log.Warn().Msgf("Failed to install systemd service: %v", err)
}
Telemetry.CaptureEvent("cli-command:gateway", posthog.NewProperties().Set("version", util.CLI_VERSION))
sigCh := make(chan os.Signal, 1)
@ -137,15 +138,10 @@ var gatewayRelayCmd = &cobra.Command{
}
func init() {
gatewayCmd.SetHelpFunc(func(command *cobra.Command, strings []string) {
command.Flags().MarkHidden("domain")
command.Parent().HelpFunc()(command, strings)
})
gatewayCmd.Flags().String("token", "", "Connect with Infisical using machine identity access token")
gatewayRelayCmd.Flags().String("config", "", "Relay config yaml file path")
gatewayCmd.AddCommand(gatewayRelayCmd)
rootCmd.AddCommand(gatewayCmd)
}

View File

@ -14,6 +14,7 @@ import (
"github.com/Infisical/infisical-merge/packages/api"
"github.com/Infisical/infisical-merge/packages/systemd"
"github.com/go-resty/resty/v2"
"github.com/pion/dtls/v3"
"github.com/pion/logging"
"github.com/pion/turn/v4"
"github.com/rs/zerolog/log"
@ -54,26 +55,6 @@ func (g *Gateway) ConnectWithRelay() error {
return err
}
relayAddress, relayPort := strings.Split(relayDetails.TurnServerAddress, ":")[0], strings.Split(relayDetails.TurnServerAddress, ":")[1]
var conn net.Conn
// Dial TURN Server
if relayPort == "5349" {
log.Info().Msgf("Provided relay port %s. Using TLS", relayPort)
conn, err = tls.Dial("tcp", relayDetails.TurnServerAddress, &tls.Config{
ServerName: relayAddress,
})
} else {
log.Info().Msgf("Provided relay port %s. Using non TLS connection.", relayPort)
peerAddr, errPeer := net.ResolveTCPAddr("tcp", relayDetails.TurnServerAddress)
if errPeer != nil {
return fmt.Errorf("Failed to parse turn server address: %w", err)
}
conn, err = net.DialTCP("tcp", nil, peerAddr)
}
if err != nil {
return fmt.Errorf("Failed to connect with relay server: %w", err)
}
// Start a new TURN Client and wrap our net.Conn in a STUNConn
// This allows us to simulate datagram based communication over a net.Conn
@ -81,17 +62,42 @@ func (g *Gateway) ConnectWithRelay() error {
if os.Getenv("LOG_LEVEL") == "debug" {
logger.DefaultLogLevel = logging.LogLevelDebug
}
cfg := &turn.ClientConfig{
turnClientCfg := &turn.ClientConfig{
STUNServerAddr: relayDetails.TurnServerAddress,
TURNServerAddr: relayDetails.TurnServerAddress,
Conn: turn.NewSTUNConn(conn),
Username: relayDetails.TurnServerUsername,
Password: relayDetails.TurnServerPassword,
Realm: relayDetails.TurnServerRealm,
LoggerFactory: logger,
}
client, err := turn.NewClient(cfg)
turnAddr, err := net.ResolveUDPAddr("udp4", relayDetails.TurnServerAddress)
if err != nil {
return fmt.Errorf("Failed to parse turn server address: %w", err)
}
// Dial TURN Server
if relayPort == "5349" {
log.Info().Msgf("Provided relay port %s. Using TLS", relayPort)
conn, err := dtls.Dial("udp", turnAddr, &dtls.Config{
ServerName: relayAddress,
})
if err != nil {
return fmt.Errorf("Failed to connect with relay server: %w", err)
}
turnClientCfg.Conn = turn.NewSTUNConn(conn)
} else {
log.Info().Msgf("Provided relay port %s. Using non TLS connection.", relayPort)
conn, err := net.ListenPacket("udp4", turnAddr.String())
if err != nil {
return fmt.Errorf("Failed to connect with relay server: %w", err)
}
turnClientCfg.Conn = conn
}
client, err := turn.NewClient(turnClientCfg)
if err != nil {
return fmt.Errorf("Failed to create relay client: %w", err)
}
@ -168,7 +174,6 @@ func (g *Gateway) Listen(ctx context.Context) error {
ClientAuth: tls.RequireAndVerifyClientCert,
NextProtos: []string{"infisical-gateway"},
}
// Setup QUIC listener on the relayConn
quicConfig := &quic.Config{
EnableDatagrams: true,
@ -176,7 +181,6 @@ func (g *Gateway) Listen(ctx context.Context) error {
KeepAlivePeriod: 2 * time.Second,
}
g.registerRelayIsActive(ctx, relayUdpConnection.LocalAddr().String(), errCh)
quicListener, err := quic.Listen(relayUdpConnection, tlsConfig, quicConfig)
if err != nil {
return fmt.Errorf("Failed to listen for QUIC: %w", err)
@ -185,6 +189,8 @@ func (g *Gateway) Listen(ctx context.Context) error {
log.Printf("Listener started on %s", quicListener.Addr())
g.registerRelayIsActive(ctx, errCh)
log.Info().Msg("Gateway started successfully")
var wg sync.WaitGroup
@ -320,41 +326,31 @@ func (g *Gateway) createPermissionForStaticIps(staticIps string) error {
return nil
}
func (g *Gateway) registerRelayIsActive(ctx context.Context, relayAddress string, errCh chan error) error {
ticker := time.NewTicker(10 * time.Second)
func (g *Gateway) registerRelayIsActive(ctx context.Context, errCh chan error) error {
ticker := time.NewTicker(15 * time.Second)
maxFailures := 3
failures := 0
log.Info().Msg("Starting relay connection health check")
go func() {
time.Sleep(2 * time.Second)
time.Sleep(5 * time.Second)
for {
select {
case <-ctx.Done():
log.Info().Msg("Stopping relay connection health check")
return
case <-ticker.C:
// Configure TLS to skip verification
tlsConfig := &tls.Config{
InsecureSkipVerify: true,
NextProtos: []string{"infisical-gateway"},
}
quicConfig := &quic.Config{
EnableDatagrams: true,
}
func() {
checkCtx, cancel := context.WithTimeout(ctx, 3*time.Second)
defer cancel()
conn, err := quic.DialAddr(checkCtx, relayAddress, tlsConfig, quicConfig)
if err != nil {
failures++
log.Warn().Err(err).Int("failures", failures).Msg("Relay connection check failed")
if failures >= maxFailures {
errCh <- fmt.Errorf("relay connection check failed: %w", err)
}
log.Debug().Msg("Performing relay connection health check")
err := g.createPermissionForStaticIps(g.config.InfisicalStaticIp)
if err != nil && !strings.Contains(err.Error(), "tls:") {
failures++
log.Warn().Err(err).Int("failures", failures).Msg("Failed to refresh TURN permissions")
if failures >= maxFailures {
errCh <- fmt.Errorf("relay connection check failed: %w", err)
return
}
if conn != nil {
conn.CloseWithError(0, "closed")
}
}()
continue
}
}
}
}()

View File

@ -1,19 +1,23 @@
//go:build !windows
// +build !windows
package gateway
import (
"context"
"crypto/tls"
"crypto/x509"
"errors"
"fmt"
"net"
"os"
"os/signal"
"runtime"
// "runtime"
"strconv"
"syscall"
udplistener "github.com/Infisical/infisical-merge/packages/gateway/udp_listener"
"github.com/Infisical/infisical-merge/packages/systemd"
"github.com/pion/dtls/v3"
"github.com/pion/logging"
"github.com/pion/turn/v4"
"github.com/rs/zerolog/log"
@ -37,8 +41,10 @@ type GatewayRelayConfig struct {
RelayMaxPort uint16 `yaml:"relay_max_port"`
TlsCertPath string `yaml:"tls_cert_path"`
TlsPrivateKeyPath string `yaml:"tls_private_key_path"`
TlsCaPath string `yaml:"tls_ca_path"`
tls tls.Certificate
tlsCa string
isTlsEnabled bool
}
@ -79,19 +85,19 @@ func NewGatewayRelay(configFilePath string) (*GatewayRelay, error) {
return nil, errMissingTlsCert
}
tlsCertFile, err := os.ReadFile(cfg.TlsCertPath)
cert, err := tls.LoadX509KeyPair(cfg.TlsCertPath, cfg.TlsPrivateKeyPath)
if err != nil {
return nil, err
}
tlsPrivateKeyFile, err := os.ReadFile(cfg.TlsPrivateKeyPath)
if err != nil {
return nil, err
return nil, fmt.Errorf("Failed to read load server tls key pair: %w", err)
}
cert, err := tls.LoadX509KeyPair(string(tlsCertFile), string(tlsPrivateKeyFile))
if err != nil {
return nil, err
if cfg.TlsCaPath != "" {
ca, err := os.ReadFile(cfg.TlsCaPath)
if err != nil {
return nil, fmt.Errorf("Failed to read tls ca: %w", err)
}
cfg.tlsCa = string(ca)
}
cfg.tls = cert
cfg.isTlsEnabled = true
}
@ -102,7 +108,7 @@ func NewGatewayRelay(configFilePath string) (*GatewayRelay, error) {
}
func (g *GatewayRelay) Run() error {
addr, err := net.ResolveTCPAddr("tcp", "0.0.0.0:"+strconv.Itoa(g.Config.Port))
addr, err := net.ResolveUDPAddr("udp", "0.0.0.0:"+strconv.Itoa(g.Config.Port))
if err != nil {
return fmt.Errorf("Failed to parse server address: %s", err)
}
@ -111,13 +117,6 @@ func (g *GatewayRelay) Run() error {
// and process them yourself.
logger := logging.NewDefaultLeveledLoggerForScope("lt-creds", logging.LogLevelTrace, os.Stdout)
// Create `numThreads` UDP listeners to pass into pion/turn
// pion/turn itself doesn't allocate any UDP sockets, but lets the user pass them in
// this allows us to add logging, storage or modify inbound/outbound traffic
// UDP listeners share the same local address:port with setting SO_REUSEPORT and the kernel
// will load-balance received packets per the IP 5-tuple
listenerConfig := udplistener.SetupListenerConfig()
publicIP := g.Config.PublicIP
relayAddressGenerator := &turn.RelayAddressGeneratorPortRange{
RelayAddress: net.ParseIP(publicIP), // Claim that we are listening on IP passed by user
@ -126,45 +125,54 @@ func (g *GatewayRelay) Run() error {
MaxPort: g.Config.RelayMaxPort,
}
threadNum := runtime.NumCPU()
listenerConfigs := make([]turn.ListenerConfig, threadNum)
var connAddress string
for i := 0; i < threadNum; i++ {
conn, listErr := listenerConfig.Listen(context.Background(), addr.Network(), addr.String())
if listErr != nil {
return fmt.Errorf("Failed to allocate TCP listener at %s:%s %s", addr.Network(), addr.String(), listErr)
}
listenerConfigs[i] = turn.ListenerConfig{
RelayAddressGenerator: relayAddressGenerator,
}
if g.Config.isTlsEnabled {
listenerConfigs[i].Listener = tls.NewListener(conn, &tls.Config{
Certificates: []tls.Certificate{g.Config.tls},
})
} else {
listenerConfigs[i].Listener = conn
}
connAddress = conn.Addr().String()
}
loggerF := logging.NewDefaultLoggerFactory()
loggerF.DefaultLogLevel = logging.LogLevelDebug
caCertPool := x509.NewCertPool()
caCertPool.AppendCertsFromPEM([]byte(g.Config.tlsCa))
listenerConfigs := make([]turn.ListenerConfig, 0)
packetConfigs := make([]turn.PacketConnConfig, 0)
if g.Config.isTlsEnabled {
caCertPool := x509.NewCertPool()
caCertPool.AppendCertsFromPEM([]byte(g.Config.tlsCa))
dtlsServer, err := dtls.Listen("udp", addr, &dtls.Config{
Certificates: []tls.Certificate{g.Config.tls},
ClientCAs: caCertPool,
})
if err != nil {
return fmt.Errorf("Failed to start dtls server: %w", err)
}
listenerConfigs = append(listenerConfigs, turn.ListenerConfig{
RelayAddressGenerator: relayAddressGenerator,
Listener: dtlsServer,
})
} else {
udpListener, err := net.ListenPacket("udp4", "0.0.0.0:"+strconv.Itoa(g.Config.Port))
if err != nil {
return fmt.Errorf("Failed to relay udp listener: %w", err)
}
packetConfigs = append(packetConfigs, turn.PacketConnConfig{
RelayAddressGenerator: relayAddressGenerator,
PacketConn: udpListener,
})
}
server, err := turn.NewServer(turn.ServerConfig{
Realm: g.Config.Realm,
AuthHandler: turn.LongTermTURNRESTAuthHandler(g.Config.AuthSecret, logger),
// PacketConnConfigs is a list of UDP Listeners and the configuration around them
ListenerConfigs: listenerConfigs,
LoggerFactory: loggerF,
ListenerConfigs: listenerConfigs,
PacketConnConfigs: packetConfigs,
LoggerFactory: loggerF,
})
if err != nil {
return fmt.Errorf("Failed to start server: %w", err)
}
log.Info().Msgf("Relay listening on %s\n", connAddress)
log.Info().Msgf("Relay listening on %d\n", g.Config.Port)
// make this compatiable with systemd notify mode
systemd.SdNotify(false, systemd.SdNotifyReady)

View File

@ -0,0 +1,37 @@
//go:build windows
// +build windows
package gateway
import (
"errors"
)
var (
errMissingTlsCert = errors.New("Missing TLS files")
errWindowsNotSupported = errors.New("Relay is not supported on Windows")
)
type GatewayRelay struct {
Config *GatewayRelayConfig
}
type GatewayRelayConfig struct {
PublicIP string
Port int
Realm string
AuthSecret string
RelayMinPort uint16
RelayMaxPort uint16
TlsCertPath string
TlsPrivateKeyPath string
TlsCaPath string
}
func NewGatewayRelay(configFilePath string) (*GatewayRelay, error) {
return nil, errWindowsNotSupported
}
func (g *GatewayRelay) Run() error {
return errWindowsNotSupported
}

View File

@ -0,0 +1,82 @@
package gateway
import (
"fmt"
"os"
"os/exec"
"path/filepath"
"runtime"
"github.com/rs/zerolog/log"
)
const systemdServiceTemplate = `[Unit]
Description=Infisical Gateway Service
After=network.target
[Service]
Type=simple
EnvironmentFile=/etc/infisical/gateway.conf
ExecStart=/usr/local/bin/infisical gateway
Restart=on-failure
InaccessibleDirectories=/home
PrivateTmp=yes
LimitCORE=infinity
LimitNOFILE=1000000
LimitNPROC=60000
LimitRTPRIO=infinity
LimitRTTIME=7000000
[Install]
WantedBy=multi-user.target
`
func InstallGatewaySystemdService(token string, domain string) error {
if runtime.GOOS != "linux" {
log.Info().Msg("Skipping systemd service installation - not on Linux")
return nil
}
if os.Geteuid() != 0 {
log.Info().Msg("Skipping systemd service installation - not running as root/sudo")
return nil
}
configDir := "/etc/infisical"
if err := os.MkdirAll(configDir, 0755); err != nil {
return fmt.Errorf("failed to create config directory: %v", err)
}
configContent := fmt.Sprintf("INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN=%s\n", token)
if domain != "" {
configContent += fmt.Sprintf("INFISICAL_API_URL=%s\n", domain)
} else {
configContent += "INFISICAL_API_URL=\n"
}
configPath := filepath.Join(configDir, "gateway.conf")
if err := os.WriteFile(configPath, []byte(configContent), 0600); err != nil {
return fmt.Errorf("failed to write config file: %v", err)
}
servicePath := "/etc/systemd/system/infisical-gateway.service"
if _, err := os.Stat(servicePath); err == nil {
log.Info().Msg("Systemd service file already exists")
return nil
}
if err := os.WriteFile(servicePath, []byte(systemdServiceTemplate), 0644); err != nil {
return fmt.Errorf("failed to write systemd service file: %v", err)
}
reloadCmd := exec.Command("systemctl", "daemon-reload")
if err := reloadCmd.Run(); err != nil {
return fmt.Errorf("failed to reload systemd: %v", err)
}
log.Info().Msg("Successfully installed systemd service")
log.Info().Msg("To start the service, run: sudo systemctl start infisical-gateway")
log.Info().Msg("To enable the service on boot, run: sudo systemctl enable infisical-gateway")
return nil
}

View File

@ -10,6 +10,10 @@ Being a remote-first company, we try to be as async as possible. When an issue a
In other words, we have almost no (recurring) meetings and prefer written communication or quick Slack huddles.
## Daily Standup
Towards the end of each day, everyone on the Engineering and GTM teams should document their progress in the respective Slack standup channels, ensuring the team stays informed of important updates. On the engineering side, if you are working on something that takes longer than 1-2 days, please add an estimated completion date (ECD) for that item in standup specifying when it will be pushed to production.
## Weekly All-hands
All-hands is the single recurring meeting that we run every Monday at 8:30am PT. Typically, we would discuss everything important that happened during the previous week and plan out the week ahead. This is also an opportunity to bring up any important topics in front of the whole company (but feel free to post those in Slack too).
All-hands is the single recurring meeting that we run every Monday at 8:00am PT. Typically, we would discuss everything important that happened during the previous week and plan out the week ahead. This is also an opportunity to bring up any important topics in front of the whole company (but feel free to post those in Slack too).

View File

@ -0,0 +1,110 @@
---
title: "Gateway Security Architecture"
sidebarTitle: "Architecture"
description: "Understand the security model and tenant isolation of Infisical's Gateway"
---
# Gateway Security Architecture
The Infisical Gateway enables Infisical Cloud to securely interact with private resources using mutual TLS authentication and private PKI (Public Key Infrastructure) system to ensure secure, isolated communication between multiple tenants.
This document explains the internal security architecture and how tenant isolation is maintained.
## Security Model Overview
### Private PKI System
Each organization (tenant) in Infisical has its own private PKI system consisting of:
1. **Root CA**: The ultimate trust anchor for the organization
2. **Intermediate CAs**:
- Client CA: Issues certificates for cloud components
- Gateway CA: Issues certificates for gateway instances
This hierarchical structure ensures complete isolation between organizations as each has its own independent certificate chain.
### Certificate Hierarchy
```
Root CA (Organization Specific)
├── Client CA
│ └── Client Certificates (Cloud Components)
└── Gateway CA
└── Gateway Certificates (Gateway Instances)
```
## Communication Security
### 1. Gateway Registration
When a gateway is first deployed:
1. Establishes initial connection using machine identity token
2. Allocates a relay address for communication
3. Exchanges certificates through a secure handshake:
- Gateway receives a unique certificate signed by organization's Gateway CA along with certificate chain for verification
### 2. Mutual TLS Authentication
All communication between gateway and cloud uses mutual TLS (mTLS):
- **Gateway Authentication**:
- Presents certificate signed by organization's Gateway CA
- Certificate contains unique identifiers (Organization ID, Gateway ID)
- Cloud validates complete certificate chain
- **Cloud Authentication**:
- Presents certificate signed by organization's Client CA
- Certificate includes required organizational unit ("gateway-client")
- Gateway validates certificate chain back to organization's root CA
### 3. Relay Communication
The relay system provides secure tunneling:
1. **Connection Establishment**:
- Uses QUIC protocol over UDP for efficient, secure communication
- Provides built-in encryption, congestion control, and multiplexing
- Enables faster connection establishment and reduced latency
- Each organization's traffic is isolated using separate relay sessions
2. **Traffic Isolation**:
- Each gateway gets unique relay credentials
- Traffic is end-to-end encrypted using QUIC's TLS 1.3
- Organization's private keys never leave their environment
## Tenant Isolation
### Certificate-Based Isolation
- Each organization has unique root CA and intermediate CAs
- Certificates contain organization-specific identifiers
- Cross-tenant communication is cryptographically impossible
### Gateway-Project Mapping
- Gateways are explicitly mapped to specific projects
- Access controls enforce organization boundaries
- Project-level permissions determine resource accessibility
### Resource Access Control
1. **Project Verification**:
- Gateway verifies project membership
- Validates organization ownership
- Enforces project-level permissions
2. **Resource Restrictions**:
- Gateways only accept connections to approved resources
- Each connection requires explicit project authorization
- Resources remain private to their assigned organization
## Security Measures
### Certificate Lifecycle
- Certificates have limited validity periods
- Automatic certificate rotation
- Immediate certificate revocation capabilities
### Monitoring and Verification
1. **Continuous Verification**:
- Regular heartbeat checks
- Certificate chain validation
- Connection state monitoring
2. **Security Controls**:
- Automatic connection termination on verification failure
- Audit logging of all access attempts
- Machine identity based authentication

View File

@ -36,3 +36,18 @@ If the signature in the header matches the signature that you generated, then yo
"timestamp": ""
}
```
```json
{
"event": "secrets.reminder-expired",
"project": {
"workspaceId": "the workspace id",
"environment": "project environment",
"secretPath": "project folder path",
"secretName": "name of the secret",
"secretId": "id of the secret",
"reminderNote": "reminder note of the secret"
},
"timestamp": ""
}
```

View File

@ -203,7 +203,7 @@
},
{
"group": "Gateway",
"pages": ["documentation/platform/gateways/overview"]
"pages": ["documentation/platform/gateways/overview", "documentation/platform/gateways/gateway-security"]
},
"documentation/platform/project-templates",
{

View File

@ -1,93 +0,0 @@
import { useTranslation } from "react-i18next";
import { faWarning } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { useToggle } from "@app/hooks";
import { generateUserBackupKey } from "@app/lib/crypto";
import { createNotification } from "../notifications";
import { generateBackupPDFAsync } from "../utilities/generateBackupPDF";
import { Button } from "../v2";
interface DownloadBackupPDFStepProps {
incrementStep: () => void;
email: string;
password: string;
name: string;
}
/**
* This is the step of the signup flow where the user downloads the backup pdf
* @param {object} obj
* @param {function} obj.incrementStep - function that moves the user on to the next stage of signup
* @param {string} obj.email - user's email
* @param {string} obj.password - user's password
* @param {string} obj.name - user's name
* @returns
*/
export default function DonwloadBackupPDFStep({
incrementStep,
email,
password,
name
}: DownloadBackupPDFStepProps): JSX.Element {
const { t } = useTranslation();
const [isLoading, setIsLoading] = useToggle();
const handleBackupKeyGenerate = async () => {
try {
setIsLoading.on();
const generatedKey = await generateUserBackupKey(email, password);
await generateBackupPDFAsync({
generatedKey,
personalEmail: email,
personalName: name
});
incrementStep();
} catch (err) {
console.log(err);
createNotification({
type: "error",
text: "Failed to generate backup key"
});
} finally {
setIsLoading.off();
}
};
return (
<div className="mx-auto mb-36 flex h-full w-full flex-col items-center md:mb-16 md:px-6">
<p className="flex flex-col items-center justify-center bg-gradient-to-b from-white to-bunker-200 bg-clip-text text-center text-xl font-medium text-transparent">
<FontAwesomeIcon
icon={faWarning}
className="mb-6 ml-2 mr-3 pt-1 text-6xl text-bunker-200"
/>
{t("signup.step4-message")}
</p>
<div className="text-md mt-8 flex w-full max-w-md flex-col items-center justify-center rounded-md border border-mineshaft-600 bg-mineshaft-800 pb-2 text-center text-bunker-300 md:min-w-[24rem] lg:w-1/6">
<div className="m-2 mx-auto mt-4 flex w-full flex-row items-center rounded-md px-3 text-center text-bunker-300 md:mt-8 md:min-w-[23rem] lg:w-1/6">
<span className="mb-2">
{t("signup.step4-description1")} {t("signup.step4-description3")}
</span>
</div>
<div className="mx-auto mb-2 mt-2 flex w-full flex-col items-center justify-center px-3 text-center text-sm md:mb-4 md:mt-4 md:min-w-[20rem] md:max-w-md md:text-left lg:w-1/6">
<div className="text-l w-full py-1 text-lg">
<Button
onClick={handleBackupKeyGenerate}
size="sm"
isFullWidth
isLoading={isLoading}
isDisabled={isLoading}
className="h-12"
colorSchema="primary"
variant="outline_bg"
>
Download PDF
</Button>
</div>
</div>
</div>
</div>
);
}

View File

@ -9,9 +9,7 @@ import { Button, FormControl, Input, TextArea } from "@app/components/v2";
import { ProjectPermissionActions, ProjectPermissionSub, useWorkspace } from "@app/context";
import { useUpdateProject } from "@app/hooks/api";
import { CopyButton } from "./CopyButton";
const formSchema = z.object({
const baseFormSchema = z.object({
name: z.string().min(1, "Required").max(64, "Too long, maximum length is 64 characters"),
description: z
.string()
@ -20,31 +18,55 @@ const formSchema = z.object({
.optional()
});
type FormData = z.infer<typeof formSchema>;
const formSchemaWithSlug = baseFormSchema.extend({
slug: z
.string()
.min(1, "Required")
.max(64, "Too long, maximum length is 64 characters")
.regex(
/^[a-z0-9]+(?:[_-][a-z0-9]+)*$/,
"Project slug can only contain lowercase letters and numbers, with optional single hyphens (-) or underscores (_) between words. Cannot start or end with a hyphen or underscore."
)
});
export const ProjectOverviewChangeSection = () => {
type BaseFormData = z.infer<typeof baseFormSchema>;
type FormDataWithSlug = z.infer<typeof formSchemaWithSlug>;
type Props = {
showSlugField?: boolean;
};
export const ProjectOverviewChangeSection = ({ showSlugField = false }: Props) => {
const { currentWorkspace } = useWorkspace();
const { mutateAsync, isPending } = useUpdateProject();
const { handleSubmit, control, reset, watch } = useForm<BaseFormData | FormDataWithSlug>({
resolver: zodResolver(showSlugField ? formSchemaWithSlug : baseFormSchema)
});
const { handleSubmit, control, reset } = useForm<FormData>({ resolver: zodResolver(formSchema) });
const currentSlug = showSlugField ? watch("slug") : currentWorkspace?.slug;
useEffect(() => {
if (currentWorkspace) {
reset({
name: currentWorkspace.name,
description: currentWorkspace.description ?? ""
description: currentWorkspace.description ?? "",
...(showSlugField && { slug: currentWorkspace.slug })
});
}
}, [currentWorkspace]);
}, [currentWorkspace, showSlugField]);
const onFormSubmit = async ({ name, description }: FormData) => {
const onFormSubmit = async (data: BaseFormData | FormDataWithSlug) => {
try {
if (!currentWorkspace?.id) return;
await mutateAsync({
projectID: currentWorkspace.id,
newProjectName: name,
newProjectDescription: description
newProjectName: data.name,
newProjectDescription: data.description,
...(showSlugField &&
"slug" in data && {
newSlug: data.slug !== currentWorkspace.slug ? data.slug : undefined
})
});
createNotification({
@ -65,20 +87,34 @@ export const ProjectOverviewChangeSection = () => {
<div className="justify-betweens flex">
<h2 className="mb-8 flex-1 text-xl font-semibold text-mineshaft-100">Project Overview</h2>
<div className="space-x-2">
<CopyButton
value={currentWorkspace?.slug || ""}
hoverText="Click to project slug"
notificationText="Copied project slug to clipboard"
<Button
variant="outline_bg"
size="sm"
onClick={() => {
navigator.clipboard.writeText(currentSlug || "");
createNotification({
text: "Copied project slug to clipboard",
type: "success"
});
}}
title="Click to copy project slug"
>
Copy Project Slug
</CopyButton>
<CopyButton
value={currentWorkspace?.id || ""}
hoverText="Click to project ID"
notificationText="Copied project ID to clipboard"
</Button>
<Button
variant="outline_bg"
size="sm"
onClick={() => {
navigator.clipboard.writeText(currentWorkspace?.id || "");
createNotification({
text: "Copied project ID to clipboard",
type: "success"
});
}}
title="Click to copy project ID"
>
Copy Project ID
</CopyButton>
</Button>
</div>
</div>
<div>
@ -113,6 +149,38 @@ export const ProjectOverviewChangeSection = () => {
</ProjectPermissionCan>
</div>
</div>
{showSlugField && (
<div className="flex w-full flex-row items-end gap-4">
<div className="w-full max-w-md">
<ProjectPermissionCan
I={ProjectPermissionActions.Edit}
a={ProjectPermissionSub.Project}
>
{(isAllowed) => (
<Controller
defaultValue=""
render={({ field, fieldState: { error } }) => (
<FormControl
isError={Boolean(error)}
errorText={error?.message}
label="Project slug"
>
<Input
placeholder="Project slug"
{...field}
className="bg-mineshaft-800"
isDisabled={!isAllowed}
/>
</FormControl>
)}
control={control}
name="slug"
/>
)}
</ProjectPermissionCan>
</div>
</div>
)}
<div className="flex w-full flex-row items-end gap-4">
<div className="w-full max-w-md">
<ProjectPermissionCan

View File

@ -14,7 +14,6 @@ import {
AccordionItem,
AccordionTrigger,
Button,
Checkbox,
FormControl,
Input,
Modal,
@ -33,13 +32,7 @@ import {
useUser
} from "@app/context";
import { getProjectHomePage } from "@app/helpers/project";
import {
fetchOrgUsers,
useAddUserToWsNonE2EE,
useCreateWorkspace,
useGetExternalKmsList,
useGetUserWorkspaces
} from "@app/hooks/api";
import { useCreateWorkspace, useGetExternalKmsList, useGetUserWorkspaces } from "@app/hooks/api";
import { INTERNAL_KMS_KEY_ID } from "@app/hooks/api/kms/types";
import { InfisicalProjectTemplate, useListProjectTemplates } from "@app/hooks/api/projectTemplates";
import { ProjectType } from "@app/hooks/api/workspace/types";
@ -51,7 +44,6 @@ const formSchema = z.object({
.trim()
.max(256, "Description too long, max length is 256 characters")
.optional(),
addMembers: z.boolean(),
kmsKeyId: z.string(),
template: z.string()
});
@ -73,7 +65,6 @@ const NewProjectForm = ({ onOpenChange, projectType }: NewProjectFormProps) => {
const { user } = useUser();
const createWs = useCreateWorkspace();
const { refetch: refetchWorkspaces } = useGetUserWorkspaces();
const addUsersToProject = useAddUserToWsNonE2EE();
const { subscription } = useSubscription();
const canReadProjectTemplates = permission.can(
@ -111,7 +102,6 @@ const NewProjectForm = ({ onOpenChange, projectType }: NewProjectFormProps) => {
const onCreateProject = async ({
name,
description,
addMembers,
kmsKeyId,
template
}: TAddProjectFormData) => {
@ -128,21 +118,6 @@ const NewProjectForm = ({ onOpenChange, projectType }: NewProjectFormProps) => {
template,
type: projectType
});
const { id: newProjectId } = project;
if (addMembers) {
const orgUsers = await fetchOrgUsers(currentOrg.id);
await addUsersToProject.mutateAsync({
usernames: orgUsers
.filter(
(member) => member.user.username !== user.username && member.status === "accepted"
)
.map((member) => member.user.username),
projectId: newProjectId,
orgId: currentOrg.id
});
}
await refetchWorkspaces();
createNotification({ text: "Project created", type: "success" });
@ -246,31 +221,7 @@ const NewProjectForm = ({ onOpenChange, projectType }: NewProjectFormProps) => {
)}
/>
</div>
<div className="mt-4 pl-1">
<Controller
control={control}
name="addMembers"
defaultValue={false}
render={({ field: { onBlur, value, onChange } }) => (
<OrgPermissionCan I={OrgPermissionActions.Read} a={OrgPermissionSubjects.Member}>
{(isAllowed) => (
<div>
<Checkbox
id="add-project-layout"
isChecked={value}
onCheckedChange={onChange}
isDisabled={!isAllowed}
onBlur={onBlur}
>
Add all members of my organization to this project
</Checkbox>
</div>
)}
</OrgPermissionCan>
)}
/>
</div>
<div className="mt-14 flex">
<div className="mt-4 flex">
<Accordion type="single" collapsible className="w-full">
<AccordionItem value="advance-settings" className="data-[state=open]:border-none">
<AccordionTrigger className="h-fit flex-none pl-1 text-sm">

View File

@ -34,12 +34,12 @@ const passwordCheck = async ({
const tests = [
{
name: "tooShort",
validator: (pwd: string) => pwd.length >= 14,
validator: (pwd: string) => pwd?.length >= 14,
setError: setPasswordErrorTooShort
},
{
name: "tooLong",
validator: (pwd: string) => pwd.length < 101,
validator: (pwd: string) => pwd?.length < 101,
setError: setPasswordErrorTooLong
},
{

View File

@ -50,6 +50,7 @@ export type TUpdateAdminSlackConfigDTO = {
export type AdminGetUsersFilters = {
limit: number;
searchTerm: string;
adminsOnly: boolean;
};
export type AdminSlackConfig = {

View File

@ -122,6 +122,7 @@ export const eventToNameMap: { [K in EventType]: string } = {
"OIDC group membership mapping assigned user to groups",
[EventType.OIDC_GROUP_MEMBERSHIP_MAPPING_REMOVE_USER]:
"OIDC group membership mapping removed user from groups",
[EventType.SECRET_APPROVAL_REQUEST_REVIEW]: "Review Secret Approval Request",
[EventType.CREATE_KMIP_CLIENT]: "Create KMIP client",
[EventType.UPDATE_KMIP_CLIENT]: "Update KMIP client",
[EventType.DELETE_KMIP_CLIENT]: "Delete KMIP client",

View File

@ -150,5 +150,6 @@ export enum EventType {
KMIP_OPERATION_ACTIVATE = "kmip-operation-activate",
KMIP_OPERATION_REVOKE = "kmip-operation-revoke",
KMIP_OPERATION_LOCATE = "kmip-operation-locate",
KMIP_OPERATION_REGISTER = "kmip-operation-register"
KMIP_OPERATION_REGISTER = "kmip-operation-register",
SECRET_APPROVAL_REQUEST_REVIEW = "secret-approval-request-review"
}

View File

@ -2,6 +2,8 @@ export {
useGetAuthToken,
useOauthTokenExchange,
useResetPassword,
useResetPasswordV2,
useResetUserPasswordV2,
useSelectOrganization,
useSendMfaToken,
useSendPasswordResetEmail,

View File

@ -22,12 +22,15 @@ import {
LoginLDAPRes,
MfaMethod,
ResetPasswordDTO,
ResetPasswordV2DTO,
ResetUserPasswordV2DTO,
SendMfaTokenDTO,
SetupPasswordDTO,
SRP1DTO,
SRPR1Res,
TOauthTokenExchangeDTO,
UserAgentType,
UserEncryptionVersion,
VerifyMfaTokenDTO,
VerifyMfaTokenRes,
VerifySignupInviteDTO
@ -247,7 +250,10 @@ export const useSendPasswordResetEmail = () => {
export const useVerifyPasswordResetCode = () => {
return useMutation({
mutationFn: async ({ email, code }: { email: string; code: string }) => {
const { data } = await apiRequest.post("/api/v1/password/email/password-reset-verify", {
const { data } = await apiRequest.post<{
token: string;
userEncryptionVersion: UserEncryptionVersion;
}>("/api/v1/password/email/password-reset-verify", {
email,
code
});
@ -302,6 +308,26 @@ export const useResetPassword = () => {
});
};
export const useResetPasswordV2 = () => {
return useMutation({
mutationFn: async (details: ResetPasswordV2DTO) => {
await apiRequest.post("/api/v2/password/password-reset", details, {
headers: {
Authorization: `Bearer ${details.verificationToken}`
}
});
}
});
};
export const useResetUserPasswordV2 = () => {
return useMutation({
mutationFn: async (details: ResetUserPasswordV2DTO) => {
await apiRequest.post("/api/v2/password/user/password-reset", details);
}
});
};
export const changePassword = async (details: ChangePasswordDTO) => {
const { data } = await apiRequest.post("/api/v1/password/change-password", details);
return data;

View File

@ -3,6 +3,11 @@ export type GetAuthTokenAPI = {
organizationId?: string;
};
export enum UserEncryptionVersion {
V1 = 1,
V2 = 2
}
export type SendMfaTokenDTO = {
email: string;
};
@ -136,6 +141,16 @@ export type ResetPasswordDTO = {
password: string;
};
export type ResetPasswordV2DTO = {
newPassword: string;
verificationToken: string;
};
export type ResetUserPasswordV2DTO = {
oldPassword: string;
newPassword: string;
};
export type SetupPasswordDTO = {
protectedKey: string;
protectedKeyIV: string;

View File

@ -74,6 +74,107 @@ export const queryClient = new QueryClient({
);
return;
}
if (serverResponse?.error === ApiErrorTypes.PermissionBoundaryError) {
createNotification(
{
title: "Forbidden Access",
type: "error",
text: `${serverResponse.message}.`,
callToAction: serverResponse?.details?.missingPermissions?.length ? (
<Modal>
<ModalTrigger asChild>
<Button variant="outline_bg" size="xs">
Show more
</Button>
</ModalTrigger>
<ModalContent title="Missing Permission">
<div className="flex flex-col gap-2">
{serverResponse.details?.missingPermissions?.map((el, index) => {
const hasConditions = Boolean(Object.keys(el.conditions || {}).length);
return (
<div
key={`Forbidden-error-details-${index + 1}`}
className="rounded-md border border-gray-600 p-4"
>
<div>
You are not authorized to perform the <b>{el.action}</b> action on the{" "}
<b>{el.subject}</b> resource.{" "}
{hasConditions &&
"Your permission does not allow access to the following conditions:"}
</div>
{hasConditions && (
<ul className="flex list-disc flex-col gap-1 pl-5 pt-2 text-sm">
{Object.keys(el.conditions || {}).flatMap((field, fieldIndex) => {
const operators = (
el.conditions as Record<
string,
| string
| { [K in PermissionConditionOperators]: string | string[] }
>
)[field];
const formattedFieldName = camelCaseToSpaces(field).toLowerCase();
if (typeof operators === "string") {
return (
<li
key={`Forbidden-error-details-${index + 1}-${
fieldIndex + 1
}`}
>
<span className="font-bold capitalize">
{formattedFieldName}
</span>{" "}
<span className="text-mineshaft-200">equal to</span>{" "}
<span className="text-yellow-600">{operators}</span>
</li>
);
}
return Object.keys(operators).map((operator, operatorIndex) => (
<li
key={`Forbidden-error-details-${index + 1}-${
fieldIndex + 1
}-${operatorIndex + 1}`}
>
<span className="font-bold capitalize">
{formattedFieldName}
</span>{" "}
<span className="text-mineshaft-200">
{
formatedConditionsOperatorNames[
operator as PermissionConditionOperators
]
}
</span>{" "}
<span className="text-yellow-600">
{operators[
operator as PermissionConditionOperators
].toString()}
</span>
</li>
));
})}
</ul>
)}
</div>
);
})}
</div>
</ModalContent>
</Modal>
) : undefined,
copyActions: [
{
value: serverResponse.reqId,
name: "Request ID",
label: `Request ID: ${serverResponse.reqId}`
}
]
},
{ closeOnClick: false }
);
return;
}
if (serverResponse?.error === ApiErrorTypes.ForbiddenError) {
createNotification(
{

View File

@ -13,9 +13,10 @@ export const useUpdateSecretApprovalReviewStatus = () => {
const queryClient = useQueryClient();
return useMutation<object, object, TUpdateSecretApprovalReviewStatusDTO>({
mutationFn: async ({ id, status }) => {
mutationFn: async ({ id, status, comment }) => {
const { data } = await apiRequest.post(`/api/v1/secret-approval-requests/${id}/review`, {
status
status,
comment
});
return data;
},

Some files were not shown because too many files have changed in this diff Show More