mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-25 14:07:47 +00:00
Compare commits
260 Commits
doc/add-do
...
fix/resolv
Author | SHA1 | Date | |
---|---|---|---|
|
67c1cb9bf1 | ||
|
f0938330a7 | ||
|
e1bb0ac3ad | ||
|
f54d930de2 | ||
|
4a1dfda41f | ||
|
c238b7b6ae | ||
|
83d314ba32 | ||
|
b94a0ffa6c | ||
|
b60e404243 | ||
|
10120e1825 | ||
|
31e66c18e7 | ||
|
fb06f5a3bc | ||
|
1515dd8a71 | ||
|
da18a12648 | ||
|
49a0d3cec6 | ||
|
e821a11271 | ||
|
af4428acec | ||
|
61370cc6b2 | ||
|
cf3b2ebbca | ||
|
e970cc0f47 | ||
|
bd5cd03aeb | ||
|
c46e4d7fc1 | ||
|
1f3896231a | ||
|
4323f6fa8f | ||
|
65db91d491 | ||
|
ae5b57f69f | ||
|
b717de4f78 | ||
|
1216d218c1 | ||
|
209004ec6d | ||
|
c865d12849 | ||
|
c921c28185 | ||
|
3647943c80 | ||
|
4bf5381060 | ||
|
a10c358f83 | ||
|
d3c63b5699 | ||
|
c64334462f | ||
|
c497e19b99 | ||
|
2aeae616de | ||
|
e0e21530e2 | ||
|
7b4b802a9b | ||
|
95cf3cf6cc | ||
|
d021b414cf | ||
|
bed75c36dd | ||
|
04cb499f0f | ||
|
189a610f52 | ||
|
00039ba0e4 | ||
|
abdcb95a8f | ||
|
47ea4ae9a6 | ||
|
903b2c3dc6 | ||
|
c795b3b3a0 | ||
|
0d8ff1828e | ||
|
30d6af7760 | ||
|
44b42359da | ||
|
38373722e3 | ||
|
7ec68ca9a1 | ||
|
a49d5b121b | ||
|
901ff7a605 | ||
|
ba4aa15c92 | ||
|
a00103aa1e | ||
|
0c17cc3577 | ||
|
51d84a47b9 | ||
|
d529670a52 | ||
|
ed0463e3e4 | ||
|
20db0a255c | ||
|
6fe1d77375 | ||
|
f90855e7a5 | ||
|
97f5c33aea | ||
|
34c2200269 | ||
|
69925721cc | ||
|
0961d2f1c6 | ||
|
b9bd518aa6 | ||
|
692c9b5d9c | ||
|
32046ca880 | ||
|
590dbbcb04 | ||
|
27d2af4979 | ||
|
a1e6c6f7d5 | ||
|
cc94a3366a | ||
|
6a6c084b8a | ||
|
7baa3b4cbe | ||
|
6cab7504fc | ||
|
ca3d8c5594 | ||
|
28a2a6c41a | ||
|
05efd95472 | ||
|
fa31f87479 | ||
|
b176f13392 | ||
|
f4384bb01e | ||
|
856c2423be | ||
|
4570de09ae | ||
|
4feff5b4ca | ||
|
6081e2927e | ||
|
0b42f29916 | ||
|
b60d0992f4 | ||
|
a8a68f600c | ||
|
742f5f6621 | ||
|
f993e4aa5c | ||
|
bb6416acb7 | ||
|
f3cd7efe0e | ||
|
2b16c19b70 | ||
|
943b540383 | ||
|
e180021aa6 | ||
|
8e08c443ad | ||
|
dae26daeeb | ||
|
170f8d9add | ||
|
8d41ef198a | ||
|
69d60a227a | ||
|
c8eefcfbf9 | ||
|
53cec754cc | ||
|
5db3e177eb | ||
|
3fcc3ccff4 | ||
|
df07d7b6d7 | ||
|
28a655bef1 | ||
|
5f2cd04f46 | ||
|
897ce1f267 | ||
|
6afc17b84b | ||
|
9017a5e838 | ||
|
cb8e4d884e | ||
|
16807c3dd6 | ||
|
61791e385c | ||
|
bbd7bfb0f5 | ||
|
4de8c48b2c | ||
|
a4bbe2c612 | ||
|
541a2e7d05 | ||
|
ea4e51d826 | ||
|
3bc920c593 | ||
|
f4244c6d4d | ||
|
e1b9965f01 | ||
|
705b4f7513 | ||
|
df38c761ad | ||
|
32a84471f2 | ||
|
fc4a20caf2 | ||
|
ea14df2cbd | ||
|
6bd6cac366 | ||
|
45294253aa | ||
|
635fbdc80b | ||
|
d20c48b7cf | ||
|
1fc18fe23b | ||
|
99403e122b | ||
|
5176e70437 | ||
|
82b2b0af97 | ||
|
e313c866a2 | ||
|
2d81606049 | ||
|
718f4ef129 | ||
|
a42f3b3763 | ||
|
f7d882a6fc | ||
|
385afdfcf8 | ||
|
281d703cc3 | ||
|
6f56ed5474 | ||
|
809e4eeba1 | ||
|
254446c895 | ||
|
bb52e2beb4 | ||
|
2739b08e59 | ||
|
ba5e877a3b | ||
|
d2752216f6 | ||
|
d91fb0db02 | ||
|
556e4d62c4 | ||
|
4892eea009 | ||
|
09c6fcb73b | ||
|
79181a1e3d | ||
|
bb934ef7b1 | ||
|
cd9316537d | ||
|
942e5f2f65 | ||
|
353d231a4e | ||
|
68e05b7198 | ||
|
4f998e3940 | ||
|
1248840dc8 | ||
|
64c8125e4b | ||
|
1690a9429c | ||
|
c109fbab3e | ||
|
15fb01089b | ||
|
6f4be3e25a | ||
|
8d33647739 | ||
|
d1c142e5b1 | ||
|
bb1cad0c5b | ||
|
2a1cfe15b4 | ||
|
881d70bc64 | ||
|
14c1b4f07b | ||
|
3028bdd424 | ||
|
902a0b0ed4 | ||
|
ba92192537 | ||
|
26ed8df73c | ||
|
c1decab912 | ||
|
216c073290 | ||
|
8626bce632 | ||
|
c5a2b0321f | ||
|
5af53d3398 | ||
|
8da8c6a66c | ||
|
88a4390ea0 | ||
|
c70d0a577c | ||
|
1070954bdd | ||
|
587a4a1120 | ||
|
cc689d3178 | ||
|
e6848828f2 | ||
|
c8b93e4467 | ||
|
0bca24bb00 | ||
|
c563ada50f | ||
|
26d1616e22 | ||
|
5fd071d1de | ||
|
a6ac78356b | ||
|
e4a2137991 | ||
|
9721d7a15e | ||
|
93db5c4555 | ||
|
ad4393fdef | ||
|
cd06e4e7f3 | ||
|
711a4179ce | ||
|
b4a2a477d3 | ||
|
8e53a1b171 | ||
|
71af463ad8 | ||
|
7abd18b11c | ||
|
1aee50a751 | ||
|
0f23b7e1d3 | ||
|
e9b37a1f98 | ||
|
33193a47ae | ||
|
43fded2350 | ||
|
7b6f4d810d | ||
|
1ad286ca87 | ||
|
be7c11a3f5 | ||
|
b97bbe5beb | ||
|
cf5260b383 | ||
|
13e0dd8e0f | ||
|
7f9150e60e | ||
|
995f0360fb | ||
|
ecab69a7ab | ||
|
cca36ab106 | ||
|
76311a1b5f | ||
|
55a6740714 | ||
|
a0490d0fde | ||
|
78e41a51c0 | ||
|
8414f04e94 | ||
|
79e414ea9f | ||
|
83772c1770 | ||
|
09928efba3 | ||
|
48eb4e772f | ||
|
7467a05fc4 | ||
|
afba636850 | ||
|
96cc315762 | ||
|
e95d7e55c1 | ||
|
520c068ac4 | ||
|
9f0d7c6d11 | ||
|
683e3dd7be | ||
|
46ca3856b3 | ||
|
891cb06de0 | ||
|
02e8f20cbf | ||
|
dbe771dba0 | ||
|
273fd6c98f | ||
|
d5f4ce4376 | ||
|
18aac6508b | ||
|
85653a90d5 | ||
|
879ef2c178 | ||
|
8777cfe680 | ||
|
2b630f75aa | ||
|
91cee20cc8 | ||
|
4249ec6030 | ||
|
e7a95e6af2 | ||
|
a9f04a3c1f | ||
|
3d380710ee | ||
|
2177ec6bcc | ||
|
070eb2aacd | ||
|
e619cfa313 | ||
|
c3038e3ca1 | ||
|
ff0e7feeee |
@@ -22,14 +22,14 @@ jobs:
|
||||
# uncomment this when testing locally using nektos/act
|
||||
- uses: KengoTODA/actions-setup-docker-compose@v1
|
||||
if: ${{ env.ACT }}
|
||||
name: Install `docker-compose` for local simulations
|
||||
name: Install `docker compose` for local simulations
|
||||
with:
|
||||
version: "2.14.2"
|
||||
- name: 📦Build the latest image
|
||||
run: docker build --tag infisical-api .
|
||||
working-directory: backend
|
||||
- name: Start postgres and redis
|
||||
run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start the server
|
||||
run: |
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
@@ -72,6 +72,6 @@ jobs:
|
||||
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
|
||||
- name: cleanup
|
||||
run: |
|
||||
docker-compose -f "docker-compose.dev.yml" down
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
docker stop infisical-api
|
||||
docker remove infisical-api
|
||||
|
6
.github/workflows/run-backend-tests.yml
vendored
6
.github/workflows/run-backend-tests.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
uses: actions/checkout@v3
|
||||
- uses: KengoTODA/actions-setup-docker-compose@v1
|
||||
if: ${{ env.ACT }}
|
||||
name: Install `docker-compose` for local simulations
|
||||
name: Install `docker compose` for local simulations
|
||||
with:
|
||||
version: "2.14.2"
|
||||
- name: 🔧 Setup Node 20
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
run: npm install
|
||||
working-directory: backend
|
||||
- name: Start postgres and redis
|
||||
run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start integration test
|
||||
run: npm run test:e2e
|
||||
working-directory: backend
|
||||
@@ -44,4 +44,4 @@ jobs:
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- name: cleanup
|
||||
run: |
|
||||
docker-compose -f "docker-compose.dev.yml" down
|
||||
docker compose -f "docker-compose.dev.yml" down
|
2
.github/workflows/run-cli-tests.yml
vendored
2
.github/workflows/run-cli-tests.yml
vendored
@@ -50,6 +50,6 @@ jobs:
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
# INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
run: go test -v -count=1 ./test
|
||||
|
13
Makefile
13
Makefile
@@ -15,3 +15,16 @@ up-prod:
|
||||
|
||||
down:
|
||||
docker compose -f docker-compose.dev.yml down
|
||||
|
||||
reviewable-ui:
|
||||
cd frontend && \
|
||||
npm run lint:fix && \
|
||||
npm run type:check
|
||||
|
||||
reviewable-api:
|
||||
cd backend && \
|
||||
npm run lint:fix && \
|
||||
npm run type:check
|
||||
|
||||
reviewable: reviewable-ui reviewable-api
|
||||
|
||||
|
4609
backend/package-lock.json
generated
4609
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -34,9 +34,9 @@
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
|
||||
"dev:docker": "nodemon",
|
||||
"build": "tsup",
|
||||
"build": "tsup --sourcemap",
|
||||
"build:frontend": "npm run build --prefix ../frontend",
|
||||
"start": "node dist/main.mjs",
|
||||
"start": "node --enable-source-maps dist/main.mjs",
|
||||
"type:check": "tsc --noEmit",
|
||||
"lint:fix": "eslint --fix --ext js,ts ./src",
|
||||
"lint": "eslint 'src/**/*.ts'",
|
||||
@@ -78,6 +78,7 @@
|
||||
"@types/picomatch": "^2.3.3",
|
||||
"@types/prompt-sync": "^4.2.3",
|
||||
"@types/resolve": "^1.20.6",
|
||||
"@types/safe-regex": "^1.1.6",
|
||||
"@types/uuid": "^9.0.7",
|
||||
"@typescript-eslint/eslint-plugin": "^6.20.0",
|
||||
"@typescript-eslint/parser": "^6.20.0",
|
||||
@@ -121,6 +122,7 @@
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@peculiar/asn1-schema": "^2.3.8",
|
||||
@@ -171,6 +173,7 @@
|
||||
"pino": "^8.16.2",
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.0.0",
|
||||
"safe-regex": "^2.1.1",
|
||||
"smee-client": "^2.0.0",
|
||||
"tedious": "^18.2.1",
|
||||
"tweetnacl": "^1.0.3",
|
||||
|
@@ -7,14 +7,33 @@ const prompt = promptSync({
|
||||
sigint: true
|
||||
});
|
||||
|
||||
type ComponentType = 1 | 2 | 3;
|
||||
|
||||
console.log(`
|
||||
Component List
|
||||
--------------
|
||||
0. Exit
|
||||
1. Service component
|
||||
2. DAL component
|
||||
3. Router component
|
||||
`);
|
||||
const componentType = parseInt(prompt("Select a component: "), 10);
|
||||
|
||||
function getComponentType(): ComponentType {
|
||||
while (true) {
|
||||
const input = prompt("Select a component (0-3): ");
|
||||
const componentType = parseInt(input, 10);
|
||||
|
||||
if (componentType === 0) {
|
||||
console.log("Exiting the program. Goodbye!");
|
||||
process.exit(0);
|
||||
} else if (componentType === 1 || componentType === 2 || componentType === 3) {
|
||||
return componentType;
|
||||
} else {
|
||||
console.log("Invalid input. Please enter 0, 1, 2, or 3.");
|
||||
}
|
||||
}
|
||||
}
|
||||
const componentType = getComponentType();
|
||||
|
||||
if (componentType === 1) {
|
||||
const componentName = prompt("Enter service name: ");
|
||||
|
10
backend/src/@types/fastify.d.ts
vendored
10
backend/src/@types/fastify.d.ts
vendored
@@ -18,6 +18,7 @@ import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-ser
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
|
||||
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
|
||||
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
@@ -35,6 +36,7 @@ import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||
@@ -50,6 +52,9 @@ import { TIntegrationServiceFactory } from "@app/services/integration/integratio
|
||||
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
|
||||
import { TOrgRoleServiceFactory } from "@app/services/org/org-role-service";
|
||||
import { TOrgServiceFactory } from "@app/services/org/org-service";
|
||||
import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
|
||||
import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
|
||||
import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
|
||||
import { TProjectServiceFactory } from "@app/services/project/project-service";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
|
||||
@@ -88,6 +93,7 @@ declare module "fastify" {
|
||||
id: string;
|
||||
orgId: string;
|
||||
};
|
||||
rateLimits: RateLimitConfiguration;
|
||||
// passport data
|
||||
passportUser: {
|
||||
isUserCompleted: string;
|
||||
@@ -113,6 +119,7 @@ declare module "fastify" {
|
||||
group: TGroupServiceFactory;
|
||||
groupProject: TGroupProjectServiceFactory;
|
||||
apiKey: TApiKeyServiceFactory;
|
||||
pkiAlert: TPkiAlertServiceFactory;
|
||||
project: TProjectServiceFactory;
|
||||
projectMembership: TProjectMembershipServiceFactory;
|
||||
projectEnv: TProjectEnvServiceFactory;
|
||||
@@ -150,8 +157,10 @@ declare module "fastify" {
|
||||
auditLog: TAuditLogServiceFactory;
|
||||
auditLogStream: TAuditLogStreamServiceFactory;
|
||||
certificate: TCertificateServiceFactory;
|
||||
certificateTemplate: TCertificateTemplateServiceFactory;
|
||||
certificateAuthority: TCertificateAuthorityServiceFactory;
|
||||
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
|
||||
pkiCollection: TPkiCollectionServiceFactory;
|
||||
secretScanning: TSecretScanningServiceFactory;
|
||||
license: TLicenseServiceFactory;
|
||||
trustedIp: TTrustedIpServiceFactory;
|
||||
@@ -165,6 +174,7 @@ declare module "fastify" {
|
||||
rateLimit: TRateLimitServiceFactory;
|
||||
userEngagement: TUserEngagementServiceFactory;
|
||||
externalKms: TExternalKmsServiceFactory;
|
||||
orgAdmin: TOrgAdminServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
28
backend/src/@types/knex.d.ts
vendored
28
backend/src/@types/knex.d.ts
vendored
@@ -53,6 +53,9 @@ import {
|
||||
TCertificateSecretsUpdate,
|
||||
TCertificatesInsert,
|
||||
TCertificatesUpdate,
|
||||
TCertificateTemplates,
|
||||
TCertificateTemplatesInsert,
|
||||
TCertificateTemplatesUpdate,
|
||||
TDynamicSecretLeases,
|
||||
TDynamicSecretLeasesInsert,
|
||||
TDynamicSecretLeasesUpdate,
|
||||
@@ -161,6 +164,15 @@ import {
|
||||
TOrgRoles,
|
||||
TOrgRolesInsert,
|
||||
TOrgRolesUpdate,
|
||||
TPkiAlerts,
|
||||
TPkiAlertsInsert,
|
||||
TPkiAlertsUpdate,
|
||||
TPkiCollectionItems,
|
||||
TPkiCollectionItemsInsert,
|
||||
TPkiCollectionItemsUpdate,
|
||||
TPkiCollections,
|
||||
TPkiCollectionsInsert,
|
||||
TPkiCollectionsUpdate,
|
||||
TProjectBots,
|
||||
TProjectBotsInsert,
|
||||
TProjectBotsUpdate,
|
||||
@@ -355,6 +367,11 @@ declare module "knex/types/tables" {
|
||||
TCertificateAuthorityCrlUpdate
|
||||
>;
|
||||
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
|
||||
[TableName.CertificateTemplate]: KnexOriginal.CompositeTableType<
|
||||
TCertificateTemplates,
|
||||
TCertificateTemplatesInsert,
|
||||
TCertificateTemplatesUpdate
|
||||
>;
|
||||
[TableName.CertificateBody]: KnexOriginal.CompositeTableType<
|
||||
TCertificateBodies,
|
||||
TCertificateBodiesInsert,
|
||||
@@ -365,6 +382,17 @@ declare module "knex/types/tables" {
|
||||
TCertificateSecretsInsert,
|
||||
TCertificateSecretsUpdate
|
||||
>;
|
||||
[TableName.PkiAlert]: KnexOriginal.CompositeTableType<TPkiAlerts, TPkiAlertsInsert, TPkiAlertsUpdate>;
|
||||
[TableName.PkiCollection]: KnexOriginal.CompositeTableType<
|
||||
TPkiCollections,
|
||||
TPkiCollectionsInsert,
|
||||
TPkiCollectionsUpdate
|
||||
>;
|
||||
[TableName.PkiCollectionItem]: KnexOriginal.CompositeTableType<
|
||||
TPkiCollectionItems,
|
||||
TPkiCollectionItemsInsert,
|
||||
TPkiCollectionItemsUpdate
|
||||
>;
|
||||
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
|
||||
TUserGroupMembership,
|
||||
TUserGroupMembershipInsert,
|
||||
|
@@ -0,0 +1,294 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
// ---------- ACCESS APPROVAL POLICY APPROVER ------------
|
||||
const hasApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
|
||||
const hasApproverId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverId");
|
||||
|
||||
if (!hasApproverUserId) {
|
||||
// add the new fields
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
|
||||
// if (hasApproverId) tb.setNullable("approverId");
|
||||
tb.uuid("approverUserId");
|
||||
tb.foreign("approverUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
});
|
||||
|
||||
// convert project membership id => user id
|
||||
await knex(TableName.AccessApprovalPolicyApprover).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
approverUserId: knex(TableName.ProjectMembership)
|
||||
.select("userId")
|
||||
.where("id", knex.raw("??", [`${TableName.AccessApprovalPolicyApprover}.approverId`]))
|
||||
});
|
||||
// drop the old field
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
|
||||
if (hasApproverId) tb.dropColumn("approverId");
|
||||
tb.uuid("approverUserId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
// ---------- ACCESS APPROVAL REQUEST ------------
|
||||
const hasAccessApprovalRequestTable = await knex.schema.hasTable(TableName.AccessApprovalRequest);
|
||||
const hasRequestedByUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedByUserId");
|
||||
const hasRequestedBy = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedBy");
|
||||
|
||||
if (hasAccessApprovalRequestTable) {
|
||||
// new fields
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
|
||||
if (!hasRequestedByUserId) {
|
||||
tb.uuid("requestedByUserId");
|
||||
tb.foreign("requestedByUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
|
||||
}
|
||||
});
|
||||
|
||||
// copy the assigned project membership => user id to new fields
|
||||
await knex(TableName.AccessApprovalRequest).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
requestedByUserId: knex(TableName.ProjectMembership)
|
||||
.select("userId")
|
||||
.where("id", knex.raw("??", [`${TableName.AccessApprovalRequest}.requestedBy`]))
|
||||
});
|
||||
// drop old fields
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
|
||||
if (hasRequestedBy) {
|
||||
// DROP AT A LATER TIME
|
||||
// tb.dropColumn("requestedBy");
|
||||
|
||||
// ADD ALLOW NULLABLE FOR NOW
|
||||
tb.uuid("requestedBy").nullable().alter();
|
||||
}
|
||||
tb.uuid("requestedByUserId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
// ---------- ACCESS APPROVAL REQUEST REVIEWER ------------
|
||||
const hasMemberId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "member");
|
||||
const hasReviewerUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "reviewerUserId");
|
||||
if (!hasReviewerUserId) {
|
||||
// new fields
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
|
||||
// if (hasMemberId) tb.setNullable("member");
|
||||
tb.uuid("reviewerUserId");
|
||||
tb.foreign("reviewerUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
|
||||
});
|
||||
// copy project membership => user id to new fields
|
||||
await knex(TableName.AccessApprovalRequestReviewer).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
reviewerUserId: knex(TableName.ProjectMembership)
|
||||
.select("userId")
|
||||
.where("id", knex.raw("??", [`${TableName.AccessApprovalRequestReviewer}.member`]))
|
||||
});
|
||||
// drop table
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
|
||||
if (hasMemberId) {
|
||||
// DROP AT A LATER TIME
|
||||
// tb.dropColumn("member");
|
||||
|
||||
// ADD ALLOW NULLABLE FOR NOW
|
||||
tb.uuid("member").nullable().alter();
|
||||
}
|
||||
tb.uuid("reviewerUserId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
// ---------- PROJECT USER ADDITIONAL PRIVILEGE ------------
|
||||
const projectUserAdditionalPrivilegeHasProjectMembershipId = await knex.schema.hasColumn(
|
||||
TableName.ProjectUserAdditionalPrivilege,
|
||||
"projectMembershipId"
|
||||
);
|
||||
|
||||
const projectUserAdditionalPrivilegeHasUserId = await knex.schema.hasColumn(
|
||||
TableName.ProjectUserAdditionalPrivilege,
|
||||
"userId"
|
||||
);
|
||||
|
||||
if (!projectUserAdditionalPrivilegeHasUserId) {
|
||||
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
|
||||
tb.uuid("userId");
|
||||
tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
|
||||
tb.string("projectId");
|
||||
tb.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
});
|
||||
|
||||
await knex(TableName.ProjectUserAdditionalPrivilege)
|
||||
.update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
userId: knex(TableName.ProjectMembership)
|
||||
.select("userId")
|
||||
.where("id", knex.raw("??", [`${TableName.ProjectUserAdditionalPrivilege}.projectMembershipId`])),
|
||||
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
projectId: knex(TableName.ProjectMembership)
|
||||
.select("projectId")
|
||||
.where("id", knex.raw("??", [`${TableName.ProjectUserAdditionalPrivilege}.projectMembershipId`]))
|
||||
})
|
||||
.whereNotNull("projectMembershipId");
|
||||
|
||||
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
|
||||
tb.uuid("userId").notNullable().alter();
|
||||
tb.string("projectId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
if (projectUserAdditionalPrivilegeHasProjectMembershipId) {
|
||||
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
|
||||
// DROP AT A LATER TIME
|
||||
// tb.dropColumn("projectMembershipId");
|
||||
|
||||
// ADD ALLOW NULLABLE FOR NOW
|
||||
tb.uuid("projectMembershipId").nullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
// We remove project user additional privileges first, because it may delete records in the database where the project membership is not found.
|
||||
// The project membership won't be found on records created by group members. In those cades we just delete the record and continue.
|
||||
// When the additionl privilege record is deleted, it will cascade delete the access request created by the group member.
|
||||
|
||||
// ---------- PROJECT USER ADDITIONAL PRIVILEGE ------------
|
||||
const hasUserId = await knex.schema.hasColumn(TableName.ProjectUserAdditionalPrivilege, "userId");
|
||||
const hasProjectMembershipId = await knex.schema.hasColumn(
|
||||
TableName.ProjectUserAdditionalPrivilege,
|
||||
"projectMembershipId"
|
||||
);
|
||||
|
||||
// If it doesn't have the userId field, then the up migration has not run
|
||||
if (!hasUserId) {
|
||||
return;
|
||||
}
|
||||
|
||||
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
|
||||
if (!hasProjectMembershipId) {
|
||||
tb.uuid("projectMembershipId");
|
||||
tb.foreign("projectMembershipId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
}
|
||||
});
|
||||
|
||||
if (!hasProjectMembershipId) {
|
||||
// First, update records where a matching project membership exists
|
||||
await knex(TableName.ProjectUserAdditionalPrivilege).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
projectMembershipId: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.ProjectUserAdditionalPrivilege}.userId`]))
|
||||
});
|
||||
|
||||
await knex(TableName.AccessApprovalRequest).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
projectMembershipId: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.userId`]))
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
|
||||
tb.dropColumn("userId");
|
||||
tb.dropColumn("projectId");
|
||||
|
||||
tb.uuid("projectMembershipId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
// Then, delete records where no matching project membership was found
|
||||
await knex(TableName.ProjectUserAdditionalPrivilege).whereNull("projectMembershipId").delete();
|
||||
await knex(TableName.AccessApprovalRequest).whereNull("requestedBy").delete();
|
||||
|
||||
// ---------- ACCESS APPROVAL POLICY APPROVER ------------
|
||||
const hasApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
|
||||
const hasApproverId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverId");
|
||||
|
||||
if (hasApproverUserId) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
|
||||
if (!hasApproverId) {
|
||||
tb.uuid("approverId");
|
||||
tb.foreign("approverId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
}
|
||||
});
|
||||
|
||||
if (!hasApproverId) {
|
||||
await knex(TableName.AccessApprovalPolicyApprover).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
approverId: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.AccessApprovalPolicyApprover}.approverUserId`]))
|
||||
});
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
|
||||
tb.dropColumn("approverUserId");
|
||||
|
||||
tb.uuid("approverId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
// ---------- ACCESS APPROVAL REQUEST ------------
|
||||
const hasAccessApprovalRequestTable = await knex.schema.hasTable(TableName.AccessApprovalRequest);
|
||||
const hasRequestedByUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedByUserId");
|
||||
const hasRequestedBy = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedBy");
|
||||
|
||||
if (hasAccessApprovalRequestTable) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
|
||||
if (!hasRequestedBy) {
|
||||
tb.uuid("requestedBy");
|
||||
tb.foreign("requestedBy").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
}
|
||||
});
|
||||
|
||||
// Try to find a project membership based on the AccessApprovalRequest.requestedByUserId and AccessApprovalRequest.policyId(reference to AccessApprovalRequestPolicy).envId(reference to Environment).projectId(reference to Project)
|
||||
// If a project membership is found, set the AccessApprovalRequest.requestedBy to the project membership id
|
||||
// If a project membership is not found, remove the AccessApprovalRequest record
|
||||
|
||||
await knex(TableName.AccessApprovalRequest).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
requestedBy: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.AccessApprovalRequest}.requestedByUserId`]))
|
||||
});
|
||||
|
||||
// Then, delete records where no matching project membership was found
|
||||
await knex(TableName.AccessApprovalRequest).whereNull("requestedBy").delete();
|
||||
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
|
||||
if (hasRequestedByUserId) {
|
||||
tb.dropColumn("requestedByUserId");
|
||||
}
|
||||
if (hasRequestedBy) tb.uuid("requestedBy").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
// ---------- ACCESS APPROVAL REQUEST REVIEWER ------------
|
||||
const hasMemberId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "member");
|
||||
const hasReviewerUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "reviewerUserId");
|
||||
|
||||
if (hasReviewerUserId) {
|
||||
if (!hasMemberId) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
|
||||
tb.uuid("member");
|
||||
tb.foreign("member").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
await knex(TableName.AccessApprovalRequestReviewer).update({
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
member: knex(TableName.ProjectMembership)
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.AccessApprovalRequestReviewer}.reviewerUserId`]))
|
||||
});
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
|
||||
tb.dropColumn("reviewerUserId");
|
||||
|
||||
tb.uuid("member").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,178 +1,8 @@
|
||||
/* eslint-disable @typescript-eslint/ban-ts-comment */
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex/select";
|
||||
|
||||
import { SecretKeyEncoding, SecretType, TableName } from "../schemas";
|
||||
import { SecretType, TableName } from "../schemas";
|
||||
import { createJunctionTable, createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
import { getSecretManagerDataKey } from "./utils/kms";
|
||||
|
||||
const backfillWebhooks = async (knex: Knex) => {
|
||||
const hasEncryptedSecretKeyWithKms = await knex.schema.hasColumn(TableName.Webhook, "encryptedSecretKeyWithKms");
|
||||
const hasEncryptedWebhookUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
|
||||
const hasUrlCipherText = await knex.schema.hasColumn(TableName.Webhook, "urlCipherText");
|
||||
const hasUrlIV = await knex.schema.hasColumn(TableName.Webhook, "urlIV");
|
||||
const hasUrlTag = await knex.schema.hasColumn(TableName.Webhook, "urlTag");
|
||||
const hasEncryptedSecretKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedSecretKey");
|
||||
const hasIV = await knex.schema.hasColumn(TableName.Webhook, "iv");
|
||||
const hasTag = await knex.schema.hasColumn(TableName.Webhook, "tag");
|
||||
const hasKeyEncoding = await knex.schema.hasColumn(TableName.Webhook, "keyEncoding");
|
||||
const hasAlgorithm = await knex.schema.hasColumn(TableName.Webhook, "algorithm");
|
||||
const hasUrl = await knex.schema.hasColumn(TableName.Webhook, "url");
|
||||
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (!hasEncryptedSecretKeyWithKms) t.binary("encryptedSecretKeyWithKms");
|
||||
if (!hasEncryptedWebhookUrl) t.binary("encryptedUrl");
|
||||
if (hasUrl) t.string("url").nullable().alter();
|
||||
});
|
||||
|
||||
const kmsEncryptorGroupByProjectId: Record<string, Awaited<ReturnType<typeof getSecretManagerDataKey>>["encryptor"]> =
|
||||
{};
|
||||
if (hasUrlCipherText && hasUrlIV && hasUrlTag && hasEncryptedSecretKey && hasIV && hasTag) {
|
||||
// eslint-disable-next-line
|
||||
const webhooksToFill = await knex(TableName.Webhook)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.Webhook}.envId`)
|
||||
.whereNull("encryptedUrl")
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore knex migration fails
|
||||
.select(selectAllTableCols(TableName.Webhook))
|
||||
.select("projectId");
|
||||
|
||||
const updatedWebhooks = [];
|
||||
for (const webhook of webhooksToFill) {
|
||||
if (!kmsEncryptorGroupByProjectId[webhook.projectId]) {
|
||||
// eslint-disable-next-line
|
||||
const { encryptor } = await getSecretManagerDataKey(knex, webhook.projectId);
|
||||
kmsEncryptorGroupByProjectId[webhook.projectId] = encryptor;
|
||||
}
|
||||
|
||||
const kmsEncryptor = kmsEncryptorGroupByProjectId[webhook.projectId];
|
||||
|
||||
// @ts-ignore post migration fails
|
||||
let webhookUrl = webhook.url;
|
||||
let webhookSecretKey;
|
||||
|
||||
// @ts-ignore post migration fails
|
||||
if (webhook.urlTag && webhook.urlCipherText && webhook.urlIV) {
|
||||
webhookUrl = infisicalSymmetricDecrypt({
|
||||
// @ts-ignore post migration fails
|
||||
keyEncoding: webhook.keyEncoding as SecretKeyEncoding,
|
||||
// @ts-ignore post migration fails
|
||||
ciphertext: webhook.urlCipherText,
|
||||
// @ts-ignore post migration fails
|
||||
iv: webhook.urlIV,
|
||||
// @ts-ignore post migration fails
|
||||
tag: webhook.urlTag
|
||||
});
|
||||
}
|
||||
// @ts-ignore post migration fails
|
||||
if (webhook.encryptedSecretKey && webhook.iv && webhook.tag) {
|
||||
webhookSecretKey = infisicalSymmetricDecrypt({
|
||||
// @ts-ignore post migration fails
|
||||
keyEncoding: webhook.keyEncoding as SecretKeyEncoding,
|
||||
// @ts-ignore post migration fails
|
||||
ciphertext: webhook.encryptedSecretKey,
|
||||
// @ts-ignore post migration fails
|
||||
iv: webhook.iv,
|
||||
// @ts-ignore post migration fails
|
||||
tag: webhook.tag
|
||||
});
|
||||
}
|
||||
const { projectId, ...el } = webhook;
|
||||
updatedWebhooks.push({
|
||||
...el,
|
||||
encryptedSecretKeyWithKms: webhookSecretKey
|
||||
? kmsEncryptor({ plainText: Buffer.from(webhookSecretKey) }).cipherTextBlob
|
||||
: null,
|
||||
encryptedUrl: kmsEncryptor({ plainText: Buffer.from(webhookUrl) }).cipherTextBlob
|
||||
});
|
||||
}
|
||||
if (updatedWebhooks.length) {
|
||||
// eslint-disable-next-line
|
||||
await knex(TableName.Webhook).insert(updatedWebhooks).onConflict("id").merge();
|
||||
}
|
||||
}
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
t.binary("encryptedUrl").notNullable().alter();
|
||||
|
||||
if (hasUrlIV) t.dropColumn("urlIV");
|
||||
if (hasUrlCipherText) t.dropColumn("urlCipherText");
|
||||
if (hasUrlTag) t.dropColumn("urlTag");
|
||||
if (hasIV) t.dropColumn("iv");
|
||||
if (hasTag) t.dropColumn("tag");
|
||||
if (hasEncryptedSecretKey) t.dropColumn("encryptedSecretKey");
|
||||
if (hasKeyEncoding) t.dropColumn("keyEncoding");
|
||||
if (hasAlgorithm) t.dropColumn("algorithm");
|
||||
if (hasUrl) t.dropColumn("url");
|
||||
});
|
||||
};
|
||||
|
||||
const backfillDynamicSecretConfigs = async (knex: Knex) => {
|
||||
const hasEncryptedConfig = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedConfig");
|
||||
|
||||
const hasInputCipherText = await knex.schema.hasColumn(TableName.DynamicSecret, "inputCiphertext");
|
||||
const hasInputIV = await knex.schema.hasColumn(TableName.DynamicSecret, "inputIV");
|
||||
const hasInputTag = await knex.schema.hasColumn(TableName.DynamicSecret, "inputTag");
|
||||
const hasKeyEncoding = await knex.schema.hasColumn(TableName.DynamicSecret, "keyEncoding");
|
||||
const hasAlgorithm = await knex.schema.hasColumn(TableName.DynamicSecret, "algorithm");
|
||||
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (!hasEncryptedConfig) t.binary("encryptedConfig");
|
||||
});
|
||||
const kmsEncryptorGroupByProjectId: Record<string, Awaited<ReturnType<typeof getSecretManagerDataKey>>["encryptor"]> =
|
||||
{};
|
||||
if (hasInputCipherText && hasInputIV && hasInputTag) {
|
||||
// eslint-disable-next-line
|
||||
const dynamicSecretConfigs = await knex(TableName.DynamicSecret)
|
||||
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.DynamicSecret}.folderId`)
|
||||
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.whereNull("encryptedConfig")
|
||||
// @ts-ignore post migration fails
|
||||
.select(selectAllTableCols(TableName.DynamicSecret))
|
||||
.select("projectId");
|
||||
|
||||
const updatedConfigs = [];
|
||||
for (const dynamicSecretConfig of dynamicSecretConfigs) {
|
||||
if (!kmsEncryptorGroupByProjectId[dynamicSecretConfig.projectId]) {
|
||||
// eslint-disable-next-line
|
||||
const { encryptor } = await getSecretManagerDataKey(knex, dynamicSecretConfig.projectId);
|
||||
kmsEncryptorGroupByProjectId[dynamicSecretConfig.projectId] = encryptor;
|
||||
}
|
||||
|
||||
const kmsEncryptor = kmsEncryptorGroupByProjectId[dynamicSecretConfig.projectId];
|
||||
const inputConfig = infisicalSymmetricDecrypt({
|
||||
// @ts-ignore post migration fails
|
||||
keyEncoding: dynamicSecretConfig.keyEncoding as SecretKeyEncoding,
|
||||
// @ts-ignore post migration fails
|
||||
ciphertext: dynamicSecretConfig.inputCiphertext as string,
|
||||
// @ts-ignore post migration fails
|
||||
iv: dynamicSecretConfig.inputIV as string,
|
||||
// @ts-ignore post migration fails
|
||||
tag: dynamicSecretConfig.inputTag as string
|
||||
});
|
||||
|
||||
const { projectId, ...el } = dynamicSecretConfig;
|
||||
updatedConfigs.push({
|
||||
...el,
|
||||
encryptedConfig: kmsEncryptor({ plainText: Buffer.from(inputConfig) }).cipherTextBlob
|
||||
});
|
||||
}
|
||||
if (updatedConfigs.length) {
|
||||
// eslint-disable-next-line
|
||||
await knex(TableName.DynamicSecret).insert(updatedConfigs).onConflict("id").merge();
|
||||
}
|
||||
}
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
t.binary("encryptedConfig").notNullable().alter();
|
||||
|
||||
if (hasInputTag) t.dropColumn("inputTag");
|
||||
if (hasInputIV) t.dropColumn("inputIV");
|
||||
if (hasInputCipherText) t.dropColumn("inputCiphertext");
|
||||
if (hasKeyEncoding) t.dropColumn("keyEncoding");
|
||||
if (hasAlgorithm) t.dropColumn("algorithm");
|
||||
});
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesSecretV2TableExist = await knex.schema.hasTable(TableName.SecretV2);
|
||||
@@ -314,14 +144,6 @@ export async function up(knex: Knex): Promise<void> {
|
||||
t.foreign("rotationId").references("id").inTable(TableName.SecretRotation).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.Webhook)) {
|
||||
await backfillWebhooks(knex);
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
|
||||
await backfillDynamicSecretConfigs(knex);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
@@ -356,49 +178,4 @@ export async function down(knex: Knex): Promise<void> {
|
||||
if (hasEncryptedAwsIamAssumRole) t.dropColumn("encryptedAwsAssumeIamRoleArn");
|
||||
});
|
||||
}
|
||||
if (await knex.schema.hasTable(TableName.Webhook)) {
|
||||
const hasEncryptedWebhookSecretKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedSecretKeyWithKms");
|
||||
const hasEncryptedWebhookUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
|
||||
const hasUrlCipherText = await knex.schema.hasColumn(TableName.Webhook, "urlCipherText");
|
||||
const hasUrlIV = await knex.schema.hasColumn(TableName.Webhook, "urlIV");
|
||||
const hasUrlTag = await knex.schema.hasColumn(TableName.Webhook, "urlTag");
|
||||
const hasEncryptedSecretKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedSecretKey");
|
||||
const hasIV = await knex.schema.hasColumn(TableName.Webhook, "iv");
|
||||
const hasTag = await knex.schema.hasColumn(TableName.Webhook, "tag");
|
||||
const hasKeyEncoding = await knex.schema.hasColumn(TableName.Webhook, "keyEncoding");
|
||||
const hasAlgorithm = await knex.schema.hasColumn(TableName.Webhook, "algorithm");
|
||||
const hasUrl = await knex.schema.hasColumn(TableName.Webhook, "url");
|
||||
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (hasEncryptedWebhookSecretKey) t.dropColumn("encryptedSecretKeyWithKms");
|
||||
if (hasEncryptedWebhookUrl) t.dropColumn("encryptedUrl");
|
||||
if (!hasUrl) t.string("url");
|
||||
if (!hasEncryptedSecretKey) t.string("encryptedSecretKey");
|
||||
if (!hasIV) t.string("iv");
|
||||
if (!hasTag) t.string("tag");
|
||||
if (!hasAlgorithm) t.string("algorithm");
|
||||
if (!hasKeyEncoding) t.string("keyEncoding");
|
||||
if (!hasUrlCipherText) t.string("urlCipherText");
|
||||
if (!hasUrlIV) t.string("urlIV");
|
||||
if (!hasUrlTag) t.string("urlTag");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
|
||||
const hasEncryptedConfig = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedConfig");
|
||||
|
||||
const hasInputIV = await knex.schema.hasColumn(TableName.DynamicSecret, "inputIV");
|
||||
const hasInputCipherText = await knex.schema.hasColumn(TableName.DynamicSecret, "inputCiphertext");
|
||||
const hasInputTag = await knex.schema.hasColumn(TableName.DynamicSecret, "inputTag");
|
||||
const hasAlgorithm = await knex.schema.hasColumn(TableName.DynamicSecret, "algorithm");
|
||||
const hasKeyEncoding = await knex.schema.hasColumn(TableName.DynamicSecret, "keyEncoding");
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (hasEncryptedConfig) t.dropColumn("encryptedConfig");
|
||||
if (!hasInputIV) t.string("inputIV");
|
||||
if (!hasInputCipherText) t.text("inputCiphertext");
|
||||
if (!hasInputTag) t.string("inputTag");
|
||||
if (!hasAlgorithm) t.string("algorithm");
|
||||
if (!hasKeyEncoding) t.string("keyEncoding");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
117
backend/src/db/migrations/20240802181855_ca-cert-version.ts
Normal file
117
backend/src/db/migrations/20240802181855_ca-cert-version.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
|
||||
const hasActiveCaCertIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthority, "activeCaCertId");
|
||||
if (!hasActiveCaCertIdColumn) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
|
||||
t.uuid("activeCaCertId").nullable();
|
||||
t.foreign("activeCaCertId").references("id").inTable(TableName.CertificateAuthorityCert);
|
||||
});
|
||||
|
||||
await knex.raw(`
|
||||
UPDATE "${TableName.CertificateAuthority}" ca
|
||||
SET "activeCaCertId" = cac.id
|
||||
FROM "${TableName.CertificateAuthorityCert}" cac
|
||||
WHERE ca.id = cac."caId"
|
||||
`);
|
||||
}
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthorityCert)) {
|
||||
const hasVersionColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "version");
|
||||
if (!hasVersionColumn) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
|
||||
t.integer("version").nullable();
|
||||
t.dropUnique(["caId"]);
|
||||
});
|
||||
|
||||
await knex(TableName.CertificateAuthorityCert).update({ version: 1 }).whereNull("version");
|
||||
|
||||
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
|
||||
t.integer("version").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
const hasCaSecretIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "caSecretId");
|
||||
if (!hasCaSecretIdColumn) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
|
||||
t.uuid("caSecretId").nullable();
|
||||
t.foreign("caSecretId").references("id").inTable(TableName.CertificateAuthoritySecret).onDelete("CASCADE");
|
||||
});
|
||||
|
||||
await knex.raw(`
|
||||
UPDATE "${TableName.CertificateAuthorityCert}" cert
|
||||
SET "caSecretId" = (
|
||||
SELECT sec.id
|
||||
FROM "${TableName.CertificateAuthoritySecret}" sec
|
||||
WHERE sec."caId" = cert."caId"
|
||||
)
|
||||
`);
|
||||
|
||||
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
|
||||
t.uuid("caSecretId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthoritySecret)) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthoritySecret, (t) => {
|
||||
t.dropUnique(["caId"]);
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.Certificate)) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").nullable();
|
||||
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
|
||||
});
|
||||
|
||||
await knex.raw(`
|
||||
UPDATE "${TableName.Certificate}" cert
|
||||
SET "caCertId" = (
|
||||
SELECT caCert.id
|
||||
FROM "${TableName.CertificateAuthorityCert}" caCert
|
||||
WHERE caCert."caId" = cert."caId"
|
||||
)
|
||||
`);
|
||||
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
|
||||
if (await knex.schema.hasColumn(TableName.CertificateAuthority, "activeCaCertId")) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
|
||||
t.dropColumn("activeCaCertId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthorityCert)) {
|
||||
if (await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "version")) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
|
||||
t.dropColumn("version");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "caSecretId")) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
|
||||
t.dropColumn("caSecretId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.Certificate)) {
|
||||
if (await knex.schema.hasColumn(TableName.Certificate, "caCertId")) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.dropColumn("caCertId");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasCreationLimitCol = await knex.schema.hasColumn(TableName.RateLimit, "creationLimit");
|
||||
await knex.schema.alterTable(TableName.RateLimit, (t) => {
|
||||
if (hasCreationLimitCol) {
|
||||
t.dropColumn("creationLimit");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasCreationLimitCol = await knex.schema.hasColumn(TableName.RateLimit, "creationLimit");
|
||||
await knex.schema.alterTable(TableName.RateLimit, (t) => {
|
||||
if (!hasCreationLimitCol) {
|
||||
t.integer("creationLimit").defaultTo(30).notNullable();
|
||||
}
|
||||
});
|
||||
}
|
21
backend/src/db/migrations/20240806185442_drop-tag-name.ts
Normal file
21
backend/src/db/migrations/20240806185442_drop-tag-name.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasNameField = await knex.schema.hasColumn(TableName.SecretTag, "name");
|
||||
if (hasNameField) {
|
||||
await knex.schema.alterTable(TableName.SecretTag, (t) => {
|
||||
t.dropColumn("name");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasNameField = await knex.schema.hasColumn(TableName.SecretTag, "name");
|
||||
if (!hasNameField) {
|
||||
await knex.schema.alterTable(TableName.SecretTag, (t) => {
|
||||
t.string("name");
|
||||
});
|
||||
}
|
||||
}
|
62
backend/src/db/migrations/20240818024923_cert-alerting.ts
Normal file
62
backend/src/db/migrations/20240818024923_cert-alerting.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.PkiCollection))) {
|
||||
await knex.schema.createTable(TableName.PkiCollection, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.string("name").notNullable();
|
||||
t.string("description").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.PkiCollection);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.PkiCollectionItem))) {
|
||||
await knex.schema.createTable(TableName.PkiCollectionItem, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("pkiCollectionId").notNullable();
|
||||
t.foreign("pkiCollectionId").references("id").inTable(TableName.PkiCollection).onDelete("CASCADE");
|
||||
t.uuid("caId").nullable();
|
||||
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.uuid("certId").nullable();
|
||||
t.foreign("certId").references("id").inTable(TableName.Certificate).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.PkiCollectionItem);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.PkiAlert))) {
|
||||
await knex.schema.createTable(TableName.PkiAlert, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.uuid("pkiCollectionId").notNullable();
|
||||
t.foreign("pkiCollectionId").references("id").inTable(TableName.PkiCollection).onDelete("CASCADE");
|
||||
t.string("name").notNullable();
|
||||
t.integer("alertBeforeDays").notNullable();
|
||||
t.string("recipientEmails").notNullable();
|
||||
t.unique(["name", "projectId"]);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.PkiAlert);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.PkiAlert);
|
||||
await dropOnUpdateTrigger(knex, TableName.PkiAlert);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.PkiCollectionItem);
|
||||
await dropOnUpdateTrigger(knex, TableName.PkiCollectionItem);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.PkiCollection);
|
||||
await dropOnUpdateTrigger(knex, TableName.PkiCollection);
|
||||
}
|
@@ -0,0 +1,55 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasCertificateTemplateTable = await knex.schema.hasTable(TableName.CertificateTemplate);
|
||||
if (!hasCertificateTemplateTable) {
|
||||
await knex.schema.createTable(TableName.CertificateTemplate, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
tb.uuid("caId").notNullable();
|
||||
tb.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
tb.uuid("pkiCollectionId");
|
||||
tb.foreign("pkiCollectionId").references("id").inTable(TableName.PkiCollection).onDelete("SET NULL");
|
||||
tb.string("name").notNullable();
|
||||
tb.string("commonName").notNullable();
|
||||
tb.string("subjectAlternativeName").notNullable();
|
||||
tb.string("ttl").notNullable();
|
||||
tb.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.CertificateTemplate);
|
||||
}
|
||||
|
||||
const doesCertificateTableHaveTemplateId = await knex.schema.hasColumn(
|
||||
TableName.Certificate,
|
||||
"certificateTemplateId"
|
||||
);
|
||||
|
||||
if (!doesCertificateTableHaveTemplateId) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (tb) => {
|
||||
tb.uuid("certificateTemplateId");
|
||||
tb.foreign("certificateTemplateId").references("id").inTable(TableName.CertificateTemplate).onDelete("SET NULL");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesCertificateTableHaveTemplateId = await knex.schema.hasColumn(
|
||||
TableName.Certificate,
|
||||
"certificateTemplateId"
|
||||
);
|
||||
|
||||
if (doesCertificateTableHaveTemplateId) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.dropColumn("certificateTemplateId");
|
||||
});
|
||||
}
|
||||
|
||||
const hasCertificateTemplateTable = await knex.schema.hasTable(TableName.CertificateTemplate);
|
||||
if (hasCertificateTemplateTable) {
|
||||
await knex.schema.dropTable(TableName.CertificateTemplate);
|
||||
await dropOnUpdateTrigger(knex, TableName.CertificateTemplate);
|
||||
}
|
||||
}
|
@@ -9,10 +9,10 @@ import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const AccessApprovalPoliciesApproversSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
approverId: z.string().uuid(),
|
||||
policyId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
approverUserId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TAccessApprovalPoliciesApprovers = z.infer<typeof AccessApprovalPoliciesApproversSchema>;
|
||||
|
@@ -9,11 +9,11 @@ import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const AccessApprovalRequestsReviewersSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
member: z.string().uuid(),
|
||||
status: z.string(),
|
||||
requestId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
reviewerUserId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TAccessApprovalRequestsReviewers = z.infer<typeof AccessApprovalRequestsReviewersSchema>;
|
||||
|
@@ -11,12 +11,12 @@ export const AccessApprovalRequestsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
policyId: z.string().uuid(),
|
||||
privilegeId: z.string().uuid().nullable().optional(),
|
||||
requestedBy: z.string().uuid(),
|
||||
isTemporary: z.boolean(),
|
||||
temporaryRange: z.string().nullable().optional(),
|
||||
permissions: z.unknown(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
requestedByUserId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TAccessApprovalRequests = z.infer<typeof AccessApprovalRequestsSchema>;
|
||||
|
@@ -27,7 +27,8 @@ export const CertificateAuthoritiesSchema = z.object({
|
||||
maxPathLength: z.number().nullable().optional(),
|
||||
keyAlgorithm: z.string(),
|
||||
notBefore: z.date().nullable().optional(),
|
||||
notAfter: z.date().nullable().optional()
|
||||
notAfter: z.date().nullable().optional(),
|
||||
activeCaCertId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TCertificateAuthorities = z.infer<typeof CertificateAuthoritiesSchema>;
|
||||
|
@@ -15,7 +15,9 @@ export const CertificateAuthorityCertsSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
caId: z.string().uuid(),
|
||||
encryptedCertificate: zodBuffer,
|
||||
encryptedCertificateChain: zodBuffer
|
||||
encryptedCertificateChain: zodBuffer,
|
||||
version: z.number(),
|
||||
caSecretId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TCertificateAuthorityCerts = z.infer<typeof CertificateAuthorityCertsSchema>;
|
||||
|
24
backend/src/db/schemas/certificate-templates.ts
Normal file
24
backend/src/db/schemas/certificate-templates.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const CertificateTemplatesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
caId: z.string().uuid(),
|
||||
pkiCollectionId: z.string().uuid().nullable().optional(),
|
||||
name: z.string(),
|
||||
commonName: z.string(),
|
||||
subjectAlternativeName: z.string(),
|
||||
ttl: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TCertificateTemplates = z.infer<typeof CertificateTemplatesSchema>;
|
||||
export type TCertificateTemplatesInsert = Omit<z.input<typeof CertificateTemplatesSchema>, TImmutableDBKeys>;
|
||||
export type TCertificateTemplatesUpdate = Partial<Omit<z.input<typeof CertificateTemplatesSchema>, TImmutableDBKeys>>;
|
@@ -20,7 +20,9 @@ export const CertificatesSchema = z.object({
|
||||
notAfter: z.date(),
|
||||
revokedAt: z.date().nullable().optional(),
|
||||
revocationReason: z.number().nullable().optional(),
|
||||
altNames: z.string().default("").nullable().optional()
|
||||
altNames: z.string().default("").nullable().optional(),
|
||||
caCertId: z.string().uuid(),
|
||||
certificateTemplateId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TCertificates = z.infer<typeof CertificatesSchema>;
|
||||
|
@@ -5,8 +5,6 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const DynamicSecretsSchema = z.object({
|
||||
@@ -16,12 +14,16 @@ export const DynamicSecretsSchema = z.object({
|
||||
type: z.string(),
|
||||
defaultTTL: z.string(),
|
||||
maxTTL: z.string().nullable().optional(),
|
||||
inputIV: z.string(),
|
||||
inputCiphertext: z.string(),
|
||||
inputTag: z.string(),
|
||||
algorithm: z.string().default("aes-256-gcm"),
|
||||
keyEncoding: z.string().default("utf8"),
|
||||
folderId: z.string().uuid(),
|
||||
status: z.string().nullable().optional(),
|
||||
statusDetails: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
encryptedConfig: zodBuffer
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;
|
||||
|
@@ -14,6 +14,7 @@ export * from "./certificate-authority-crl";
|
||||
export * from "./certificate-authority-secret";
|
||||
export * from "./certificate-bodies";
|
||||
export * from "./certificate-secrets";
|
||||
export * from "./certificate-templates";
|
||||
export * from "./certificates";
|
||||
export * from "./dynamic-secret-leases";
|
||||
export * from "./dynamic-secrets";
|
||||
@@ -52,6 +53,9 @@ export * from "./org-bots";
|
||||
export * from "./org-memberships";
|
||||
export * from "./org-roles";
|
||||
export * from "./organizations";
|
||||
export * from "./pki-alerts";
|
||||
export * from "./pki-collection-items";
|
||||
export * from "./pki-collections";
|
||||
export * from "./project-bots";
|
||||
export * from "./project-environments";
|
||||
export * from "./project-keys";
|
||||
|
@@ -9,6 +9,10 @@ export enum TableName {
|
||||
Certificate = "certificates",
|
||||
CertificateBody = "certificate_bodies",
|
||||
CertificateSecret = "certificate_secrets",
|
||||
CertificateTemplate = "certificate_templates",
|
||||
PkiAlert = "pki_alerts",
|
||||
PkiCollection = "pki_collections",
|
||||
PkiCollectionItem = "pki_collection_items",
|
||||
Groups = "groups",
|
||||
GroupProjectMembership = "group_project_memberships",
|
||||
GroupProjectMembershipRole = "group_project_membership_roles",
|
||||
|
23
backend/src/db/schemas/pki-alerts.ts
Normal file
23
backend/src/db/schemas/pki-alerts.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const PkiAlertsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
projectId: z.string(),
|
||||
pkiCollectionId: z.string().uuid(),
|
||||
name: z.string(),
|
||||
alertBeforeDays: z.number(),
|
||||
recipientEmails: z.string()
|
||||
});
|
||||
|
||||
export type TPkiAlerts = z.infer<typeof PkiAlertsSchema>;
|
||||
export type TPkiAlertsInsert = Omit<z.input<typeof PkiAlertsSchema>, TImmutableDBKeys>;
|
||||
export type TPkiAlertsUpdate = Partial<Omit<z.input<typeof PkiAlertsSchema>, TImmutableDBKeys>>;
|
21
backend/src/db/schemas/pki-collection-items.ts
Normal file
21
backend/src/db/schemas/pki-collection-items.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const PkiCollectionItemsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
pkiCollectionId: z.string().uuid(),
|
||||
caId: z.string().uuid().nullable().optional(),
|
||||
certId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TPkiCollectionItems = z.infer<typeof PkiCollectionItemsSchema>;
|
||||
export type TPkiCollectionItemsInsert = Omit<z.input<typeof PkiCollectionItemsSchema>, TImmutableDBKeys>;
|
||||
export type TPkiCollectionItemsUpdate = Partial<Omit<z.input<typeof PkiCollectionItemsSchema>, TImmutableDBKeys>>;
|
21
backend/src/db/schemas/pki-collections.ts
Normal file
21
backend/src/db/schemas/pki-collections.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const PkiCollectionsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
projectId: z.string(),
|
||||
name: z.string(),
|
||||
description: z.string()
|
||||
});
|
||||
|
||||
export type TPkiCollections = z.infer<typeof PkiCollectionsSchema>;
|
||||
export type TPkiCollectionsInsert = Omit<z.input<typeof PkiCollectionsSchema>, TImmutableDBKeys>;
|
||||
export type TPkiCollectionsUpdate = Partial<Omit<z.input<typeof PkiCollectionsSchema>, TImmutableDBKeys>>;
|
@@ -10,7 +10,6 @@ import { TImmutableDBKeys } from "./models";
|
||||
export const ProjectUserAdditionalPrivilegeSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
slug: z.string(),
|
||||
projectMembershipId: z.string().uuid(),
|
||||
isTemporary: z.boolean().default(false),
|
||||
temporaryMode: z.string().nullable().optional(),
|
||||
temporaryRange: z.string().nullable().optional(),
|
||||
@@ -18,7 +17,9 @@ export const ProjectUserAdditionalPrivilegeSchema = z.object({
|
||||
temporaryAccessEndTime: z.date().nullable().optional(),
|
||||
permissions: z.unknown(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
userId: z.string().uuid(),
|
||||
projectId: z.string()
|
||||
});
|
||||
|
||||
export type TProjectUserAdditionalPrivilege = z.infer<typeof ProjectUserAdditionalPrivilegeSchema>;
|
||||
|
@@ -15,7 +15,6 @@ export const RateLimitSchema = z.object({
|
||||
authRateLimit: z.number().default(60),
|
||||
inviteUserRateLimit: z.number().default(30),
|
||||
mfaRateLimit: z.number().default(20),
|
||||
creationLimit: z.number().default(30),
|
||||
publicEndpointLimit: z.number().default(30),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
|
@@ -9,7 +9,6 @@ import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretTagsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
color: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
|
@@ -5,22 +5,27 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const WebhooksSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
secretPath: z.string().default("/"),
|
||||
url: z.string(),
|
||||
lastStatus: z.string().nullable().optional(),
|
||||
lastRunErrorMessage: z.string().nullable().optional(),
|
||||
isDisabled: z.boolean().default(false),
|
||||
encryptedSecretKey: z.string().nullable().optional(),
|
||||
iv: z.string().nullable().optional(),
|
||||
tag: z.string().nullable().optional(),
|
||||
algorithm: z.string().nullable().optional(),
|
||||
keyEncoding: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
envId: z.string().uuid(),
|
||||
type: z.string().default("general").nullable().optional(),
|
||||
encryptedSecretKeyWithKms: zodBuffer.nullable().optional(),
|
||||
encryptedUrl: zodBuffer
|
||||
urlCipherText: z.string().nullable().optional(),
|
||||
urlIV: z.string().nullable().optional(),
|
||||
urlTag: z.string().nullable().optional(),
|
||||
type: z.string().default("general").nullable().optional()
|
||||
});
|
||||
|
||||
export type TWebhooks = z.infer<typeof WebhooksSchema>;
|
||||
|
@@ -17,11 +17,11 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
name: z.string().optional(),
|
||||
secretPath: z.string().trim().default("/"),
|
||||
environment: z.string(),
|
||||
approvers: z.string().array().min(1),
|
||||
approverUserIds: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approvers.length, {
|
||||
.refine((data) => data.approvals <= data.approverUserIds.length, {
|
||||
path: ["approvals"],
|
||||
message: "The number of approvals should be lower than the number of approvers."
|
||||
}),
|
||||
@@ -56,7 +56,16 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
approvals: sapPubSchema.extend({ approvers: z.string().array(), secretPath: z.string().optional() }).array()
|
||||
approvals: sapPubSchema
|
||||
.extend({
|
||||
userApprovers: z
|
||||
.object({
|
||||
userId: z.string()
|
||||
})
|
||||
.array(),
|
||||
secretPath: z.string().optional().nullable()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
@@ -69,6 +78,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
actorOrgId: req.permission.orgId,
|
||||
projectSlug: req.query.projectSlug
|
||||
});
|
||||
|
||||
return { approvals };
|
||||
}
|
||||
});
|
||||
@@ -117,11 +127,11 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
.trim()
|
||||
.optional()
|
||||
.transform((val) => (val === "" ? "/" : val)),
|
||||
approvers: z.string().array().min(1),
|
||||
approverUserIds: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approvers.length, {
|
||||
.refine((data) => data.approvals <= data.approverUserIds.length, {
|
||||
path: ["approvals"],
|
||||
message: "The number of approvals should be lower than the number of approvers."
|
||||
}),
|
||||
|
@@ -1,10 +1,19 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { AccessApprovalRequestsReviewersSchema, AccessApprovalRequestsSchema } from "@app/db/schemas";
|
||||
import { AccessApprovalRequestsReviewersSchema, AccessApprovalRequestsSchema, UsersSchema } from "@app/db/schemas";
|
||||
import { ApprovalStatus } from "@app/ee/services/access-approval-request/access-approval-request-types";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const approvalRequestUser = z.object({ userId: z.string() }).merge(
|
||||
UsersSchema.pick({
|
||||
email: true,
|
||||
firstName: true,
|
||||
lastName: true,
|
||||
username: true
|
||||
})
|
||||
);
|
||||
|
||||
export const registerAccessApprovalRequestRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
url: "/",
|
||||
@@ -104,10 +113,11 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
||||
}),
|
||||
reviewers: z
|
||||
.object({
|
||||
member: z.string(),
|
||||
userId: z.string(),
|
||||
status: z.string()
|
||||
})
|
||||
.array()
|
||||
.array(),
|
||||
requestedByUser: approvalRequestUser
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
|
@@ -131,7 +131,7 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
|
||||
.default("/")
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(DYNAMIC_SECRET_LEASES.RENEW.path),
|
||||
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.RENEW.ttl)
|
||||
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.RENEW.environmentSlug)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@@ -58,7 +58,6 @@ export const registerRateLimitRouter = async (server: FastifyZodProvider) => {
|
||||
authRateLimit: z.number(),
|
||||
inviteUserRateLimit: z.number(),
|
||||
mfaRateLimit: z.number(),
|
||||
creationLimit: z.number(),
|
||||
publicEndpointLimit: z.number()
|
||||
}),
|
||||
response: {
|
||||
|
@@ -1,9 +1,9 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName, TAccessApprovalPolicies } from "@app/db/schemas";
|
||||
import { AccessApprovalPoliciesSchema, TableName, TAccessApprovalPolicies } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, mergeOneToManyRelation, ormify, selectAllTableCols, TFindFilter } from "@app/lib/knex";
|
||||
import { buildFindFilter, ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex";
|
||||
|
||||
export type TAccessApprovalPolicyDALFactory = ReturnType<typeof accessApprovalPolicyDALFactory>;
|
||||
|
||||
@@ -15,12 +15,12 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
// eslint-disable-next-line
|
||||
.where(buildFindFilter(filter))
|
||||
.join(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
|
||||
.join(
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalPolicyApprover,
|
||||
`${TableName.AccessApprovalPolicy}.id`,
|
||||
`${TableName.AccessApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
.select(tx.ref("approverId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
.select(tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
.select(tx.ref("name").withSchema(TableName.Environment).as("envName"))
|
||||
.select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug"))
|
||||
.select(tx.ref("id").withSchema(TableName.Environment).as("envId"))
|
||||
@@ -35,18 +35,30 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
const doc = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), {
|
||||
[`${TableName.AccessApprovalPolicy}.id` as "id"]: id
|
||||
});
|
||||
const formatedDoc = mergeOneToManyRelation(
|
||||
doc,
|
||||
"id",
|
||||
({ approverId, envId, envName: name, envSlug: slug, ...el }) => ({
|
||||
...el,
|
||||
envId,
|
||||
environment: { id: envId, name, slug }
|
||||
const formattedDoc = sqlNestRelationships({
|
||||
data: doc,
|
||||
key: "id",
|
||||
parentMapper: (data) => ({
|
||||
environment: {
|
||||
id: data.envId,
|
||||
name: data.envName,
|
||||
slug: data.envSlug
|
||||
},
|
||||
projectId: data.projectId,
|
||||
...AccessApprovalPoliciesSchema.parse(data)
|
||||
}),
|
||||
({ approverId }) => approverId,
|
||||
"approvers"
|
||||
);
|
||||
return formatedDoc?.[0];
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "approverUserId",
|
||||
label: "userApprovers" as const,
|
||||
mapper: ({ approverUserId }) => ({
|
||||
userId: approverUserId
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return formattedDoc?.[0];
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "FindById" });
|
||||
}
|
||||
@@ -55,18 +67,32 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
const find = async (filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), filter);
|
||||
const formatedDoc = mergeOneToManyRelation(
|
||||
docs,
|
||||
"id",
|
||||
({ approverId, envId, envName: name, envSlug: slug, ...el }) => ({
|
||||
...el,
|
||||
envId,
|
||||
environment: { id: envId, name, slug }
|
||||
|
||||
const formattedDocs = sqlNestRelationships({
|
||||
data: docs,
|
||||
key: "id",
|
||||
parentMapper: (data) => ({
|
||||
environment: {
|
||||
id: data.envId,
|
||||
name: data.envName,
|
||||
slug: data.envSlug
|
||||
},
|
||||
projectId: data.projectId,
|
||||
...AccessApprovalPoliciesSchema.parse(data)
|
||||
// secretPath: data.secretPath || undefined,
|
||||
}),
|
||||
({ approverId }) => approverId,
|
||||
"approvers"
|
||||
);
|
||||
return formatedDoc.map((policy) => ({ ...policy, secretPath: policy.secretPath || undefined }));
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "approverUserId",
|
||||
label: "userApprovers" as const,
|
||||
mapper: ({ approverUserId }) => ({
|
||||
userId: approverUserId
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return formattedDocs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find" });
|
||||
}
|
||||
|
@@ -34,8 +34,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
accessApprovalPolicyApproverDAL,
|
||||
permissionService,
|
||||
projectEnvDAL,
|
||||
projectDAL,
|
||||
projectMembershipDAL
|
||||
projectDAL
|
||||
}: TSecretApprovalPolicyServiceFactoryDep) => {
|
||||
const createAccessApprovalPolicy = async ({
|
||||
name,
|
||||
@@ -45,7 +44,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
secretPath,
|
||||
actorAuthMethod,
|
||||
approvals,
|
||||
approvers,
|
||||
approverUserIds,
|
||||
projectSlug,
|
||||
environment,
|
||||
enforcementLevel
|
||||
@@ -53,7 +52,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
if (approvals > approvers.length)
|
||||
if (approvals > approverUserIds.length)
|
||||
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@@ -70,15 +69,6 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
const env = await projectEnvDAL.findOne({ slug: environment, projectId: project.id });
|
||||
if (!env) throw new BadRequestError({ message: "Environment not found" });
|
||||
|
||||
const secretApprovers = await projectMembershipDAL.find({
|
||||
projectId: project.id,
|
||||
$in: { id: approvers }
|
||||
});
|
||||
|
||||
if (secretApprovers.length !== approvers.length) {
|
||||
throw new BadRequestError({ message: "Approver not found in project" });
|
||||
}
|
||||
|
||||
await verifyApprovers({
|
||||
projectId: project.id,
|
||||
orgId: actorOrgId,
|
||||
@@ -86,7 +76,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
secretPath,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: secretApprovers.map((approver) => approver.userId)
|
||||
userIds: approverUserIds
|
||||
});
|
||||
|
||||
const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => {
|
||||
@@ -101,8 +91,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
tx
|
||||
);
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
secretApprovers.map(({ id }) => ({
|
||||
approverId: id,
|
||||
approverUserIds.map((userId) => ({
|
||||
approverUserId: userId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
@@ -138,7 +128,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
|
||||
const updateAccessApprovalPolicy = async ({
|
||||
policyId,
|
||||
approvers,
|
||||
approverUserIds,
|
||||
secretPath,
|
||||
name,
|
||||
actorId,
|
||||
@@ -171,16 +161,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
if (approvers) {
|
||||
// Find the workspace project memberships of the users passed in the approvers array
|
||||
const secretApprovers = await projectMembershipDAL.find(
|
||||
{
|
||||
projectId: accessApprovalPolicy.projectId,
|
||||
$in: { id: approvers }
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
|
||||
if (approverUserIds) {
|
||||
await verifyApprovers({
|
||||
projectId: accessApprovalPolicy.projectId,
|
||||
orgId: actorOrgId,
|
||||
@@ -188,15 +169,13 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
secretPath: doc.secretPath!,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: secretApprovers.map((approver) => approver.userId)
|
||||
userIds: approverUserIds
|
||||
});
|
||||
|
||||
if (secretApprovers.length !== approvers.length)
|
||||
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
||||
await accessApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx);
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
secretApprovers.map(({ id }) => ({
|
||||
approverId: id,
|
||||
approverUserIds.map((userId) => ({
|
||||
approverUserId: userId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
|
@@ -17,7 +17,7 @@ export type TCreateAccessApprovalPolicy = {
|
||||
approvals: number;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
approvers: string[];
|
||||
approverUserIds: string[];
|
||||
projectSlug: string;
|
||||
name: string;
|
||||
enforcementLevel: EnforcementLevel;
|
||||
@@ -26,7 +26,7 @@ export type TCreateAccessApprovalPolicy = {
|
||||
export type TUpdateAccessApprovalPolicy = {
|
||||
policyId: string;
|
||||
approvals?: number;
|
||||
approvers?: string[];
|
||||
approverUserIds?: string[];
|
||||
secretPath?: string;
|
||||
name?: string;
|
||||
enforcementLevel?: EnforcementLevel;
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { AccessApprovalRequestsSchema, TableName, TAccessApprovalRequests } from "@app/db/schemas";
|
||||
import { AccessApprovalRequestsSchema, TableName, TAccessApprovalRequests, TUsers } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex";
|
||||
|
||||
@@ -40,6 +40,12 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`${TableName.AccessApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
|
||||
.join<TUsers>(
|
||||
db(TableName.Users).as("requestedByUser"),
|
||||
`${TableName.AccessApprovalRequest}.requestedByUserId`,
|
||||
`requestedByUser.id`
|
||||
)
|
||||
|
||||
.leftJoin(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
|
||||
|
||||
.select(selectAllTableCols(TableName.AccessApprovalRequest))
|
||||
@@ -52,7 +58,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId")
|
||||
)
|
||||
|
||||
.select(db.ref("approverId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
|
||||
.select(
|
||||
db.ref("projectId").withSchema(TableName.Environment),
|
||||
@@ -61,15 +67,20 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
)
|
||||
|
||||
.select(
|
||||
db.ref("member").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerMemberId"),
|
||||
db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId"),
|
||||
db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus")
|
||||
)
|
||||
|
||||
// TODO: ADD SUPPORT FOR GROUPS!!!!
|
||||
.select(
|
||||
db
|
||||
.ref("projectMembershipId")
|
||||
.withSchema(TableName.ProjectUserAdditionalPrivilege)
|
||||
.as("privilegeMembershipId"),
|
||||
db.ref("email").withSchema("requestedByUser").as("requestedByUserEmail"),
|
||||
db.ref("username").withSchema("requestedByUser").as("requestedByUserUsername"),
|
||||
db.ref("firstName").withSchema("requestedByUser").as("requestedByUserFirstName"),
|
||||
db.ref("lastName").withSchema("requestedByUser").as("requestedByUserLastName"),
|
||||
|
||||
db.ref("userId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeUserId"),
|
||||
db.ref("projectId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeMembershipId"),
|
||||
|
||||
db.ref("isTemporary").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeIsTemporary"),
|
||||
db.ref("temporaryMode").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeTemporaryMode"),
|
||||
db.ref("temporaryRange").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeTemporaryRange"),
|
||||
@@ -102,9 +113,18 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
enforcementLevel: doc.policyEnforcementLevel,
|
||||
envId: doc.policyEnvId
|
||||
},
|
||||
requestedByUser: {
|
||||
userId: doc.requestedByUserId,
|
||||
email: doc.requestedByUserEmail,
|
||||
firstName: doc.requestedByUserFirstName,
|
||||
lastName: doc.requestedByUserLastName,
|
||||
username: doc.requestedByUserUsername
|
||||
},
|
||||
privilege: doc.privilegeId
|
||||
? {
|
||||
membershipId: doc.privilegeMembershipId,
|
||||
userId: doc.privilegeUserId,
|
||||
projectId: doc.projectId,
|
||||
isTemporary: doc.privilegeIsTemporary,
|
||||
temporaryMode: doc.privilegeTemporaryMode,
|
||||
temporaryRange: doc.privilegeTemporaryRange,
|
||||
@@ -118,11 +138,11 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
}),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "reviewerMemberId",
|
||||
key: "reviewerUserId",
|
||||
label: "reviewers" as const,
|
||||
mapper: ({ reviewerMemberId: member, reviewerStatus: status }) => (member ? { member, status } : undefined)
|
||||
mapper: ({ reviewerUserId: userId, reviewerStatus: status }) => (userId ? { userId, status } : undefined)
|
||||
},
|
||||
{ key: "approverId", label: "approvers" as const, mapper: ({ approverId }) => approverId }
|
||||
{ key: "approverUserId", label: "approvers" as const, mapper: ({ approverUserId }) => approverUserId }
|
||||
]
|
||||
});
|
||||
|
||||
@@ -146,30 +166,65 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`${TableName.AccessApprovalPolicy}.id`
|
||||
)
|
||||
|
||||
.join<TUsers>(
|
||||
db(TableName.Users).as("requestedByUser"),
|
||||
`${TableName.AccessApprovalRequest}.requestedByUserId`,
|
||||
`requestedByUser.id`
|
||||
)
|
||||
|
||||
.join(
|
||||
TableName.AccessApprovalPolicyApprover,
|
||||
`${TableName.AccessApprovalPolicy}.id`,
|
||||
`${TableName.AccessApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
|
||||
.join<TUsers>(
|
||||
db(TableName.Users).as("accessApprovalPolicyApproverUser"),
|
||||
`${TableName.AccessApprovalPolicyApprover}.approverUserId`,
|
||||
"accessApprovalPolicyApproverUser.id"
|
||||
)
|
||||
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalRequestReviewer,
|
||||
`${TableName.AccessApprovalRequest}.id`,
|
||||
`${TableName.AccessApprovalRequestReviewer}.requestId`
|
||||
)
|
||||
|
||||
.leftJoin<TUsers>(
|
||||
db(TableName.Users).as("accessApprovalReviewerUser"),
|
||||
`${TableName.AccessApprovalRequestReviewer}.reviewerUserId`,
|
||||
`accessApprovalReviewerUser.id`
|
||||
)
|
||||
|
||||
.leftJoin(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
|
||||
.select(selectAllTableCols(TableName.AccessApprovalRequest))
|
||||
.select(
|
||||
tx.ref("member").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerMemberId"),
|
||||
tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover),
|
||||
tx.ref("email").withSchema("accessApprovalPolicyApproverUser").as("approverEmail"),
|
||||
tx.ref("username").withSchema("accessApprovalPolicyApproverUser").as("approverUsername"),
|
||||
tx.ref("firstName").withSchema("accessApprovalPolicyApproverUser").as("approverFirstName"),
|
||||
tx.ref("lastName").withSchema("accessApprovalPolicyApproverUser").as("approverLastName"),
|
||||
tx.ref("email").withSchema("requestedByUser").as("requestedByUserEmail"),
|
||||
tx.ref("username").withSchema("requestedByUser").as("requestedByUserUsername"),
|
||||
tx.ref("firstName").withSchema("requestedByUser").as("requestedByUserFirstName"),
|
||||
tx.ref("lastName").withSchema("requestedByUser").as("requestedByUserLastName"),
|
||||
|
||||
tx.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer),
|
||||
|
||||
tx.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus"),
|
||||
|
||||
tx.ref("email").withSchema("accessApprovalReviewerUser").as("reviewerEmail"),
|
||||
tx.ref("username").withSchema("accessApprovalReviewerUser").as("reviewerUsername"),
|
||||
tx.ref("firstName").withSchema("accessApprovalReviewerUser").as("reviewerFirstName"),
|
||||
tx.ref("lastName").withSchema("accessApprovalReviewerUser").as("reviewerLastName"),
|
||||
|
||||
tx.ref("id").withSchema(TableName.AccessApprovalPolicy).as("policyId"),
|
||||
tx.ref("name").withSchema(TableName.AccessApprovalPolicy).as("policyName"),
|
||||
tx.ref("projectId").withSchema(TableName.Environment),
|
||||
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
|
||||
tx.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
|
||||
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
||||
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
|
||||
tx.ref("approverId").withSchema(TableName.AccessApprovalPolicyApprover)
|
||||
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals")
|
||||
);
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
@@ -189,15 +244,45 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
approvals: el.policyApprovals,
|
||||
secretPath: el.policySecretPath,
|
||||
enforcementLevel: el.policyEnforcementLevel
|
||||
},
|
||||
requestedByUser: {
|
||||
userId: el.requestedByUserId,
|
||||
email: el.requestedByUserEmail,
|
||||
firstName: el.requestedByUserFirstName,
|
||||
lastName: el.requestedByUserLastName,
|
||||
username: el.requestedByUserUsername
|
||||
}
|
||||
}),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "reviewerMemberId",
|
||||
key: "reviewerUserId",
|
||||
label: "reviewers" as const,
|
||||
mapper: ({ reviewerMemberId: member, reviewerStatus: status }) => (member ? { member, status } : undefined)
|
||||
mapper: ({
|
||||
reviewerUserId: userId,
|
||||
reviewerStatus: status,
|
||||
reviewerEmail: email,
|
||||
reviewerLastName: lastName,
|
||||
reviewerUsername: username,
|
||||
reviewerFirstName: firstName
|
||||
}) => (userId ? { userId, status, email, firstName, lastName, username } : undefined)
|
||||
},
|
||||
{ key: "approverId", label: "approvers" as const, mapper: ({ approverId }) => approverId }
|
||||
{
|
||||
key: "approverUserId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({
|
||||
approverUserId,
|
||||
approverEmail: email,
|
||||
approverUsername: username,
|
||||
approverLastName: lastName,
|
||||
approverFirstName: firstName
|
||||
}) => ({
|
||||
userId: approverUserId,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
username
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
if (!formatedDoc?.[0]) return;
|
||||
@@ -235,7 +320,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
.where(`${TableName.Environment}.projectId`, projectId)
|
||||
.select(selectAllTableCols(TableName.AccessApprovalRequest))
|
||||
.select(db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus"))
|
||||
.select(db.ref("member").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerMemberId"));
|
||||
.select(db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId"));
|
||||
|
||||
const formattedRequests = sqlNestRelationships({
|
||||
data: accessRequests,
|
||||
@@ -245,9 +330,10 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
}),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "reviewerMemberId",
|
||||
key: "reviewerUserId",
|
||||
label: "reviewers" as const,
|
||||
mapper: ({ reviewerMemberId: member, reviewerStatus: status }) => (member ? { member, status } : undefined)
|
||||
mapper: ({ reviewerUserId: reviewer, reviewerStatus: status }) =>
|
||||
reviewer ? { reviewer, status } : undefined
|
||||
}
|
||||
]
|
||||
});
|
||||
|
@@ -52,7 +52,10 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
>;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findById">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
userDAL: Pick<TUserDALFactory, "findUserByProjectMembershipId" | "findUsersByProjectMembershipIds">;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
"findUserByProjectMembershipId" | "findUsersByProjectMembershipIds" | "find" | "findById"
|
||||
>;
|
||||
};
|
||||
|
||||
export type TAccessApprovalRequestServiceFactory = ReturnType<typeof accessApprovalRequestServiceFactory>;
|
||||
@@ -94,7 +97,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
);
|
||||
if (!membership) throw new UnauthorizedError({ message: "You are not a member of this project" });
|
||||
|
||||
const requestedByUser = await userDAL.findUserByProjectMembershipId(membership.id);
|
||||
const requestedByUser = await userDAL.findById(actorId);
|
||||
if (!requestedByUser) throw new UnauthorizedError({ message: "User not found" });
|
||||
|
||||
await projectDAL.checkProjectUpgradeStatus(project.id);
|
||||
@@ -114,13 +117,15 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
policyId: policy.id
|
||||
});
|
||||
|
||||
const approverUsers = await userDAL.findUsersByProjectMembershipIds(
|
||||
approvers.map((approver) => approver.approverId)
|
||||
);
|
||||
const approverUsers = await userDAL.find({
|
||||
$in: {
|
||||
id: approvers.map((approver) => approver.approverUserId)
|
||||
}
|
||||
});
|
||||
|
||||
const duplicateRequests = await accessApprovalRequestDAL.find({
|
||||
policyId: policy.id,
|
||||
requestedBy: membership.id,
|
||||
requestedByUserId: actorId,
|
||||
permissions: JSON.stringify(requestedPermissions),
|
||||
isTemporary
|
||||
});
|
||||
@@ -153,7 +158,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
const approvalRequest = await accessApprovalRequestDAL.create(
|
||||
{
|
||||
policyId: policy.id,
|
||||
requestedBy: membership.id,
|
||||
requestedByUserId: actorId,
|
||||
temporaryRange: temporaryRange || null,
|
||||
permissions: JSON.stringify(requestedPermissions),
|
||||
isTemporary
|
||||
@@ -212,7 +217,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
let requests = await accessApprovalRequestDAL.findRequestsWithPrivilegeByPolicyIds(policies.map((p) => p.id));
|
||||
|
||||
if (authorProjectMembershipId) {
|
||||
requests = requests.filter((request) => request.requestedBy === authorProjectMembershipId);
|
||||
requests = requests.filter((request) => request.requestedByUserId === actorId);
|
||||
}
|
||||
|
||||
if (envSlug) {
|
||||
@@ -246,8 +251,8 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
|
||||
if (
|
||||
!hasRole(ProjectMembershipRole.Admin) &&
|
||||
accessApprovalRequest.requestedBy !== membership.id && // The request wasn't made by the current user
|
||||
!policy.approvers.find((approverId) => approverId === membership.id) // The request isn't performed by an assigned approver
|
||||
accessApprovalRequest.requestedByUserId !== actorId && // The request wasn't made by the current user
|
||||
!policy.approvers.find((approver) => approver.userId === actorId) // The request isn't performed by an assigned approver
|
||||
) {
|
||||
throw new UnauthorizedError({ message: "You are not authorized to approve this request" });
|
||||
}
|
||||
@@ -273,7 +278,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
const review = await accessApprovalRequestReviewerDAL.findOne(
|
||||
{
|
||||
requestId: accessApprovalRequest.id,
|
||||
member: membership.id
|
||||
reviewerUserId: actorId
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -282,7 +287,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
{
|
||||
status,
|
||||
requestId: accessApprovalRequest.id,
|
||||
member: membership.id
|
||||
reviewerUserId: actorId
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -303,7 +308,8 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
// Permanent access
|
||||
const privilege = await additionalPrivilegeDAL.create(
|
||||
{
|
||||
projectMembershipId: accessApprovalRequest.requestedBy,
|
||||
userId: accessApprovalRequest.requestedByUserId,
|
||||
projectId: accessApprovalRequest.projectId,
|
||||
slug: `requested-privilege-${slugify(alphaNumericNanoId(12))}`,
|
||||
permissions: JSON.stringify(accessApprovalRequest.permissions)
|
||||
},
|
||||
@@ -317,7 +323,8 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
|
||||
const privilege = await additionalPrivilegeDAL.create(
|
||||
{
|
||||
projectMembershipId: accessApprovalRequest.requestedBy,
|
||||
userId: accessApprovalRequest.requestedByUserId,
|
||||
projectId: accessApprovalRequest.projectId,
|
||||
slug: `requested-privilege-${slugify(alphaNumericNanoId(12))}`,
|
||||
permissions: JSON.stringify(accessApprovalRequest.permissions),
|
||||
isTemporary: true,
|
||||
|
@@ -75,15 +75,16 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
.del()
|
||||
.returning("id");
|
||||
numberOfRetryOnFailure = 0; // reset
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 100); // time to breathe for db
|
||||
});
|
||||
} catch (error) {
|
||||
numberOfRetryOnFailure += 1;
|
||||
logger.error(error, "Failed to delete audit log on pruning");
|
||||
} finally {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 10); // time to breathe for db
|
||||
});
|
||||
}
|
||||
} while (deletedAuditLogIds.length > 0 && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
|
||||
} while (deletedAuditLogIds.length > 0 || numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
|
||||
};
|
||||
|
||||
return { ...auditLogOrm, pruneAuditLog, find };
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
@@ -61,6 +62,10 @@ export const auditLogServiceFactory = ({
|
||||
};
|
||||
|
||||
const createAuditLog = async (data: TCreateAuditLogDTO) => {
|
||||
const appCfg = getConfig();
|
||||
if (appCfg.DISABLE_AUDIT_LOG_GENERATION) {
|
||||
return;
|
||||
}
|
||||
// add all cases in which project id or org id cannot be added
|
||||
if (data.event.type !== EventType.LOGIN_IDENTITY_UNIVERSAL_AUTH) {
|
||||
if (!data.projectId && !data.orgId) throw new BadRequestError({ message: "Must either project id or org id" });
|
||||
|
@@ -2,6 +2,7 @@ import { TProjectPermission } from "@app/lib/types";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
|
||||
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
|
||||
import { PkiItemType } from "@app/services/pki-collection/pki-collection-types";
|
||||
|
||||
export type TListProjectAuditLogDTO = {
|
||||
auditLogActor?: string;
|
||||
@@ -130,23 +131,42 @@ export enum EventType {
|
||||
GET_CA = "get-certificate-authority",
|
||||
UPDATE_CA = "update-certificate-authority",
|
||||
DELETE_CA = "delete-certificate-authority",
|
||||
RENEW_CA = "renew-certificate-authority",
|
||||
GET_CA_CSR = "get-certificate-authority-csr",
|
||||
GET_CA_CERTS = "get-certificate-authority-certs",
|
||||
GET_CA_CERT = "get-certificate-authority-cert",
|
||||
SIGN_INTERMEDIATE = "sign-intermediate",
|
||||
IMPORT_CA_CERT = "import-certificate-authority-cert",
|
||||
GET_CA_CRL = "get-certificate-authority-crl",
|
||||
ISSUE_CERT = "issue-cert",
|
||||
SIGN_CERT = "sign-cert",
|
||||
GET_CERT = "get-cert",
|
||||
DELETE_CERT = "delete-cert",
|
||||
REVOKE_CERT = "revoke-cert",
|
||||
GET_CERT_BODY = "get-cert-body",
|
||||
CREATE_PKI_ALERT = "create-pki-alert",
|
||||
GET_PKI_ALERT = "get-pki-alert",
|
||||
UPDATE_PKI_ALERT = "update-pki-alert",
|
||||
DELETE_PKI_ALERT = "delete-pki-alert",
|
||||
CREATE_PKI_COLLECTION = "create-pki-collection",
|
||||
GET_PKI_COLLECTION = "get-pki-collection",
|
||||
UPDATE_PKI_COLLECTION = "update-pki-collection",
|
||||
DELETE_PKI_COLLECTION = "delete-pki-collection",
|
||||
GET_PKI_COLLECTION_ITEMS = "get-pki-collection-items",
|
||||
ADD_PKI_COLLECTION_ITEM = "add-pki-collection-item",
|
||||
DELETE_PKI_COLLECTION_ITEM = "delete-pki-collection-item",
|
||||
CREATE_KMS = "create-kms",
|
||||
UPDATE_KMS = "update-kms",
|
||||
DELETE_KMS = "delete-kms",
|
||||
GET_KMS = "get-kms",
|
||||
UPDATE_PROJECT_KMS = "update-project-kms",
|
||||
GET_PROJECT_KMS_BACKUP = "get-project-kms-backup",
|
||||
LOAD_PROJECT_KMS_BACKUP = "load-project-kms-backup"
|
||||
LOAD_PROJECT_KMS_BACKUP = "load-project-kms-backup",
|
||||
ORG_ADMIN_ACCESS_PROJECT = "org-admin-accessed-project",
|
||||
CREATE_CERTIFICATE_TEMPLATE = "create-certificate-template",
|
||||
UPDATE_CERTIFICATE_TEMPLATE = "update-certificate-template",
|
||||
DELETE_CERTIFICATE_TEMPLATE = "delete-certificate-template",
|
||||
GET_CERTIFICATE_TEMPLATE = "get-certificate-template"
|
||||
}
|
||||
|
||||
interface UserActorMetadata {
|
||||
@@ -336,6 +356,7 @@ interface DeleteIntegrationEvent {
|
||||
targetServiceId?: string;
|
||||
path?: string;
|
||||
region?: string;
|
||||
shouldDeleteIntegrationSecrets?: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1093,6 +1114,14 @@ interface DeleteCa {
|
||||
};
|
||||
}
|
||||
|
||||
interface RenewCa {
|
||||
type: EventType.RENEW_CA;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCaCsr {
|
||||
type: EventType.GET_CA_CSR;
|
||||
metadata: {
|
||||
@@ -1101,6 +1130,14 @@ interface GetCaCsr {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCaCerts {
|
||||
type: EventType.GET_CA_CERTS;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCaCert {
|
||||
type: EventType.GET_CA_CERT;
|
||||
metadata: {
|
||||
@@ -1143,6 +1180,15 @@ interface IssueCert {
|
||||
};
|
||||
}
|
||||
|
||||
interface SignCert {
|
||||
type: EventType.SIGN_CERT;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
serialNumber: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCert {
|
||||
type: EventType.GET_CERT;
|
||||
metadata: {
|
||||
@@ -1179,6 +1225,95 @@ interface GetCertBody {
|
||||
};
|
||||
}
|
||||
|
||||
interface CreatePkiAlert {
|
||||
type: EventType.CREATE_PKI_ALERT;
|
||||
metadata: {
|
||||
pkiAlertId: string;
|
||||
pkiCollectionId: string;
|
||||
name: string;
|
||||
alertBeforeDays: number;
|
||||
recipientEmails: string;
|
||||
};
|
||||
}
|
||||
interface GetPkiAlert {
|
||||
type: EventType.GET_PKI_ALERT;
|
||||
metadata: {
|
||||
pkiAlertId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdatePkiAlert {
|
||||
type: EventType.UPDATE_PKI_ALERT;
|
||||
metadata: {
|
||||
pkiAlertId: string;
|
||||
pkiCollectionId?: string;
|
||||
name?: string;
|
||||
alertBeforeDays?: number;
|
||||
recipientEmails?: string;
|
||||
};
|
||||
}
|
||||
interface DeletePkiAlert {
|
||||
type: EventType.DELETE_PKI_ALERT;
|
||||
metadata: {
|
||||
pkiAlertId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreatePkiCollection {
|
||||
type: EventType.CREATE_PKI_COLLECTION;
|
||||
metadata: {
|
||||
pkiCollectionId: string;
|
||||
name: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetPkiCollection {
|
||||
type: EventType.GET_PKI_COLLECTION;
|
||||
metadata: {
|
||||
pkiCollectionId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdatePkiCollection {
|
||||
type: EventType.UPDATE_PKI_COLLECTION;
|
||||
metadata: {
|
||||
pkiCollectionId: string;
|
||||
name?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeletePkiCollection {
|
||||
type: EventType.DELETE_PKI_COLLECTION;
|
||||
metadata: {
|
||||
pkiCollectionId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetPkiCollectionItems {
|
||||
type: EventType.GET_PKI_COLLECTION_ITEMS;
|
||||
metadata: {
|
||||
pkiCollectionId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface AddPkiCollectionItem {
|
||||
type: EventType.ADD_PKI_COLLECTION_ITEM;
|
||||
metadata: {
|
||||
pkiCollectionItemId: string;
|
||||
pkiCollectionId: string;
|
||||
type: PkiItemType;
|
||||
itemId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeletePkiCollectionItem {
|
||||
type: EventType.DELETE_PKI_COLLECTION_ITEM;
|
||||
metadata: {
|
||||
pkiCollectionItemId: string;
|
||||
pkiCollectionId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateKmsEvent {
|
||||
type: EventType.CREATE_KMS;
|
||||
metadata: {
|
||||
@@ -1235,6 +1370,56 @@ interface LoadProjectKmsBackupEvent {
|
||||
metadata: Record<string, string>; // no metadata yet
|
||||
}
|
||||
|
||||
interface CreateCertificateTemplate {
|
||||
type: EventType.CREATE_CERTIFICATE_TEMPLATE;
|
||||
metadata: {
|
||||
certificateTemplateId: string;
|
||||
caId: string;
|
||||
pkiCollectionId?: string;
|
||||
name: string;
|
||||
commonName: string;
|
||||
subjectAlternativeName: string;
|
||||
ttl: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCertificateTemplate {
|
||||
type: EventType.GET_CERTIFICATE_TEMPLATE;
|
||||
metadata: {
|
||||
certificateTemplateId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateCertificateTemplate {
|
||||
type: EventType.UPDATE_CERTIFICATE_TEMPLATE;
|
||||
metadata: {
|
||||
certificateTemplateId: string;
|
||||
caId: string;
|
||||
pkiCollectionId?: string;
|
||||
name: string;
|
||||
commonName: string;
|
||||
subjectAlternativeName: string;
|
||||
ttl: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteCertificateTemplate {
|
||||
type: EventType.DELETE_CERTIFICATE_TEMPLATE;
|
||||
metadata: {
|
||||
certificateTemplateId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface OrgAdminAccessProjectEvent {
|
||||
type: EventType.ORG_ADMIN_ACCESS_PROJECT;
|
||||
metadata: {
|
||||
userId: string;
|
||||
username: string;
|
||||
email: string;
|
||||
projectId: string;
|
||||
}; // no metadata yet
|
||||
}
|
||||
|
||||
export type Event =
|
||||
| GetSecretsEvent
|
||||
| GetSecretEvent
|
||||
@@ -1327,20 +1512,39 @@ export type Event =
|
||||
| GetCa
|
||||
| UpdateCa
|
||||
| DeleteCa
|
||||
| RenewCa
|
||||
| GetCaCsr
|
||||
| GetCaCerts
|
||||
| GetCaCert
|
||||
| SignIntermediate
|
||||
| ImportCaCert
|
||||
| GetCaCrl
|
||||
| IssueCert
|
||||
| SignCert
|
||||
| GetCert
|
||||
| DeleteCert
|
||||
| RevokeCert
|
||||
| GetCertBody
|
||||
| CreatePkiAlert
|
||||
| GetPkiAlert
|
||||
| UpdatePkiAlert
|
||||
| DeletePkiAlert
|
||||
| CreatePkiCollection
|
||||
| GetPkiCollection
|
||||
| UpdatePkiCollection
|
||||
| DeletePkiCollection
|
||||
| GetPkiCollectionItems
|
||||
| AddPkiCollectionItem
|
||||
| DeletePkiCollectionItem
|
||||
| CreateKmsEvent
|
||||
| UpdateKmsEvent
|
||||
| DeleteKmsEvent
|
||||
| GetKmsEvent
|
||||
| UpdateProjectKmsEvent
|
||||
| GetProjectKmsBackupEvent
|
||||
| LoadProjectKmsBackupEvent;
|
||||
| LoadProjectKmsBackupEvent
|
||||
| OrgAdminAccessProjectEvent
|
||||
| CreateCertificateTemplate
|
||||
| UpdateCertificateTemplate
|
||||
| GetCertificateTemplate
|
||||
| DeleteCertificateTemplate;
|
||||
|
@@ -12,10 +12,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
|
||||
|
||||
const countLeasesForDynamicSecret = async (dynamicSecretId: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db.replicaNode())(TableName.DynamicSecretLease)
|
||||
.count("*")
|
||||
.where({ dynamicSecretId })
|
||||
.first();
|
||||
const doc = await (tx || db)(TableName.DynamicSecretLease).count("*").where({ dynamicSecretId }).first();
|
||||
return parseInt(doc || "0", 10);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "DynamicSecretCountLeases" });
|
||||
@@ -24,7 +21,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db.replicaNode())(TableName.DynamicSecretLease)
|
||||
const doc = await (tx || db)(TableName.DynamicSecretLease)
|
||||
.where({ [`${TableName.DynamicSecretLease}.id` as "id"]: id })
|
||||
.first()
|
||||
.join(
|
||||
@@ -40,10 +37,14 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
|
||||
db.ref("type").withSchema(TableName.DynamicSecret).as("dynType"),
|
||||
db.ref("defaultTTL").withSchema(TableName.DynamicSecret).as("dynDefaultTTL"),
|
||||
db.ref("maxTTL").withSchema(TableName.DynamicSecret).as("dynMaxTTL"),
|
||||
db.ref("inputIV").withSchema(TableName.DynamicSecret).as("dynInputIV"),
|
||||
db.ref("inputTag").withSchema(TableName.DynamicSecret).as("dynInputTag"),
|
||||
db.ref("inputCiphertext").withSchema(TableName.DynamicSecret).as("dynInputCiphertext"),
|
||||
db.ref("algorithm").withSchema(TableName.DynamicSecret).as("dynAlgorithm"),
|
||||
db.ref("keyEncoding").withSchema(TableName.DynamicSecret).as("dynKeyEncoding"),
|
||||
db.ref("folderId").withSchema(TableName.DynamicSecret).as("dynFolderId"),
|
||||
db.ref("status").withSchema(TableName.DynamicSecret).as("dynStatus"),
|
||||
db.ref("statusDetails").withSchema(TableName.DynamicSecret).as("dynStatusDetails"),
|
||||
db.ref("encryptedConfig").withSchema(TableName.DynamicSecret).as("dynEncryptedConfig"),
|
||||
db.ref("createdAt").withSchema(TableName.DynamicSecret).as("dynCreatedAt"),
|
||||
db.ref("updatedAt").withSchema(TableName.DynamicSecret).as("dynUpdatedAt")
|
||||
);
|
||||
@@ -58,12 +59,16 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
|
||||
type: doc.dynType,
|
||||
defaultTTL: doc.dynDefaultTTL,
|
||||
maxTTL: doc.dynMaxTTL,
|
||||
inputIV: doc.dynInputIV,
|
||||
inputTag: doc.dynInputTag,
|
||||
inputCiphertext: doc.dynInputCiphertext,
|
||||
algorithm: doc.dynAlgorithm,
|
||||
keyEncoding: doc.dynKeyEncoding,
|
||||
folderId: doc.dynFolderId,
|
||||
status: doc.dynStatus,
|
||||
statusDetails: doc.dynStatusDetails,
|
||||
createdAt: doc.dynCreatedAt,
|
||||
updatedAt: doc.dynUpdatedAt,
|
||||
encryptedConfig: doc.dynEncryptedConfig
|
||||
updatedAt: doc.dynUpdatedAt
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
|
@@ -1,9 +1,8 @@
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { DisableRotationErrors } from "@app/ee/services/secret-rotation/secret-rotation-queue";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
|
||||
import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
|
||||
import { DynamicSecretStatus } from "../dynamic-secret/dynamic-secret-types";
|
||||
@@ -15,8 +14,6 @@ type TDynamicSecretLeaseQueueServiceFactoryDep = {
|
||||
dynamicSecretLeaseDAL: Pick<TDynamicSecretLeaseDALFactory, "findById" | "deleteById" | "find" | "updateById">;
|
||||
dynamicSecretDAL: Pick<TDynamicSecretDALFactory, "findById" | "deleteById" | "updateById">;
|
||||
dynamicSecretProviders: Record<DynamicSecretProviders, TDynamicProviderFns>;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findById">;
|
||||
};
|
||||
|
||||
export type TDynamicSecretLeaseQueueServiceFactory = ReturnType<typeof dynamicSecretLeaseQueueServiceFactory>;
|
||||
@@ -25,9 +22,7 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
|
||||
queueService,
|
||||
dynamicSecretDAL,
|
||||
dynamicSecretProviders,
|
||||
dynamicSecretLeaseDAL,
|
||||
kmsService,
|
||||
folderDAL
|
||||
dynamicSecretLeaseDAL
|
||||
}: TDynamicSecretLeaseQueueServiceFactoryDep) => {
|
||||
const pruneDynamicSecret = async (dynamicSecretCfgId: string) => {
|
||||
await queueService.queue(
|
||||
@@ -82,20 +77,15 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
|
||||
if (!dynamicSecretLease) throw new DisableRotationErrors({ message: "Dynamic secret lease not found" });
|
||||
|
||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||
const folder = await folderDAL.findById(dynamicSecretCfg.folderId);
|
||||
if (!folder) throw new DisableRotationErrors({ message: "Folder not found" });
|
||||
const { projectId } = folder;
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const dynamicSecretInputConfig = secretManagerDecryptor({
|
||||
cipherTextBlob: dynamicSecretCfg.encryptedConfig
|
||||
}).toString();
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const decryptedStoredInput = JSON.parse(dynamicSecretInputConfig) as object;
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
) as object;
|
||||
|
||||
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId);
|
||||
await dynamicSecretLeaseDAL.deleteById(dynamicSecretLease.id);
|
||||
@@ -110,22 +100,17 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
|
||||
if ((dynamicSecretCfg.status as DynamicSecretStatus) !== DynamicSecretStatus.Deleting)
|
||||
throw new DisableRotationErrors({ message: "Document not deleted" });
|
||||
|
||||
const folder = await folderDAL.findById(dynamicSecretCfg.folderId);
|
||||
if (!folder) throw new DisableRotationErrors({ message: "Folder not found" });
|
||||
const { projectId } = folder;
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const dynamicSecretLeases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfgId });
|
||||
if (dynamicSecretLeases.length) {
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
|
||||
const dynamicSecretInputConfig = secretManagerDecryptor({
|
||||
cipherTextBlob: dynamicSecretCfg.encryptedConfig
|
||||
}).toString();
|
||||
const decryptedStoredInput = JSON.parse(dynamicSecretInputConfig) as object;
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
) as object;
|
||||
|
||||
await Promise.all(dynamicSecretLeases.map(({ id }) => unsetLeaseRevocation(id)));
|
||||
await Promise.all(
|
||||
|
@@ -1,14 +1,14 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import ms from "ms";
|
||||
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
|
||||
@@ -34,7 +34,6 @@ type TDynamicSecretLeaseServiceFactoryDep = {
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TDynamicSecretLeaseServiceFactory = ReturnType<typeof dynamicSecretLeaseServiceFactory>;
|
||||
@@ -47,8 +46,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
permissionService,
|
||||
dynamicSecretQueueService,
|
||||
projectDAL,
|
||||
licenseService,
|
||||
kmsService
|
||||
licenseService
|
||||
}: TDynamicSecretLeaseServiceFactoryDep) => {
|
||||
const create = async ({
|
||||
environmentSlug,
|
||||
@@ -96,12 +94,14 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
throw new BadRequestError({ message: `Max lease limit reached. Limit: ${appCfg.MAX_LEASE_LIMIT}` });
|
||||
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const { decryptor: kmsDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
const decryptedStoredInputJson = kmsDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedConfig }).toString();
|
||||
const decryptedStoredInput = JSON.parse(decryptedStoredInputJson) as object;
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
) as object;
|
||||
|
||||
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
|
||||
const { maxTTL } = dynamicSecretCfg;
|
||||
@@ -164,12 +164,14 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
|
||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const { decryptor: kmsDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
const decryptedStoredInputJson = kmsDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedConfig }).toString();
|
||||
const decryptedStoredInput = JSON.parse(decryptedStoredInputJson) as object;
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
) as object;
|
||||
|
||||
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
|
||||
const { maxTTL } = dynamicSecretCfg;
|
||||
@@ -229,12 +231,14 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
|
||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const { decryptor: kmsDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
const decryptedStoredInputJson = kmsDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedConfig }).toString();
|
||||
const decryptedStoredInput = JSON.parse(decryptedStoredInputJson) as object;
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
) as object;
|
||||
|
||||
const revokeResponse = await selectedProvider
|
||||
.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId)
|
||||
|
@@ -1,11 +1,11 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
|
||||
@@ -34,7 +34,6 @@ type TDynamicSecretServiceFactoryDep = {
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TDynamicSecretServiceFactory = ReturnType<typeof dynamicSecretServiceFactory>;
|
||||
@@ -47,8 +46,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
dynamicSecretProviders,
|
||||
permissionService,
|
||||
dynamicSecretQueueService,
|
||||
projectDAL,
|
||||
kmsService
|
||||
projectDAL
|
||||
}: TDynamicSecretServiceFactoryDep) => {
|
||||
const create = async ({
|
||||
path,
|
||||
@@ -98,16 +96,16 @@ export const dynamicSecretServiceFactory = ({
|
||||
|
||||
const isConnected = await selectedProvider.validateConnection(provider.inputs);
|
||||
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
|
||||
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const encryptedConfig = secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(inputs)) }).cipherTextBlob;
|
||||
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(inputs));
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.create({
|
||||
type: provider.type,
|
||||
version: 1,
|
||||
encryptedConfig,
|
||||
inputIV: encryptedInput.iv,
|
||||
inputTag: encryptedInput.tag,
|
||||
inputCiphertext: encryptedInput.ciphertext,
|
||||
algorithm: encryptedInput.algorithm,
|
||||
keyEncoding: encryptedInput.encoding,
|
||||
maxTTL,
|
||||
defaultTTL,
|
||||
folderId: folder.id,
|
||||
@@ -167,28 +165,27 @@ export const dynamicSecretServiceFactory = ({
|
||||
}
|
||||
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } =
|
||||
await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
const dynamicSecretInputConfig = secretManagerDecryptor({
|
||||
cipherTextBlob: dynamicSecretCfg.encryptedConfig
|
||||
}).toString();
|
||||
|
||||
const decryptedStoredInput = JSON.parse(dynamicSecretInputConfig) as object;
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
) as object;
|
||||
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
|
||||
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
|
||||
|
||||
const isConnected = await selectedProvider.validateConnection(newInput);
|
||||
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
|
||||
|
||||
const encryptedConfig = secretManagerEncryptor({
|
||||
plainText: Buffer.from(JSON.stringify(updatedInput))
|
||||
}).cipherTextBlob;
|
||||
|
||||
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(updatedInput));
|
||||
const updatedDynamicCfg = await dynamicSecretDAL.updateById(dynamicSecretCfg.id, {
|
||||
encryptedConfig,
|
||||
inputIV: encryptedInput.iv,
|
||||
inputTag: encryptedInput.tag,
|
||||
inputCiphertext: encryptedInput.ciphertext,
|
||||
algorithm: encryptedInput.algorithm,
|
||||
keyEncoding: encryptedInput.encoding,
|
||||
maxTTL,
|
||||
defaultTTL,
|
||||
name: newName ?? name,
|
||||
@@ -289,16 +286,14 @@ export const dynamicSecretServiceFactory = ({
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
|
||||
if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" });
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const dynamicSecretInputConfig = secretManagerDecryptor({
|
||||
cipherTextBlob: dynamicSecretCfg.encryptedConfig
|
||||
}).toString();
|
||||
|
||||
const decryptedStoredInput = JSON.parse(dynamicSecretInputConfig) as object;
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
) as object;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput)) as object;
|
||||
return { ...dynamicSecretCfg, inputs: providerInputs };
|
||||
|
@@ -40,7 +40,12 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
secretRotation: true,
|
||||
caCrl: false,
|
||||
instanceUserManagement: false,
|
||||
externalKms: false
|
||||
externalKms: false,
|
||||
rateLimits: {
|
||||
readLimit: 60,
|
||||
writeLimit: 200,
|
||||
secretsLimit: 40
|
||||
}
|
||||
});
|
||||
|
||||
export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {
|
||||
|
@@ -58,6 +58,11 @@ export type TFeatureSet = {
|
||||
caCrl: false;
|
||||
instanceUserManagement: false;
|
||||
externalKms: false;
|
||||
rateLimits: {
|
||||
readLimit: number;
|
||||
writeLimit: number;
|
||||
secretsLimit: number;
|
||||
};
|
||||
};
|
||||
|
||||
export type TOrgPlansTableDTO = {
|
||||
|
@@ -9,6 +9,10 @@ export enum OrgPermissionActions {
|
||||
Delete = "delete"
|
||||
}
|
||||
|
||||
export enum OrgPermissionAdminConsoleAction {
|
||||
AccessAllProjects = "access-all-projects"
|
||||
}
|
||||
|
||||
export enum OrgPermissionSubjects {
|
||||
Workspace = "workspace",
|
||||
Role = "role",
|
||||
@@ -22,7 +26,8 @@ export enum OrgPermissionSubjects {
|
||||
Billing = "billing",
|
||||
SecretScanning = "secret-scanning",
|
||||
Identity = "identity",
|
||||
Kms = "kms"
|
||||
Kms = "kms",
|
||||
AdminConsole = "organization-admin-console"
|
||||
}
|
||||
|
||||
export type OrgPermissionSet =
|
||||
@@ -39,7 +44,8 @@ export type OrgPermissionSet =
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.SecretScanning]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Billing]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Identity]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Kms];
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Kms]
|
||||
| [OrgPermissionAdminConsoleAction, OrgPermissionSubjects.AdminConsole];
|
||||
|
||||
const buildAdminPermission = () => {
|
||||
const { can, build } = new AbilityBuilder<MongoAbility<OrgPermissionSet>>(createMongoAbility);
|
||||
@@ -107,6 +113,8 @@ const buildAdminPermission = () => {
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Kms);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Kms);
|
||||
|
||||
can(OrgPermissionAdminConsoleAction.AccessAllProjects, OrgPermissionSubjects.AdminConsole);
|
||||
|
||||
return build({ conditionsMatcher });
|
||||
};
|
||||
|
||||
@@ -118,7 +126,6 @@ const buildMemberPermission = () => {
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Workspace);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Member);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Member);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Role);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
|
||||
|
@@ -66,6 +66,7 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
`${TableName.GroupProjectMembershipRole}.projectMembershipId`,
|
||||
`${TableName.GroupProjectMembership}.id`
|
||||
)
|
||||
|
||||
.leftJoin(
|
||||
TableName.ProjectRoles,
|
||||
`${TableName.GroupProjectMembershipRole}.customRoleId`,
|
||||
@@ -73,6 +74,12 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
)
|
||||
.join(TableName.Project, `${TableName.GroupProjectMembership}.projectId`, `${TableName.Project}.id`)
|
||||
.join(TableName.Organization, `${TableName.Project}.orgId`, `${TableName.Organization}.id`)
|
||||
|
||||
.leftJoin(
|
||||
TableName.ProjectUserAdditionalPrivilege,
|
||||
`${TableName.GroupProjectMembership}.projectId`,
|
||||
`${TableName.Project}.id`
|
||||
)
|
||||
.select(selectAllTableCols(TableName.GroupProjectMembershipRole))
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.GroupProjectMembership).as("membershipId"),
|
||||
@@ -81,9 +88,30 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
db.ref("projectId").withSchema(TableName.GroupProjectMembership),
|
||||
db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"),
|
||||
db.ref("orgId").withSchema(TableName.Project),
|
||||
db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug")
|
||||
)
|
||||
.select("permissions");
|
||||
db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug"),
|
||||
|
||||
db.ref("permissions").withSchema(TableName.ProjectRoles).as("permissions"),
|
||||
// db.ref("permissions").withSchema(TableName.ProjectUserAdditionalPrivilege).as("apPermissions")
|
||||
// Additional Privileges
|
||||
db.ref("id").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApId"),
|
||||
db.ref("permissions").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApPermissions"),
|
||||
db.ref("temporaryMode").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApTemporaryMode"),
|
||||
db.ref("isTemporary").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApIsTemporary"),
|
||||
db.ref("temporaryRange").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApTemporaryRange"),
|
||||
|
||||
db.ref("projectId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApProjectId"),
|
||||
db.ref("userId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApUserId"),
|
||||
|
||||
db
|
||||
.ref("temporaryAccessStartTime")
|
||||
.withSchema(TableName.ProjectUserAdditionalPrivilege)
|
||||
.as("userApTemporaryAccessStartTime"),
|
||||
db
|
||||
.ref("temporaryAccessEndTime")
|
||||
.withSchema(TableName.ProjectUserAdditionalPrivilege)
|
||||
.as("userApTemporaryAccessEndTime")
|
||||
);
|
||||
// .select(`${TableName.ProjectRoles}.permissions`);
|
||||
|
||||
const docs = await db(TableName.ProjectMembership)
|
||||
.join(
|
||||
@@ -98,12 +126,13 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.ProjectUserAdditionalPrivilege,
|
||||
`${TableName.ProjectUserAdditionalPrivilege}.projectMembershipId`,
|
||||
`${TableName.ProjectMembership}.id`
|
||||
`${TableName.ProjectUserAdditionalPrivilege}.projectId`,
|
||||
`${TableName.ProjectMembership}.projectId`
|
||||
)
|
||||
|
||||
.join(TableName.Project, `${TableName.ProjectMembership}.projectId`, `${TableName.Project}.id`)
|
||||
.join(TableName.Organization, `${TableName.Project}.orgId`, `${TableName.Organization}.id`)
|
||||
.where("userId", userId)
|
||||
.where(`${TableName.ProjectMembership}.userId`, userId)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, projectId)
|
||||
.select(selectAllTableCols(TableName.ProjectUserMembershipRole))
|
||||
.select(
|
||||
@@ -120,6 +149,10 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
db.ref("temporaryMode").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApTemporaryMode"),
|
||||
db.ref("isTemporary").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApIsTemporary"),
|
||||
db.ref("temporaryRange").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApTemporaryRange"),
|
||||
|
||||
db.ref("projectId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApProjectId"),
|
||||
db.ref("userId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("userApUserId"),
|
||||
|
||||
db
|
||||
.ref("temporaryAccessStartTime")
|
||||
.withSchema(TableName.ProjectUserAdditionalPrivilege)
|
||||
@@ -198,6 +231,31 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
permissions: z.unknown(),
|
||||
customRoleSlug: z.string().optional().nullable()
|
||||
}).parse(data)
|
||||
},
|
||||
{
|
||||
key: "userApId",
|
||||
label: "additionalPrivileges" as const,
|
||||
mapper: ({
|
||||
userApId,
|
||||
userApProjectId,
|
||||
userApUserId,
|
||||
userApPermissions,
|
||||
userApIsTemporary,
|
||||
userApTemporaryMode,
|
||||
userApTemporaryRange,
|
||||
userApTemporaryAccessEndTime,
|
||||
userApTemporaryAccessStartTime
|
||||
}) => ({
|
||||
id: userApId,
|
||||
userId: userApUserId,
|
||||
projectId: userApProjectId,
|
||||
permissions: userApPermissions,
|
||||
temporaryRange: userApTemporaryRange,
|
||||
temporaryMode: userApTemporaryMode,
|
||||
temporaryAccessEndTime: userApTemporaryAccessEndTime,
|
||||
temporaryAccessStartTime: userApTemporaryAccessStartTime,
|
||||
isTemporary: userApIsTemporary
|
||||
})
|
||||
}
|
||||
]
|
||||
})
|
||||
@@ -218,15 +276,24 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
!isTemporary || (isTemporary && temporaryAccessEndTime && new Date() < temporaryAccessEndTime)
|
||||
) ?? [];
|
||||
|
||||
const activeAdditionalPrivileges = permission?.[0]?.additionalPrivileges?.filter(
|
||||
({ isTemporary, temporaryAccessEndTime }) =>
|
||||
!isTemporary || (isTemporary && temporaryAccessEndTime && new Date() < temporaryAccessEndTime)
|
||||
);
|
||||
const activeAdditionalPrivileges =
|
||||
permission?.[0]?.additionalPrivileges?.filter(
|
||||
({ isTemporary, temporaryAccessEndTime }) =>
|
||||
!isTemporary || (isTemporary && temporaryAccessEndTime && new Date() < temporaryAccessEndTime)
|
||||
) ?? [];
|
||||
|
||||
const activeGroupAdditionalPrivileges =
|
||||
groupPermission?.[0]?.additionalPrivileges?.filter(
|
||||
({ isTemporary, temporaryAccessEndTime, userId: apUserId, projectId: apProjectId }) =>
|
||||
apProjectId === projectId &&
|
||||
apUserId === userId &&
|
||||
(!isTemporary || (isTemporary && temporaryAccessEndTime && new Date() < temporaryAccessEndTime))
|
||||
) ?? [];
|
||||
|
||||
return {
|
||||
...(permission[0] || groupPermission[0]),
|
||||
roles: [...activeRoles, ...activeGroupRoles],
|
||||
additionalPrivileges: activeAdditionalPrivileges
|
||||
additionalPrivileges: [...activeAdditionalPrivileges, ...activeGroupAdditionalPrivileges]
|
||||
};
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "GetProjectPermission" });
|
||||
|
@@ -23,12 +23,16 @@ export enum ProjectPermissionSub {
|
||||
IpAllowList = "ip-allowlist",
|
||||
Project = "workspace",
|
||||
Secrets = "secrets",
|
||||
SecretFolders = "secret-folders",
|
||||
SecretRollback = "secret-rollback",
|
||||
SecretApproval = "secret-approval",
|
||||
SecretRotation = "secret-rotation",
|
||||
Identity = "identity",
|
||||
CertificateAuthorities = "certificate-authorities",
|
||||
Certificates = "certificates",
|
||||
CertificateTemplates = "certificate-templates",
|
||||
PkiAlerts = "pki-alerts",
|
||||
PkiCollections = "pki-collections",
|
||||
Kms = "kms"
|
||||
}
|
||||
|
||||
@@ -42,6 +46,10 @@ export type ProjectPermissionSet =
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSub.Secrets | (ForcedSubject<ProjectPermissionSub.Secrets> & SubjectFields)
|
||||
]
|
||||
| [
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSub.SecretFolders | (ForcedSubject<ProjectPermissionSub.SecretFolders> & SubjectFields)
|
||||
]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Role]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Tags]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Member]
|
||||
@@ -58,6 +66,9 @@ export type ProjectPermissionSet =
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Identity]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.CertificateAuthorities]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Certificates]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.CertificateTemplates]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.PkiAlerts]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.PkiCollections]
|
||||
| [ProjectPermissionActions.Delete, ProjectPermissionSub.Project]
|
||||
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Project]
|
||||
| [ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback]
|
||||
@@ -156,6 +167,21 @@ const buildAdminPermissionRules = () => {
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Certificates);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Certificates);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateTemplates);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.CertificateTemplates);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.CertificateTemplates);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.CertificateTemplates);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.PkiAlerts);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.PkiAlerts);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.PkiAlerts);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.PkiAlerts);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.PkiCollections);
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.PkiCollections);
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.PkiCollections);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.PkiCollections);
|
||||
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Project);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Project);
|
||||
|
||||
@@ -232,6 +258,11 @@ const buildMemberPermissionRules = () => {
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Certificates);
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Certificates);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateTemplates);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.PkiAlerts);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.PkiCollections);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
||||
|
@@ -18,7 +18,7 @@ import {
|
||||
|
||||
type TProjectUserAdditionalPrivilegeServiceFactoryDep = {
|
||||
projectUserAdditionalPrivilegeDAL: TProjectUserAdditionalPrivilegeDALFactory;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findById">;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findById" | "findOne">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
};
|
||||
|
||||
@@ -53,12 +53,17 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
|
||||
|
||||
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({ slug, projectMembershipId });
|
||||
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
projectId: projectMembership.projectId,
|
||||
userId: projectMembership.userId
|
||||
});
|
||||
if (existingSlug) throw new BadRequestError({ message: "Additional privilege of provided slug exist" });
|
||||
|
||||
if (!dto.isTemporary) {
|
||||
const additionalPrivilege = await projectUserAdditionalPrivilegeDAL.create({
|
||||
projectMembershipId,
|
||||
userId: projectMembership.userId,
|
||||
projectId: projectMembership.projectId,
|
||||
slug,
|
||||
permissions: customPermission
|
||||
});
|
||||
@@ -67,7 +72,8 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
|
||||
const relativeTempAllocatedTimeInMs = ms(dto.temporaryRange);
|
||||
const additionalPrivilege = await projectUserAdditionalPrivilegeDAL.create({
|
||||
projectMembershipId,
|
||||
projectId: projectMembership.projectId,
|
||||
userId: projectMembership.userId,
|
||||
slug,
|
||||
permissions: customPermission,
|
||||
isTemporary: true,
|
||||
@@ -90,7 +96,11 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
const userPrivilege = await projectUserAdditionalPrivilegeDAL.findById(privilegeId);
|
||||
if (!userPrivilege) throw new BadRequestError({ message: "User additional privilege not found" });
|
||||
|
||||
const projectMembership = await projectMembershipDAL.findById(userPrivilege.projectMembershipId);
|
||||
const projectMembership = await projectMembershipDAL.findOne({
|
||||
userId: userPrivilege.userId,
|
||||
projectId: userPrivilege.projectId
|
||||
});
|
||||
|
||||
if (!projectMembership) throw new BadRequestError({ message: "Project membership not found" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@@ -105,7 +115,8 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
if (dto?.slug) {
|
||||
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({
|
||||
slug: dto.slug,
|
||||
projectMembershipId: projectMembership.id
|
||||
userId: projectMembership.id,
|
||||
projectId: projectMembership.projectId
|
||||
});
|
||||
if (existingSlug && existingSlug.id !== userPrivilege.id)
|
||||
throw new BadRequestError({ message: "Additional privilege of provided slug exist" });
|
||||
@@ -138,7 +149,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
const userPrivilege = await projectUserAdditionalPrivilegeDAL.findById(privilegeId);
|
||||
if (!userPrivilege) throw new BadRequestError({ message: "User additional privilege not found" });
|
||||
|
||||
const projectMembership = await projectMembershipDAL.findById(userPrivilege.projectMembershipId);
|
||||
const projectMembership = await projectMembershipDAL.findOne({
|
||||
userId: userPrivilege.userId,
|
||||
projectId: userPrivilege.projectId
|
||||
});
|
||||
if (!projectMembership) throw new BadRequestError({ message: "Project membership not found" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@@ -164,7 +178,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
const userPrivilege = await projectUserAdditionalPrivilegeDAL.findById(privilegeId);
|
||||
if (!userPrivilege) throw new BadRequestError({ message: "User additional privilege not found" });
|
||||
|
||||
const projectMembership = await projectMembershipDAL.findById(userPrivilege.projectMembershipId);
|
||||
const projectMembership = await projectMembershipDAL.findOne({
|
||||
userId: userPrivilege.userId,
|
||||
projectId: userPrivilege.projectId
|
||||
});
|
||||
if (!projectMembership) throw new BadRequestError({ message: "Project membership not found" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@@ -198,7 +215,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
|
||||
|
||||
const userPrivileges = await projectUserAdditionalPrivilegeDAL.find({ projectMembershipId });
|
||||
const userPrivileges = await projectUserAdditionalPrivilegeDAL.find({
|
||||
userId: projectMembership.userId,
|
||||
projectId: projectMembership.projectId
|
||||
});
|
||||
return userPrivileges;
|
||||
};
|
||||
|
||||
|
@@ -4,17 +4,16 @@ import { logger } from "@app/lib/logger";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { TRateLimitDALFactory } from "./rate-limit-dal";
|
||||
import { TRateLimit, TRateLimitUpdateDTO } from "./rate-limit-types";
|
||||
import { RateLimitConfiguration, TRateLimit, TRateLimitUpdateDTO } from "./rate-limit-types";
|
||||
|
||||
let rateLimitMaxConfiguration = {
|
||||
let rateLimitMaxConfiguration: RateLimitConfiguration = {
|
||||
readLimit: 60,
|
||||
publicEndpointLimit: 30,
|
||||
writeLimit: 200,
|
||||
secretsLimit: 60,
|
||||
authRateLimit: 60,
|
||||
inviteUserRateLimit: 30,
|
||||
mfaRateLimit: 20,
|
||||
creationLimit: 30
|
||||
mfaRateLimit: 20
|
||||
};
|
||||
|
||||
Object.freeze(rateLimitMaxConfiguration);
|
||||
@@ -67,8 +66,7 @@ export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateL
|
||||
secretsLimit: rateLimit.secretsRateLimit,
|
||||
authRateLimit: rateLimit.authRateLimit,
|
||||
inviteUserRateLimit: rateLimit.inviteUserRateLimit,
|
||||
mfaRateLimit: rateLimit.mfaRateLimit,
|
||||
creationLimit: rateLimit.creationLimit
|
||||
mfaRateLimit: rateLimit.mfaRateLimit
|
||||
};
|
||||
|
||||
logger.info(`syncRateLimitConfiguration: rate limit configuration: %o`, newRateLimitMaxConfiguration);
|
||||
|
@@ -5,7 +5,6 @@ export type TRateLimitUpdateDTO = {
|
||||
authRateLimit: number;
|
||||
inviteUserRateLimit: number;
|
||||
mfaRateLimit: number;
|
||||
creationLimit: number;
|
||||
publicEndpointLimit: number;
|
||||
};
|
||||
|
||||
@@ -14,3 +13,13 @@ export type TRateLimit = {
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
} & TRateLimitUpdateDTO;
|
||||
|
||||
export type RateLimitConfiguration = {
|
||||
readLimit: number;
|
||||
publicEndpointLimit: number;
|
||||
writeLimit: number;
|
||||
secretsLimit: number;
|
||||
authRateLimit: number;
|
||||
inviteUserRateLimit: number;
|
||||
mfaRateLimit: number;
|
||||
};
|
||||
|
@@ -31,6 +31,7 @@ import { UserAliasType } from "@app/services/user-alias/user-alias-types";
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal";
|
||||
import {
|
||||
buildScimGroup,
|
||||
buildScimGroupList,
|
||||
@@ -93,6 +94,7 @@ type TScimServiceFactoryDep = {
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
projectUserAdditionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "delete">;
|
||||
};
|
||||
|
||||
export type TScimServiceFactory = ReturnType<typeof scimServiceFactory>;
|
||||
@@ -112,6 +114,7 @@ export const scimServiceFactory = ({
|
||||
projectKeyDAL,
|
||||
projectBotDAL,
|
||||
permissionService,
|
||||
projectUserAdditionalPrivilegeDAL,
|
||||
smtpService
|
||||
}: TScimServiceFactoryDep) => {
|
||||
const createScimToken = async ({
|
||||
@@ -558,6 +561,7 @@ export const scimServiceFactory = ({
|
||||
orgId: membership.orgId,
|
||||
orgDAL,
|
||||
projectMembershipDAL,
|
||||
projectUserAdditionalPrivilegeDAL,
|
||||
projectKeyDAL,
|
||||
userAliasDAL,
|
||||
licenseService
|
||||
|
@@ -8,6 +8,7 @@ import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { containsGlobPatterns } from "@app/lib/picomatch";
|
||||
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { TSecretApprovalPolicyApproverDALFactory } from "./secret-approval-policy-approver-dal";
|
||||
import { TSecretApprovalPolicyDALFactory } from "./secret-approval-policy-dal";
|
||||
import {
|
||||
@@ -28,6 +29,7 @@ type TSecretApprovalPolicyServiceFactoryDep = {
|
||||
secretApprovalPolicyDAL: TSecretApprovalPolicyDALFactory;
|
||||
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
|
||||
secretApprovalPolicyApproverDAL: TSecretApprovalPolicyApproverDALFactory;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TSecretApprovalPolicyServiceFactory = ReturnType<typeof secretApprovalPolicyServiceFactory>;
|
||||
@@ -36,7 +38,8 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
secretApprovalPolicyDAL,
|
||||
permissionService,
|
||||
secretApprovalPolicyApproverDAL,
|
||||
projectEnvDAL
|
||||
projectEnvDAL,
|
||||
licenseService
|
||||
}: TSecretApprovalPolicyServiceFactoryDep) => {
|
||||
const createSecretApprovalPolicy = async ({
|
||||
name,
|
||||
@@ -65,6 +68,15 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionSub.SecretApproval
|
||||
);
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.secretApproval) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to create secret approval policy due to plan restriction. Upgrade plan to create secret approval policy."
|
||||
});
|
||||
}
|
||||
|
||||
const env = await projectEnvDAL.findOne({ slug: environment, projectId });
|
||||
if (!env) throw new BadRequestError({ message: "Environment not found" });
|
||||
|
||||
@@ -115,6 +127,14 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretApproval);
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.secretApproval) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to update secret approval policy due to plan restriction. Upgrade plan to update secret approval policy."
|
||||
});
|
||||
}
|
||||
|
||||
const updatedSap = await secretApprovalPolicyDAL.transaction(async (tx) => {
|
||||
const doc = await secretApprovalPolicyDAL.updateById(
|
||||
secretApprovalPolicy.id,
|
||||
@@ -167,6 +187,14 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
ProjectPermissionSub.SecretApproval
|
||||
);
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.secretApproval) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to update secret approval policy due to plan restriction. Upgrade plan to update secret approval policy."
|
||||
});
|
||||
}
|
||||
|
||||
await secretApprovalPolicyDAL.deleteById(secretPolicyId);
|
||||
return sapPolicy;
|
||||
};
|
||||
|
@@ -81,15 +81,13 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
|
||||
.select({
|
||||
secVerTagId: "secVerTag.id",
|
||||
secVerTagColor: "secVerTag.color",
|
||||
secVerTagSlug: "secVerTag.slug",
|
||||
secVerTagName: "secVerTag.name"
|
||||
secVerTagSlug: "secVerTag.slug"
|
||||
})
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
|
||||
db.ref("id").withSchema(TableName.SecretApprovalRequestSecretTag).as("tagJnId"),
|
||||
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
|
||||
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
|
||||
db.ref("name").withSchema(TableName.SecretTag).as("tagName")
|
||||
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")
|
||||
)
|
||||
.select(
|
||||
db.ref("secretBlindIndex").withSchema(TableName.Secret).as("orgSecBlindIndex"),
|
||||
@@ -124,9 +122,9 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
|
||||
{
|
||||
key: "tagJnId",
|
||||
label: "tags" as const,
|
||||
mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color }) => ({
|
||||
mapper: ({ tagId: id, tagSlug: slug, tagColor: color }) => ({
|
||||
id,
|
||||
name,
|
||||
name: slug,
|
||||
slug,
|
||||
color
|
||||
})
|
||||
@@ -200,11 +198,11 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
|
||||
{
|
||||
key: "secVerTagId",
|
||||
label: "tags" as const,
|
||||
mapper: ({ secVerTagId: id, secVerTagName: name, secVerTagSlug: slug, secVerTagColor: color }) => ({
|
||||
mapper: ({ secVerTagId: id, secVerTagSlug: slug, secVerTagColor: color }) => ({
|
||||
// eslint-disable-next-line
|
||||
id,
|
||||
// eslint-disable-next-line
|
||||
name,
|
||||
name: slug,
|
||||
// eslint-disable-next-line
|
||||
slug,
|
||||
// eslint-disable-next-line
|
||||
@@ -262,15 +260,13 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
|
||||
.select({
|
||||
secVerTagId: "secVerTag.id",
|
||||
secVerTagColor: "secVerTag.color",
|
||||
secVerTagSlug: "secVerTag.slug",
|
||||
secVerTagName: "secVerTag.name"
|
||||
secVerTagSlug: "secVerTag.slug"
|
||||
})
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
|
||||
db.ref("id").withSchema(TableName.SecretApprovalRequestSecretTagV2).as("tagJnId"),
|
||||
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
|
||||
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
|
||||
db.ref("name").withSchema(TableName.SecretTag).as("tagName")
|
||||
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")
|
||||
)
|
||||
.select(
|
||||
db.ref("version").withSchema(TableName.SecretV2).as("orgSecVersion"),
|
||||
@@ -292,9 +288,9 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
|
||||
{
|
||||
key: "tagJnId",
|
||||
label: "tags" as const,
|
||||
mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color }) => ({
|
||||
mapper: ({ tagId: id, tagSlug: slug, tagColor: color }) => ({
|
||||
id,
|
||||
name,
|
||||
name: slug,
|
||||
slug,
|
||||
color
|
||||
})
|
||||
@@ -330,11 +326,11 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
|
||||
{
|
||||
key: "secVerTagId",
|
||||
label: "tags" as const,
|
||||
mapper: ({ secVerTagId: id, secVerTagName: name, secVerTagSlug: slug, secVerTagColor: color }) => ({
|
||||
mapper: ({ secVerTagId: id, secVerTagSlug: slug, secVerTagColor: color }) => ({
|
||||
// eslint-disable-next-line
|
||||
id,
|
||||
// eslint-disable-next-line
|
||||
name,
|
||||
name: slug,
|
||||
// eslint-disable-next-line
|
||||
slug,
|
||||
// eslint-disable-next-line
|
||||
|
@@ -50,6 +50,7 @@ import { TSecretVersionV2TagDALFactory } from "@app/services/secret-v2-bridge/se
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TSecretSnapshotServiceFactory } from "../secret-snapshot/secret-snapshot-service";
|
||||
@@ -97,6 +98,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
>;
|
||||
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
|
||||
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TSecretApprovalRequestServiceFactory = ReturnType<typeof secretApprovalRequestServiceFactory>;
|
||||
@@ -122,7 +124,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
kmsService,
|
||||
secretV2BridgeDAL,
|
||||
secretVersionV2BridgeDAL,
|
||||
secretVersionTagV2BridgeDAL
|
||||
secretVersionTagV2BridgeDAL,
|
||||
licenseService
|
||||
}: TSecretApprovalRequestServiceFactoryDep) => {
|
||||
const requestCount = async ({ projectId, actor, actorId, actorOrgId, actorAuthMethod }: TApprovalRequestCountDTO) => {
|
||||
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
|
||||
@@ -224,12 +227,10 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
secretKey: el.key,
|
||||
id: el.id,
|
||||
version: el.version,
|
||||
secretValue: el.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
|
||||
: undefined,
|
||||
secretValue: el.encryptedValue ? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString() : "",
|
||||
secretComment: el.encryptedComment
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
|
||||
: undefined,
|
||||
: "",
|
||||
secret: el.secret
|
||||
? {
|
||||
secretKey: el.secret.key,
|
||||
@@ -237,10 +238,10 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
version: el.secret.version,
|
||||
secretValue: el.secret.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedValue }).toString()
|
||||
: undefined,
|
||||
: "",
|
||||
secretComment: el.secret.encryptedComment
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedComment }).toString()
|
||||
: undefined
|
||||
: ""
|
||||
}
|
||||
: undefined,
|
||||
secretVersion: el.secretVersion
|
||||
@@ -250,10 +251,10 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
version: el.secretVersion.version,
|
||||
secretValue: el.secretVersion.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedValue }).toString()
|
||||
: undefined,
|
||||
: "",
|
||||
secretComment: el.secretVersion.encryptedComment
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedComment }).toString()
|
||||
: undefined
|
||||
: ""
|
||||
}
|
||||
: undefined
|
||||
}));
|
||||
@@ -297,6 +298,14 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
if (!secretApprovalRequest) throw new BadRequestError({ message: "Secret approval request not found" });
|
||||
if (actor !== ActorType.USER) throw new BadRequestError({ message: "Must be a user" });
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.secretApproval) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to review secret approval request due to plan restriction. Upgrade plan to review secret approval request."
|
||||
});
|
||||
}
|
||||
|
||||
const { policy } = secretApprovalRequest;
|
||||
const { hasRole } = await permissionService.getProjectPermission(
|
||||
ActorType.USER,
|
||||
@@ -347,6 +356,14 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
if (!secretApprovalRequest) throw new BadRequestError({ message: "Secret approval request not found" });
|
||||
if (actor !== ActorType.USER) throw new BadRequestError({ message: "Must be a user" });
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.secretApproval) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to update secret approval request due to plan restriction. Upgrade plan to update secret approval request."
|
||||
});
|
||||
}
|
||||
|
||||
const { policy } = secretApprovalRequest;
|
||||
const { hasRole } = await permissionService.getProjectPermission(
|
||||
ActorType.USER,
|
||||
@@ -388,6 +405,14 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
if (!secretApprovalRequest) throw new BadRequestError({ message: "Secret approval request not found" });
|
||||
if (actor !== ActorType.USER) throw new BadRequestError({ message: "Must be a user" });
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.secretApproval) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to merge secret approval request due to plan restriction. Upgrade plan to merge secret approval request."
|
||||
});
|
||||
}
|
||||
|
||||
const { policy, folderId, projectId } = secretApprovalRequest;
|
||||
const { hasRole } = await permissionService.getProjectPermission(
|
||||
ActorType.USER,
|
||||
|
@@ -257,7 +257,7 @@ export const secretReplicationServiceFactory = ({
|
||||
secretDAL: secretV2BridgeDAL,
|
||||
folderDAL,
|
||||
secretImportDAL,
|
||||
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : undefined)
|
||||
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : "")
|
||||
});
|
||||
// secrets that gets replicated across imports
|
||||
const sourceDecryptedLocalSecrets = sourceLocalSecrets.map((el) => ({
|
||||
@@ -449,7 +449,7 @@ export const secretReplicationServiceFactory = ({
|
||||
});
|
||||
}
|
||||
if (locallyDeletedSecrets.length) {
|
||||
await secretDAL.delete(
|
||||
await secretV2BridgeDAL.delete(
|
||||
{
|
||||
$in: {
|
||||
id: locallyDeletedSecrets.map(({ id }) => id)
|
||||
|
@@ -164,10 +164,10 @@ export const secretSnapshotServiceFactory = ({
|
||||
secretKey: el.key,
|
||||
secretValue: el.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
|
||||
: undefined,
|
||||
: "",
|
||||
secretComment: el.encryptedComment
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
|
||||
: undefined
|
||||
: ""
|
||||
}))
|
||||
};
|
||||
} else {
|
||||
|
@@ -100,8 +100,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
|
||||
db.ref("id").withSchema(TableName.SecretVersionTag).as("tagVersionId"),
|
||||
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
|
||||
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
|
||||
db.ref("name").withSchema(TableName.SecretTag).as("tagName")
|
||||
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")
|
||||
);
|
||||
return sqlNestRelationships({
|
||||
data,
|
||||
@@ -132,9 +131,9 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
{
|
||||
key: "tagVersionId",
|
||||
label: "tags" as const,
|
||||
mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
|
||||
mapper: ({ tagId: id, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
|
||||
id,
|
||||
name,
|
||||
name: slug,
|
||||
slug,
|
||||
color,
|
||||
vId
|
||||
@@ -195,8 +194,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
|
||||
db.ref("id").withSchema(TableName.SecretVersionV2Tag).as("tagVersionId"),
|
||||
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
|
||||
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
|
||||
db.ref("name").withSchema(TableName.SecretTag).as("tagName")
|
||||
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")
|
||||
);
|
||||
return sqlNestRelationships({
|
||||
data,
|
||||
@@ -227,9 +225,9 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
{
|
||||
key: "tagVersionId",
|
||||
label: "tags" as const,
|
||||
mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
|
||||
mapper: ({ tagId: id, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
|
||||
id,
|
||||
name,
|
||||
name: slug,
|
||||
slug,
|
||||
color,
|
||||
vId
|
||||
@@ -353,8 +351,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
|
||||
db.ref("id").withSchema(TableName.SecretVersionTag).as("tagVersionId"),
|
||||
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
|
||||
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
|
||||
db.ref("name").withSchema(TableName.SecretTag).as("tagName")
|
||||
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")
|
||||
);
|
||||
|
||||
const formated = sqlNestRelationships({
|
||||
@@ -377,9 +374,9 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
{
|
||||
key: "tagVersionId",
|
||||
label: "tags" as const,
|
||||
mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
|
||||
mapper: ({ tagId: id, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
|
||||
id,
|
||||
name,
|
||||
name: slug,
|
||||
slug,
|
||||
color,
|
||||
vId
|
||||
@@ -508,8 +505,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
db.ref("id").withSchema(TableName.SecretTag).as("tagId"),
|
||||
db.ref("id").withSchema(TableName.SecretVersionV2Tag).as("tagVersionId"),
|
||||
db.ref("color").withSchema(TableName.SecretTag).as("tagColor"),
|
||||
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"),
|
||||
db.ref("name").withSchema(TableName.SecretTag).as("tagName")
|
||||
db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug")
|
||||
);
|
||||
|
||||
const formated = sqlNestRelationships({
|
||||
@@ -532,9 +528,9 @@ export const snapshotDALFactory = (db: TDbClient) => {
|
||||
{
|
||||
key: "tagVersionId",
|
||||
label: "tags" as const,
|
||||
mapper: ({ tagId: id, tagName: name, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
|
||||
mapper: ({ tagId: id, tagSlug: slug, tagColor: color, tagVersionId: vId }) => ({
|
||||
id,
|
||||
name,
|
||||
name: slug,
|
||||
slug,
|
||||
color,
|
||||
vId
|
||||
|
@@ -5,17 +5,30 @@ import { Redlock, Settings } from "@app/lib/red-lock";
|
||||
export type TKeyStoreFactory = ReturnType<typeof keyStoreFactory>;
|
||||
|
||||
// all the key prefixes used must be set here to avoid conflict
|
||||
export enum KeyStorePrefixes {
|
||||
SecretReplication = "secret-replication-import-lock",
|
||||
KmsProjectDataKeyCreation = "kms-project-data-key-creation-lock",
|
||||
KmsProjectKeyCreation = "kms-project-key-creation-lock",
|
||||
WaitUntilReadyKmsProjectDataKeyCreation = "wait-until-ready-kms-project-data-key-creation-",
|
||||
WaitUntilReadyKmsProjectKeyCreation = "wait-until-ready-kms-project-key-creation-",
|
||||
KmsOrgKeyCreation = "kms-org-key-creation-lock",
|
||||
KmsOrgDataKeyCreation = "kms-org-data-key-creation-lock",
|
||||
WaitUntilReadyKmsOrgKeyCreation = "wait-until-ready-kms-org-key-creation-",
|
||||
WaitUntilReadyKmsOrgDataKeyCreation = "wait-until-ready-kms-org-data-key-creation-"
|
||||
}
|
||||
export const KeyStorePrefixes = {
|
||||
SecretReplication: "secret-replication-import-lock",
|
||||
KmsProjectDataKeyCreation: "kms-project-data-key-creation-lock",
|
||||
KmsProjectKeyCreation: "kms-project-key-creation-lock",
|
||||
WaitUntilReadyKmsProjectDataKeyCreation: "wait-until-ready-kms-project-data-key-creation-",
|
||||
WaitUntilReadyKmsProjectKeyCreation: "wait-until-ready-kms-project-key-creation-",
|
||||
KmsOrgKeyCreation: "kms-org-key-creation-lock",
|
||||
KmsOrgDataKeyCreation: "kms-org-data-key-creation-lock",
|
||||
WaitUntilReadyKmsOrgKeyCreation: "wait-until-ready-kms-org-key-creation-",
|
||||
WaitUntilReadyKmsOrgDataKeyCreation: "wait-until-ready-kms-org-data-key-creation-",
|
||||
|
||||
SyncSecretIntegrationLock: (projectId: string, environmentSlug: string, secretPath: string) =>
|
||||
`sync-integration-mutex-${projectId}-${environmentSlug}-${secretPath}` as const,
|
||||
SyncSecretIntegrationLastRunTimestamp: (projectId: string, environmentSlug: string, secretPath: string) =>
|
||||
`sync-integration-last-run-${projectId}-${environmentSlug}-${secretPath}` as const,
|
||||
IdentityAccessTokenStatusUpdate: (identityAccessTokenId: string) =>
|
||||
`identity-access-token-status:${identityAccessTokenId}`,
|
||||
ServiceTokenStatusUpdate: (serviceTokenId: string) => `service-token-status:${serviceTokenId}`
|
||||
};
|
||||
|
||||
export const KeyStoreTtls = {
|
||||
SetSyncSecretIntegrationLastRunTimestampInSeconds: 10,
|
||||
AccessTokenStatusUpdateInSeconds: 120
|
||||
};
|
||||
|
||||
type TWaitTillReady = {
|
||||
key: string;
|
||||
@@ -37,10 +50,10 @@ export const keyStoreFactory = (redisUrl: string) => {
|
||||
|
||||
const setItemWithExpiry = async (
|
||||
key: string,
|
||||
exp: number | string,
|
||||
expiryInSeconds: number | string,
|
||||
value: string | number | Buffer,
|
||||
prefix?: string
|
||||
) => redis.set(prefix ? `${prefix}:${key}` : key, value, "EX", exp);
|
||||
) => redis.set(prefix ? `${prefix}:${key}` : key, value, "EX", expiryInSeconds);
|
||||
|
||||
const deleteItem = async (key: string) => redis.del(key);
|
||||
|
||||
|
@@ -596,7 +596,8 @@ export const RAW_SECRETS = {
|
||||
"The slug of the project to list secrets from. This parameter is only applicable by machine identities.",
|
||||
environment: "The slug of the environment to list secrets from.",
|
||||
secretPath: "The secret path to list secrets from.",
|
||||
includeImports: "Weather to include imported secrets or not."
|
||||
includeImports: "Weather to include imported secrets or not.",
|
||||
tagSlugs: "The comma separated tag slugs to filter secrets"
|
||||
},
|
||||
CREATE: {
|
||||
secretName: "The name of the secret to create.",
|
||||
@@ -1048,15 +1049,30 @@ export const CERTIFICATE_AUTHORITIES = {
|
||||
caId: "The ID of the CA to generate CSR from",
|
||||
csr: "The generated CSR from the CA"
|
||||
},
|
||||
RENEW_CA_CERT: {
|
||||
caId: "The ID of the CA to renew the CA certificate for",
|
||||
type: "The type of behavior to use for the renewal operation. Currently Infisical is only able to renew a CA certificate with the same key pair.",
|
||||
notAfter: "The expiry date and time for the renewed CA certificate in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
certificate: "The renewed CA certificate body",
|
||||
certificateChain: "The certificate chain of the CA",
|
||||
serialNumber: "The serial number of the renewed CA certificate"
|
||||
},
|
||||
GET_CERT: {
|
||||
caId: "The ID of the CA to get the certificate body and certificate chain from",
|
||||
certificate: "The certificate body of the CA",
|
||||
certificateChain: "The certificate chain of the CA",
|
||||
serialNumber: "The serial number of the CA certificate"
|
||||
},
|
||||
GET_CA_CERTS: {
|
||||
caId: "The ID of the CA to get the CA certificates for",
|
||||
certificate: "The certificate body of the CA certificate",
|
||||
certificateChain: "The certificate chain of the CA certificate",
|
||||
serialNumber: "The serial number of the CA certificate",
|
||||
version: "The version of the CA certificate. The version is incremented for each CA renewal operation."
|
||||
},
|
||||
SIGN_INTERMEDIATE: {
|
||||
caId: "The ID of the CA to sign the intermediate certificate with",
|
||||
csr: "The CSR to sign with the CA",
|
||||
csr: "The pem-encoded CSR to sign with the CA",
|
||||
notBefore: "The date and time when the intermediate CA becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
notAfter: "The date and time when the intermediate CA expires in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
maxPathLength:
|
||||
@@ -1073,6 +1089,8 @@ export const CERTIFICATE_AUTHORITIES = {
|
||||
},
|
||||
ISSUE_CERT: {
|
||||
caId: "The ID of the CA to issue the certificate from",
|
||||
certificateTemplateId: "The ID of the certificate template to issue the certificate from",
|
||||
pkiCollectionId: "The ID of the PKI collection to add the certificate to",
|
||||
friendlyName: "A friendly name for the certificate",
|
||||
commonName: "The common name (CN) for the certificate",
|
||||
altNames:
|
||||
@@ -1086,6 +1104,22 @@ export const CERTIFICATE_AUTHORITIES = {
|
||||
privateKey: "The private key of the issued certificate",
|
||||
serialNumber: "The serial number of the issued certificate"
|
||||
},
|
||||
SIGN_CERT: {
|
||||
caId: "The ID of the CA to issue the certificate from",
|
||||
pkiCollectionId: "The ID of the PKI collection to add the certificate to",
|
||||
csr: "The pem-encoded CSR to sign with the CA to be used for certificate issuance",
|
||||
friendlyName: "A friendly name for the certificate",
|
||||
commonName: "The common name (CN) for the certificate",
|
||||
altNames:
|
||||
"A comma-delimited list of Subject Alternative Names (SANs) for the certificate; these can be host names or email addresses.",
|
||||
ttl: "The time to live for the certificate such as 1m, 1h, 1d, 1y, ...",
|
||||
notBefore: "The date and time when the certificate becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
notAfter: "The date and time when the certificate expires in YYYY-MM-DDTHH:mm:ss.sssZ format",
|
||||
certificate: "The issued certificate",
|
||||
issuingCaCertificate: "The certificate of the issuing CA",
|
||||
certificateChain: "The certificate chain of the issued certificate",
|
||||
serialNumber: "The serial number of the issued certificate"
|
||||
},
|
||||
GET_CRL: {
|
||||
caId: "The ID of the CA to get the certificate revocation list (CRL) for",
|
||||
crl: "The certificate revocation list (CRL) of the CA"
|
||||
@@ -1114,6 +1148,91 @@ export const CERTIFICATES = {
|
||||
}
|
||||
};
|
||||
|
||||
export const CERTIFICATE_TEMPLATES = {
|
||||
CREATE: {
|
||||
caId: "The ID of the certificate authority to associate the template with",
|
||||
pkiCollectionId: "The ID of the PKI collection to bind to the template",
|
||||
name: "The name of the template",
|
||||
commonName: "The regular expression string to use for validating common names",
|
||||
subjectAlternativeName: "The regular expression string to use for validating subject alternative names",
|
||||
ttl: "The max TTL for the template"
|
||||
},
|
||||
GET: {
|
||||
certificateTemplateId: "The ID of the certificate template to get"
|
||||
},
|
||||
UPDATE: {
|
||||
certificateTemplateId: "The ID of the certificate template to update",
|
||||
caId: "The ID of the certificate authority to update the association with the template",
|
||||
pkiCollectionId: "The ID of the PKI collection to update the binding to the template",
|
||||
name: "The updated name of the template",
|
||||
commonName: "The updated regular expression string for validating common names",
|
||||
subjectAlternativeName: "The updated regular expression string for validating subject alternative names",
|
||||
ttl: "The updated max TTL for the template"
|
||||
},
|
||||
DELETE: {
|
||||
certificateTemplateId: "The ID of the certificate template to delete"
|
||||
}
|
||||
};
|
||||
|
||||
export const ALERTS = {
|
||||
CREATE: {
|
||||
projectId: "The ID of the project to create the alert in",
|
||||
pkiCollectionId: "The ID of the PKI collection to bind to the alert",
|
||||
name: "The name of the alert",
|
||||
alertBeforeDays: "The number of days before the certificate expires to trigger the alert",
|
||||
emails: "The email addresses to send the alert email to"
|
||||
},
|
||||
GET: {
|
||||
alertId: "The ID of the alert to get"
|
||||
},
|
||||
UPDATE: {
|
||||
alertId: "The ID of the alert to update",
|
||||
name: "The name of the alert to update to",
|
||||
alertBeforeDays: "The number of days before the certificate expires to trigger the alert to update to",
|
||||
pkiCollectionId: "The ID of the PKI collection to bind to the alert to update to",
|
||||
emails: "The email addresses to send the alert email to update to"
|
||||
},
|
||||
DELETE: {
|
||||
alertId: "The ID of the alert to delete"
|
||||
}
|
||||
};
|
||||
|
||||
export const PKI_COLLECTIONS = {
|
||||
CREATE: {
|
||||
projectId: "The ID of the project to create the PKI collection in",
|
||||
name: "The name of the PKI collection",
|
||||
description: "A description for the PKI collection"
|
||||
},
|
||||
GET: {
|
||||
collectionId: "The ID of the PKI collection to get"
|
||||
},
|
||||
UPDATE: {
|
||||
collectionId: "The ID of the PKI collection to update",
|
||||
name: "The name of the PKI collection to update to",
|
||||
description: "The description for the PKI collection to update to"
|
||||
},
|
||||
DELETE: {
|
||||
collectionId: "The ID of the PKI collection to delete"
|
||||
},
|
||||
LIST_ITEMS: {
|
||||
collectionId: "The ID of the PKI collection to list items from",
|
||||
type: "The type of the PKI collection item to list",
|
||||
offset: "The offset to start from",
|
||||
limit: "The number of items to return"
|
||||
},
|
||||
ADD_ITEM: {
|
||||
collectionId: "The ID of the PKI collection to add the item to",
|
||||
type: "The type of the PKI collection item to add",
|
||||
itemId: "The resource ID of the PKI collection item to add"
|
||||
},
|
||||
DELETE_ITEM: {
|
||||
collectionId: "The ID of the PKI collection to delete the item from",
|
||||
collectionItemId: "The ID of the PKI collection item to delete",
|
||||
type: "The type of the deleted PKI collection item",
|
||||
itemId: "The resource ID of the deleted PKI collection item"
|
||||
}
|
||||
};
|
||||
|
||||
export const PROJECT_ROLE = {
|
||||
CREATE: {
|
||||
projectSlug: "Slug of the project to create the role for.",
|
||||
|
@@ -140,7 +140,8 @@ const envSchema = z
|
||||
MAINTENANCE_MODE: zodStrBool.default("false"),
|
||||
CAPTCHA_SECRET: zpStr(z.string().optional()),
|
||||
PLAIN_API_KEY: zpStr(z.string().optional()),
|
||||
PLAIN_WISH_LABEL_IDS: zpStr(z.string().optional())
|
||||
PLAIN_WISH_LABEL_IDS: zpStr(z.string().optional()),
|
||||
DISABLE_AUDIT_LOG_GENERATION: zodStrBool.default("false")
|
||||
})
|
||||
.transform((data) => ({
|
||||
...data,
|
||||
|
@@ -1,3 +1,8 @@
|
||||
export const daysToMillisecond = (days: number) => days * 24 * 60 * 60 * 1000;
|
||||
|
||||
export const secondsToMillis = (seconds: number) => seconds * 1000;
|
||||
|
||||
export const applyJitter = (delayMs: number, jitterMs: number) => {
|
||||
const jitter = Math.floor(Math.random() * (2 * jitterMs)) - jitterMs;
|
||||
return delayMs + jitter;
|
||||
};
|
||||
|
@@ -1,2 +1,8 @@
|
||||
export const getLastMidnightDateISO = (last = 1) =>
|
||||
`${new Date(new Date().setDate(new Date().getDate() - last)).toISOString().slice(0, 10)}T00:00:00Z`;
|
||||
|
||||
export const getTimeDifferenceInSeconds = (lhsTimestamp: string, rhsTimestamp: string) => {
|
||||
const lhs = new Date(lhsTimestamp);
|
||||
const rhs = new Date(rhsTimestamp);
|
||||
return Math.floor((Number(lhs) - Number(rhs)) / 1000);
|
||||
};
|
||||
|
@@ -19,23 +19,43 @@ export const withTransaction = <K extends object>(db: Knex, dal: K) => ({
|
||||
|
||||
export type TFindFilter<R extends object = object> = Partial<R> & {
|
||||
$in?: Partial<{ [k in keyof R]: R[k][] }>;
|
||||
$search?: Partial<{ [k in keyof R]: R[k] }>;
|
||||
};
|
||||
export const buildFindFilter =
|
||||
<R extends object = object>({ $in, ...filter }: TFindFilter<R>) =>
|
||||
<R extends object = object>({ $in, $search, ...filter }: TFindFilter<R>) =>
|
||||
(bd: Knex.QueryBuilder<R, R>) => {
|
||||
void bd.where(filter);
|
||||
if ($in) {
|
||||
Object.entries($in).forEach(([key, val]) => {
|
||||
void bd.whereIn(key as never, val as never);
|
||||
if (val) {
|
||||
void bd.whereIn(key as never, val as never);
|
||||
}
|
||||
});
|
||||
}
|
||||
if ($search) {
|
||||
Object.entries($search).forEach(([key, val]) => {
|
||||
if (val) {
|
||||
void bd.whereILike(key as never, val as never);
|
||||
}
|
||||
});
|
||||
}
|
||||
return bd;
|
||||
};
|
||||
|
||||
export type TFindOpt<R extends object = object> = {
|
||||
export type TFindReturn<TQuery extends Knex.QueryBuilder, TCount extends boolean = false> = Array<
|
||||
Awaited<TQuery>[0] &
|
||||
(TCount extends true
|
||||
? {
|
||||
count: string;
|
||||
}
|
||||
: unknown)
|
||||
>;
|
||||
|
||||
export type TFindOpt<R extends object = object, TCount extends boolean = boolean> = {
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
sort?: Array<[keyof R, "asc" | "desc"] | [keyof R, "asc" | "desc", "first" | "last"]>;
|
||||
count?: TCount;
|
||||
tx?: Knex;
|
||||
};
|
||||
|
||||
@@ -66,18 +86,22 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
|
||||
throw new DatabaseError({ error, name: "Find one" });
|
||||
}
|
||||
},
|
||||
find: async (
|
||||
find: async <TCount extends boolean = false>(
|
||||
filter: TFindFilter<Tables[Tname]["base"]>,
|
||||
{ offset, limit, sort, tx }: TFindOpt<Tables[Tname]["base"]> = {}
|
||||
{ offset, limit, sort, count, tx }: TFindOpt<Tables[Tname]["base"], TCount> = {}
|
||||
) => {
|
||||
try {
|
||||
const query = (tx || db.replicaNode())(tableName).where(buildFindFilter(filter));
|
||||
if (count) {
|
||||
void query.select(db.raw("COUNT(*) OVER() AS count"));
|
||||
void query.select("*");
|
||||
}
|
||||
if (limit) void query.limit(limit);
|
||||
if (offset) void query.offset(offset);
|
||||
if (sort) {
|
||||
void query.orderBy(sort.map(([column, order, nulls]) => ({ column: column as string, order, nulls })));
|
||||
}
|
||||
const res = await query;
|
||||
const res = (await query) as TFindReturn<typeof query, TCount>;
|
||||
return res;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find one" });
|
||||
@@ -104,6 +128,16 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
|
||||
throw new DatabaseError({ error, name: "Create" });
|
||||
}
|
||||
},
|
||||
// This spilit the insert into multiple chunk
|
||||
batchInsert: async (data: readonly Tables[Tname]["insert"][], tx?: Knex) => {
|
||||
try {
|
||||
if (!data.length) return [];
|
||||
const res = await (tx || db).batchInsert(tableName, data as never).returning("*");
|
||||
return res as Tables[Tname]["base"][];
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "batchInsert" });
|
||||
}
|
||||
},
|
||||
upsert: async (data: readonly Tables[Tname]["insert"][], onConflictField: keyof Tables[Tname]["base"], tx?: Knex) => {
|
||||
try {
|
||||
if (!data.length) return [];
|
||||
|
@@ -16,6 +16,7 @@ export enum QueueName {
|
||||
// TODO(akhilmhdh): This will get removed later. For now this is kept to stop the repeatable queue
|
||||
AuditLogPrune = "audit-log-prune",
|
||||
DailyResourceCleanUp = "daily-resource-cleanup",
|
||||
DailyExpiringPkiItemAlert = "daily-expiring-pki-item-alert",
|
||||
TelemetryInstanceStats = "telemtry-self-hosted-stats",
|
||||
IntegrationSync = "sync-integrations",
|
||||
SecretWebhook = "secret-webhook",
|
||||
@@ -26,7 +27,8 @@ export enum QueueName {
|
||||
CaCrlRotation = "ca-crl-rotation",
|
||||
SecretReplication = "secret-replication",
|
||||
SecretSync = "secret-sync", // parent queue to push integration sync, webhook, and secret replication
|
||||
ProjectV3Migration = "project-v3-migration"
|
||||
ProjectV3Migration = "project-v3-migration",
|
||||
AccessTokenStatusUpdate = "access-token-status-update"
|
||||
}
|
||||
|
||||
export enum QueueJobs {
|
||||
@@ -36,6 +38,7 @@ export enum QueueJobs {
|
||||
// TODO(akhilmhdh): This will get removed later. For now this is kept to stop the repeatable queue
|
||||
AuditLogPrune = "audit-log-prune-job",
|
||||
DailyResourceCleanUp = "daily-resource-cleanup-job",
|
||||
DailyExpiringPkiItemAlert = "daily-expiring-pki-item-alert",
|
||||
SecWebhook = "secret-webhook-trigger",
|
||||
TelemetryInstanceStats = "telemetry-self-hosted-stats",
|
||||
IntegrationSync = "secret-integration-pull",
|
||||
@@ -46,7 +49,9 @@ export enum QueueJobs {
|
||||
CaCrlRotation = "ca-crl-rotation-job",
|
||||
SecretReplication = "secret-replication",
|
||||
SecretSync = "secret-sync", // parent queue to push integration sync, webhook, and secret replication
|
||||
ProjectV3Migration = "project-v3-migration"
|
||||
ProjectV3Migration = "project-v3-migration",
|
||||
IdentityAccessTokenStatusUpdate = "identity-access-token-status-update",
|
||||
ServiceTokenStatusUpdate = "service-token-status-update"
|
||||
}
|
||||
|
||||
export type TQueueJobTypes = {
|
||||
@@ -71,6 +76,10 @@ export type TQueueJobTypes = {
|
||||
name: QueueJobs.DailyResourceCleanUp;
|
||||
payload: undefined;
|
||||
};
|
||||
[QueueName.DailyExpiringPkiItemAlert]: {
|
||||
name: QueueJobs.DailyExpiringPkiItemAlert;
|
||||
payload: undefined;
|
||||
};
|
||||
[QueueName.AuditLogPrune]: {
|
||||
name: QueueJobs.AuditLogPrune;
|
||||
payload: undefined;
|
||||
@@ -142,6 +151,15 @@ export type TQueueJobTypes = {
|
||||
name: QueueJobs.ProjectV3Migration;
|
||||
payload: { projectId: string };
|
||||
};
|
||||
[QueueName.AccessTokenStatusUpdate]:
|
||||
| {
|
||||
name: QueueJobs.IdentityAccessTokenStatusUpdate;
|
||||
payload: { identityAccessTokenId: string; numberOfUses: number };
|
||||
}
|
||||
| {
|
||||
name: QueueJobs.ServiceTokenStatusUpdate;
|
||||
payload: { serviceTokenId: string };
|
||||
};
|
||||
};
|
||||
|
||||
export type TQueueServiceFactory = ReturnType<typeof queueServiceFactory>;
|
||||
|
@@ -1,7 +1,6 @@
|
||||
import type { RateLimitOptions, RateLimitPluginOptions } from "@fastify/rate-limit";
|
||||
import { Redis } from "ioredis";
|
||||
|
||||
import { getRateLimiterConfig } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
|
||||
export const globalRateLimiterCfg = (): RateLimitPluginOptions => {
|
||||
@@ -22,14 +21,16 @@ export const globalRateLimiterCfg = (): RateLimitPluginOptions => {
|
||||
// GET endpoints
|
||||
export const readLimit: RateLimitOptions = {
|
||||
timeWindow: 60 * 1000,
|
||||
max: () => getRateLimiterConfig().readLimit,
|
||||
hook: "preValidation",
|
||||
max: (req) => req.rateLimits.readLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
// POST, PATCH, PUT, DELETE endpoints
|
||||
export const writeLimit: RateLimitOptions = {
|
||||
timeWindow: 60 * 1000,
|
||||
max: () => getRateLimiterConfig().writeLimit,
|
||||
hook: "preValidation",
|
||||
max: (req) => req.rateLimits.writeLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
@@ -37,42 +38,40 @@ export const writeLimit: RateLimitOptions = {
|
||||
export const secretsLimit: RateLimitOptions = {
|
||||
// secrets, folders, secret imports
|
||||
timeWindow: 60 * 1000,
|
||||
max: () => getRateLimiterConfig().secretsLimit,
|
||||
hook: "preValidation",
|
||||
max: (req) => req.rateLimits.secretsLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
export const authRateLimit: RateLimitOptions = {
|
||||
timeWindow: 60 * 1000,
|
||||
max: () => getRateLimiterConfig().authRateLimit,
|
||||
hook: "preValidation",
|
||||
max: (req) => req.rateLimits.authRateLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
export const inviteUserRateLimit: RateLimitOptions = {
|
||||
timeWindow: 60 * 1000,
|
||||
max: () => getRateLimiterConfig().inviteUserRateLimit,
|
||||
hook: "preValidation",
|
||||
max: (req) => req.rateLimits.inviteUserRateLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
export const mfaRateLimit: RateLimitOptions = {
|
||||
timeWindow: 60 * 1000,
|
||||
max: () => getRateLimiterConfig().mfaRateLimit,
|
||||
hook: "preValidation",
|
||||
max: (req) => req.rateLimits.mfaRateLimit,
|
||||
keyGenerator: (req) => {
|
||||
return req.headers.authorization?.split(" ")[1] || req.realIp;
|
||||
}
|
||||
};
|
||||
|
||||
export const creationLimit: RateLimitOptions = {
|
||||
// identity, project, org
|
||||
timeWindow: 60 * 1000,
|
||||
max: () => getRateLimiterConfig().creationLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
// Public endpoints to avoid brute force attacks
|
||||
export const publicEndpointLimit: RateLimitOptions = {
|
||||
// Read Shared Secrets
|
||||
timeWindow: 60 * 1000,
|
||||
max: () => getRateLimiterConfig().publicEndpointLimit,
|
||||
hook: "preValidation",
|
||||
max: (req) => req.rateLimits.publicEndpointLimit,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
|
38
backend/src/server/plugins/inject-rate-limits.ts
Normal file
38
backend/src/server/plugins/inject-rate-limits.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import fp from "fastify-plugin";
|
||||
|
||||
import { getRateLimiterConfig } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
|
||||
export const injectRateLimits = fp(async (server) => {
|
||||
server.decorateRequest("rateLimits", null);
|
||||
server.addHook("onRequest", async (req) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const instanceRateLimiterConfig = getRateLimiterConfig();
|
||||
if (!req.auth?.orgId) {
|
||||
// for public endpoints, we always use the instance-wide default rate limits
|
||||
req.rateLimits = instanceRateLimiterConfig;
|
||||
return;
|
||||
}
|
||||
|
||||
const { rateLimits, customRateLimits } = await server.services.license.getPlan(req.auth.orgId);
|
||||
|
||||
if (customRateLimits && !appCfg.isCloud) {
|
||||
// we do this because for self-hosted/dedicated instances, we want custom rate limits to be based on admin configuration
|
||||
// note that the syncing of custom rate limit happens on the instanceRateLimiterConfig object
|
||||
req.rateLimits = instanceRateLimiterConfig;
|
||||
return;
|
||||
}
|
||||
|
||||
// we're using the null coalescing operator in order to handle outdated licenses
|
||||
req.rateLimits = {
|
||||
readLimit: rateLimits?.readLimit ?? instanceRateLimiterConfig.readLimit,
|
||||
writeLimit: rateLimits?.writeLimit ?? instanceRateLimiterConfig.writeLimit,
|
||||
secretsLimit: rateLimits?.secretsLimit ?? instanceRateLimiterConfig.secretsLimit,
|
||||
publicEndpointLimit: instanceRateLimiterConfig.publicEndpointLimit,
|
||||
authRateLimit: instanceRateLimiterConfig.authRateLimit,
|
||||
inviteUserRateLimit: instanceRateLimiterConfig.inviteUserRateLimit,
|
||||
mfaRateLimit: instanceRateLimiterConfig.mfaRateLimit
|
||||
};
|
||||
});
|
||||
});
|
@@ -73,6 +73,7 @@ import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { accessTokenQueueServiceFactory } from "@app/services/access-token-queue/access-token-queue";
|
||||
import { apiKeyDALFactory } from "@app/services/api-key/api-key-dal";
|
||||
import { apiKeyServiceFactory } from "@app/services/api-key/api-key-service";
|
||||
import { authDALFactory } from "@app/services/auth/auth-dal";
|
||||
@@ -89,6 +90,8 @@ import { certificateAuthorityDALFactory } from "@app/services/certificate-author
|
||||
import { certificateAuthorityQueueFactory } from "@app/services/certificate-authority/certificate-authority-queue";
|
||||
import { certificateAuthoritySecretDALFactory } from "@app/services/certificate-authority/certificate-authority-secret-dal";
|
||||
import { certificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { certificateTemplateDALFactory } from "@app/services/certificate-template/certificate-template-dal";
|
||||
import { certificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||
import { groupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { groupProjectMembershipRoleDALFactory } from "@app/services/group-project/group-project-membership-role-dal";
|
||||
import { groupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
@@ -129,7 +132,14 @@ import { orgDALFactory } from "@app/services/org/org-dal";
|
||||
import { orgRoleDALFactory } from "@app/services/org/org-role-dal";
|
||||
import { orgRoleServiceFactory } from "@app/services/org/org-role-service";
|
||||
import { orgServiceFactory } from "@app/services/org/org-service";
|
||||
import { orgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
|
||||
import { orgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { dailyExpiringPkiItemAlertQueueServiceFactory } from "@app/services/pki-alert/expiring-pki-item-alert-queue";
|
||||
import { pkiAlertDALFactory } from "@app/services/pki-alert/pki-alert-dal";
|
||||
import { pkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
|
||||
import { pkiCollectionDALFactory } from "@app/services/pki-collection/pki-collection-dal";
|
||||
import { pkiCollectionItemDALFactory } from "@app/services/pki-collection/pki-collection-item-dal";
|
||||
import { pkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
|
||||
import { projectDALFactory } from "@app/services/project/project-dal";
|
||||
import { projectQueueFactory } from "@app/services/project/project-queue";
|
||||
import { projectServiceFactory } from "@app/services/project/project-service";
|
||||
@@ -183,6 +193,7 @@ import { webhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
import { injectAuditLogInfo } from "../plugins/audit-log";
|
||||
import { injectIdentity } from "../plugins/auth/inject-identity";
|
||||
import { injectPermission } from "../plugins/auth/inject-permission";
|
||||
import { injectRateLimits } from "../plugins/inject-rate-limits";
|
||||
import { registerSecretScannerGhApp } from "../plugins/secret-scanner";
|
||||
import { registerV1Routes } from "./v1";
|
||||
import { registerV2Routes } from "./v2";
|
||||
@@ -354,7 +365,8 @@ export const registerRoutes = async (
|
||||
projectEnvDAL,
|
||||
secretApprovalPolicyApproverDAL: sapApproverDAL,
|
||||
permissionService,
|
||||
secretApprovalPolicyDAL
|
||||
secretApprovalPolicyDAL,
|
||||
licenseService
|
||||
});
|
||||
const tokenService = tokenServiceFactory({ tokenDAL: authTokenDAL, userDAL, orgMembershipDAL });
|
||||
|
||||
@@ -401,6 +413,7 @@ export const registerRoutes = async (
|
||||
orgDAL,
|
||||
orgMembershipDAL,
|
||||
projectDAL,
|
||||
projectUserAdditionalPrivilegeDAL,
|
||||
projectMembershipDAL,
|
||||
groupDAL,
|
||||
groupProjectDAL,
|
||||
@@ -466,6 +479,7 @@ export const registerRoutes = async (
|
||||
orgDAL,
|
||||
incidentContactDAL,
|
||||
tokenService,
|
||||
projectUserAdditionalPrivilegeDAL,
|
||||
projectDAL,
|
||||
projectMembershipDAL,
|
||||
orgMembershipDAL,
|
||||
@@ -498,6 +512,16 @@ export const registerRoutes = async (
|
||||
keyStore,
|
||||
licenseService
|
||||
});
|
||||
const orgAdminService = orgAdminServiceFactory({
|
||||
projectDAL,
|
||||
permissionService,
|
||||
projectUserMembershipRoleDAL,
|
||||
userDAL,
|
||||
projectBotDAL,
|
||||
projectKeyDAL,
|
||||
projectMembershipDAL
|
||||
});
|
||||
|
||||
const rateLimitService = rateLimitServiceFactory({
|
||||
rateLimitDAL,
|
||||
licenseService
|
||||
@@ -528,10 +552,12 @@ export const registerRoutes = async (
|
||||
projectBotDAL,
|
||||
orgDAL,
|
||||
userDAL,
|
||||
projectUserAdditionalPrivilegeDAL,
|
||||
userGroupMembershipDAL,
|
||||
smtpService,
|
||||
projectKeyDAL,
|
||||
projectRoleDAL,
|
||||
groupProjectDAL,
|
||||
licenseService
|
||||
});
|
||||
const projectUserAdditionalPrivilegeService = projectUserAdditionalPrivilegeServiceFactory({
|
||||
@@ -568,10 +594,15 @@ export const registerRoutes = async (
|
||||
const certificateAuthorityCertDAL = certificateAuthorityCertDALFactory(db);
|
||||
const certificateAuthoritySecretDAL = certificateAuthoritySecretDALFactory(db);
|
||||
const certificateAuthorityCrlDAL = certificateAuthorityCrlDALFactory(db);
|
||||
const certificateTemplateDAL = certificateTemplateDALFactory(db);
|
||||
|
||||
const certificateDAL = certificateDALFactory(db);
|
||||
const certificateBodyDAL = certificateBodyDALFactory(db);
|
||||
|
||||
const pkiAlertDAL = pkiAlertDALFactory(db);
|
||||
const pkiCollectionDAL = pkiCollectionDALFactory(db);
|
||||
const pkiCollectionItemDAL = pkiCollectionItemDALFactory(db);
|
||||
|
||||
const certificateService = certificateServiceFactory({
|
||||
certificateDAL,
|
||||
certificateBodyDAL,
|
||||
@@ -599,9 +630,12 @@ export const registerRoutes = async (
|
||||
certificateAuthorityCertDAL,
|
||||
certificateAuthoritySecretDAL,
|
||||
certificateAuthorityCrlDAL,
|
||||
certificateTemplateDAL,
|
||||
certificateAuthorityQueue,
|
||||
certificateDAL,
|
||||
certificateBodyDAL,
|
||||
pkiCollectionDAL,
|
||||
pkiCollectionItemDAL,
|
||||
projectDAL,
|
||||
kmsService,
|
||||
permissionService
|
||||
@@ -616,6 +650,27 @@ export const registerRoutes = async (
|
||||
licenseService
|
||||
});
|
||||
|
||||
const certificateTemplateService = certificateTemplateServiceFactory({
|
||||
certificateTemplateDAL,
|
||||
certificateAuthorityDAL,
|
||||
permissionService
|
||||
});
|
||||
|
||||
const pkiAlertService = pkiAlertServiceFactory({
|
||||
pkiAlertDAL,
|
||||
pkiCollectionDAL,
|
||||
permissionService,
|
||||
smtpService
|
||||
});
|
||||
|
||||
const pkiCollectionService = pkiCollectionServiceFactory({
|
||||
pkiCollectionDAL,
|
||||
pkiCollectionItemDAL,
|
||||
certificateAuthorityDAL,
|
||||
certificateDAL,
|
||||
permissionService
|
||||
});
|
||||
|
||||
const projectService = projectServiceFactory({
|
||||
permissionService,
|
||||
projectDAL,
|
||||
@@ -632,10 +687,14 @@ export const registerRoutes = async (
|
||||
licenseService,
|
||||
certificateAuthorityDAL,
|
||||
certificateDAL,
|
||||
pkiAlertDAL,
|
||||
pkiCollectionDAL,
|
||||
projectUserMembershipRoleDAL,
|
||||
identityProjectMembershipRoleDAL,
|
||||
keyStore,
|
||||
kmsService
|
||||
kmsService,
|
||||
projectBotDAL,
|
||||
certificateTemplateDAL
|
||||
});
|
||||
|
||||
const projectEnvService = projectEnvServiceFactory({
|
||||
@@ -677,8 +736,7 @@ export const registerRoutes = async (
|
||||
permissionService,
|
||||
webhookDAL,
|
||||
projectEnvDAL,
|
||||
projectDAL,
|
||||
kmsService
|
||||
projectDAL
|
||||
});
|
||||
|
||||
const secretTagService = secretTagServiceFactory({ secretTagDAL, permissionService });
|
||||
@@ -699,6 +757,7 @@ export const registerRoutes = async (
|
||||
kmsService
|
||||
});
|
||||
const secretQueueService = secretQueueFactory({
|
||||
keyStore,
|
||||
queueService,
|
||||
secretDAL,
|
||||
folderDAL,
|
||||
@@ -784,7 +843,8 @@ export const registerRoutes = async (
|
||||
secretVersionTagV2BridgeDAL,
|
||||
smtpService,
|
||||
projectEnvDAL,
|
||||
userDAL
|
||||
userDAL,
|
||||
licenseService
|
||||
});
|
||||
|
||||
const secretService = secretServiceFactory({
|
||||
@@ -885,14 +945,29 @@ export const registerRoutes = async (
|
||||
folderDAL,
|
||||
integrationDAL,
|
||||
integrationAuthDAL,
|
||||
secretQueueService
|
||||
secretQueueService,
|
||||
integrationAuthService,
|
||||
projectBotService,
|
||||
secretV2BridgeDAL,
|
||||
secretImportDAL,
|
||||
secretDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const accessTokenQueue = accessTokenQueueServiceFactory({
|
||||
keyStore,
|
||||
identityAccessTokenDAL,
|
||||
queueService,
|
||||
serviceTokenDAL
|
||||
});
|
||||
|
||||
const serviceTokenService = serviceTokenServiceFactory({
|
||||
projectEnvDAL,
|
||||
serviceTokenDAL,
|
||||
userDAL,
|
||||
permissionService,
|
||||
projectDAL
|
||||
projectDAL,
|
||||
accessTokenQueue
|
||||
});
|
||||
|
||||
const identityService = identityServiceFactory({
|
||||
@@ -902,10 +977,13 @@ export const registerRoutes = async (
|
||||
identityProjectDAL,
|
||||
licenseService
|
||||
});
|
||||
|
||||
const identityAccessTokenService = identityAccessTokenServiceFactory({
|
||||
identityAccessTokenDAL,
|
||||
identityOrgMembershipDAL
|
||||
identityOrgMembershipDAL,
|
||||
accessTokenQueue
|
||||
});
|
||||
|
||||
const identityProjectService = identityProjectServiceFactory({
|
||||
permissionService,
|
||||
projectDAL,
|
||||
@@ -988,9 +1066,7 @@ export const registerRoutes = async (
|
||||
queueService,
|
||||
dynamicSecretLeaseDAL,
|
||||
dynamicSecretProviders,
|
||||
dynamicSecretDAL,
|
||||
kmsService,
|
||||
folderDAL
|
||||
dynamicSecretDAL
|
||||
});
|
||||
const dynamicSecretService = dynamicSecretServiceFactory({
|
||||
projectDAL,
|
||||
@@ -1000,8 +1076,7 @@ export const registerRoutes = async (
|
||||
dynamicSecretProviders,
|
||||
folderDAL,
|
||||
permissionService,
|
||||
licenseService,
|
||||
kmsService
|
||||
licenseService
|
||||
});
|
||||
const dynamicSecretLeaseService = dynamicSecretLeaseServiceFactory({
|
||||
projectDAL,
|
||||
@@ -1011,8 +1086,7 @@ export const registerRoutes = async (
|
||||
dynamicSecretLeaseDAL,
|
||||
dynamicSecretProviders,
|
||||
folderDAL,
|
||||
licenseService,
|
||||
kmsService
|
||||
licenseService
|
||||
});
|
||||
const dailyResourceCleanUp = dailyResourceCleanUpQueueServiceFactory({
|
||||
auditLogDAL,
|
||||
@@ -1022,7 +1096,13 @@ export const registerRoutes = async (
|
||||
snapshotDAL,
|
||||
identityAccessTokenDAL,
|
||||
secretSharingDAL,
|
||||
secretVersionV2DAL: secretVersionV2BridgeDAL
|
||||
secretVersionV2DAL: secretVersionV2BridgeDAL,
|
||||
identityUniversalAuthClientSecretDAL: identityUaClientSecretDAL
|
||||
});
|
||||
|
||||
const dailyExpiringPkiItemAlert = dailyExpiringPkiItemAlertQueueServiceFactory({
|
||||
queueService,
|
||||
pkiAlertService
|
||||
});
|
||||
|
||||
const oidcService = oidcConfigServiceFactory({
|
||||
@@ -1049,6 +1129,7 @@ export const registerRoutes = async (
|
||||
|
||||
await telemetryQueue.startTelemetryCheck();
|
||||
await dailyResourceCleanUp.startCleanUp();
|
||||
await dailyExpiringPkiItemAlert.startSendingAlerts();
|
||||
await kmsService.startService();
|
||||
|
||||
// inject all services
|
||||
@@ -1106,7 +1187,10 @@ export const registerRoutes = async (
|
||||
auditLogStream: auditLogStreamService,
|
||||
certificate: certificateService,
|
||||
certificateAuthority: certificateAuthorityService,
|
||||
certificateTemplate: certificateTemplateService,
|
||||
certificateAuthorityCrl: certificateAuthorityCrlService,
|
||||
pkiAlert: pkiAlertService,
|
||||
pkiCollection: pkiCollectionService,
|
||||
secretScanning: secretScanningService,
|
||||
license: licenseService,
|
||||
trustedIp: trustedIpService,
|
||||
@@ -1117,7 +1201,8 @@ export const registerRoutes = async (
|
||||
identityProjectAdditionalPrivilege: identityProjectAdditionalPrivilegeService,
|
||||
secretSharing: secretSharingService,
|
||||
userEngagement: userEngagementService,
|
||||
externalKms: externalKmsService
|
||||
externalKms: externalKmsService,
|
||||
orgAdmin: orgAdminService
|
||||
});
|
||||
|
||||
const cronJobs: CronJob[] = [];
|
||||
@@ -1134,6 +1219,7 @@ export const registerRoutes = async (
|
||||
|
||||
await server.register(injectIdentity, { userDAL, serviceTokenDAL });
|
||||
await server.register(injectPermission);
|
||||
await server.register(injectRateLimits);
|
||||
await server.register(injectAuditLogInfo);
|
||||
|
||||
server.route({
|
||||
|
@@ -63,8 +63,8 @@ export const secretRawSchema = z.object({
|
||||
version: z.number(),
|
||||
type: z.string(),
|
||||
secretKey: z.string(),
|
||||
secretValue: z.string().optional(),
|
||||
secretComment: z.string().optional(),
|
||||
secretValue: z.string(),
|
||||
secretComment: z.string(),
|
||||
secretReminderNote: z.string().nullable().optional(),
|
||||
secretReminderRepeatDays: z.number().nullable().optional(),
|
||||
skipMultilineEncoding: z.boolean().default(false).nullable().optional(),
|
||||
@@ -129,7 +129,11 @@ export const SanitizedRoleSchema = ProjectRolesSchema.extend({
|
||||
});
|
||||
|
||||
export const SanitizedDynamicSecretSchema = DynamicSecretsSchema.omit({
|
||||
encryptedConfig: true
|
||||
inputIV: true,
|
||||
inputTag: true,
|
||||
inputCiphertext: true,
|
||||
keyEncoding: true,
|
||||
algorithm: true
|
||||
});
|
||||
|
||||
export const SanitizedAuditLogStreamSchema = z.object({
|
||||
|
@@ -8,7 +8,7 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
|
||||
import { CaStatus, CaType } from "@app/services/certificate-authority/certificate-authority-types";
|
||||
import { CaRenewalType, CaStatus, CaType } from "@app/services/certificate-authority/certificate-authority-types";
|
||||
import {
|
||||
validateAltNamesField,
|
||||
validateCaDateField
|
||||
@@ -275,15 +275,118 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:caId/renew",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Perform CA certificate renewal",
|
||||
params: z.object({
|
||||
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.RENEW_CA_CERT.caId)
|
||||
}),
|
||||
body: z.object({
|
||||
type: z.nativeEnum(CaRenewalType).describe(CERTIFICATE_AUTHORITIES.RENEW_CA_CERT.type),
|
||||
notAfter: validateCaDateField.describe(CERTIFICATE_AUTHORITIES.RENEW_CA_CERT.notAfter)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
certificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.RENEW_CA_CERT.certificate),
|
||||
certificateChain: z.string().trim().describe(CERTIFICATE_AUTHORITIES.RENEW_CA_CERT.certificateChain),
|
||||
serialNumber: z.string().trim().describe(CERTIFICATE_AUTHORITIES.RENEW_CA_CERT.serialNumber)
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { certificate, certificateChain, serialNumber, ca } =
|
||||
await server.services.certificateAuthority.renewCaCert({
|
||||
caId: req.params.caId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: ca.projectId,
|
||||
event: {
|
||||
type: EventType.RENEW_CA,
|
||||
metadata: {
|
||||
caId: ca.id,
|
||||
dn: ca.dn
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
certificate,
|
||||
certificateChain,
|
||||
serialNumber
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:caId/certificate",
|
||||
url: "/:caId/ca-certificates",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Get cert and cert chain of a CA",
|
||||
description: "Get list of past and current CA certificates for a CA",
|
||||
params: z.object({
|
||||
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CA_CERTS.caId)
|
||||
}),
|
||||
response: {
|
||||
200: z.array(
|
||||
z.object({
|
||||
certificate: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CA_CERTS.certificate),
|
||||
certificateChain: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CA_CERTS.certificateChain),
|
||||
serialNumber: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CA_CERTS.serialNumber),
|
||||
version: z.number().describe(CERTIFICATE_AUTHORITIES.GET_CA_CERTS.version)
|
||||
})
|
||||
)
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { caCerts, ca } = await server.services.certificateAuthority.getCaCerts({
|
||||
caId: req.params.caId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: ca.projectId,
|
||||
event: {
|
||||
type: EventType.GET_CA_CERTS,
|
||||
metadata: {
|
||||
caId: ca.id,
|
||||
dn: ca.dn
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return caCerts;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:caId/certificate", // TODO: consider updating endpoint structure considering CA certificates
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Get current CA cert and cert chain of a CA",
|
||||
params: z.object({
|
||||
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CERT.caId)
|
||||
}),
|
||||
@@ -337,7 +440,7 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
|
||||
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.caId)
|
||||
}),
|
||||
body: z.object({
|
||||
csr: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.csr),
|
||||
csr: z.string().trim().min(1).describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.csr),
|
||||
notBefore: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.notBefore),
|
||||
notAfter: validateCaDateField.describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.notAfter),
|
||||
maxPathLength: z.number().min(-1).default(-1).describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.maxPathLength)
|
||||
@@ -453,7 +556,8 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
friendlyName: z.string().optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.friendlyName),
|
||||
pkiCollectionId: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.pkiCollectionId),
|
||||
friendlyName: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.friendlyName),
|
||||
commonName: z.string().trim().min(1).describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.commonName),
|
||||
altNames: validateAltNamesField.describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.altNames),
|
||||
ttl: z
|
||||
@@ -516,4 +620,82 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:caId/sign-certificate",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Sign certificate from CA",
|
||||
params: z.object({
|
||||
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.caId)
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
csr: z.string().trim().min(1).describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.csr),
|
||||
pkiCollectionId: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.pkiCollectionId),
|
||||
friendlyName: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.friendlyName),
|
||||
commonName: z.string().trim().min(1).optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.commonName),
|
||||
altNames: validateAltNamesField.describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.altNames),
|
||||
ttl: z
|
||||
.string()
|
||||
.refine((val) => ms(val) > 0, "TTL must be a positive number")
|
||||
.describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.ttl),
|
||||
notBefore: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.notBefore),
|
||||
notAfter: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.notAfter)
|
||||
})
|
||||
.refine(
|
||||
(data) => {
|
||||
const { ttl, notAfter } = data;
|
||||
return (ttl !== undefined && notAfter === undefined) || (ttl === undefined && notAfter !== undefined);
|
||||
},
|
||||
{
|
||||
message: "Either ttl or notAfter must be present, but not both",
|
||||
path: ["ttl", "notAfter"]
|
||||
}
|
||||
),
|
||||
response: {
|
||||
200: z.object({
|
||||
certificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.certificate),
|
||||
issuingCaCertificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.issuingCaCertificate),
|
||||
certificateChain: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.certificateChain),
|
||||
serialNumber: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.serialNumber)
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { certificate, certificateChain, issuingCaCertificate, serialNumber, ca } =
|
||||
await server.services.certificateAuthority.signCertFromCa({
|
||||
caId: req.params.caId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: ca.projectId,
|
||||
event: {
|
||||
type: EventType.SIGN_CERT,
|
||||
metadata: {
|
||||
caId: ca.id,
|
||||
dn: ca.dn,
|
||||
serialNumber
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
certificate,
|
||||
certificateChain,
|
||||
issuingCaCertificate,
|
||||
serialNumber
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -1,12 +1,17 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { CertificatesSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { CERTIFICATES } from "@app/lib/api-docs";
|
||||
import { CERTIFICATE_AUTHORITIES, CERTIFICATES } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { CrlReason } from "@app/services/certificate/certificate-types";
|
||||
import {
|
||||
validateAltNamesField,
|
||||
validateCaDateField
|
||||
} from "@app/services/certificate-authority/certificate-authority-validators";
|
||||
|
||||
export const registerCertRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
@@ -55,6 +60,185 @@ export const registerCertRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/issue-certificate",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Issue certificate",
|
||||
body: z
|
||||
.object({
|
||||
caId: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.caId),
|
||||
certificateTemplateId: z
|
||||
.string()
|
||||
.trim()
|
||||
.optional()
|
||||
.describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.certificateTemplateId),
|
||||
pkiCollectionId: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.pkiCollectionId),
|
||||
friendlyName: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.friendlyName),
|
||||
commonName: z.string().trim().min(1).describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.commonName),
|
||||
altNames: validateAltNamesField.describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.altNames),
|
||||
ttl: z
|
||||
.string()
|
||||
.refine((val) => ms(val) > 0, "TTL must be a positive number")
|
||||
.describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.ttl),
|
||||
notBefore: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.notBefore),
|
||||
notAfter: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.notAfter)
|
||||
})
|
||||
.refine(
|
||||
(data) => {
|
||||
const { ttl, notAfter } = data;
|
||||
return (ttl !== undefined && notAfter === undefined) || (ttl === undefined && notAfter !== undefined);
|
||||
},
|
||||
{
|
||||
message: "Either ttl or notAfter must be present, but not both",
|
||||
path: ["ttl", "notAfter"]
|
||||
}
|
||||
)
|
||||
.refine(
|
||||
(data) =>
|
||||
(data.caId !== undefined && data.certificateTemplateId === undefined) ||
|
||||
(data.caId === undefined && data.certificateTemplateId !== undefined),
|
||||
{
|
||||
message: "Either CA ID or Certificate Template ID must be present, but not both",
|
||||
path: ["caId", "certificateTemplateId"]
|
||||
}
|
||||
),
|
||||
response: {
|
||||
200: z.object({
|
||||
certificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.certificate),
|
||||
issuingCaCertificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.issuingCaCertificate),
|
||||
certificateChain: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.certificateChain),
|
||||
privateKey: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.privateKey),
|
||||
serialNumber: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.serialNumber)
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { certificate, certificateChain, issuingCaCertificate, privateKey, serialNumber, ca } =
|
||||
await server.services.certificateAuthority.issueCertFromCa({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: ca.projectId,
|
||||
event: {
|
||||
type: EventType.ISSUE_CERT,
|
||||
metadata: {
|
||||
caId: ca.id,
|
||||
dn: ca.dn,
|
||||
serialNumber
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
certificate,
|
||||
certificateChain,
|
||||
issuingCaCertificate,
|
||||
privateKey,
|
||||
serialNumber
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/sign-certificate",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Sign certificate",
|
||||
body: z
|
||||
.object({
|
||||
caId: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.caId),
|
||||
certificateTemplateId: z
|
||||
.string()
|
||||
.trim()
|
||||
.optional()
|
||||
.describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.certificateTemplateId),
|
||||
pkiCollectionId: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.pkiCollectionId),
|
||||
csr: z.string().trim().min(1).describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.csr),
|
||||
friendlyName: z.string().trim().optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.friendlyName),
|
||||
commonName: z.string().trim().min(1).optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.commonName),
|
||||
altNames: validateAltNamesField.describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.altNames),
|
||||
ttl: z
|
||||
.string()
|
||||
.refine((val) => ms(val) > 0, "TTL must be a positive number")
|
||||
.describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.ttl),
|
||||
notBefore: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.notBefore),
|
||||
notAfter: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.notAfter)
|
||||
})
|
||||
.refine(
|
||||
(data) => {
|
||||
const { ttl, notAfter } = data;
|
||||
return (ttl !== undefined && notAfter === undefined) || (ttl === undefined && notAfter !== undefined);
|
||||
},
|
||||
{
|
||||
message: "Either ttl or notAfter must be present, but not both",
|
||||
path: ["ttl", "notAfter"]
|
||||
}
|
||||
)
|
||||
.refine(
|
||||
(data) =>
|
||||
(data.caId !== undefined && data.certificateTemplateId === undefined) ||
|
||||
(data.caId === undefined && data.certificateTemplateId !== undefined),
|
||||
{
|
||||
message: "Either CA ID or Certificate Template ID must be present, but not both",
|
||||
path: ["caId", "certificateTemplateId"]
|
||||
}
|
||||
),
|
||||
response: {
|
||||
200: z.object({
|
||||
certificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_CERT.certificate),
|
||||
issuingCaCertificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.issuingCaCertificate),
|
||||
certificateChain: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.certificateChain),
|
||||
serialNumber: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.serialNumber)
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { certificate, certificateChain, issuingCaCertificate, serialNumber, ca } =
|
||||
await server.services.certificateAuthority.signCertFromCa({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: ca.projectId,
|
||||
event: {
|
||||
type: EventType.SIGN_CERT,
|
||||
metadata: {
|
||||
caId: ca.id,
|
||||
dn: ca.dn,
|
||||
serialNumber
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
certificate,
|
||||
certificateChain,
|
||||
issuingCaCertificate,
|
||||
serialNumber
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:serialNumber/revoke",
|
||||
|
205
backend/src/server/routes/v1/certificate-template-router.ts
Normal file
205
backend/src/server/routes/v1/certificate-template-router.ts
Normal file
@@ -0,0 +1,205 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { CERTIFICATE_TEMPLATES } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { sanitizedCertificateTemplate } from "@app/services/certificate-template/certificate-template-schema";
|
||||
import { validateTemplateRegexField } from "@app/services/certificate-template/certificate-template-validators";
|
||||
|
||||
export const registerCertificateTemplateRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:certificateTemplateId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
certificateTemplateId: z.string().describe(CERTIFICATE_TEMPLATES.GET.certificateTemplateId)
|
||||
}),
|
||||
response: {
|
||||
200: sanitizedCertificateTemplate
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const certificateTemplate = await server.services.certificateTemplate.getCertTemplate({
|
||||
id: req.params.certificateTemplateId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: certificateTemplate.projectId,
|
||||
event: {
|
||||
type: EventType.GET_CERTIFICATE_TEMPLATE,
|
||||
metadata: {
|
||||
certificateTemplateId: certificateTemplate.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return certificateTemplate;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
caId: z.string().describe(CERTIFICATE_TEMPLATES.CREATE.caId),
|
||||
pkiCollectionId: z.string().optional().describe(CERTIFICATE_TEMPLATES.CREATE.pkiCollectionId),
|
||||
name: z.string().min(1).describe(CERTIFICATE_TEMPLATES.CREATE.name),
|
||||
commonName: validateTemplateRegexField.describe(CERTIFICATE_TEMPLATES.CREATE.commonName),
|
||||
subjectAlternativeName: validateTemplateRegexField.describe(
|
||||
CERTIFICATE_TEMPLATES.CREATE.subjectAlternativeName
|
||||
),
|
||||
ttl: z
|
||||
.string()
|
||||
.refine((val) => ms(val) > 0, "TTL must be a positive number")
|
||||
.describe(CERTIFICATE_TEMPLATES.CREATE.ttl)
|
||||
}),
|
||||
response: {
|
||||
200: sanitizedCertificateTemplate
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const certificateTemplate = await server.services.certificateTemplate.createCertTemplate({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: certificateTemplate.projectId,
|
||||
event: {
|
||||
type: EventType.CREATE_CERTIFICATE_TEMPLATE,
|
||||
metadata: {
|
||||
certificateTemplateId: certificateTemplate.id,
|
||||
caId: certificateTemplate.caId,
|
||||
pkiCollectionId: certificateTemplate.pkiCollectionId as string,
|
||||
name: certificateTemplate.name,
|
||||
commonName: certificateTemplate.commonName,
|
||||
subjectAlternativeName: certificateTemplate.subjectAlternativeName,
|
||||
ttl: certificateTemplate.ttl
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return certificateTemplate;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:certificateTemplateId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
caId: z.string().optional().describe(CERTIFICATE_TEMPLATES.UPDATE.caId),
|
||||
pkiCollectionId: z.string().optional().describe(CERTIFICATE_TEMPLATES.UPDATE.pkiCollectionId),
|
||||
name: z.string().min(1).optional().describe(CERTIFICATE_TEMPLATES.UPDATE.name),
|
||||
commonName: validateTemplateRegexField.optional().describe(CERTIFICATE_TEMPLATES.UPDATE.commonName),
|
||||
subjectAlternativeName: validateTemplateRegexField
|
||||
.optional()
|
||||
.describe(CERTIFICATE_TEMPLATES.UPDATE.subjectAlternativeName),
|
||||
ttl: z
|
||||
.string()
|
||||
.refine((val) => ms(val) > 0, "TTL must be a positive number")
|
||||
.optional()
|
||||
.describe(CERTIFICATE_TEMPLATES.UPDATE.ttl)
|
||||
}),
|
||||
params: z.object({
|
||||
certificateTemplateId: z.string().describe(CERTIFICATE_TEMPLATES.UPDATE.certificateTemplateId)
|
||||
}),
|
||||
response: {
|
||||
200: sanitizedCertificateTemplate
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const certificateTemplate = await server.services.certificateTemplate.updateCertTemplate({
|
||||
...req.body,
|
||||
id: req.params.certificateTemplateId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: certificateTemplate.projectId,
|
||||
event: {
|
||||
type: EventType.UPDATE_CERTIFICATE_TEMPLATE,
|
||||
metadata: {
|
||||
certificateTemplateId: certificateTemplate.id,
|
||||
caId: certificateTemplate.caId,
|
||||
pkiCollectionId: certificateTemplate.pkiCollectionId as string,
|
||||
name: certificateTemplate.name,
|
||||
commonName: certificateTemplate.commonName,
|
||||
subjectAlternativeName: certificateTemplate.subjectAlternativeName,
|
||||
ttl: certificateTemplate.ttl
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return certificateTemplate;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:certificateTemplateId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
certificateTemplateId: z.string().describe(CERTIFICATE_TEMPLATES.DELETE.certificateTemplateId)
|
||||
}),
|
||||
response: {
|
||||
200: sanitizedCertificateTemplate
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const certificateTemplate = await server.services.certificateTemplate.deleteCertTemplate({
|
||||
id: req.params.certificateTemplateId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: certificateTemplate.projectId,
|
||||
event: {
|
||||
type: EventType.DELETE_CERTIFICATE_TEMPLATE,
|
||||
metadata: {
|
||||
certificateTemplateId: certificateTemplate.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return certificateTemplate;
|
||||
}
|
||||
});
|
||||
};
|
@@ -3,7 +3,7 @@ import { z } from "zod";
|
||||
import { IdentitiesSchema, IdentityOrgMembershipsSchema, OrgMembershipRole, OrgRolesSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { IDENTITIES } from "@app/lib/api-docs";
|
||||
import { creationLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@@ -16,7 +16,7 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: creationLimit
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
|
@@ -3,6 +3,7 @@ import { registerAuthRoutes } from "./auth-router";
|
||||
import { registerProjectBotRouter } from "./bot-router";
|
||||
import { registerCaRouter } from "./certificate-authority-router";
|
||||
import { registerCertRouter } from "./certificate-router";
|
||||
import { registerCertificateTemplateRouter } from "./certificate-template-router";
|
||||
import { registerIdentityAccessTokenRouter } from "./identity-access-token-router";
|
||||
import { registerIdentityAwsAuthRouter } from "./identity-aws-iam-auth-router";
|
||||
import { registerIdentityAzureAuthRouter } from "./identity-azure-auth-router";
|
||||
@@ -15,8 +16,11 @@ import { registerIdentityUaRouter } from "./identity-universal-auth-router";
|
||||
import { registerIntegrationAuthRouter } from "./integration-auth-router";
|
||||
import { registerIntegrationRouter } from "./integration-router";
|
||||
import { registerInviteOrgRouter } from "./invite-org-router";
|
||||
import { registerOrgAdminRouter } from "./org-admin-router";
|
||||
import { registerOrgRouter } from "./organization-router";
|
||||
import { registerPasswordRouter } from "./password-router";
|
||||
import { registerPkiAlertRouter } from "./pki-alert-router";
|
||||
import { registerPkiCollectionRouter } from "./pki-collection-router";
|
||||
import { registerProjectEnvRouter } from "./project-env-router";
|
||||
import { registerProjectKeyRouter } from "./project-key-router";
|
||||
import { registerProjectMembershipRouter } from "./project-membership-router";
|
||||
@@ -50,6 +54,7 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
|
||||
await server.register(registerPasswordRouter, { prefix: "/password" });
|
||||
await server.register(registerOrgRouter, { prefix: "/organization" });
|
||||
await server.register(registerAdminRouter, { prefix: "/admin" });
|
||||
await server.register(registerOrgAdminRouter, { prefix: "/organization-admin" });
|
||||
await server.register(registerUserRouter, { prefix: "/user" });
|
||||
await server.register(registerInviteOrgRouter, { prefix: "/invite-org" });
|
||||
await server.register(registerUserActionRouter, { prefix: "/user-action" });
|
||||
@@ -72,6 +77,9 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
|
||||
async (pkiRouter) => {
|
||||
await pkiRouter.register(registerCaRouter, { prefix: "/ca" });
|
||||
await pkiRouter.register(registerCertRouter, { prefix: "/certificates" });
|
||||
await pkiRouter.register(registerCertificateTemplateRouter, { prefix: "/certificate-templates" });
|
||||
await pkiRouter.register(registerPkiAlertRouter, { prefix: "/alerts" });
|
||||
await pkiRouter.register(registerPkiCollectionRouter, { prefix: "/collections" });
|
||||
},
|
||||
{ prefix: "/pki" }
|
||||
);
|
||||
|
@@ -170,6 +170,12 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
|
||||
params: z.object({
|
||||
integrationId: z.string().trim().describe(INTEGRATION.DELETE.integrationId)
|
||||
}),
|
||||
querystring: z.object({
|
||||
shouldDeleteIntegrationSecrets: z
|
||||
.enum(["true", "false"])
|
||||
.optional()
|
||||
.transform((val) => val === "true")
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
integration: IntegrationsSchema
|
||||
@@ -183,7 +189,8 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.integrationId
|
||||
id: req.params.integrationId,
|
||||
shouldDeleteIntegrationSecrets: req.query.shouldDeleteIntegrationSecrets
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
@@ -205,7 +212,8 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
|
||||
targetService: integration.targetService,
|
||||
targetServiceId: integration.targetServiceId,
|
||||
path: integration.path,
|
||||
region: integration.region
|
||||
region: integration.region,
|
||||
shouldDeleteIntegrationSecrets: req.query.shouldDeleteIntegrationSecrets
|
||||
// eslint-disable-next-line
|
||||
}) as any
|
||||
}
|
||||
|
90
backend/src/server/routes/v1/org-admin-router.ts
Normal file
90
backend/src/server/routes/v1/org-admin-router.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectMembershipsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
import { SanitizedProjectSchema } from "../sanitizedSchemas";
|
||||
|
||||
export const registerOrgAdminRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/projects",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
search: z.string().optional(),
|
||||
offset: z.coerce.number().default(0),
|
||||
limit: z.coerce.number().max(100).default(50)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
projects: SanitizedProjectSchema.array(),
|
||||
count: z.coerce.number()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { projects, count } = await server.services.orgAdmin.listOrgProjects({
|
||||
limit: req.query.limit,
|
||||
offset: req.query.offset,
|
||||
search: req.query.search,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type
|
||||
});
|
||||
return { projects, count };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/projects/:projectId/grant-admin-access",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
projectId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
membership: ProjectMembershipsSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { membership } = await server.services.orgAdmin.grantProjectAdminAccess({
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
projectId: req.params.projectId
|
||||
});
|
||||
if (req.auth.authMode === AuthMode.JWT) {
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.params.projectId,
|
||||
event: {
|
||||
type: EventType.ORG_ADMIN_ACCESS_PROJECT,
|
||||
metadata: {
|
||||
projectId: req.params.projectId,
|
||||
username: req.auth.user.username,
|
||||
email: req.auth.user.email || "",
|
||||
userId: req.auth.userId
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return { membership };
|
||||
}
|
||||
});
|
||||
};
|
198
backend/src/server/routes/v1/pki-alert-router.ts
Normal file
198
backend/src/server/routes/v1/pki-alert-router.ts
Normal file
@@ -0,0 +1,198 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { PkiAlertsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { ALERTS } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerPkiAlertRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Create PKI alert",
|
||||
body: z.object({
|
||||
projectId: z.string().trim().describe(ALERTS.CREATE.projectId),
|
||||
pkiCollectionId: z.string().trim().describe(ALERTS.CREATE.pkiCollectionId),
|
||||
name: z.string().trim().describe(ALERTS.CREATE.name),
|
||||
alertBeforeDays: z.number().describe(ALERTS.CREATE.alertBeforeDays),
|
||||
emails: z
|
||||
.array(z.string().trim().email({ message: "Invalid email address" }))
|
||||
.min(1, { message: "You must specify at least 1 email" })
|
||||
.max(5, { message: "You can specify a maximum of 5 emails" })
|
||||
.describe(ALERTS.CREATE.emails)
|
||||
}),
|
||||
response: {
|
||||
200: PkiAlertsSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const alert = await server.services.pkiAlert.createPkiAlert({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: alert.projectId,
|
||||
event: {
|
||||
type: EventType.CREATE_PKI_ALERT,
|
||||
metadata: {
|
||||
pkiAlertId: alert.id,
|
||||
pkiCollectionId: alert.pkiCollectionId,
|
||||
name: alert.name,
|
||||
alertBeforeDays: alert.alertBeforeDays,
|
||||
recipientEmails: alert.recipientEmails
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return alert;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:alertId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Get PKI alert",
|
||||
params: z.object({
|
||||
alertId: z.string().trim().describe(ALERTS.GET.alertId)
|
||||
}),
|
||||
response: {
|
||||
200: PkiAlertsSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const alert = await server.services.pkiAlert.getPkiAlertById({
|
||||
alertId: req.params.alertId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: alert.projectId,
|
||||
event: {
|
||||
type: EventType.GET_PKI_ALERT,
|
||||
metadata: {
|
||||
pkiAlertId: alert.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return alert;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:alertId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Update PKI alert",
|
||||
params: z.object({
|
||||
alertId: z.string().trim().describe(ALERTS.UPDATE.alertId)
|
||||
}),
|
||||
body: z.object({
|
||||
name: z.string().trim().optional().describe(ALERTS.UPDATE.name),
|
||||
alertBeforeDays: z.number().optional().describe(ALERTS.UPDATE.alertBeforeDays),
|
||||
pkiCollectionId: z.string().trim().optional().describe(ALERTS.UPDATE.pkiCollectionId),
|
||||
emails: z
|
||||
.array(z.string().trim().email({ message: "Invalid email address" }))
|
||||
.min(1, { message: "You must specify at least 1 email" })
|
||||
.max(5, { message: "You can specify a maximum of 5 emails" })
|
||||
.optional()
|
||||
.describe(ALERTS.UPDATE.emails)
|
||||
}),
|
||||
response: {
|
||||
200: PkiAlertsSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const alert = await server.services.pkiAlert.updatePkiAlert({
|
||||
alertId: req.params.alertId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: alert.projectId,
|
||||
event: {
|
||||
type: EventType.UPDATE_PKI_ALERT,
|
||||
metadata: {
|
||||
pkiAlertId: alert.id,
|
||||
pkiCollectionId: alert.pkiCollectionId,
|
||||
name: alert.name,
|
||||
alertBeforeDays: alert.alertBeforeDays,
|
||||
recipientEmails: alert.recipientEmails
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return alert;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:alertId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Delete PKI alert",
|
||||
params: z.object({
|
||||
alertId: z.string().trim().describe(ALERTS.DELETE.alertId)
|
||||
}),
|
||||
response: {
|
||||
200: PkiAlertsSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const alert = await server.services.pkiAlert.deletePkiAlert({
|
||||
alertId: req.params.alertId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: alert.projectId,
|
||||
event: {
|
||||
type: EventType.DELETE_PKI_ALERT,
|
||||
metadata: {
|
||||
pkiAlertId: alert.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return alert;
|
||||
}
|
||||
});
|
||||
};
|
338
backend/src/server/routes/v1/pki-collection-router.ts
Normal file
338
backend/src/server/routes/v1/pki-collection-router.ts
Normal file
@@ -0,0 +1,338 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { PkiCollectionItemsSchema, PkiCollectionsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { PKI_COLLECTIONS } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { PkiItemType } from "@app/services/pki-collection/pki-collection-types";
|
||||
|
||||
export const registerPkiCollectionRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Create PKI collection",
|
||||
body: z.object({
|
||||
projectId: z.string().trim().describe(PKI_COLLECTIONS.CREATE.projectId),
|
||||
name: z.string().trim().describe(PKI_COLLECTIONS.CREATE.name),
|
||||
description: z.string().trim().default("").describe(PKI_COLLECTIONS.CREATE.description)
|
||||
}),
|
||||
response: {
|
||||
200: PkiCollectionsSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const pkiCollection = await server.services.pkiCollection.createPkiCollection({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: pkiCollection.projectId,
|
||||
event: {
|
||||
type: EventType.CREATE_PKI_COLLECTION,
|
||||
metadata: {
|
||||
pkiCollectionId: pkiCollection.id,
|
||||
name: pkiCollection.name
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return pkiCollection;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:collectionId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Get PKI collection",
|
||||
params: z.object({
|
||||
collectionId: z.string().trim().describe(PKI_COLLECTIONS.GET.collectionId)
|
||||
}),
|
||||
response: {
|
||||
200: PkiCollectionsSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const pkiCollection = await server.services.pkiCollection.getPkiCollectionById({
|
||||
collectionId: req.params.collectionId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: pkiCollection.projectId,
|
||||
event: {
|
||||
type: EventType.GET_PKI_COLLECTION,
|
||||
metadata: {
|
||||
pkiCollectionId: pkiCollection.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return pkiCollection;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:collectionId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Update PKI collection",
|
||||
params: z.object({
|
||||
collectionId: z.string().trim().describe(PKI_COLLECTIONS.UPDATE.collectionId)
|
||||
}),
|
||||
body: z.object({
|
||||
name: z.string().trim().optional().describe(PKI_COLLECTIONS.UPDATE.name),
|
||||
description: z.string().trim().optional().describe(PKI_COLLECTIONS.UPDATE.description)
|
||||
}),
|
||||
response: {
|
||||
200: PkiCollectionsSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const pkiCollection = await server.services.pkiCollection.updatePkiCollection({
|
||||
collectionId: req.params.collectionId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: pkiCollection.projectId,
|
||||
event: {
|
||||
type: EventType.UPDATE_PKI_COLLECTION,
|
||||
metadata: {
|
||||
pkiCollectionId: pkiCollection.id,
|
||||
name: pkiCollection.name
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return pkiCollection;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:collectionId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Delete PKI collection",
|
||||
params: z.object({
|
||||
collectionId: z.string().trim().describe(PKI_COLLECTIONS.DELETE.collectionId)
|
||||
}),
|
||||
response: {
|
||||
200: PkiCollectionsSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const pkiCollection = await server.services.pkiCollection.deletePkiCollection({
|
||||
collectionId: req.params.collectionId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: pkiCollection.projectId,
|
||||
event: {
|
||||
type: EventType.DELETE_PKI_COLLECTION,
|
||||
metadata: {
|
||||
pkiCollectionId: pkiCollection.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return pkiCollection;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:collectionId/items",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Get items in PKI collection",
|
||||
params: z.object({
|
||||
collectionId: z.string().trim().describe(PKI_COLLECTIONS.LIST_ITEMS.collectionId)
|
||||
}),
|
||||
querystring: z.object({
|
||||
type: z.nativeEnum(PkiItemType).optional().describe(PKI_COLLECTIONS.LIST_ITEMS.type),
|
||||
offset: z.coerce.number().min(0).max(100).default(0).describe(PKI_COLLECTIONS.LIST_ITEMS.offset),
|
||||
limit: z.coerce.number().min(1).max(100).default(25).describe(PKI_COLLECTIONS.LIST_ITEMS.limit)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
collectionItems: z.array(
|
||||
PkiCollectionItemsSchema.omit({ caId: true, certId: true }).extend({
|
||||
type: z.nativeEnum(PkiItemType),
|
||||
itemId: z.string().trim(),
|
||||
notBefore: z.date(),
|
||||
notAfter: z.date(),
|
||||
friendlyName: z.string().trim()
|
||||
})
|
||||
),
|
||||
totalCount: z.number()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { pkiCollection, pkiCollectionItems, totalCount } =
|
||||
await server.services.pkiCollection.getPkiCollectionItems({
|
||||
collectionId: req.params.collectionId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.query
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: pkiCollection.projectId,
|
||||
event: {
|
||||
type: EventType.GET_PKI_COLLECTION_ITEMS,
|
||||
metadata: {
|
||||
pkiCollectionId: pkiCollection.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
collectionItems: pkiCollectionItems,
|
||||
totalCount
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:collectionId/items",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Add item to PKI collection",
|
||||
params: z.object({
|
||||
collectionId: z.string().trim().describe(PKI_COLLECTIONS.ADD_ITEM.collectionId)
|
||||
}),
|
||||
body: z.object({
|
||||
type: z.nativeEnum(PkiItemType).describe(PKI_COLLECTIONS.ADD_ITEM.type),
|
||||
itemId: z.string().trim().describe(PKI_COLLECTIONS.ADD_ITEM.itemId)
|
||||
}),
|
||||
response: {
|
||||
200: PkiCollectionItemsSchema.omit({ caId: true, certId: true }).extend({
|
||||
type: z.nativeEnum(PkiItemType).describe(PKI_COLLECTIONS.ADD_ITEM.type),
|
||||
itemId: z.string().trim().describe(PKI_COLLECTIONS.ADD_ITEM.itemId)
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { pkiCollection, pkiCollectionItem } = await server.services.pkiCollection.addItemToPkiCollection({
|
||||
collectionId: req.params.collectionId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: pkiCollection.projectId,
|
||||
event: {
|
||||
type: EventType.ADD_PKI_COLLECTION_ITEM,
|
||||
metadata: {
|
||||
pkiCollectionId: pkiCollection.id,
|
||||
pkiCollectionItemId: pkiCollectionItem.id,
|
||||
type: pkiCollectionItem.type,
|
||||
itemId: pkiCollectionItem.itemId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return pkiCollectionItem;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:collectionId/items/:collectionItemId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Remove item from PKI collection",
|
||||
params: z.object({
|
||||
collectionId: z.string().trim().describe(PKI_COLLECTIONS.DELETE_ITEM.collectionId),
|
||||
collectionItemId: z.string().trim().describe(PKI_COLLECTIONS.DELETE_ITEM.collectionItemId)
|
||||
}),
|
||||
response: {
|
||||
200: PkiCollectionItemsSchema.omit({ caId: true, certId: true }).extend({
|
||||
type: z.nativeEnum(PkiItemType).describe(PKI_COLLECTIONS.DELETE_ITEM.type),
|
||||
itemId: z.string().trim().describe(PKI_COLLECTIONS.DELETE_ITEM.itemId)
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { pkiCollection, pkiCollectionItem } = await server.services.pkiCollection.removeItemFromPkiCollection({
|
||||
collectionId: req.params.collectionId,
|
||||
itemId: req.params.collectionItemId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: pkiCollection.projectId,
|
||||
event: {
|
||||
type: EventType.DELETE_PKI_COLLECTION_ITEM,
|
||||
metadata: {
|
||||
pkiCollectionId: pkiCollection.id,
|
||||
pkiCollectionItemId: pkiCollectionItem.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return pkiCollectionItem;
|
||||
}
|
||||
});
|
||||
};
|
@@ -59,12 +59,19 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
includeGroupMembers: z
|
||||
.enum(["true", "false"])
|
||||
.default("false")
|
||||
.transform((value) => value === "true")
|
||||
}),
|
||||
params: z.object({
|
||||
workspaceId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
users: ProjectMembershipsSchema.extend({
|
||||
isGroupMember: z.boolean(),
|
||||
user: UsersSchema.pick({
|
||||
email: true,
|
||||
username: true,
|
||||
@@ -99,9 +106,11 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
includeGroupMembers: req.query.includeGroupMembers,
|
||||
projectId: req.params.workspaceId,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return { users };
|
||||
}
|
||||
});
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretTagsSchema } from "@app/db/schemas";
|
||||
@@ -49,7 +50,8 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
workspaceTag: SecretTagsSchema
|
||||
// akhilmhdh: for terraform backward compatiability
|
||||
workspaceTag: SecretTagsSchema.extend({ name: z.string() })
|
||||
})
|
||||
}
|
||||
},
|
||||
@@ -79,7 +81,8 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
workspaceTag: SecretTagsSchema
|
||||
// akhilmhdh: for terraform backward compatiability
|
||||
workspaceTag: SecretTagsSchema.extend({ name: z.string() })
|
||||
})
|
||||
}
|
||||
},
|
||||
@@ -108,8 +111,14 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
|
||||
projectId: z.string().trim().describe(SECRET_TAGS.CREATE.projectId)
|
||||
}),
|
||||
body: z.object({
|
||||
name: z.string().trim().describe(SECRET_TAGS.CREATE.name),
|
||||
slug: z.string().trim().describe(SECRET_TAGS.CREATE.slug),
|
||||
slug: z
|
||||
.string()
|
||||
.toLowerCase()
|
||||
.trim()
|
||||
.describe(SECRET_TAGS.CREATE.slug)
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Invalid slug. Slug can only contain alphanumeric characters and hyphens."
|
||||
}),
|
||||
color: z.string().trim().describe(SECRET_TAGS.CREATE.color)
|
||||
}),
|
||||
response: {
|
||||
@@ -144,8 +153,14 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
|
||||
tagId: z.string().trim().describe(SECRET_TAGS.UPDATE.tagId)
|
||||
}),
|
||||
body: z.object({
|
||||
name: z.string().trim().describe(SECRET_TAGS.UPDATE.name),
|
||||
slug: z.string().trim().describe(SECRET_TAGS.UPDATE.slug),
|
||||
slug: z
|
||||
.string()
|
||||
.toLowerCase()
|
||||
.trim()
|
||||
.describe(SECRET_TAGS.UPDATE.slug)
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Invalid slug. Slug can only contain alphanumeric characters and hyphens."
|
||||
}),
|
||||
color: z.string().trim().describe(SECRET_TAGS.UPDATE.color)
|
||||
}),
|
||||
response: {
|
||||
|
@@ -9,7 +9,7 @@ import {
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { ORGANIZATIONS } from "@app/lib/api-docs";
|
||||
import { creationLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
@@ -307,7 +307,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: creationLimit
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
|
@@ -1,14 +1,21 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { CertificateAuthoritiesSchema, CertificatesSchema, ProjectKeysSchema } from "@app/db/schemas";
|
||||
import {
|
||||
CertificateAuthoritiesSchema,
|
||||
CertificatesSchema,
|
||||
PkiAlertsSchema,
|
||||
PkiCollectionsSchema,
|
||||
ProjectKeysSchema
|
||||
} from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { PROJECTS } from "@app/lib/api-docs";
|
||||
import { creationLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
|
||||
import { sanitizedCertificateTemplate } from "@app/services/certificate-template/certificate-template-schema";
|
||||
import { ProjectFilterType } from "@app/services/project/project-types";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
@@ -142,7 +149,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: creationLimit
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Create a new project",
|
||||
@@ -392,4 +399,94 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
return { certificates, totalCount };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:projectId/pki-alerts",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
projectId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
alerts: z.array(PkiAlertsSchema)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { alerts } = await server.services.project.listProjectAlerts({
|
||||
projectId: req.params.projectId,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actor: req.permission.type
|
||||
});
|
||||
|
||||
return { alerts };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:projectId/pki-collections",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
projectId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
collections: z.array(PkiCollectionsSchema)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { pkiCollections } = await server.services.project.listProjectPkiCollections({
|
||||
projectId: req.params.projectId,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actor: req.permission.type
|
||||
});
|
||||
|
||||
return { collections: pkiCollections };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:projectId/certificate-templates",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
projectId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
certificateTemplates: sanitizedCertificateTemplate.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { certificateTemplates } = await server.services.project.listProjectCertificateTemplates({
|
||||
projectId: req.params.projectId,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actor: req.permission.type
|
||||
});
|
||||
|
||||
return { certificateTemplates };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -59,9 +59,10 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
name: true,
|
||||
color: true
|
||||
}).array()
|
||||
})
|
||||
.extend({ name: z.string() })
|
||||
.array()
|
||||
})
|
||||
)
|
||||
})
|
||||
@@ -116,16 +117,15 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secret: SecretsSchema.omit({ secretBlindIndex: true }).merge(
|
||||
z.object({
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
name: true,
|
||||
color: true
|
||||
}).array()
|
||||
secret: SecretsSchema.omit({ secretBlindIndex: true }).extend({
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
color: true
|
||||
})
|
||||
)
|
||||
.extend({ name: z.string() })
|
||||
.array()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
@@ -180,7 +180,13 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
.enum(["true", "false"])
|
||||
.default("false")
|
||||
.transform((value) => value === "true")
|
||||
.describe(RAW_SECRETS.LIST.includeImports)
|
||||
.describe(RAW_SECRETS.LIST.includeImports),
|
||||
tagSlugs: z
|
||||
.string()
|
||||
.describe(RAW_SECRETS.LIST.tagSlugs)
|
||||
.optional()
|
||||
// split by comma and trim the strings
|
||||
.transform((el) => (el ? el.split(",").map((i) => i.trim()) : []))
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -190,9 +196,9 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
name: true,
|
||||
color: true
|
||||
})
|
||||
.extend({ name: z.string() })
|
||||
.array()
|
||||
.optional()
|
||||
})
|
||||
@@ -251,7 +257,8 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
projectId: workspaceId,
|
||||
path: secretPath,
|
||||
includeImports: req.query.include_imports,
|
||||
recursive: req.query.recursive
|
||||
recursive: req.query.recursive,
|
||||
tagSlugs: req.query.tagSlugs
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
@@ -325,9 +332,9 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
name: true,
|
||||
color: true
|
||||
})
|
||||
.extend({ name: z.string() })
|
||||
.array()
|
||||
.optional()
|
||||
})
|
||||
@@ -731,9 +738,10 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
name: true,
|
||||
color: true
|
||||
}).array()
|
||||
})
|
||||
.extend({ name: z.string() })
|
||||
.array()
|
||||
})
|
||||
.array(),
|
||||
imports: z
|
||||
|
125
backend/src/services/access-token-queue/access-token-queue.ts
Normal file
125
backend/src/services/access-token-queue/access-token-queue.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { KeyStorePrefixes, KeyStoreTtls, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { applyJitter, secondsToMillis } from "@app/lib/dates";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
|
||||
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
|
||||
import { TServiceTokenDALFactory } from "../service-token/service-token-dal";
|
||||
|
||||
type TAccessTokenQueueServiceFactoryDep = {
|
||||
queueService: TQueueServiceFactory;
|
||||
keyStore: Pick<TKeyStoreFactory, "getItem" | "setItemWithExpiry">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "updateById">;
|
||||
serviceTokenDAL: Pick<TServiceTokenDALFactory, "updateById">;
|
||||
};
|
||||
|
||||
export type TAccessTokenQueueServiceFactory = ReturnType<typeof accessTokenQueueServiceFactory>;
|
||||
|
||||
export const AccessTokenStatusSchema = z.object({
|
||||
lastUpdatedAt: z.string().datetime(),
|
||||
numberOfUses: z.number()
|
||||
});
|
||||
|
||||
export const accessTokenQueueServiceFactory = ({
|
||||
queueService,
|
||||
keyStore,
|
||||
identityAccessTokenDAL,
|
||||
serviceTokenDAL
|
||||
}: TAccessTokenQueueServiceFactoryDep) => {
|
||||
const getIdentityTokenDetailsInCache = async (identityAccessTokenId: string) => {
|
||||
const tokenDetailsInCache = await keyStore.getItem(
|
||||
KeyStorePrefixes.IdentityAccessTokenStatusUpdate(identityAccessTokenId)
|
||||
);
|
||||
if (tokenDetailsInCache) {
|
||||
return AccessTokenStatusSchema.parseAsync(JSON.parse(tokenDetailsInCache));
|
||||
}
|
||||
};
|
||||
|
||||
const updateServiceTokenStatus = async (serviceTokenId: string) => {
|
||||
await keyStore.setItemWithExpiry(
|
||||
KeyStorePrefixes.ServiceTokenStatusUpdate(serviceTokenId),
|
||||
KeyStoreTtls.AccessTokenStatusUpdateInSeconds,
|
||||
JSON.stringify({ lastUpdatedAt: new Date() })
|
||||
);
|
||||
await queueService.queue(
|
||||
QueueName.AccessTokenStatusUpdate,
|
||||
QueueJobs.ServiceTokenStatusUpdate,
|
||||
{
|
||||
serviceTokenId
|
||||
},
|
||||
{
|
||||
delay: applyJitter(secondsToMillis(KeyStoreTtls.AccessTokenStatusUpdateInSeconds / 2), secondsToMillis(10)),
|
||||
// https://docs.bullmq.io/guide/jobs/job-ids
|
||||
jobId: KeyStorePrefixes.ServiceTokenStatusUpdate(serviceTokenId).replaceAll(":", "_"),
|
||||
removeOnFail: true,
|
||||
removeOnComplete: true
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
const updateIdentityAccessTokenStatus = async (identityAccessTokenId: string, numberOfUses: number) => {
|
||||
await keyStore.setItemWithExpiry(
|
||||
KeyStorePrefixes.IdentityAccessTokenStatusUpdate(identityAccessTokenId),
|
||||
KeyStoreTtls.AccessTokenStatusUpdateInSeconds,
|
||||
JSON.stringify({ lastUpdatedAt: new Date(), numberOfUses })
|
||||
);
|
||||
await queueService.queue(
|
||||
QueueName.AccessTokenStatusUpdate,
|
||||
QueueJobs.IdentityAccessTokenStatusUpdate,
|
||||
{
|
||||
identityAccessTokenId,
|
||||
numberOfUses
|
||||
},
|
||||
{
|
||||
delay: applyJitter(secondsToMillis(KeyStoreTtls.AccessTokenStatusUpdateInSeconds / 2), secondsToMillis(10)),
|
||||
jobId: KeyStorePrefixes.IdentityAccessTokenStatusUpdate(identityAccessTokenId).replaceAll(":", "_"),
|
||||
removeOnFail: true,
|
||||
removeOnComplete: true
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
queueService.start(QueueName.AccessTokenStatusUpdate, async (job) => {
|
||||
// for identity token update
|
||||
if (job.name === QueueJobs.IdentityAccessTokenStatusUpdate && "identityAccessTokenId" in job.data) {
|
||||
const { identityAccessTokenId } = job.data;
|
||||
const tokenDetails = { lastUpdatedAt: new Date(job.timestamp), numberOfUses: job.data.numberOfUses };
|
||||
const tokenDetailsInCache = await getIdentityTokenDetailsInCache(identityAccessTokenId);
|
||||
if (tokenDetailsInCache) {
|
||||
tokenDetails.numberOfUses = tokenDetailsInCache.numberOfUses;
|
||||
tokenDetails.lastUpdatedAt = new Date(tokenDetailsInCache.lastUpdatedAt);
|
||||
}
|
||||
|
||||
await identityAccessTokenDAL.updateById(identityAccessTokenId, {
|
||||
accessTokenLastUsedAt: tokenDetails.lastUpdatedAt,
|
||||
accessTokenNumUses: tokenDetails.numberOfUses
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// for service token
|
||||
if (job.name === QueueJobs.ServiceTokenStatusUpdate && "serviceTokenId" in job.data) {
|
||||
const { serviceTokenId } = job.data;
|
||||
const tokenDetailsInCache = await keyStore.getItem(KeyStorePrefixes.ServiceTokenStatusUpdate(serviceTokenId));
|
||||
let lastUsed = new Date(job.timestamp);
|
||||
if (tokenDetailsInCache) {
|
||||
const tokenDetails = await AccessTokenStatusSchema.pick({ lastUpdatedAt: true }).parseAsync(
|
||||
JSON.parse(tokenDetailsInCache)
|
||||
);
|
||||
lastUsed = new Date(tokenDetails.lastUpdatedAt);
|
||||
}
|
||||
|
||||
await serviceTokenDAL.updateById(serviceTokenId, {
|
||||
lastUsed
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
queueService.listen(QueueName.AccessTokenStatusUpdate, "failed", (_, err) => {
|
||||
logger.error(err, `${QueueName.AccessTokenStatusUpdate}: Failed to updated access token status`);
|
||||
});
|
||||
|
||||
return { updateIdentityAccessTokenStatus, updateServiceTokenStatus, getIdentityTokenDetailsInCache };
|
||||
};
|
@@ -583,7 +583,13 @@ export const authLoginServiceFactory = ({
|
||||
} else {
|
||||
const isLinkingRequired = !user?.authMethods?.includes(authMethod);
|
||||
if (isLinkingRequired) {
|
||||
user = await userDAL.updateById(user.id, { authMethods: [...(user.authMethods || []), authMethod] });
|
||||
// we update the names here because upon org invitation, the names are set to be NULL
|
||||
// if user is signing up with SSO after invitation, their names should be set based on their SSO profile
|
||||
user = await userDAL.updateById(user.id, {
|
||||
authMethods: [...(user.authMethods || []), authMethod],
|
||||
firstName: !user.isAccepted ? firstName : undefined,
|
||||
lastName: !user.isAccepted ? lastName : undefined
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -1,11 +1,17 @@
|
||||
import * as x509 from "@peculiar/x509";
|
||||
import crypto from "crypto";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns";
|
||||
|
||||
import { CertKeyAlgorithm, CertStatus } from "../certificate/certificate-types";
|
||||
import { TDNParts, TGetCaCertChainDTO, TGetCaCredentialsDTO, TRebuildCaCrlDTO } from "./certificate-authority-types";
|
||||
import {
|
||||
TDNParts,
|
||||
TGetCaCertChainDTO,
|
||||
TGetCaCertChainsDTO,
|
||||
TGetCaCredentialsDTO,
|
||||
TRebuildCaCrlDTO
|
||||
} from "./certificate-authority-types";
|
||||
|
||||
export const createDistinguishedName = (parts: TDNParts) => {
|
||||
const dnParts = [];
|
||||
@@ -18,6 +24,40 @@ export const createDistinguishedName = (parts: TDNParts) => {
|
||||
return dnParts.join(", ");
|
||||
};
|
||||
|
||||
export const parseDistinguishedName = (dn: string): TDNParts => {
|
||||
const parts: TDNParts = {};
|
||||
const dnParts = dn.split(/,\s*/);
|
||||
|
||||
for (const part of dnParts) {
|
||||
const [key, value] = part.split("=");
|
||||
switch (key.toUpperCase()) {
|
||||
case "C":
|
||||
parts.country = value;
|
||||
break;
|
||||
case "O":
|
||||
parts.organization = value;
|
||||
break;
|
||||
case "OU":
|
||||
parts.ou = value;
|
||||
break;
|
||||
case "ST":
|
||||
parts.province = value;
|
||||
break;
|
||||
case "CN":
|
||||
parts.commonName = value;
|
||||
break;
|
||||
case "L":
|
||||
parts.locality = value;
|
||||
break;
|
||||
default:
|
||||
// Ignore unrecognized keys
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return parts;
|
||||
};
|
||||
|
||||
export const keyAlgorithmToAlgCfg = (keyAlgorithm: CertKeyAlgorithm) => {
|
||||
switch (keyAlgorithm) {
|
||||
case CertKeyAlgorithm.RSA_4096:
|
||||
@@ -55,6 +95,8 @@ export const keyAlgorithmToAlgCfg = (keyAlgorithm: CertKeyAlgorithm) => {
|
||||
* Return the public and private key of CA with id [caId]
|
||||
* Note: credentials are returned as crypto.webcrypto.CryptoKey
|
||||
* suitable for use with @peculiar/x509 module
|
||||
*
|
||||
* TODO: Update to get latest CA Secret once support for CA renewal with new key pair is added
|
||||
*/
|
||||
export const getCaCredentials = async ({
|
||||
caId,
|
||||
@@ -64,10 +106,10 @@ export const getCaCredentials = async ({
|
||||
kmsService
|
||||
}: TGetCaCredentialsDTO) => {
|
||||
const ca = await certificateAuthorityDAL.findById(caId);
|
||||
if (!ca) throw new BadRequestError({ message: "CA not found" });
|
||||
if (!ca) throw new NotFoundError({ message: "CA not found" });
|
||||
|
||||
const caSecret = await certificateAuthoritySecretDAL.findOne({ caId });
|
||||
if (!caSecret) throw new BadRequestError({ message: "CA secret not found" });
|
||||
if (!caSecret) throw new NotFoundError({ message: "CA secret not found" });
|
||||
|
||||
const keyId = await getProjectKmsCertificateKeyId({
|
||||
projectId: ca.projectId,
|
||||
@@ -98,26 +140,73 @@ export const getCaCredentials = async ({
|
||||
]);
|
||||
|
||||
return {
|
||||
caSecret,
|
||||
caPrivateKey,
|
||||
caPublicKey
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the decrypted pem-encoded certificate and certificate chain
|
||||
* Return the list of decrypted pem-encoded certificates and certificate chains
|
||||
* for CA with id [caId].
|
||||
*/
|
||||
export const getCaCertChain = async ({
|
||||
export const getCaCertChains = async ({
|
||||
caId,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthorityCertDAL,
|
||||
projectDAL,
|
||||
kmsService
|
||||
}: TGetCaCertChainDTO) => {
|
||||
}: TGetCaCertChainsDTO) => {
|
||||
const ca = await certificateAuthorityDAL.findById(caId);
|
||||
if (!ca) throw new BadRequestError({ message: "CA not found" });
|
||||
if (!ca) throw new NotFoundError({ message: "CA not found" });
|
||||
|
||||
const caCert = await certificateAuthorityCertDAL.findOne({ caId: ca.id });
|
||||
const keyId = await getProjectKmsCertificateKeyId({
|
||||
projectId: ca.projectId,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const kmsDecryptor = await kmsService.decryptWithKmsKey({
|
||||
kmsId: keyId
|
||||
});
|
||||
|
||||
const caCerts = await certificateAuthorityCertDAL.find({ caId: ca.id }, { sort: [["version", "asc"]] });
|
||||
|
||||
const decryptedChains = await Promise.all(
|
||||
caCerts.map(async (caCert) => {
|
||||
const decryptedCaCert = await kmsDecryptor({
|
||||
cipherTextBlob: caCert.encryptedCertificate
|
||||
});
|
||||
const caCertObj = new x509.X509Certificate(decryptedCaCert);
|
||||
const decryptedChain = await kmsDecryptor({
|
||||
cipherTextBlob: caCert.encryptedCertificateChain
|
||||
});
|
||||
return {
|
||||
certificate: caCertObj.toString("pem"),
|
||||
certificateChain: decryptedChain.toString("utf-8"),
|
||||
serialNumber: caCertObj.serialNumber,
|
||||
version: caCert.version
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
return decryptedChains;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the decrypted pem-encoded certificate and certificate chain
|
||||
* corresponding to CA certificate with id [caCertId].
|
||||
*/
|
||||
export const getCaCertChain = async ({
|
||||
caCertId,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthorityCertDAL,
|
||||
projectDAL,
|
||||
kmsService
|
||||
}: TGetCaCertChainDTO) => {
|
||||
const caCert = await certificateAuthorityCertDAL.findById(caCertId);
|
||||
if (!caCert) throw new NotFoundError({ message: "CA certificate not found" });
|
||||
const ca = await certificateAuthorityDAL.findById(caCert.caId);
|
||||
|
||||
const keyId = await getProjectKmsCertificateKeyId({
|
||||
projectId: ca.projectId,
|
||||
@@ -160,7 +249,7 @@ export const rebuildCaCrl = async ({
|
||||
kmsService
|
||||
}: TRebuildCaCrlDTO) => {
|
||||
const ca = await certificateAuthorityDAL.findById(caId);
|
||||
if (!ca) throw new BadRequestError({ message: "CA not found" });
|
||||
if (!ca) throw new NotFoundError({ message: "CA not found" });
|
||||
|
||||
const caSecret = await certificateAuthoritySecretDAL.findOne({ caId: ca.id });
|
||||
|
||||
|
@@ -5,24 +5,31 @@ import crypto, { KeyObject } from "crypto";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { TCertificateAuthorities, TCertificateTemplates } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { TCertificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal";
|
||||
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TPkiCollectionDALFactory } from "@app/services/pki-collection/pki-collection-dal";
|
||||
import { TPkiCollectionItemDALFactory } from "@app/services/pki-collection/pki-collection-item-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns";
|
||||
|
||||
import { TCertificateAuthorityCrlDALFactory } from "../../ee/services/certificate-authority-crl/certificate-authority-crl-dal";
|
||||
import { CertKeyAlgorithm, CertStatus } from "../certificate/certificate-types";
|
||||
import { TCertificateTemplateDALFactory } from "../certificate-template/certificate-template-dal";
|
||||
import { validateCertificateDetailsAgainstTemplate } from "../certificate-template/certificate-template-fns";
|
||||
import { TCertificateAuthorityCertDALFactory } from "./certificate-authority-cert-dal";
|
||||
import { TCertificateAuthorityDALFactory } from "./certificate-authority-dal";
|
||||
import {
|
||||
createDistinguishedName,
|
||||
getCaCertChain,
|
||||
getCaCertChain, // TODO: consider rename
|
||||
getCaCertChains,
|
||||
getCaCredentials,
|
||||
keyAlgorithmToAlgCfg
|
||||
keyAlgorithmToAlgCfg,
|
||||
parseDistinguishedName
|
||||
} from "./certificate-authority-fns";
|
||||
import { TCertificateAuthorityQueueFactory } from "./certificate-authority-queue";
|
||||
import { TCertificateAuthoritySecretDALFactory } from "./certificate-authority-secret-dal";
|
||||
@@ -32,10 +39,13 @@ import {
|
||||
TCreateCaDTO,
|
||||
TDeleteCaDTO,
|
||||
TGetCaCertDTO,
|
||||
TGetCaCertsDTO,
|
||||
TGetCaCsrDTO,
|
||||
TGetCaDTO,
|
||||
TImportCertToCaDTO,
|
||||
TIssueCertFromCaDTO,
|
||||
TRenewCaCertDTO,
|
||||
TSignCertFromCaDTO,
|
||||
TSignIntermediateDTO,
|
||||
TUpdateCaDTO
|
||||
} from "./certificate-authority-types";
|
||||
@@ -46,12 +56,18 @@ type TCertificateAuthorityServiceFactoryDep = {
|
||||
TCertificateAuthorityDALFactory,
|
||||
"transaction" | "create" | "findById" | "updateById" | "deleteById" | "findOne"
|
||||
>;
|
||||
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "create" | "findOne" | "transaction">;
|
||||
certificateAuthorityCertDAL: Pick<
|
||||
TCertificateAuthorityCertDALFactory,
|
||||
"create" | "findOne" | "transaction" | "find" | "findById"
|
||||
>;
|
||||
certificateAuthoritySecretDAL: Pick<TCertificateAuthoritySecretDALFactory, "create" | "findOne">;
|
||||
certificateAuthorityCrlDAL: Pick<TCertificateAuthorityCrlDALFactory, "create" | "findOne" | "update">;
|
||||
certificateTemplateDAL: Pick<TCertificateTemplateDALFactory, "getById">;
|
||||
certificateAuthorityQueue: TCertificateAuthorityQueueFactory; // TODO: Pick
|
||||
certificateDAL: Pick<TCertificateDALFactory, "transaction" | "create" | "find">;
|
||||
certificateBodyDAL: Pick<TCertificateBodyDALFactory, "create">;
|
||||
pkiCollectionDAL: Pick<TPkiCollectionDALFactory, "findById">;
|
||||
pkiCollectionItemDAL: Pick<TPkiCollectionItemDALFactory, "create">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug" | "findOne" | "updateById" | "findById" | "transaction">;
|
||||
kmsService: Pick<TKmsServiceFactory, "generateKmsKey" | "encryptWithKmsKey" | "decryptWithKmsKey">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
@@ -64,8 +80,11 @@ export const certificateAuthorityServiceFactory = ({
|
||||
certificateAuthorityCertDAL,
|
||||
certificateAuthoritySecretDAL,
|
||||
certificateAuthorityCrlDAL,
|
||||
certificateTemplateDAL,
|
||||
certificateDAL,
|
||||
certificateBodyDAL,
|
||||
pkiCollectionDAL,
|
||||
pkiCollectionItemDAL,
|
||||
projectDAL,
|
||||
kmsService,
|
||||
permissionService
|
||||
@@ -163,6 +182,24 @@ export const certificateAuthorityServiceFactory = ({
|
||||
kmsId: certificateManagerKmsId
|
||||
});
|
||||
|
||||
// https://nodejs.org/api/crypto.html#static-method-keyobjectfromkey
|
||||
const skObj = KeyObject.from(keys.privateKey);
|
||||
|
||||
const { cipherTextBlob: encryptedPrivateKey } = await kmsEncryptor({
|
||||
plainText: skObj.export({
|
||||
type: "pkcs8",
|
||||
format: "der"
|
||||
})
|
||||
});
|
||||
|
||||
const caSecret = await certificateAuthoritySecretDAL.create(
|
||||
{
|
||||
caId: ca.id,
|
||||
encryptedPrivateKey
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (type === CaType.ROOT) {
|
||||
// note: create self-signed cert only applicable for root CA
|
||||
const cert = await x509.X509CertificateGenerator.createSelfSigned({
|
||||
@@ -189,11 +226,21 @@ export const certificateAuthorityServiceFactory = ({
|
||||
plainText: Buffer.alloc(0)
|
||||
});
|
||||
|
||||
await certificateAuthorityCertDAL.create(
|
||||
const caCert = await certificateAuthorityCertDAL.create(
|
||||
{
|
||||
caId: ca.id,
|
||||
encryptedCertificate,
|
||||
encryptedCertificateChain
|
||||
encryptedCertificateChain,
|
||||
version: 1,
|
||||
caSecretId: caSecret.id
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await certificateAuthorityDAL.updateById(
|
||||
ca.id,
|
||||
{
|
||||
activeCaCertId: caCert.id
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -221,24 +268,6 @@ export const certificateAuthorityServiceFactory = ({
|
||||
tx
|
||||
);
|
||||
|
||||
// https://nodejs.org/api/crypto.html#static-method-keyobjectfromkey
|
||||
const skObj = KeyObject.from(keys.privateKey);
|
||||
|
||||
const { cipherTextBlob: encryptedPrivateKey } = await kmsEncryptor({
|
||||
plainText: skObj.export({
|
||||
type: "pkcs8",
|
||||
format: "der"
|
||||
})
|
||||
});
|
||||
|
||||
await certificateAuthoritySecretDAL.create(
|
||||
{
|
||||
caId: ca.id,
|
||||
encryptedPrivateKey
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
return ca;
|
||||
});
|
||||
|
||||
@@ -340,9 +369,6 @@ export const certificateAuthorityServiceFactory = ({
|
||||
|
||||
if (ca.type === CaType.ROOT) throw new BadRequestError({ message: "Root CA cannot generate CSR" });
|
||||
|
||||
const caCert = await certificateAuthorityCertDAL.findOne({ caId: ca.id });
|
||||
if (caCert) throw new BadRequestError({ message: "CA already has a certificate installed" });
|
||||
|
||||
const { caPrivateKey, caPublicKey } = await getCaCredentials({
|
||||
caId,
|
||||
certificateAuthorityDAL,
|
||||
@@ -379,9 +405,284 @@ export const certificateAuthorityServiceFactory = ({
|
||||
};
|
||||
|
||||
/**
|
||||
* Return certificate and certificate chain for CA
|
||||
* Renew certificate for CA with id [caId]
|
||||
* Note 1: This CA renewal method is only applicable to CAs with internal parent CAs
|
||||
* Note 2: Currently implements CA renewal with same key-pair only
|
||||
*/
|
||||
const getCaCert = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCaCertDTO) => {
|
||||
const renewCaCert = async ({ caId, notAfter, actorId, actorAuthMethod, actor, actorOrgId }: TRenewCaCertDTO) => {
|
||||
const ca = await certificateAuthorityDAL.findById(caId);
|
||||
if (!ca) throw new BadRequestError({ message: "CA not found" });
|
||||
|
||||
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
ca.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionSub.CertificateAuthorities
|
||||
);
|
||||
|
||||
if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" });
|
||||
|
||||
// get latest CA certificate
|
||||
const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId);
|
||||
|
||||
const serialNumber = crypto.randomBytes(32).toString("hex");
|
||||
|
||||
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
|
||||
projectId: ca.projectId,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const kmsEncryptor = await kmsService.encryptWithKmsKey({
|
||||
kmsId: certificateManagerKmsId
|
||||
});
|
||||
|
||||
const { caPrivateKey, caPublicKey, caSecret } = await getCaCredentials({
|
||||
caId: ca.id,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthoritySecretDAL,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm);
|
||||
|
||||
const kmsDecryptor = await kmsService.decryptWithKmsKey({
|
||||
kmsId: certificateManagerKmsId
|
||||
});
|
||||
const decryptedCaCert = await kmsDecryptor({
|
||||
cipherTextBlob: caCert.encryptedCertificate
|
||||
});
|
||||
|
||||
const caCertObj = new x509.X509Certificate(decryptedCaCert);
|
||||
|
||||
let certificate = "";
|
||||
let certificateChain = "";
|
||||
|
||||
switch (ca.type) {
|
||||
case CaType.ROOT: {
|
||||
if (new Date(notAfter) <= new Date(caCertObj.notAfter)) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"New Root CA certificate must have notAfter date that is greater than the current certificate notAfter date"
|
||||
});
|
||||
}
|
||||
|
||||
const notBeforeDate = new Date();
|
||||
const cert = await x509.X509CertificateGenerator.createSelfSigned({
|
||||
name: ca.dn,
|
||||
serialNumber,
|
||||
notBefore: notBeforeDate,
|
||||
notAfter: new Date(notAfter),
|
||||
signingAlgorithm: alg,
|
||||
keys: {
|
||||
privateKey: caPrivateKey,
|
||||
publicKey: caPublicKey
|
||||
},
|
||||
extensions: [
|
||||
new x509.BasicConstraintsExtension(
|
||||
true,
|
||||
ca.maxPathLength === -1 || !ca.maxPathLength ? undefined : ca.maxPathLength,
|
||||
true
|
||||
),
|
||||
new x509.ExtendedKeyUsageExtension(["1.2.3.4.5.6.7", "2.3.4.5.6.7.8"], true),
|
||||
// eslint-disable-next-line no-bitwise
|
||||
new x509.KeyUsagesExtension(x509.KeyUsageFlags.keyCertSign | x509.KeyUsageFlags.cRLSign, true),
|
||||
await x509.SubjectKeyIdentifierExtension.create(caPublicKey)
|
||||
]
|
||||
});
|
||||
|
||||
const { cipherTextBlob: encryptedCertificate } = await kmsEncryptor({
|
||||
plainText: Buffer.from(new Uint8Array(cert.rawData))
|
||||
});
|
||||
|
||||
const { cipherTextBlob: encryptedCertificateChain } = await kmsEncryptor({
|
||||
plainText: Buffer.alloc(0)
|
||||
});
|
||||
|
||||
await certificateAuthorityDAL.transaction(async (tx) => {
|
||||
const newCaCert = await certificateAuthorityCertDAL.create(
|
||||
{
|
||||
caId: ca.id,
|
||||
encryptedCertificate,
|
||||
encryptedCertificateChain,
|
||||
version: caCert.version + 1,
|
||||
caSecretId: caSecret.id
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await certificateAuthorityDAL.updateById(
|
||||
ca.id,
|
||||
{
|
||||
activeCaCertId: newCaCert.id,
|
||||
notBefore: notBeforeDate,
|
||||
notAfter: new Date(notAfter)
|
||||
},
|
||||
tx
|
||||
);
|
||||
});
|
||||
|
||||
certificate = cert.toString("pem");
|
||||
break;
|
||||
}
|
||||
case CaType.INTERMEDIATE: {
|
||||
if (!ca.parentCaId) {
|
||||
// TODO: look into optimal way to support renewal of intermediate CA with external parent CA
|
||||
throw new BadRequestError({
|
||||
message: "Failed to renew intermediate CA certificate with external parent CA"
|
||||
});
|
||||
}
|
||||
|
||||
const parentCa = await certificateAuthorityDAL.findById(ca.parentCaId);
|
||||
const { caPrivateKey: parentCaPrivateKey } = await getCaCredentials({
|
||||
caId: parentCa.id,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthoritySecretDAL,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
// get latest parent CA certificate
|
||||
if (!parentCa.activeCaCertId)
|
||||
throw new BadRequestError({ message: "Parent CA does not have a certificate installed" });
|
||||
const parentCaCert = await certificateAuthorityCertDAL.findById(parentCa.activeCaCertId);
|
||||
|
||||
const decryptedParentCaCert = await kmsDecryptor({
|
||||
cipherTextBlob: parentCaCert.encryptedCertificate
|
||||
});
|
||||
|
||||
const parentCaCertObj = new x509.X509Certificate(decryptedParentCaCert);
|
||||
|
||||
if (new Date(notAfter) <= new Date(caCertObj.notAfter)) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"New Intermediate CA certificate must have notAfter date that is greater than the current certificate notAfter date"
|
||||
});
|
||||
}
|
||||
|
||||
if (new Date(notAfter) > new Date(parentCaCertObj.notAfter)) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"New Intermediate CA certificate must have notAfter date that is equal to or smaller than the notAfter date of the parent CA certificate current certificate notAfter date"
|
||||
});
|
||||
}
|
||||
|
||||
const csrObj = await x509.Pkcs10CertificateRequestGenerator.create({
|
||||
name: ca.dn,
|
||||
keys: {
|
||||
privateKey: caPrivateKey,
|
||||
publicKey: caPublicKey
|
||||
},
|
||||
signingAlgorithm: alg,
|
||||
extensions: [
|
||||
// eslint-disable-next-line no-bitwise
|
||||
new x509.KeyUsagesExtension(
|
||||
x509.KeyUsageFlags.keyCertSign |
|
||||
x509.KeyUsageFlags.cRLSign |
|
||||
x509.KeyUsageFlags.digitalSignature |
|
||||
x509.KeyUsageFlags.keyEncipherment
|
||||
)
|
||||
],
|
||||
attributes: [new x509.ChallengePasswordAttribute("password")]
|
||||
});
|
||||
|
||||
const notBeforeDate = new Date();
|
||||
const intermediateCert = await x509.X509CertificateGenerator.create({
|
||||
serialNumber,
|
||||
subject: csrObj.subject,
|
||||
issuer: parentCaCertObj.subject,
|
||||
notBefore: notBeforeDate,
|
||||
notAfter: new Date(notAfter),
|
||||
signingKey: parentCaPrivateKey,
|
||||
publicKey: csrObj.publicKey,
|
||||
signingAlgorithm: alg,
|
||||
extensions: [
|
||||
new x509.KeyUsagesExtension(
|
||||
x509.KeyUsageFlags.keyCertSign |
|
||||
x509.KeyUsageFlags.cRLSign |
|
||||
x509.KeyUsageFlags.digitalSignature |
|
||||
x509.KeyUsageFlags.keyEncipherment,
|
||||
true
|
||||
),
|
||||
new x509.BasicConstraintsExtension(
|
||||
true,
|
||||
ca.maxPathLength === -1 || !ca.maxPathLength ? undefined : ca.maxPathLength,
|
||||
true
|
||||
),
|
||||
await x509.AuthorityKeyIdentifierExtension.create(parentCaCertObj, false),
|
||||
await x509.SubjectKeyIdentifierExtension.create(csrObj.publicKey)
|
||||
]
|
||||
});
|
||||
|
||||
const { cipherTextBlob: encryptedCertificate } = await kmsEncryptor({
|
||||
plainText: Buffer.from(new Uint8Array(intermediateCert.rawData))
|
||||
});
|
||||
|
||||
const { caCert: parentCaCertificate, caCertChain: parentCaCertChain } = await getCaCertChain({
|
||||
caCertId: parentCa.activeCaCertId,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthorityCertDAL,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
certificateChain = `${parentCaCertificate}\n${parentCaCertChain}`.trim();
|
||||
|
||||
const { cipherTextBlob: encryptedCertificateChain } = await kmsEncryptor({
|
||||
plainText: Buffer.from(certificateChain)
|
||||
});
|
||||
|
||||
await certificateAuthorityDAL.transaction(async (tx) => {
|
||||
const newCaCert = await certificateAuthorityCertDAL.create(
|
||||
{
|
||||
caId: ca.id,
|
||||
encryptedCertificate,
|
||||
encryptedCertificateChain,
|
||||
version: caCert.version + 1,
|
||||
caSecretId: caSecret.id
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await certificateAuthorityDAL.updateById(
|
||||
ca.id,
|
||||
{
|
||||
activeCaCertId: newCaCert.id,
|
||||
notBefore: notBeforeDate,
|
||||
notAfter: new Date(notAfter)
|
||||
},
|
||||
tx
|
||||
);
|
||||
});
|
||||
|
||||
certificate = intermediateCert.toString("pem");
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
throw new BadRequestError({
|
||||
message: "Unrecognized CA type"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
certificate,
|
||||
certificateChain,
|
||||
serialNumber,
|
||||
ca
|
||||
};
|
||||
};
|
||||
|
||||
const getCaCerts = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCaCertsDTO) => {
|
||||
const ca = await certificateAuthorityDAL.findById(caId);
|
||||
if (!ca) throw new BadRequestError({ message: "CA not found" });
|
||||
|
||||
@@ -398,7 +699,7 @@ export const certificateAuthorityServiceFactory = ({
|
||||
ProjectPermissionSub.CertificateAuthorities
|
||||
);
|
||||
|
||||
const { caCert, caCertChain, serialNumber } = await getCaCertChain({
|
||||
const caCertChains = await getCaCertChains({
|
||||
caId,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthorityCertDAL,
|
||||
@@ -406,6 +707,41 @@ export const certificateAuthorityServiceFactory = ({
|
||||
kmsService
|
||||
});
|
||||
|
||||
return {
|
||||
ca,
|
||||
caCerts: caCertChains
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Return current certificate and certificate chain for CA
|
||||
*/
|
||||
const getCaCert = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCaCertDTO) => {
|
||||
const ca = await certificateAuthorityDAL.findById(caId);
|
||||
if (!ca) throw new BadRequestError({ message: "CA not found" });
|
||||
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
ca.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSub.CertificateAuthorities
|
||||
);
|
||||
|
||||
const { caCert, caCertChain, serialNumber } = await getCaCertChain({
|
||||
caCertId: ca.activeCaCertId,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthorityCertDAL,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
return {
|
||||
certificate: caCert,
|
||||
certificateChain: caCertChain,
|
||||
@@ -445,6 +781,13 @@ export const certificateAuthorityServiceFactory = ({
|
||||
);
|
||||
|
||||
if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" });
|
||||
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
|
||||
|
||||
const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId);
|
||||
|
||||
if (ca.notAfter && new Date() > new Date(ca.notAfter)) {
|
||||
throw new BadRequestError({ message: "CA is expired" });
|
||||
}
|
||||
|
||||
const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm);
|
||||
|
||||
@@ -457,7 +800,6 @@ export const certificateAuthorityServiceFactory = ({
|
||||
kmsId: certificateManagerKmsId
|
||||
});
|
||||
|
||||
const caCert = await certificateAuthorityCertDAL.findOne({ caId: ca.id });
|
||||
const decryptedCaCert = await kmsDecryptor({
|
||||
cipherTextBlob: caCert.encryptedCertificate
|
||||
});
|
||||
@@ -529,7 +871,7 @@ export const certificateAuthorityServiceFactory = ({
|
||||
});
|
||||
|
||||
const { caCert: issuingCaCertificate, caCertChain } = await getCaCertChain({
|
||||
caId,
|
||||
caCertId: ca.activeCaCertId,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthorityCertDAL,
|
||||
projectDAL,
|
||||
@@ -546,9 +888,9 @@ export const certificateAuthorityServiceFactory = ({
|
||||
};
|
||||
|
||||
/**
|
||||
* Import certificate for (un-installed) CA with id [caId].
|
||||
* Import certificate for CA with id [caId].
|
||||
* Note: Can be used to import an external certificate and certificate chain
|
||||
* to be installed into the CA.
|
||||
* to be into an installed or uninstalled CA.
|
||||
*/
|
||||
const importCertToCa = async ({
|
||||
caId,
|
||||
@@ -575,8 +917,18 @@ export const certificateAuthorityServiceFactory = ({
|
||||
ProjectPermissionSub.CertificateAuthorities
|
||||
);
|
||||
|
||||
const caCert = await certificateAuthorityCertDAL.findOne({ caId: ca.id });
|
||||
if (caCert) throw new BadRequestError({ message: "CA has already imported a certificate" });
|
||||
if (ca.parentCaId) {
|
||||
/**
|
||||
* re-evaluate in the future if we should allow users to import a new CA certificate for an intermediate
|
||||
* CA chained to an internal parent CA. Doing so would allow users to re-chain the CA to a different
|
||||
* internal CA.
|
||||
*/
|
||||
throw new BadRequestError({
|
||||
message: "Cannot import certificate to intermediate CA chained to internal parent CA"
|
||||
});
|
||||
}
|
||||
|
||||
const caCert = ca.activeCaCertId ? await certificateAuthorityCertDAL.findById(ca.activeCaCertId) : undefined;
|
||||
|
||||
const certObj = new x509.X509Certificate(certificate);
|
||||
const maxPathLength = certObj.getExtension(x509.BasicConstraintsExtension)?.pathLength;
|
||||
@@ -623,12 +975,32 @@ export const certificateAuthorityServiceFactory = ({
|
||||
plainText: Buffer.from(certificateChain)
|
||||
});
|
||||
|
||||
// TODO: validate that latest key-pair of CA is used to sign the certificate
|
||||
// once renewal with new key pair is supported
|
||||
const { caSecret, caPublicKey } = await getCaCredentials({
|
||||
caId: ca.id,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthoritySecretDAL,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const isCaAndCertPublicKeySame = Buffer.from(await crypto.subtle.exportKey("spki", caPublicKey)).equals(
|
||||
Buffer.from(certObj.publicKey.rawData)
|
||||
);
|
||||
|
||||
if (!isCaAndCertPublicKeySame) {
|
||||
throw new BadRequestError({ message: "CA and certificate public key do not match" });
|
||||
}
|
||||
|
||||
await certificateAuthorityCertDAL.transaction(async (tx) => {
|
||||
await certificateAuthorityCertDAL.create(
|
||||
const newCaCert = await certificateAuthorityCertDAL.create(
|
||||
{
|
||||
caId: ca.id,
|
||||
encryptedCertificate,
|
||||
encryptedCertificateChain
|
||||
encryptedCertificateChain,
|
||||
version: caCert ? caCert.version + 1 : 1,
|
||||
caSecretId: caSecret.id
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -641,7 +1013,8 @@ export const certificateAuthorityServiceFactory = ({
|
||||
notBefore: new Date(certObj.notBefore),
|
||||
notAfter: new Date(certObj.notAfter),
|
||||
serialNumber: certObj.serialNumber,
|
||||
parentCaId: parentCa?.id
|
||||
parentCaId: parentCa?.id,
|
||||
activeCaCertId: newCaCert.id
|
||||
},
|
||||
tx
|
||||
);
|
||||
@@ -651,10 +1024,13 @@ export const certificateAuthorityServiceFactory = ({
|
||||
};
|
||||
|
||||
/**
|
||||
* Return new leaf certificate issued by CA with id [caId]
|
||||
* Return new leaf certificate issued by CA with id [caId] and private key.
|
||||
* Note: private key and CSR are generated within Infisical.
|
||||
*/
|
||||
const issueCertFromCa = async ({
|
||||
caId,
|
||||
certificateTemplateId,
|
||||
pkiCollectionId,
|
||||
friendlyName,
|
||||
commonName,
|
||||
altNames,
|
||||
@@ -666,8 +1042,27 @@ export const certificateAuthorityServiceFactory = ({
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TIssueCertFromCaDTO) => {
|
||||
const ca = await certificateAuthorityDAL.findById(caId);
|
||||
if (!ca) throw new BadRequestError({ message: "CA not found" });
|
||||
let ca: TCertificateAuthorities | undefined;
|
||||
let certificateTemplate: TCertificateTemplates | undefined;
|
||||
let collectionId = pkiCollectionId;
|
||||
|
||||
if (caId) {
|
||||
ca = await certificateAuthorityDAL.findById(caId);
|
||||
} else if (certificateTemplateId) {
|
||||
certificateTemplate = await certificateTemplateDAL.getById(certificateTemplateId);
|
||||
if (!certificateTemplate) {
|
||||
throw new NotFoundError({
|
||||
message: "Certificate template not found"
|
||||
});
|
||||
}
|
||||
|
||||
collectionId = certificateTemplate.pkiCollectionId as string;
|
||||
ca = await certificateAuthorityDAL.findById(certificateTemplate.caId);
|
||||
}
|
||||
|
||||
if (!ca) {
|
||||
throw new BadRequestError({ message: "CA not found" });
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@@ -680,9 +1075,19 @@ export const certificateAuthorityServiceFactory = ({
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Certificates);
|
||||
|
||||
if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" });
|
||||
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
|
||||
const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId);
|
||||
|
||||
const caCert = await certificateAuthorityCertDAL.findOne({ caId: ca.id });
|
||||
if (!caCert) throw new BadRequestError({ message: "CA does not have a certificate installed" });
|
||||
if (ca.notAfter && new Date() > new Date(ca.notAfter)) {
|
||||
throw new BadRequestError({ message: "CA is expired" });
|
||||
}
|
||||
|
||||
// check PKI collection
|
||||
if (collectionId) {
|
||||
const pkiCollection = await pkiCollectionDAL.findById(collectionId);
|
||||
if (!pkiCollection) throw new NotFoundError({ message: "PKI collection not found" });
|
||||
if (pkiCollection.projectId !== ca.projectId) throw new BadRequestError({ message: "Invalid PKI collection" });
|
||||
}
|
||||
|
||||
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
|
||||
projectId: ca.projectId,
|
||||
@@ -752,11 +1157,13 @@ export const certificateAuthorityServiceFactory = ({
|
||||
await x509.SubjectKeyIdentifierExtension.create(csrObj.publicKey)
|
||||
];
|
||||
|
||||
let altNamesArray: {
|
||||
type: "email" | "dns";
|
||||
value: string;
|
||||
}[] = [];
|
||||
|
||||
if (altNames) {
|
||||
const altNamesArray: {
|
||||
type: "email" | "dns";
|
||||
value: string;
|
||||
}[] = altNames
|
||||
altNamesArray = altNames
|
||||
.split(",")
|
||||
.map((name) => name.trim())
|
||||
.map((altName) => {
|
||||
@@ -784,6 +1191,18 @@ export const certificateAuthorityServiceFactory = ({
|
||||
extensions.push(altNamesExtension);
|
||||
}
|
||||
|
||||
if (certificateTemplate) {
|
||||
validateCertificateDetailsAgainstTemplate(
|
||||
{
|
||||
commonName,
|
||||
notBeforeDate,
|
||||
notAfterDate,
|
||||
altNames: altNamesArray.map((entry) => entry.value)
|
||||
},
|
||||
certificateTemplate
|
||||
);
|
||||
}
|
||||
|
||||
const serialNumber = crypto.randomBytes(32).toString("hex");
|
||||
const leafCert = await x509.X509CertificateGenerator.create({
|
||||
serialNumber,
|
||||
@@ -810,7 +1229,9 @@ export const certificateAuthorityServiceFactory = ({
|
||||
await certificateDAL.transaction(async (tx) => {
|
||||
const cert = await certificateDAL.create(
|
||||
{
|
||||
caId: ca.id,
|
||||
caId: (ca as TCertificateAuthorities).id,
|
||||
caCertId: caCert.id,
|
||||
certificateTemplateId: certificateTemplate?.id,
|
||||
status: CertStatus.ACTIVE,
|
||||
friendlyName: friendlyName || commonName,
|
||||
commonName,
|
||||
@@ -830,11 +1251,21 @@ export const certificateAuthorityServiceFactory = ({
|
||||
tx
|
||||
);
|
||||
|
||||
if (collectionId) {
|
||||
await pkiCollectionItemDAL.create(
|
||||
{
|
||||
pkiCollectionId: collectionId,
|
||||
certId: cert.id
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
return cert;
|
||||
});
|
||||
|
||||
const { caCert: issuingCaCertificate, caCertChain } = await getCaCertChain({
|
||||
caId: ca.id,
|
||||
caCertId: caCert.id,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthorityCertDAL,
|
||||
projectDAL,
|
||||
@@ -851,15 +1282,273 @@ export const certificateAuthorityServiceFactory = ({
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Return new leaf certificate issued by CA with id [caId].
|
||||
* Note: CSR is generated externally and submitted to Infisical.
|
||||
*/
|
||||
const signCertFromCa = async ({
|
||||
caId,
|
||||
certificateTemplateId,
|
||||
csr,
|
||||
pkiCollectionId,
|
||||
friendlyName,
|
||||
commonName,
|
||||
altNames,
|
||||
ttl,
|
||||
notBefore,
|
||||
notAfter,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TSignCertFromCaDTO) => {
|
||||
let ca: TCertificateAuthorities | undefined;
|
||||
let certificateTemplate: TCertificateTemplates | undefined;
|
||||
let collectionId = pkiCollectionId;
|
||||
|
||||
if (caId) {
|
||||
ca = await certificateAuthorityDAL.findById(caId);
|
||||
} else if (certificateTemplateId) {
|
||||
certificateTemplate = await certificateTemplateDAL.getById(certificateTemplateId);
|
||||
if (!certificateTemplate) {
|
||||
throw new NotFoundError({
|
||||
message: "Certificate template not found"
|
||||
});
|
||||
}
|
||||
|
||||
collectionId = certificateTemplate.pkiCollectionId as string;
|
||||
ca = await certificateAuthorityDAL.findById(certificateTemplate.caId);
|
||||
}
|
||||
|
||||
if (!ca) {
|
||||
throw new BadRequestError({ message: "CA not found" });
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
ca.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Certificates);
|
||||
|
||||
if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" });
|
||||
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
|
||||
|
||||
const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId);
|
||||
|
||||
if (ca.notAfter && new Date() > new Date(ca.notAfter)) {
|
||||
throw new BadRequestError({ message: "CA is expired" });
|
||||
}
|
||||
|
||||
// check PKI collection
|
||||
if (pkiCollectionId) {
|
||||
const pkiCollection = await pkiCollectionDAL.findById(pkiCollectionId);
|
||||
if (!pkiCollection) throw new NotFoundError({ message: "PKI collection not found" });
|
||||
if (pkiCollection.projectId !== ca.projectId) throw new BadRequestError({ message: "Invalid PKI collection" });
|
||||
}
|
||||
|
||||
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
|
||||
projectId: ca.projectId,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const kmsDecryptor = await kmsService.decryptWithKmsKey({
|
||||
kmsId: certificateManagerKmsId
|
||||
});
|
||||
|
||||
const decryptedCaCert = await kmsDecryptor({
|
||||
cipherTextBlob: caCert.encryptedCertificate
|
||||
});
|
||||
|
||||
const caCertObj = new x509.X509Certificate(decryptedCaCert);
|
||||
|
||||
const notBeforeDate = notBefore ? new Date(notBefore) : new Date();
|
||||
|
||||
let notAfterDate = new Date(new Date().setFullYear(new Date().getFullYear() + 1));
|
||||
if (notAfter) {
|
||||
notAfterDate = new Date(notAfter);
|
||||
} else if (ttl) {
|
||||
notAfterDate = new Date(new Date().getTime() + ms(ttl));
|
||||
}
|
||||
|
||||
const caCertNotBeforeDate = new Date(caCertObj.notBefore);
|
||||
const caCertNotAfterDate = new Date(caCertObj.notAfter);
|
||||
|
||||
// check not before constraint
|
||||
if (notBeforeDate < caCertNotBeforeDate) {
|
||||
throw new BadRequestError({ message: "notBefore date is before CA certificate's notBefore date" });
|
||||
}
|
||||
|
||||
if (notBeforeDate > notAfterDate) throw new BadRequestError({ message: "notBefore date is after notAfter date" });
|
||||
|
||||
// check not after constraint
|
||||
if (notAfterDate > caCertNotAfterDate) {
|
||||
throw new BadRequestError({ message: "notAfter date is after CA certificate's notAfter date" });
|
||||
}
|
||||
|
||||
const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm);
|
||||
|
||||
const csrObj = new x509.Pkcs10CertificateRequest(csr);
|
||||
|
||||
const dn = parseDistinguishedName(csrObj.subject);
|
||||
const cn = commonName || dn.commonName;
|
||||
|
||||
if (!cn)
|
||||
throw new BadRequestError({
|
||||
message: "A common name (CN) is required in the CSR or as a parameter to this endpoint"
|
||||
});
|
||||
|
||||
const { caPrivateKey } = await getCaCredentials({
|
||||
caId: ca.id,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthoritySecretDAL,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const extensions: x509.Extension[] = [
|
||||
new x509.KeyUsagesExtension(x509.KeyUsageFlags.digitalSignature | x509.KeyUsageFlags.keyEncipherment, true),
|
||||
new x509.BasicConstraintsExtension(false),
|
||||
await x509.AuthorityKeyIdentifierExtension.create(caCertObj, false),
|
||||
await x509.SubjectKeyIdentifierExtension.create(csrObj.publicKey)
|
||||
];
|
||||
|
||||
let altNamesArray: {
|
||||
type: "email" | "dns";
|
||||
value: string;
|
||||
}[] = [];
|
||||
if (altNames) {
|
||||
altNamesArray = altNames
|
||||
.split(",")
|
||||
.map((name) => name.trim())
|
||||
.map((altName) => {
|
||||
// check if the altName is a valid email
|
||||
if (z.string().email().safeParse(altName).success) {
|
||||
return {
|
||||
type: "email",
|
||||
value: altName
|
||||
};
|
||||
}
|
||||
|
||||
// check if the altName is a valid hostname
|
||||
if (hostnameRegex.test(altName)) {
|
||||
return {
|
||||
type: "dns",
|
||||
value: altName
|
||||
};
|
||||
}
|
||||
|
||||
// If altName is neither a valid email nor a valid hostname, throw an error or handle it accordingly
|
||||
throw new Error(`Invalid altName: ${altName}`);
|
||||
});
|
||||
|
||||
const altNamesExtension = new x509.SubjectAlternativeNameExtension(altNamesArray, false);
|
||||
extensions.push(altNamesExtension);
|
||||
}
|
||||
|
||||
if (certificateTemplate) {
|
||||
validateCertificateDetailsAgainstTemplate(
|
||||
{
|
||||
commonName: cn,
|
||||
notBeforeDate,
|
||||
notAfterDate,
|
||||
altNames: altNamesArray.map((entry) => entry.value)
|
||||
},
|
||||
certificateTemplate
|
||||
);
|
||||
}
|
||||
|
||||
const serialNumber = crypto.randomBytes(32).toString("hex");
|
||||
const leafCert = await x509.X509CertificateGenerator.create({
|
||||
serialNumber,
|
||||
subject: csrObj.subject,
|
||||
issuer: caCertObj.subject,
|
||||
notBefore: notBeforeDate,
|
||||
notAfter: notAfterDate,
|
||||
signingKey: caPrivateKey,
|
||||
publicKey: csrObj.publicKey,
|
||||
signingAlgorithm: alg,
|
||||
extensions
|
||||
});
|
||||
|
||||
const kmsEncryptor = await kmsService.encryptWithKmsKey({
|
||||
kmsId: certificateManagerKmsId
|
||||
});
|
||||
const { cipherTextBlob: encryptedCertificate } = await kmsEncryptor({
|
||||
plainText: Buffer.from(new Uint8Array(leafCert.rawData))
|
||||
});
|
||||
|
||||
await certificateDAL.transaction(async (tx) => {
|
||||
const cert = await certificateDAL.create(
|
||||
{
|
||||
caId: (ca as TCertificateAuthorities).id,
|
||||
caCertId: caCert.id,
|
||||
certificateTemplateId: certificateTemplate?.id,
|
||||
status: CertStatus.ACTIVE,
|
||||
friendlyName: friendlyName || csrObj.subject,
|
||||
commonName: cn,
|
||||
altNames,
|
||||
serialNumber,
|
||||
notBefore: notBeforeDate,
|
||||
notAfter: notAfterDate
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await certificateBodyDAL.create(
|
||||
{
|
||||
certId: cert.id,
|
||||
encryptedCertificate
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (collectionId) {
|
||||
await pkiCollectionItemDAL.create(
|
||||
{
|
||||
pkiCollectionId: collectionId,
|
||||
certId: cert.id
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
return cert;
|
||||
});
|
||||
|
||||
const { caCert: issuingCaCertificate, caCertChain } = await getCaCertChain({
|
||||
caCertId: ca.activeCaCertId,
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthorityCertDAL,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
return {
|
||||
certificate: leafCert.toString("pem"),
|
||||
certificateChain: `${issuingCaCertificate}\n${caCertChain}`.trim(),
|
||||
issuingCaCertificate,
|
||||
serialNumber,
|
||||
ca
|
||||
};
|
||||
};
|
||||
|
||||
return {
|
||||
createCa,
|
||||
getCaById,
|
||||
updateCaById,
|
||||
deleteCaById,
|
||||
getCaCsr,
|
||||
renewCaCert,
|
||||
getCaCerts,
|
||||
getCaCert,
|
||||
signIntermediate,
|
||||
importCertToCa,
|
||||
issueCertFromCa
|
||||
issueCertFromCa,
|
||||
signCertFromCa
|
||||
};
|
||||
};
|
||||
|
@@ -20,6 +20,10 @@ export enum CaStatus {
|
||||
PENDING_CERTIFICATE = "pending-certificate"
|
||||
}
|
||||
|
||||
export enum CaRenewalType {
|
||||
EXISTING = "existing"
|
||||
}
|
||||
|
||||
export type TCreateCaDTO = {
|
||||
projectSlug: string;
|
||||
type: CaType;
|
||||
@@ -53,6 +57,16 @@ export type TGetCaCsrDTO = {
|
||||
caId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TRenewCaCertDTO = {
|
||||
caId: string;
|
||||
notAfter: string;
|
||||
type: CaRenewalType;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetCaCertsDTO = {
|
||||
caId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetCaCertDTO = {
|
||||
caId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
@@ -72,7 +86,9 @@ export type TImportCertToCaDTO = {
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TIssueCertFromCaDTO = {
|
||||
caId: string;
|
||||
caId?: string;
|
||||
certificateTemplateId?: string;
|
||||
pkiCollectionId?: string;
|
||||
friendlyName?: string;
|
||||
commonName: string;
|
||||
altNames: string;
|
||||
@@ -81,6 +97,19 @@ export type TIssueCertFromCaDTO = {
|
||||
notAfter?: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TSignCertFromCaDTO = {
|
||||
caId?: string;
|
||||
csr: string;
|
||||
certificateTemplateId?: string;
|
||||
pkiCollectionId?: string;
|
||||
friendlyName?: string;
|
||||
commonName?: string;
|
||||
altNames: string;
|
||||
ttl: string;
|
||||
notBefore?: string;
|
||||
notAfter?: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TDNParts = {
|
||||
commonName?: string;
|
||||
organization?: string;
|
||||
@@ -98,10 +127,18 @@ export type TGetCaCredentialsDTO = {
|
||||
kmsService: Pick<TKmsServiceFactory, "decryptWithKmsKey" | "generateKmsKey">;
|
||||
};
|
||||
|
||||
export type TGetCaCertChainDTO = {
|
||||
export type TGetCaCertChainsDTO = {
|
||||
caId: string;
|
||||
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findById">;
|
||||
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "findOne">;
|
||||
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "find">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction">;
|
||||
kmsService: Pick<TKmsServiceFactory, "decryptWithKmsKey" | "generateKmsKey">;
|
||||
};
|
||||
|
||||
export type TGetCaCertChainDTO = {
|
||||
caCertId: string;
|
||||
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findById">;
|
||||
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "findById">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction">;
|
||||
kmsService: Pick<TKmsServiceFactory, "decryptWithKmsKey" | "generateKmsKey">;
|
||||
};
|
||||
|
@@ -0,0 +1,57 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
|
||||
export type TCertificateTemplateDALFactory = ReturnType<typeof certificateTemplateDALFactory>;
|
||||
|
||||
export const certificateTemplateDALFactory = (db: TDbClient) => {
|
||||
const certificateTemplateOrm = ormify(db, TableName.CertificateTemplate);
|
||||
|
||||
const getCertTemplatesByProjectId = async (projectId: string) => {
|
||||
try {
|
||||
const certTemplates = await db
|
||||
.replicaNode()(TableName.CertificateTemplate)
|
||||
.join(
|
||||
TableName.CertificateAuthority,
|
||||
`${TableName.CertificateAuthority}.id`,
|
||||
`${TableName.CertificateTemplate}.caId`
|
||||
)
|
||||
.where(`${TableName.CertificateAuthority}.projectId`, "=", projectId)
|
||||
.select(selectAllTableCols(TableName.CertificateTemplate))
|
||||
.select(
|
||||
db.ref("friendlyName").as("caName").withSchema(TableName.CertificateAuthority),
|
||||
db.ref("projectId").withSchema(TableName.CertificateAuthority)
|
||||
);
|
||||
|
||||
return certTemplates;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Get certificate templates by project ID" });
|
||||
}
|
||||
};
|
||||
|
||||
const getById = async (id: string) => {
|
||||
try {
|
||||
const certTemplate = await db
|
||||
.replicaNode()(TableName.CertificateTemplate)
|
||||
.join(
|
||||
TableName.CertificateAuthority,
|
||||
`${TableName.CertificateAuthority}.id`,
|
||||
`${TableName.CertificateTemplate}.caId`
|
||||
)
|
||||
.where(`${TableName.CertificateTemplate}.id`, "=", id)
|
||||
.select(selectAllTableCols(TableName.CertificateTemplate))
|
||||
.select(
|
||||
db.ref("projectId").withSchema(TableName.CertificateAuthority),
|
||||
db.ref("friendlyName").as("caName").withSchema(TableName.CertificateAuthority)
|
||||
)
|
||||
.first();
|
||||
|
||||
return certTemplate;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Get certificate template by ID" });
|
||||
}
|
||||
};
|
||||
|
||||
return { ...certificateTemplateOrm, getCertTemplatesByProjectId, getById };
|
||||
};
|
@@ -0,0 +1,36 @@
|
||||
import ms from "ms";
|
||||
|
||||
import { TCertificateTemplates } from "@app/db/schemas";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
export const validateCertificateDetailsAgainstTemplate = (
|
||||
cert: {
|
||||
commonName: string;
|
||||
notBeforeDate: Date;
|
||||
notAfterDate: Date;
|
||||
altNames: string[];
|
||||
},
|
||||
template: TCertificateTemplates
|
||||
) => {
|
||||
const commonNameRegex = new RegExp(template.commonName);
|
||||
if (!commonNameRegex.test(cert.commonName)) {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid common name based on template policy"
|
||||
});
|
||||
}
|
||||
|
||||
if (cert.notAfterDate.getTime() - cert.notBeforeDate.getTime() > ms(template.ttl)) {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid validity date based on template policy"
|
||||
});
|
||||
}
|
||||
|
||||
const subjectAlternativeNameRegex = new RegExp(template.subjectAlternativeName);
|
||||
cert.altNames.forEach((altName) => {
|
||||
if (!subjectAlternativeNameRegex.test(altName)) {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid subject alternative name based on template policy"
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
@@ -0,0 +1,18 @@
|
||||
import z from "zod";
|
||||
|
||||
import { CertificateTemplatesSchema } from "@app/db/schemas";
|
||||
|
||||
export const sanitizedCertificateTemplate = CertificateTemplatesSchema.pick({
|
||||
id: true,
|
||||
caId: true,
|
||||
name: true,
|
||||
commonName: true,
|
||||
subjectAlternativeName: true,
|
||||
pkiCollectionId: true,
|
||||
ttl: true
|
||||
}).merge(
|
||||
z.object({
|
||||
projectId: z.string(),
|
||||
caName: z.string()
|
||||
})
|
||||
);
|
@@ -0,0 +1,196 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
|
||||
import { TCertificateAuthorityDALFactory } from "../certificate-authority/certificate-authority-dal";
|
||||
import { TCertificateTemplateDALFactory } from "./certificate-template-dal";
|
||||
import {
|
||||
TCreateCertTemplateDTO,
|
||||
TDeleteCertTemplateDTO,
|
||||
TGetCertTemplateDTO,
|
||||
TUpdateCertTemplateDTO
|
||||
} from "./certificate-template-types";
|
||||
|
||||
type TCertificateTemplateServiceFactoryDep = {
|
||||
certificateTemplateDAL: TCertificateTemplateDALFactory;
|
||||
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findById">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
};
|
||||
|
||||
export type TCertificateTemplateServiceFactory = ReturnType<typeof certificateTemplateServiceFactory>;
|
||||
|
||||
export const certificateTemplateServiceFactory = ({
|
||||
certificateTemplateDAL,
|
||||
certificateAuthorityDAL,
|
||||
permissionService
|
||||
}: TCertificateTemplateServiceFactoryDep) => {
|
||||
const createCertTemplate = async ({
|
||||
caId,
|
||||
pkiCollectionId,
|
||||
name,
|
||||
commonName,
|
||||
subjectAlternativeName,
|
||||
ttl,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TCreateCertTemplateDTO) => {
|
||||
const ca = await certificateAuthorityDAL.findById(caId);
|
||||
if (!ca) {
|
||||
throw new NotFoundError({
|
||||
message: "CA not found"
|
||||
});
|
||||
}
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
ca.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionSub.CertificateTemplates
|
||||
);
|
||||
|
||||
const { id } = await certificateTemplateDAL.create({
|
||||
caId,
|
||||
pkiCollectionId,
|
||||
name,
|
||||
commonName,
|
||||
subjectAlternativeName,
|
||||
ttl
|
||||
});
|
||||
|
||||
const certificateTemplate = await certificateTemplateDAL.getById(id);
|
||||
if (!certificateTemplate) {
|
||||
throw new NotFoundError({
|
||||
message: "Certificate template not found"
|
||||
});
|
||||
}
|
||||
|
||||
return certificateTemplate;
|
||||
};
|
||||
|
||||
const updateCertTemplate = async ({
|
||||
id,
|
||||
caId,
|
||||
pkiCollectionId,
|
||||
name,
|
||||
commonName,
|
||||
subjectAlternativeName,
|
||||
ttl,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actor,
|
||||
actorOrgId
|
||||
}: TUpdateCertTemplateDTO) => {
|
||||
const certTemplate = await certificateTemplateDAL.getById(id);
|
||||
if (!certTemplate) {
|
||||
throw new NotFoundError({
|
||||
message: "Certificate template not found."
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
certTemplate.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionSub.CertificateTemplates
|
||||
);
|
||||
|
||||
if (caId) {
|
||||
const ca = await certificateAuthorityDAL.findById(caId);
|
||||
if (!ca || ca.projectId !== certTemplate.projectId) {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid CA"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
await certificateTemplateDAL.updateById(certTemplate.id, {
|
||||
caId,
|
||||
pkiCollectionId,
|
||||
commonName,
|
||||
subjectAlternativeName,
|
||||
name,
|
||||
ttl
|
||||
});
|
||||
|
||||
const updatedTemplate = await certificateTemplateDAL.getById(id);
|
||||
if (!updatedTemplate) {
|
||||
throw new NotFoundError({
|
||||
message: "Certificate template not found"
|
||||
});
|
||||
}
|
||||
|
||||
return updatedTemplate;
|
||||
};
|
||||
|
||||
const deleteCertTemplate = async ({ id, actorId, actorAuthMethod, actor, actorOrgId }: TDeleteCertTemplateDTO) => {
|
||||
const certTemplate = await certificateTemplateDAL.getById(id);
|
||||
if (!certTemplate) {
|
||||
throw new NotFoundError({
|
||||
message: "Certificate template not found."
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
certTemplate.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Delete,
|
||||
ProjectPermissionSub.CertificateTemplates
|
||||
);
|
||||
|
||||
await certificateTemplateDAL.deleteById(certTemplate.id);
|
||||
|
||||
return certTemplate;
|
||||
};
|
||||
|
||||
const getCertTemplate = async ({ id, actorId, actorAuthMethod, actor, actorOrgId }: TGetCertTemplateDTO) => {
|
||||
const certTemplate = await certificateTemplateDAL.getById(id);
|
||||
if (!certTemplate) {
|
||||
throw new NotFoundError({
|
||||
message: "Certificate template not found."
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
certTemplate.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSub.CertificateTemplates
|
||||
);
|
||||
|
||||
return certTemplate;
|
||||
};
|
||||
|
||||
return {
|
||||
createCertTemplate,
|
||||
getCertTemplate,
|
||||
deleteCertTemplate,
|
||||
updateCertTemplate
|
||||
};
|
||||
};
|
@@ -0,0 +1,28 @@
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
export type TCreateCertTemplateDTO = {
|
||||
caId: string;
|
||||
pkiCollectionId?: string;
|
||||
name: string;
|
||||
commonName: string;
|
||||
subjectAlternativeName: string;
|
||||
ttl: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TUpdateCertTemplateDTO = {
|
||||
id: string;
|
||||
caId?: string;
|
||||
pkiCollectionId?: string;
|
||||
name?: string;
|
||||
commonName?: string;
|
||||
subjectAlternativeName?: string;
|
||||
ttl?: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetCertTemplateDTO = {
|
||||
id: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TDeleteCertTemplateDTO = {
|
||||
id: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
@@ -0,0 +1,14 @@
|
||||
import safe from "safe-regex";
|
||||
import z from "zod";
|
||||
|
||||
export const validateTemplateRegexField = z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(100)
|
||||
.regex(/^[a-zA-Z0-9 *@\-\\.\\]+$/, {
|
||||
message: "Invalid pattern: only alphanumeric characters, spaces, *, ., @, -, and \\ are allowed."
|
||||
})
|
||||
// we ensure that the inputted pattern is computationally safe by limiting star height to 1
|
||||
.refine((v) => safe(v), {
|
||||
message: "Unsafe REGEX pattern"
|
||||
});
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user