mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-29 13:26:20 +00:00
Compare commits
1 Commits
misc/remov
...
daniel/add
Author | SHA1 | Date | |
---|---|---|---|
d38243a1c6 |
.env.migration.examplemain.tsvitest.e2e.config.ts
.github
.gitignoreREADME.mdbackend
e2e-test
package-lock.jsonpackage.jsonscripts
src
@types
db
auditlog-knexfile.tsindex.tsinstance.ts
migrations
20240702131735_secret-approval-groups.ts20240806083221_secret-sharing-password.ts20240830142938_native-slack-integration.ts20240909145938_cert-template-enforcement.ts20240910070128_add-pki-key-usages.ts20240918005344_add-group-approvals.ts20240924100329_identity-metadata.ts20240925100349_managed-secret-sharing.ts20240930072738_add-oidc-auth-enforced-to-org.ts20241003220151_kms-key-cmek-alterations.ts20241005170802_kms-keys-temp-slug-col.ts20241007052025_make-audit-log-independent.ts20241007052449_partition-audit-logs.ts
utils
schemas
access-approval-policies-approvers.tscertificate-authorities.tscertificate-templates.tscertificates.tsidentity-metadata.tsindex.tskms-keys.tsmodels.tsoidc-configs.tspartitioned-audit-logs.tsproject-slack-configs.tssecret-approval-policies-approvers.tssecret-sharing.tsslack-integrations.tssuper-admin.tsworkflow-integrations.ts
ee
routes/v1
access-approval-policy-router.tscertificate-authority-crl-router.tsdynamic-secret-lease-router.tsdynamic-secret-router.tsexternal-kms-router.tsgroup-router.tsidentity-project-additional-privilege-router.tslicense-router.tsproject-role-router.tsproject-router.tsrate-limit-router.tssaml-router.tsscim-router.tssecret-approval-policy-router.tssecret-approval-request-router.ts
services
access-approval-policy
access-approval-policy-dal.tsaccess-approval-policy-fns.tsaccess-approval-policy-service.tsaccess-approval-policy-types.ts
access-approval-request
audit-log-stream
audit-log
certificate-authority-crl
dynamic-secret-lease
dynamic-secret
external-kms
group
identity-project-additional-privilege
ldap-config
license
oidc
permission
org-permission.tspermission-dal.tspermission-fns.tspermission-service-types.tspermission-service.tspermission-types.tsproject-permission.ts
project-user-additional-privilege
saml-config
scim
secret-approval-policy
secret-approval-request
secret-approval-request-dal.tssecret-approval-request-secret-dal.tssecret-approval-request-service.ts
secret-replication
secret-rotation
secret-scanning
secret-snapshot
keystore
lib
api-docs
axios
base64
casl
config
crypto
errors
fn
ip
knex
types
validator
queue
server
app.ts
plugins
routes
index.tssanitizedSchemas.ts
v1
admin-router.tsauth-router.tscertificate-authority-router.tscertificate-router.tscertificate-template-router.tscmek-router.tsdashboard-router.tsidentity-aws-iam-auth-router.tsidentity-azure-auth-router.tsidentity-gcp-auth-router.tsidentity-router.tsindex.tsintegration-router.tsinvite-org-router.tsorganization-router.tsproject-env-router.tsproject-router.tssecret-folder-router.tssecret-import-router.tssecret-sharing-router.tsslack-router.tssso-router.tsuser-router.tsworkflow-integration-router.ts
v2
group-project-router.tsidentity-org-router.tsidentity-project-router.tsorganization-router.tsproject-membership-router.ts
v3
services
api-key
auth-token
auth
certificate-authority
certificate-authority-fns.tscertificate-authority-queue.tscertificate-authority-service.tscertificate-authority-types.tscertificate-authority-validators.ts
certificate-template
certificate
cmek
external-migration
group-project
identity-access-token
identity-aws-auth
identity-azure-auth
identity-gcp-auth
identity-kubernetes-auth
identity-oidc-auth
identity-project
identity-token-auth
identity-ua
identity
integration-auth
integration-app-list.tsintegration-auth-service.tsintegration-delete-secret.tsintegration-list.tsintegration-sync-secret.tsintegration-token.ts
integration
kms
org-admin
org-membership
org
pki-alert
project-bot
project-env
project-membership
project-role
project
secret-blind-index
secret-folder
secret-import
secret-sharing
secret-tag
secret-v2-bridge
secret-v2-bridge-dal.tssecret-v2-bridge-fns.tssecret-v2-bridge-service.tssecret-v2-bridge-types.tssecret-version-dal.ts
secret
service-token
slack
project-slack-config-dal.tsslack-auth-validators.tsslack-fns.tsslack-integration-dal.tsslack-service.tsslack-types.ts
smtp
super-admin
user
webhook
workflow-integration
cli/packages
docs
api-reference/endpoints
audit-logs
groups
add-group-user.mdxcreate.mdxdelete.mdxget-by-id.mdxget.mdxlist-group-users.mdxremove-group-user.mdxupdate.mdx
kms/keys
project-groups
project-roles
cli
documentation
guides
platform
audit-log-streams.mdx
audit-log-streams
audit-logs.mdxdynamic-secrets
aws-elasticache.mdxazure-entra-id.mdxelastic-search.mdxldap.mdxmongo-atlas.mdxmongo-db.mdxrabbit-mq.mdxredis.mdx
identities
kms.mdxkms
mfa.mdxpki
sso
token.mdxworkflow-integrations
images
guides/import-envkey
copy-encryption-key.pngenvkey-dashboard.pngenvkey-export.pnginfisical-import-dashboard.pnginfisical-import-envkey.png
integrations
databricks
integrations-databricks-auth.pngintegrations-databricks-create.pngintegrations-databricks.pngpat-token.png
github
platform
dynamic-secrets
add-dynamic-secret-button-redis.pngadd-dynamic-secret-button.pngadvanced-option-atlas.pngdynamic-secret-ad-add-assignments.pngdynamic-secret-ad-add-client-secret.pngdynamic-secret-ad-add-permission.pngdynamic-secret-ad-admin-consent.pngdynamic-secret-ad-copy-app-id.pngdynamic-secret-ad-lease.pngdynamic-secret-ad-modal.pngdynamic-secret-ad-new-registration.pngdynamic-secret-ad-select-graph.pngdynamic-secret-ad-select-perms.pngdynamic-secret-ad-show-more.pngdynamic-secret-ad-tenant-id.pngdynamic-secret-ad-user-admin.pngdynamic-secret-atlas-modal.pngdynamic-secret-input-modal-elastic-search.pngdynamic-secret-input-modal-rabbit-mq.pngdynamic-secret-ldap-lease.pngdynamic-secret-ldap-select.pngdynamic-secret-lease-renew-redis.pngdynamic-secret-lease-renew.pngdynamic-secret-modal-atlas.pngdynamic-secret-modal-elastic-search.pngdynamic-secret-modal-mongodb.pngdynamic-secret-mongodb.pngdynamic-secret-rabbit-mq-modal.pnglease-data-redis.pnglease-data.pnglease-values-redis.pnglease-values.pngmodify-elastic-search-statement.pngprovision-lease-redis.pngprovision-lease.png
kms/infisical-kms
kms-add-key-modal.pngkms-add-key.pngkms-decrypt-data.pngkms-decrypt-options.pngkms-decrypted-data.pngkms-encrypt-data.pngkms-encrypted-data.pngkms-key-options.png
mfa/entra
mfa_entra_conditional_access.pngmfa_entra_confirm_policy.pngmfa_entra_create_policy.pngmfa_entra_infisical_app.pngmfa_entra_login.pngmfa_entra_review_policy.png
pki
workflow-integrations/slack-integration
admin-slack-integration-app-credential-form.pngadmin-slack-integration-app-credentials.pngadmin-slack-integration-app-summary.pngadmin-slack-integration-app-workspace-select.pngadmin-slack-integration-create-app.pngadmin-slack-integration-overview.pngcloud-org-slack-integration-authenticate.pngcloud-org-slack-integration-workspace.pngorg-slack-integration-add-form.pngorg-slack-integration-authenticate.pngorg-slack-integration-created.pngorg-slack-integration-initial-add.pngorg-slack-integration-overview.pngorg-slack-integration-workspace.pngprivate-slack-setup-add.pngprivate-slack-setup-channel-field.pngprivate-slack-setup-form.pngprivate-slack-setup-menu.pngproject-slack-integration-config.pngproject-slack-integration-overview.pngproject-slack-integration-select.png
sso
auth0-oidc
general-oidc
keycloak-oidc
integrations
cicd
cloud
aws-amplify.mdxaws-parameter-store.mdxazure-key-vault.mdxcheckly.mdxcloud-66.mdxcloudflare-pages.mdxcloudflare-workers.mdxdatabricks.mdxdigital-ocean-app-platform.mdxflyio.mdxgcp-secret-manager.mdxhasura-cloud.mdxheroku.mdxlaravel-forge.mdxnetlify.mdxnorthflank.mdxqovery.mdxrailway.mdxrender.mdxsupabase.mdxteamcity.mdxwindmill.mdx
platforms
internals
mint.jsonsdks
self-hosting
frontend
next.config.jspackage-lock.jsonpackage.jsonindex.tsuseDebounce.tsxusePagination.tsx
public
src
components
features
navigation
signup
v2
ComboBox
Dropdown
FormControl
InfisicalSecretInput
Input
Modal
Pagination
SecretPathInput
Select
Switch
Tooltip
context
hooks
api
accessApproval
admin
apiKeys
auditLogs
auth
ca
certificateTemplates
certificates
cmeks
dashboard
dynamicSecret
generic
groups
identities
index.tsxintegrationAuth
integrations
kms
migration
oidcConfig
organization
pkiAlerts
pkiCollections
roles
secretApproval
secretFolders
secretImports
secretSharing
secrets
users
workflowIntegrations
workspace
utils
layouts/AppLayout
pages
_app.tsx
integrations
aws-parameter-store
aws-secret-manager
circleci
databricks
details
github
login
org/[id]/overview
project/[id]
signupinvite.tsxviews
IntegrationsPage
IntegrationDetailsPage
IntegrationDetailsPage.tsx
IntegrationPage.utils.tsxcomponents
IntegrationAuditLogsSection.tsxIntegrationConnectionSection.tsxIntegrationDetailsSection.tsxIntegrationSettingsSection.tsx
index.tsxcomponents/IntegrationsSection
Login
Org
AuditLogsPage
IdentityPage/components
MembersPage/components
OrgGroupsTab/components/OrgGroupsSection
OrgIdentityTab/components/IdentitySection
IdentityAuthMethodModal.tsxIdentityAwsAuthForm.tsxIdentityAzureAuthForm.tsxIdentityGcpAuthForm.tsxIdentityKubernetesAuthForm.tsxIdentityModal.tsxIdentityOidcAuthForm.tsxIdentitySection.tsxIdentityTable.tsxIdentityTokenAuthForm.tsxIdentityUniversalAuthForm.tsx
OrgMembersTab/components/OrgMembersSection
OrgRoleTabSection
RolePage/components
UserPage
OrgAdminPage/components/OrgAdminProjects
Project
AuditLogsPage
CaPage
CertificatesPage
CertificatesPage.tsx
components
KmsPage
components
index.tsxMembersPage/components
GroupsTab/components/GroupsSection
IdentityTab
MembersTab/components
RolePage
RolePage.tsx
components/RolePermissionsSection
SecretApprovalPage/components/ApprovalPolicyList
SecretMainPage
SecretMainPage.tsxSecretMainPage.types.ts
components
ActionBar
ActionBar.tsx
CreateDynamicSecretForm
CreateSecretForm
DynamicSecretListView
FolderListView
SecretDropzone
SecretImportListView
SecretListView
SecretOverviewPage
SecretOverviewPage.tsx
components
CreateSecretForm
FolderBreadCrumbs
SecretOverviewTableRow
SecretTableResourceCount
SecretV2MigrationSection
Settings
BillingSettingsPage/components/BillingCloudTab
OrgSettingsPage/components
ImportTab
OrgAuthTab
OrgEncryptionTab
OrgTabGroup
OrgWorkflowIntegrationTab
ProjectSettingsPage
ProjectSettingsPage.tsx
components
BackfillSecretReferenceSection
EncryptionTab
WorkflowIntegrationSection
ShareSecretPublicPage/components
ViewSecretPublicPage
admin/DashboardPage
helm-charts/secrets-operator
k8-operator/controllers
standalone-entrypoint.sh@ -1,2 +1 @@
|
||||
DB_CONNECTION_URI=
|
||||
AUDIT_LOGS_DB_CONNECTION_URI=
|
||||
|
1
.github/pull_request_template.md
vendored
1
.github/pull_request_template.md
vendored
@ -6,7 +6,6 @@
|
||||
|
||||
- [ ] Bug fix
|
||||
- [ ] New feature
|
||||
- [ ] Improvement
|
||||
- [ ] Breaking change
|
||||
- [ ] Documentation
|
||||
|
||||
|
@ -6,15 +6,9 @@ permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Run tests before deployment
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
|
||||
infisical-image:
|
||||
name: Build backend image
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -63,7 +63,6 @@ yarn-error.log*
|
||||
|
||||
# Editor specific
|
||||
.vscode/*
|
||||
.idea/*
|
||||
|
||||
frontend-build
|
||||
|
||||
|
82
README.md
82
README.md
File diff suppressed because one or more lines are too long
@ -123,7 +123,7 @@ describe("Project Environment Router", async () => {
|
||||
id: deletedProjectEnvironment.id,
|
||||
name: mockProjectEnv.name,
|
||||
slug: mockProjectEnv.slug,
|
||||
position: 5,
|
||||
position: 4,
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String)
|
||||
})
|
||||
|
@ -1,36 +0,0 @@
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
||||
|
||||
const createPolicy = async (dto: { name: string; secretPath: string; approvers: {type: ApproverType.User, id: string}[]; approvals: number }) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-approvals`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
name: dto.name,
|
||||
secretPath: dto.secretPath,
|
||||
approvers: dto.approvers,
|
||||
approvals: dto.approvals
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().approval;
|
||||
};
|
||||
|
||||
describe("Secret approval policy router", async () => {
|
||||
test("Create policy", async () => {
|
||||
const policy = await createPolicy({
|
||||
secretPath: "/",
|
||||
approvals: 1,
|
||||
approvers: [{id:seedData1.id, type: ApproverType.User}],
|
||||
name: "test-policy"
|
||||
});
|
||||
|
||||
expect(policy.name).toBe("test-policy");
|
||||
});
|
||||
});
|
@ -1,61 +1,73 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
const createSecretImport = async (importPath: string, importEnv: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/",
|
||||
import: {
|
||||
environment: importEnv,
|
||||
path: importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport;
|
||||
};
|
||||
|
||||
const deleteSecretImport = async (id: string) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${jwtAuthToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.project.id,
|
||||
environment: seedData1.environment.slug,
|
||||
path: "/"
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport;
|
||||
};
|
||||
|
||||
describe("Secret Import Router", async () => {
|
||||
test.each([
|
||||
{ importEnv: "prod", importPath: "/" }, // one in root
|
||||
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
|
||||
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
|
||||
// check for default environments
|
||||
const payload = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath,
|
||||
importEnv
|
||||
});
|
||||
const payload = await createSecretImport(importPath, importEnv);
|
||||
expect(payload).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath,
|
||||
importPath: expect.any(String),
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: importEnv,
|
||||
slug: expect.any(String),
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: payload.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport(payload.id);
|
||||
});
|
||||
|
||||
test("Get secret imports", async () => {
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "prod"
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "staging"
|
||||
});
|
||||
const createdImport1 = await createSecretImport("/", "prod");
|
||||
const createdImport2 = await createSecretImport("/", "staging");
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
@ -77,60 +89,25 @@ describe("Secret Import Router", async () => {
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importPath: expect.any(String),
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "prod",
|
||||
id: expect.any(String)
|
||||
})
|
||||
}),
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "staging",
|
||||
slug: expect.any(String),
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
])
|
||||
);
|
||||
await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport(createdImport1.id);
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
});
|
||||
|
||||
test("Update secret import position", async () => {
|
||||
const prodImportDetails = { path: "/", envSlug: "prod" };
|
||||
const stagingImportDetails = { path: "/", envSlug: "staging" };
|
||||
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: prodImportDetails.path,
|
||||
importEnv: prodImportDetails.envSlug
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: stagingImportDetails.path,
|
||||
importEnv: stagingImportDetails.envSlug
|
||||
});
|
||||
const createdImport1 = await createSecretImport(prodImportDetails.path, prodImportDetails.envSlug);
|
||||
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
|
||||
|
||||
const updateImportRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
@ -184,55 +161,22 @@ describe("Secret Import Router", async () => {
|
||||
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
|
||||
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport(createdImport1.id);
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
});
|
||||
|
||||
test("Delete secret import position", async () => {
|
||||
const createdImport1 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "prod"
|
||||
});
|
||||
const createdImport2 = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.project.id,
|
||||
importPath: "/",
|
||||
importEnv: "staging"
|
||||
});
|
||||
const deletedImport = await deleteSecretImport({
|
||||
id: createdImport1.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
const createdImport1 = await createSecretImport("/", "prod");
|
||||
const createdImport2 = await createSecretImport("/", "staging");
|
||||
const deletedImport = await deleteSecretImport(createdImport1.id);
|
||||
// check for default environments
|
||||
expect(deletedImport).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
importPath: "/",
|
||||
importPath: expect.any(String),
|
||||
importEnv: expect.objectContaining({
|
||||
name: expect.any(String),
|
||||
slug: "prod",
|
||||
slug: expect.any(String),
|
||||
id: expect.any(String)
|
||||
})
|
||||
})
|
||||
@ -257,552 +201,6 @@ describe("Secret Import Router", async () => {
|
||||
expect(secretImportList.secretImports.length).toEqual(1);
|
||||
expect(secretImportList.secretImports[0].position).toEqual(1);
|
||||
|
||||
await deleteSecretImport({
|
||||
id: createdImport2.id,
|
||||
workspaceId: seedData1.project.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
await deleteSecretImport(createdImport2.id);
|
||||
});
|
||||
});
|
||||
|
||||
// dev <- stage <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import waterfall pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging"
|
||||
});
|
||||
|
||||
const stageImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check one level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
});
|
||||
|
||||
test("Check two level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("PROD_KEY");
|
||||
expect(secret.secretValue).toBe("prod-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// dev <- stage, dev <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import multiple destination to one source pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging"
|
||||
});
|
||||
|
||||
const devImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: devImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// dev -> stage, prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret import one source to multiple destination pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const stageImportFromDev = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: seedData1.environment.slug
|
||||
});
|
||||
|
||||
const prodImportFromDev = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: seedData1.environment.slug
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: prodImportFromDev.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromDev.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
const stagingSecret = await getSecretByNameV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(stagingSecret.secretKey).toBe("STAGING_KEY");
|
||||
expect(stagingSecret.secretValue).toBe("stage-value");
|
||||
|
||||
const prodSecret = await getSecretByNameV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(prodSecret.secretKey).toBe("PROD_KEY");
|
||||
expect(prodSecret.secretValue).toBe("prod-value");
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
@ -1,406 +0,0 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
// dev <- stage <- prod
|
||||
describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
|
||||
"Secret replication waterfall pattern testing - %secretPath",
|
||||
({ secretPath: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
const stageImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: stageImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging",
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check one level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
});
|
||||
|
||||
test("Check two level imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("PROD_KEY");
|
||||
expect(secret.secretValue).toBe("prod-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
||||
|
||||
// dev <- stage, dev <- prod
|
||||
describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
"Secret replication 1-N pattern testing - %path",
|
||||
({ path: testSuitePath }) => {
|
||||
beforeAll(async () => {
|
||||
let prodFolder: { id: string };
|
||||
let stagingFolder: { id: string };
|
||||
let devFolder: { id: string };
|
||||
|
||||
if (testSuitePath !== "/") {
|
||||
prodFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
stagingFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
devFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
}
|
||||
|
||||
const devImportFromStage = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "staging",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
const devImportFromProd = await createSecretImport({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: testSuitePath,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
importPath: testSuitePath,
|
||||
importEnv: "prod",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteSecretImport({
|
||||
id: devImportFromProd.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
await deleteSecretImport({
|
||||
id: devImportFromStage.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
|
||||
if (prodFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
}
|
||||
|
||||
if (stagingFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: stagingFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: "staging"
|
||||
});
|
||||
}
|
||||
|
||||
if (devFolder) {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: devFolder.id,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environmentSlug: seedData1.environment.slug
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
test("Check imported secret exist", async () => {
|
||||
await createSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY",
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
await createSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY",
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
|
||||
expect(secret.secretKey).toBe("STAGING_KEY");
|
||||
expect(secret.secretValue).toBe("stage-value");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "STAGING_KEY",
|
||||
secretValue: "stage-value"
|
||||
})
|
||||
])
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "PROD_KEY",
|
||||
secretValue: "prod-value"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "staging",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "STAGING_KEY"
|
||||
});
|
||||
await deleteSecretV2({
|
||||
environmentSlug: "prod",
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
secretPath: testSuitePath,
|
||||
authToken: jwtAuthToken,
|
||||
key: "PROD_KEY"
|
||||
});
|
||||
});
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
@ -510,7 +510,7 @@ describe("Service token fail cases", async () => {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(fetchSecrets.statusCode).toBe(403);
|
||||
expect(fetchSecrets.statusCode).toBe(401);
|
||||
expect(fetchSecrets.json().error).toBe("PermissionDenied");
|
||||
await deleteServiceToken();
|
||||
});
|
||||
@ -532,7 +532,7 @@ describe("Service token fail cases", async () => {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(fetchSecrets.statusCode).toBe(403);
|
||||
expect(fetchSecrets.statusCode).toBe(401);
|
||||
expect(fetchSecrets.json().error).toBe("PermissionDenied");
|
||||
await deleteServiceToken();
|
||||
});
|
||||
@ -557,7 +557,7 @@ describe("Service token fail cases", async () => {
|
||||
authorization: `Bearer ${serviceToken}`
|
||||
}
|
||||
});
|
||||
expect(writeSecrets.statusCode).toBe(403);
|
||||
expect(writeSecrets.statusCode).toBe(401);
|
||||
expect(writeSecrets.json().error).toBe("PermissionDenied");
|
||||
|
||||
// but read access should still work fine
|
||||
|
@ -1,330 +0,0 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
|
||||
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Secret expansion", () => {
|
||||
const projectId = seedData1.projectV3.id;
|
||||
|
||||
beforeAll(async () => {
|
||||
const prodRootFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
name: "deep"
|
||||
});
|
||||
|
||||
await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
name: "nested"
|
||||
});
|
||||
|
||||
return async () => {
|
||||
await deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id: prodRootFolder.id,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod"
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
test("Local secret reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "HELLO",
|
||||
value: "world"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "TEST",
|
||||
// eslint-disable-next-line
|
||||
value: "hello ${HELLO}"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "TEST"
|
||||
});
|
||||
expect(expandedSecret.secretValue).toBe("hello world");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "TEST",
|
||||
secretValue: "hello world"
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
});
|
||||
|
||||
test("Cross environment secret reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello ${prod.deep.DEEP_KEY_1} ${prod.deep.nested.NESTED_KEY_2}"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
|
||||
const expandedSecret = await getSecretByNameV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY"
|
||||
});
|
||||
expect(expandedSecret.secretValue).toBe("hello testing secret reference");
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.secrets).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "KEY",
|
||||
secretValue: "hello testing secret reference"
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
});
|
||||
|
||||
test("Non replicated secret import secret expansion on local reference and nested reference", async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello world"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
importEnv: "prod",
|
||||
importPath: "/deep/nested"
|
||||
});
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: "/deep/nested",
|
||||
environment: "prod",
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
secretValue: "reference"
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_2",
|
||||
secretValue: "secret reference testing"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
await deleteSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
authToken: jwtAuthToken,
|
||||
id: secretImportFromProdToDev.id,
|
||||
secretPath: "/"
|
||||
});
|
||||
});
|
||||
|
||||
test(
|
||||
"Replicated secret import secret expansion on local reference and nested reference",
|
||||
async () => {
|
||||
const secrets = [
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep",
|
||||
authToken: jwtAuthToken,
|
||||
key: "DEEP_KEY_1",
|
||||
value: "testing"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_1",
|
||||
value: "reference"
|
||||
},
|
||||
{
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: "/deep/nested",
|
||||
authToken: jwtAuthToken,
|
||||
key: "NESTED_KEY_2",
|
||||
// eslint-disable-next-line
|
||||
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
|
||||
},
|
||||
{
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
key: "KEY",
|
||||
// eslint-disable-next-line
|
||||
value: "hello world"
|
||||
}
|
||||
];
|
||||
|
||||
await Promise.all(secrets.map((el) => createSecretV2(el)));
|
||||
const secretImportFromProdToDev = await createSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken,
|
||||
importEnv: "prod",
|
||||
importPath: "/deep/nested",
|
||||
isReplication: true
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
});
|
||||
|
||||
const listSecrets = await getSecretsV2({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
secretPath: "/",
|
||||
authToken: jwtAuthToken
|
||||
});
|
||||
expect(listSecrets.imports).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretPath: `/__reserve_replication_${secretImportFromProdToDev.id}`,
|
||||
environment: seedData1.environment.slug,
|
||||
secrets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_1",
|
||||
secretValue: "reference"
|
||||
}),
|
||||
expect.objectContaining({
|
||||
secretKey: "NESTED_KEY_2",
|
||||
secretValue: "secret reference testing"
|
||||
})
|
||||
])
|
||||
})
|
||||
])
|
||||
);
|
||||
|
||||
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
|
||||
await deleteSecretImport({
|
||||
environmentSlug: seedData1.environment.slug,
|
||||
workspaceId: projectId,
|
||||
authToken: jwtAuthToken,
|
||||
id: secretImportFromProdToDev.id,
|
||||
secretPath: "/"
|
||||
});
|
||||
},
|
||||
{ timeout: 10000 }
|
||||
);
|
||||
});
|
@ -8,7 +8,6 @@ type TRawSecret = {
|
||||
secretComment?: string;
|
||||
version: number;
|
||||
};
|
||||
|
||||
const createSecret = async (dto: { path: string; key: string; value: string; comment: string; type?: SecretType }) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
|
@ -1075,7 +1075,7 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(404);
|
||||
expect(createSecRes.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test("Update secret raw", async () => {
|
||||
@ -1093,7 +1093,7 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
|
||||
},
|
||||
body: updateSecretReqBody
|
||||
});
|
||||
expect(updateSecRes.statusCode).toBe(404);
|
||||
expect(updateSecRes.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test("Delete secret raw", async () => {
|
||||
@ -1110,6 +1110,6 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
|
||||
},
|
||||
body: deletedSecretReqBody
|
||||
});
|
||||
expect(deletedSecRes.statusCode).toBe(404);
|
||||
expect(deletedSecRes.statusCode).toBe(400);
|
||||
});
|
||||
});
|
||||
|
@ -1,73 +0,0 @@
|
||||
type TFolder = {
|
||||
id: string;
|
||||
name: string;
|
||||
};
|
||||
|
||||
export const createFolder = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
name: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
name: dto.name,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder as TFolder;
|
||||
};
|
||||
|
||||
export const deleteFolder = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
id: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/folders/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folder as TFolder;
|
||||
};
|
||||
|
||||
export const listFolders = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/folders`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
return res.json().folders as TFolder[];
|
||||
};
|
@ -1,93 +0,0 @@
|
||||
type TSecretImport = {
|
||||
id: string;
|
||||
importEnv: {
|
||||
name: string;
|
||||
slug: string;
|
||||
id: string;
|
||||
};
|
||||
importPath: string;
|
||||
};
|
||||
|
||||
export const createSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
isReplication?: boolean;
|
||||
secretPath: string;
|
||||
importPath: string;
|
||||
importEnv: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
isReplication: dto.isReplication,
|
||||
path: dto.secretPath,
|
||||
import: {
|
||||
environment: dto.importEnv,
|
||||
path: dto.importPath
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport as TSecretImport;
|
||||
};
|
||||
|
||||
export const deleteSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
id: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v1/secret-imports/${dto.id}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImport");
|
||||
return payload.secretImport as TSecretImport;
|
||||
};
|
||||
|
||||
export const listSecretImport = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const res = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v1/secret-imports`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
path: dto.secretPath
|
||||
}
|
||||
});
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const payload = JSON.parse(res.payload);
|
||||
expect(payload).toHaveProperty("secretImports");
|
||||
return payload.secretImports as TSecretImport[];
|
||||
};
|
@ -1,128 +0,0 @@
|
||||
import { SecretType } from "@app/db/schemas";
|
||||
|
||||
type TRawSecret = {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
version: number;
|
||||
};
|
||||
|
||||
export const createSecretV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
value: string;
|
||||
comment?: string;
|
||||
authToken: string;
|
||||
type?: SecretType;
|
||||
}) => {
|
||||
const createSecretReqBody = {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
type: dto.type || SecretType.Shared,
|
||||
secretPath: dto.secretPath,
|
||||
secretKey: dto.key,
|
||||
secretValue: dto.value,
|
||||
secretComment: dto.comment
|
||||
};
|
||||
const createSecRes = await testServer.inject({
|
||||
method: "POST",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: createSecretReqBody
|
||||
});
|
||||
expect(createSecRes.statusCode).toBe(200);
|
||||
const createdSecretPayload = JSON.parse(createSecRes.payload);
|
||||
expect(createdSecretPayload).toHaveProperty("secret");
|
||||
return createdSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const deleteSecretV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const deleteSecRes = await testServer.inject({
|
||||
method: "DELETE",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath
|
||||
}
|
||||
});
|
||||
expect(deleteSecRes.statusCode).toBe(200);
|
||||
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
|
||||
expect(updatedSecretPayload).toHaveProperty("secret");
|
||||
return updatedSecretPayload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const getSecretByNameV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
key: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const response = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw/${dto.key}`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true"
|
||||
}
|
||||
});
|
||||
expect(response.statusCode).toBe(200);
|
||||
const payload = JSON.parse(response.payload);
|
||||
expect(payload).toHaveProperty("secret");
|
||||
return payload.secret as TRawSecret;
|
||||
};
|
||||
|
||||
export const getSecretsV2 = async (dto: {
|
||||
workspaceId: string;
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
}) => {
|
||||
const getSecretsResponse = await testServer.inject({
|
||||
method: "GET",
|
||||
url: `/api/v3/secrets/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${dto.authToken}`
|
||||
},
|
||||
query: {
|
||||
workspaceId: dto.workspaceId,
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true"
|
||||
}
|
||||
});
|
||||
expect(getSecretsResponse.statusCode).toBe(200);
|
||||
const getSecretsPayload = JSON.parse(getSecretsResponse.payload);
|
||||
expect(getSecretsPayload).toHaveProperty("secrets");
|
||||
expect(getSecretsPayload).toHaveProperty("imports");
|
||||
return getSecretsPayload as {
|
||||
secrets: TRawSecret[];
|
||||
imports: {
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
folderId: string;
|
||||
secrets: TRawSecret[];
|
||||
}[];
|
||||
};
|
||||
};
|
@ -11,11 +11,10 @@ import { initLogger } from "@app/lib/logger";
|
||||
import { main } from "@app/server/app";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { mockQueue } from "./mocks/queue";
|
||||
import { mockSmtpServer } from "./mocks/smtp";
|
||||
import { mockKeyStore } from "./mocks/keystore";
|
||||
import { initDbConnection } from "@app/db";
|
||||
import { queueServiceFactory } from "@app/queue";
|
||||
import { keyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Redis } from "ioredis";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
export default {
|
||||
@ -29,31 +28,19 @@ export default {
|
||||
dbRootCert: cfg.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const redis = new Redis(cfg.REDIS_URL);
|
||||
await redis.flushdb("SYNC");
|
||||
|
||||
try {
|
||||
await db.migrate.rollback(
|
||||
{
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
true
|
||||
);
|
||||
await db.migrate.latest({
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
});
|
||||
|
||||
await db.seed.run({
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
});
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL);
|
||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||
const queue = mockQueue();
|
||||
const keyStore = mockKeyStore();
|
||||
const server = await main({ db, smtp, logger, queue, keyStore });
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
@ -71,12 +58,10 @@ export default {
|
||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||
);
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line
|
||||
console.log("[TEST] Error setting up environment", error);
|
||||
await db.destroy();
|
||||
throw error;
|
||||
}
|
||||
|
||||
// custom setup
|
||||
return {
|
||||
async teardown() {
|
||||
@ -95,9 +80,6 @@ export default {
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
await redis.flushdb("ASYNC");
|
||||
redis.disconnect();
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
|
1213
backend/package-lock.json
generated
1213
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -45,19 +45,12 @@
|
||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts",
|
||||
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
|
||||
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
|
||||
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
||||
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
|
||||
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
|
||||
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
|
||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migration:up": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
"migration:down": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
||||
@ -86,7 +79,6 @@
|
||||
"@types/prompt-sync": "^4.2.3",
|
||||
"@types/resolve": "^1.20.6",
|
||||
"@types/safe-regex": "^1.1.6",
|
||||
"@types/sjcl": "^1.0.34",
|
||||
"@types/uuid": "^9.0.7",
|
||||
"@typescript-eslint/eslint-plugin": "^6.20.0",
|
||||
"@typescript-eslint/parser": "^6.20.0",
|
||||
@ -110,6 +102,7 @@
|
||||
"tsup": "^8.0.1",
|
||||
"tsx": "^4.4.0",
|
||||
"typescript": "^5.3.2",
|
||||
"vite-tsconfig-paths": "^4.2.2",
|
||||
"vitest": "^1.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
@ -119,7 +112,6 @@
|
||||
"@aws-sdk/client-secrets-manager": "^3.504.0",
|
||||
"@aws-sdk/client-sts": "^3.600.0",
|
||||
"@casl/ability": "^6.5.0",
|
||||
"@elastic/elasticsearch": "^8.15.0",
|
||||
"@fastify/cookie": "^9.3.1",
|
||||
"@fastify/cors": "^8.5.0",
|
||||
"@fastify/etag": "^5.1.0",
|
||||
@ -138,8 +130,6 @@
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@slack/oauth": "^3.0.1",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@team-plain/typescript-sdk": "^4.6.1",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
@ -165,10 +155,8 @@
|
||||
"jwks-rsa": "^3.1.0",
|
||||
"knex": "^3.0.1",
|
||||
"ldapjs": "^3.0.7",
|
||||
"ldif": "0.5.1",
|
||||
"libsodium-wrappers": "^0.7.13",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"mongodb": "^6.8.1",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
@ -186,11 +174,8 @@
|
||||
"pino": "^8.16.2",
|
||||
"pkijs": "^3.2.4",
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.3.8",
|
||||
"probot": "^13.0.0",
|
||||
"safe-regex": "^2.1.1",
|
||||
"scim-patch": "^0.8.3",
|
||||
"scim2-parse-filter": "^0.2.10",
|
||||
"sjcl": "^1.0.8",
|
||||
"smee-client": "^2.0.0",
|
||||
"tedious": "^18.2.1",
|
||||
"tweetnacl": "^1.0.3",
|
||||
|
@ -90,7 +90,7 @@ const main = async () => {
|
||||
.whereRaw("table_schema = current_schema()")
|
||||
.select<{ tableName: string }[]>("table_name as tableName")
|
||||
.orderBy("table_name")
|
||||
).filter((el) => !el.tableName.includes("_migrations") && !el.tableName.includes("partitioned_audit_logs_"));
|
||||
).filter((el) => !el.tableName.includes("_migrations"));
|
||||
|
||||
for (let i = 0; i < tables.length; i += 1) {
|
||||
const { tableName } = tables[i];
|
||||
|
8
backend/src/@types/fastify.d.ts
vendored
8
backend/src/@types/fastify.d.ts
vendored
@ -38,8 +38,6 @@ import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-se
|
||||
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
|
||||
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
|
||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||
@ -72,14 +70,12 @@ import { TSecretReplicationServiceFactory } from "@app/services/secret-replicati
|
||||
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
|
||||
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
|
||||
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
|
||||
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
|
||||
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
||||
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { TUserServiceFactory } from "@app/services/user/user-service";
|
||||
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
||||
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
||||
|
||||
declare module "fastify" {
|
||||
interface FastifyRequest {
|
||||
@ -181,10 +177,6 @@ declare module "fastify" {
|
||||
userEngagement: TUserEngagementServiceFactory;
|
||||
externalKms: TExternalKmsServiceFactory;
|
||||
orgAdmin: TOrgAdminServiceFactory;
|
||||
slack: TSlackServiceFactory;
|
||||
workflowIntegration: TWorkflowIntegrationServiceFactory;
|
||||
cmek: TCmekServiceFactory;
|
||||
migration: TExternalMigrationServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
42
backend/src/@types/knex.d.ts
vendored
42
backend/src/@types/knex.d.ts
vendored
@ -101,9 +101,6 @@ import {
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate,
|
||||
TIdentityMetadata,
|
||||
TIdentityMetadataInsert,
|
||||
TIdentityMetadataUpdate,
|
||||
TIdentityOidcAuths,
|
||||
TIdentityOidcAuthsInsert,
|
||||
TIdentityOidcAuthsUpdate,
|
||||
@ -170,9 +167,6 @@ import {
|
||||
TOrgRoles,
|
||||
TOrgRolesInsert,
|
||||
TOrgRolesUpdate,
|
||||
TPartitionedAuditLogs,
|
||||
TPartitionedAuditLogsInsert,
|
||||
TPartitionedAuditLogsUpdate,
|
||||
TPkiAlerts,
|
||||
TPkiAlertsInsert,
|
||||
TPkiAlertsUpdate,
|
||||
@ -199,9 +193,6 @@ import {
|
||||
TProjectRolesUpdate,
|
||||
TProjects,
|
||||
TProjectsInsert,
|
||||
TProjectSlackConfigs,
|
||||
TProjectSlackConfigsInsert,
|
||||
TProjectSlackConfigsUpdate,
|
||||
TProjectsUpdate,
|
||||
TProjectUserAdditionalPrivilege,
|
||||
TProjectUserAdditionalPrivilegeInsert,
|
||||
@ -308,9 +299,6 @@ import {
|
||||
TServiceTokens,
|
||||
TServiceTokensInsert,
|
||||
TServiceTokensUpdate,
|
||||
TSlackIntegrations,
|
||||
TSlackIntegrationsInsert,
|
||||
TSlackIntegrationsUpdate,
|
||||
TSuperAdmin,
|
||||
TSuperAdminInsert,
|
||||
TSuperAdminUpdate,
|
||||
@ -334,10 +322,7 @@ import {
|
||||
TUsersUpdate,
|
||||
TWebhooks,
|
||||
TWebhooksInsert,
|
||||
TWebhooksUpdate,
|
||||
TWorkflowIntegrations,
|
||||
TWorkflowIntegrationsInsert,
|
||||
TWorkflowIntegrationsUpdate
|
||||
TWebhooksUpdate
|
||||
} from "@app/db/schemas";
|
||||
import {
|
||||
TSecretV2TagJunction,
|
||||
@ -552,11 +537,6 @@ declare module "knex/types/tables" {
|
||||
TIdentityUniversalAuthsInsert,
|
||||
TIdentityUniversalAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityMetadata]: KnexOriginal.CompositeTableType<
|
||||
TIdentityMetadata,
|
||||
TIdentityMetadataInsert,
|
||||
TIdentityMetadataUpdate
|
||||
>;
|
||||
[TableName.IdentityKubernetesAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
@ -718,11 +698,6 @@ declare module "knex/types/tables" {
|
||||
TAuditLogStreamsInsert,
|
||||
TAuditLogStreamsUpdate
|
||||
>;
|
||||
[TableName.PartitionedAuditLog]: KnexOriginal.CompositeTableType<
|
||||
TPartitionedAuditLogs,
|
||||
TPartitionedAuditLogsInsert,
|
||||
TPartitionedAuditLogsUpdate
|
||||
>;
|
||||
[TableName.GitAppInstallSession]: KnexOriginal.CompositeTableType<
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
@ -801,20 +776,5 @@ declare module "knex/types/tables" {
|
||||
TKmsKeyVersionsInsert,
|
||||
TKmsKeyVersionsUpdate
|
||||
>;
|
||||
[TableName.SlackIntegrations]: KnexOriginal.CompositeTableType<
|
||||
TSlackIntegrations,
|
||||
TSlackIntegrationsInsert,
|
||||
TSlackIntegrationsUpdate
|
||||
>;
|
||||
[TableName.ProjectSlackConfigs]: KnexOriginal.CompositeTableType<
|
||||
TProjectSlackConfigs,
|
||||
TProjectSlackConfigsInsert,
|
||||
TProjectSlackConfigsUpdate
|
||||
>;
|
||||
[TableName.WorkflowIntegrations]: KnexOriginal.CompositeTableType<
|
||||
TWorkflowIntegrations,
|
||||
TWorkflowIntegrationsInsert,
|
||||
TWorkflowIntegrationsUpdate
|
||||
>;
|
||||
}
|
||||
}
|
||||
|
4
backend/src/@types/ldif.d.ts
vendored
4
backend/src/@types/ldif.d.ts
vendored
@ -1,4 +0,0 @@
|
||||
declare module "ldif" {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
|
||||
function parse(input: string, ...args: any[]): any;
|
||||
}
|
@ -1,75 +0,0 @@
|
||||
// eslint-disable-next-line
|
||||
import "ts-node/register";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import type { Knex } from "knex";
|
||||
import path from "path";
|
||||
|
||||
// Update with your config settings. .
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../../.env.migration")
|
||||
});
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../../.env")
|
||||
});
|
||||
|
||||
if (!process.env.AUDIT_LOGS_DB_CONNECTION_URI && !process.env.AUDIT_LOGS_DB_HOST) {
|
||||
console.info("Dedicated audit log database not found. No further migrations necessary");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
console.info("Executing migration on audit log database...");
|
||||
|
||||
export default {
|
||||
development: {
|
||||
client: "postgres",
|
||||
connection: {
|
||||
connectionString: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
|
||||
host: process.env.AUDIT_LOGS_DB_HOST,
|
||||
port: process.env.AUDIT_LOGS_DB_PORT,
|
||||
user: process.env.AUDIT_LOGS_DB_USER,
|
||||
database: process.env.AUDIT_LOGS_DB_NAME,
|
||||
password: process.env.AUDIT_LOGS_DB_PASSWORD,
|
||||
ssl: process.env.AUDIT_LOGS_DB_ROOT_CERT
|
||||
? {
|
||||
rejectUnauthorized: true,
|
||||
ca: Buffer.from(process.env.AUDIT_LOGS_DB_ROOT_CERT, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
pool: {
|
||||
min: 2,
|
||||
max: 10
|
||||
},
|
||||
seeds: {
|
||||
directory: "./seeds"
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
},
|
||||
production: {
|
||||
client: "postgres",
|
||||
connection: {
|
||||
connectionString: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
|
||||
host: process.env.AUDIT_LOGS_DB_HOST,
|
||||
port: process.env.AUDIT_LOGS_DB_PORT,
|
||||
user: process.env.AUDIT_LOGS_DB_USER,
|
||||
database: process.env.AUDIT_LOGS_DB_NAME,
|
||||
password: process.env.AUDIT_LOGS_DB_PASSWORD,
|
||||
ssl: process.env.AUDIT_LOGS_DB_ROOT_CERT
|
||||
? {
|
||||
rejectUnauthorized: true,
|
||||
ca: Buffer.from(process.env.AUDIT_LOGS_DB_ROOT_CERT, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
pool: {
|
||||
min: 2,
|
||||
max: 10
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
}
|
||||
} as Knex.Config;
|
@ -1,2 +1,2 @@
|
||||
export type { TDbClient } from "./instance";
|
||||
export { initAuditLogDbConnection, initDbConnection } from "./instance";
|
||||
export { initDbConnection } from "./instance";
|
||||
|
@ -70,45 +70,3 @@ export const initDbConnection = ({
|
||||
|
||||
return db;
|
||||
};
|
||||
|
||||
export const initAuditLogDbConnection = ({
|
||||
dbConnectionUri,
|
||||
dbRootCert
|
||||
}: {
|
||||
dbConnectionUri: string;
|
||||
dbRootCert?: string;
|
||||
}) => {
|
||||
// akhilmhdh: the default Knex is knex.Knex<any, any[]>. but when assigned with knex({<config>}) the value is knex.Knex<any, unknown[]>
|
||||
// this was causing issue with files like `snapshot-dal` `findRecursivelySnapshots` this i am explicitly putting the any and unknown[]
|
||||
// eslint-disable-next-line
|
||||
const db: Knex<any, unknown[]> = knex({
|
||||
client: "pg",
|
||||
connection: {
|
||||
connectionString: dbConnectionUri,
|
||||
host: process.env.AUDIT_LOGS_DB_HOST,
|
||||
// @ts-expect-error I have no clue why only for the port there is a type error
|
||||
// eslint-disable-next-line
|
||||
port: process.env.AUDIT_LOGS_DB_PORT,
|
||||
user: process.env.AUDIT_LOGS_DB_USER,
|
||||
database: process.env.AUDIT_LOGS_DB_NAME,
|
||||
password: process.env.AUDIT_LOGS_DB_PASSWORD,
|
||||
ssl: dbRootCert
|
||||
? {
|
||||
rejectUnauthorized: true,
|
||||
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
}
|
||||
});
|
||||
|
||||
// we add these overrides so that auditLogDb and the primary DB are interchangeable
|
||||
db.primaryNode = () => {
|
||||
return db;
|
||||
};
|
||||
|
||||
db.replicaNode = () => {
|
||||
return db;
|
||||
};
|
||||
|
||||
return db;
|
||||
};
|
||||
|
@ -115,14 +115,7 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
approverId: knex(TableName.ProjectMembership)
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverUserId`]))
|
||||
});
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
|
||||
@ -154,27 +147,13 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
committerId: knex(TableName.ProjectMembership)
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalRequest}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`]))
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership)),
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`])),
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
statusChangeBy: knex(TableName.ProjectMembership)
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalRequest}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangedByUserId`]))
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
||||
});
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
|
||||
@ -198,20 +177,8 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore because generate schema happens after this
|
||||
member: knex(TableName.ProjectMembership)
|
||||
.join(
|
||||
TableName.SecretApprovalRequest,
|
||||
`${TableName.SecretApprovalRequest}.id`,
|
||||
`${TableName.SecretApprovalRequestReviewer}.requestId`
|
||||
)
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalPolicy}.id`,
|
||||
`${TableName.SecretApprovalRequest}.policyId`
|
||||
)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
|
||||
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
|
||||
.select("id")
|
||||
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`]))
|
||||
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
|
||||
});
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
|
||||
tb.uuid("member").notNullable().alter();
|
||||
|
@ -1,25 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
|
||||
if (!doesPasswordExist) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.string("password").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
|
||||
if (doesPasswordExist) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.dropColumn("password");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -1,96 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.WorkflowIntegrations))) {
|
||||
await knex.schema.createTable(TableName.WorkflowIntegrations, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
tb.string("integration").notNullable();
|
||||
tb.string("slug").notNullable();
|
||||
tb.uuid("orgId").notNullable();
|
||||
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
tb.string("description");
|
||||
tb.unique(["orgId", "slug"]);
|
||||
tb.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.WorkflowIntegrations);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.SlackIntegrations))) {
|
||||
await knex.schema.createTable(TableName.SlackIntegrations, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).notNullable();
|
||||
tb.foreign("id").references("id").inTable(TableName.WorkflowIntegrations).onDelete("CASCADE");
|
||||
tb.string("teamId").notNullable();
|
||||
tb.string("teamName").notNullable();
|
||||
tb.string("slackUserId").notNullable();
|
||||
tb.string("slackAppId").notNullable();
|
||||
tb.binary("encryptedBotAccessToken").notNullable();
|
||||
tb.string("slackBotId").notNullable();
|
||||
tb.string("slackBotUserId").notNullable();
|
||||
tb.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.SlackIntegrations);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.ProjectSlackConfigs))) {
|
||||
await knex.schema.createTable(TableName.ProjectSlackConfigs, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
tb.string("projectId").notNullable().unique();
|
||||
tb.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
tb.uuid("slackIntegrationId").notNullable();
|
||||
tb.foreign("slackIntegrationId").references("id").inTable(TableName.SlackIntegrations).onDelete("CASCADE");
|
||||
tb.boolean("isAccessRequestNotificationEnabled").notNullable().defaultTo(false);
|
||||
tb.string("accessRequestChannels").notNullable().defaultTo("");
|
||||
tb.boolean("isSecretRequestNotificationEnabled").notNullable().defaultTo(false);
|
||||
tb.string("secretRequestChannels").notNullable().defaultTo("");
|
||||
tb.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.ProjectSlackConfigs);
|
||||
}
|
||||
|
||||
const doesSuperAdminHaveSlackClientId = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedSlackClientId");
|
||||
const doesSuperAdminHaveSlackClientSecret = await knex.schema.hasColumn(
|
||||
TableName.SuperAdmin,
|
||||
"encryptedSlackClientSecret"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
|
||||
if (!doesSuperAdminHaveSlackClientId) {
|
||||
tb.binary("encryptedSlackClientId");
|
||||
}
|
||||
if (!doesSuperAdminHaveSlackClientSecret) {
|
||||
tb.binary("encryptedSlackClientSecret");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.ProjectSlackConfigs);
|
||||
await dropOnUpdateTrigger(knex, TableName.ProjectSlackConfigs);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.SlackIntegrations);
|
||||
await dropOnUpdateTrigger(knex, TableName.SlackIntegrations);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.WorkflowIntegrations);
|
||||
await dropOnUpdateTrigger(knex, TableName.WorkflowIntegrations);
|
||||
|
||||
const doesSuperAdminHaveSlackClientId = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedSlackClientId");
|
||||
const doesSuperAdminHaveSlackClientSecret = await knex.schema.hasColumn(
|
||||
TableName.SuperAdmin,
|
||||
"encryptedSlackClientSecret"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
|
||||
if (doesSuperAdminHaveSlackClientId) {
|
||||
tb.dropColumn("encryptedSlackClientId");
|
||||
}
|
||||
if (doesSuperAdminHaveSlackClientSecret) {
|
||||
tb.dropColumn("encryptedSlackClientSecret");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
|
||||
const hasRequireTemplateForIssuanceColumn = await knex.schema.hasColumn(
|
||||
TableName.CertificateAuthority,
|
||||
"requireTemplateForIssuance"
|
||||
);
|
||||
if (!hasRequireTemplateForIssuanceColumn) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
|
||||
t.boolean("requireTemplateForIssuance").notNullable().defaultTo(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
|
||||
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
|
||||
t.dropColumn("requireTemplateForIssuance");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,85 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { CertKeyUsage } from "@app/services/certificate/certificate-types";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
// Certificate template
|
||||
const hasKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "keyUsages");
|
||||
const hasExtendedKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "extendedKeyUsages");
|
||||
|
||||
await knex.schema.alterTable(TableName.CertificateTemplate, (tb) => {
|
||||
if (!hasKeyUsagesCol) {
|
||||
tb.specificType("keyUsages", "text[]");
|
||||
}
|
||||
|
||||
if (!hasExtendedKeyUsagesCol) {
|
||||
tb.specificType("extendedKeyUsages", "text[]");
|
||||
}
|
||||
});
|
||||
|
||||
if (!hasKeyUsagesCol) {
|
||||
await knex(TableName.CertificateTemplate).update({
|
||||
keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]
|
||||
});
|
||||
}
|
||||
|
||||
if (!hasExtendedKeyUsagesCol) {
|
||||
await knex(TableName.CertificateTemplate).update({
|
||||
extendedKeyUsages: []
|
||||
});
|
||||
}
|
||||
|
||||
// Certificate
|
||||
const doesCertTableHaveKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "keyUsages");
|
||||
const doesCertTableHaveExtendedKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "extendedKeyUsages");
|
||||
await knex.schema.alterTable(TableName.Certificate, (tb) => {
|
||||
if (!doesCertTableHaveKeyUsages) {
|
||||
tb.specificType("keyUsages", "text[]");
|
||||
}
|
||||
|
||||
if (!doesCertTableHaveExtendedKeyUsages) {
|
||||
tb.specificType("extendedKeyUsages", "text[]");
|
||||
}
|
||||
});
|
||||
|
||||
if (!doesCertTableHaveKeyUsages) {
|
||||
await knex(TableName.Certificate).update({
|
||||
keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]
|
||||
});
|
||||
}
|
||||
|
||||
if (!doesCertTableHaveExtendedKeyUsages) {
|
||||
await knex(TableName.Certificate).update({
|
||||
extendedKeyUsages: []
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
// Certificate Template
|
||||
const hasKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "keyUsages");
|
||||
const hasExtendedKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "extendedKeyUsages");
|
||||
|
||||
await knex.schema.alterTable(TableName.CertificateTemplate, (t) => {
|
||||
if (hasKeyUsagesCol) {
|
||||
t.dropColumn("keyUsages");
|
||||
}
|
||||
if (hasExtendedKeyUsagesCol) {
|
||||
t.dropColumn("extendedKeyUsages");
|
||||
}
|
||||
});
|
||||
|
||||
// Certificate
|
||||
const doesCertTableHaveKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "keyUsages");
|
||||
const doesCertTableHaveExtendedKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "extendedKeyUsages");
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
if (doesCertTableHaveKeyUsages) {
|
||||
t.dropColumn("keyUsages");
|
||||
}
|
||||
if (doesCertTableHaveExtendedKeyUsages) {
|
||||
t.dropColumn("extendedKeyUsages");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,76 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAccessApproverGroupId = await knex.schema.hasColumn(
|
||||
TableName.AccessApprovalPolicyApprover,
|
||||
"approverGroupId"
|
||||
);
|
||||
const hasAccessApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
|
||||
const hasSecretApproverGroupId = await knex.schema.hasColumn(
|
||||
TableName.SecretApprovalPolicyApprover,
|
||||
"approverGroupId"
|
||||
);
|
||||
const hasSecretApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId");
|
||||
if (await knex.schema.hasTable(TableName.AccessApprovalPolicyApprover)) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (table) => {
|
||||
// add column approverGroupId to AccessApprovalPolicyApprover
|
||||
if (!hasAccessApproverGroupId) {
|
||||
table.uuid("approverGroupId").nullable().references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
||||
}
|
||||
|
||||
// make approverUserId nullable
|
||||
if (hasAccessApproverUserId) {
|
||||
table.uuid("approverUserId").nullable().alter();
|
||||
}
|
||||
});
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (table) => {
|
||||
// add column approverGroupId to SecretApprovalPolicyApprover
|
||||
if (!hasSecretApproverGroupId) {
|
||||
table.uuid("approverGroupId").nullable().references("id").inTable(TableName.Groups).onDelete("CASCADE");
|
||||
}
|
||||
|
||||
// make approverUserId nullable
|
||||
if (hasSecretApproverUserId) {
|
||||
table.uuid("approverUserId").nullable().alter();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasAccessApproverGroupId = await knex.schema.hasColumn(
|
||||
TableName.AccessApprovalPolicyApprover,
|
||||
"approverGroupId"
|
||||
);
|
||||
const hasAccessApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
|
||||
const hasSecretApproverGroupId = await knex.schema.hasColumn(
|
||||
TableName.SecretApprovalPolicyApprover,
|
||||
"approverGroupId"
|
||||
);
|
||||
const hasSecretApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.AccessApprovalPolicyApprover)) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (table) => {
|
||||
if (hasAccessApproverGroupId) {
|
||||
table.dropColumn("approverGroupId");
|
||||
}
|
||||
// make approverUserId not nullable
|
||||
if (hasAccessApproverUserId) {
|
||||
table.uuid("approverUserId").notNullable().alter();
|
||||
}
|
||||
});
|
||||
|
||||
// remove
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (table) => {
|
||||
if (hasSecretApproverGroupId) {
|
||||
table.dropColumn("approverGroupId");
|
||||
}
|
||||
// make approverUserId not nullable
|
||||
if (hasSecretApproverUserId) {
|
||||
table.uuid("approverUserId").notNullable().alter();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityMetadata))) {
|
||||
await knex.schema.createTable(TableName.IdentityMetadata, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
tb.string("key").notNullable();
|
||||
tb.string("value").notNullable();
|
||||
tb.uuid("orgId").notNullable();
|
||||
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
tb.uuid("userId");
|
||||
tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
tb.uuid("identityId");
|
||||
tb.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
tb.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityMetadata);
|
||||
}
|
@ -1,30 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.string("iv").nullable().alter();
|
||||
t.string("tag").nullable().alter();
|
||||
t.string("encryptedValue").nullable().alter();
|
||||
|
||||
t.binary("encryptedSecret").nullable();
|
||||
t.string("hashedHex").nullable().alter();
|
||||
|
||||
t.string("identifier", 64).nullable();
|
||||
t.unique("identifier");
|
||||
t.index("identifier");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
t.dropColumn("encryptedSecret");
|
||||
|
||||
t.dropColumn("identifier");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.OidcConfig, "lastUsed"))) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
|
||||
tb.datetime("lastUsed");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.OidcConfig, "lastUsed")) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
|
||||
tb.dropColumn("lastUsed");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,46 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { dropConstraintIfExists } from "@app/db/migrations/utils/dropConstraintIfExists";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
||||
const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId");
|
||||
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
||||
|
||||
// drop constraint if exists (won't exist if rolled back, see below)
|
||||
await dropConstraintIfExists(TableName.KmsKey, "kms_keys_orgid_slug_unique", knex);
|
||||
|
||||
// projectId for CMEK functionality
|
||||
await knex.schema.alterTable(TableName.KmsKey, (table) => {
|
||||
table.string("projectId").nullable().references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
|
||||
if (hasOrgId) {
|
||||
table.unique(["orgId", "projectId", "slug"]);
|
||||
}
|
||||
|
||||
if (hasSlug) {
|
||||
table.renameColumn("slug", "name");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
||||
const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId");
|
||||
const hasName = await knex.schema.hasColumn(TableName.KmsKey, "name");
|
||||
|
||||
// remove projectId for CMEK functionality
|
||||
await knex.schema.alterTable(TableName.KmsKey, (table) => {
|
||||
if (hasName) {
|
||||
table.renameColumn("name", "slug");
|
||||
}
|
||||
|
||||
if (hasOrgId) {
|
||||
table.dropUnique(["orgId", "projectId", "slug"]);
|
||||
}
|
||||
table.dropColumn("projectId");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,30 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
||||
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
||||
|
||||
if (!hasSlug) {
|
||||
// add slug back temporarily and set value equal to name
|
||||
await knex.schema
|
||||
.alterTable(TableName.KmsKey, (table) => {
|
||||
table.string("slug", 32);
|
||||
})
|
||||
.then(() => knex(TableName.KmsKey).update("slug", knex.ref("name")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
||||
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
||||
|
||||
if (hasSlug) {
|
||||
await knex.schema.alterTable(TableName.KmsKey, (table) => {
|
||||
table.dropColumn("slug");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -1,48 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
|
||||
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
|
||||
const doesProjectNameExist = await knex.schema.hasColumn(TableName.AuditLog, "projectName");
|
||||
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesOrgIdExist) {
|
||||
t.dropForeign("orgId");
|
||||
}
|
||||
|
||||
if (doesProjectIdExist) {
|
||||
t.dropForeign("projectId");
|
||||
}
|
||||
|
||||
// add normalized field
|
||||
if (!doesProjectNameExist) {
|
||||
t.string("projectName");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
|
||||
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
|
||||
const doesProjectNameExist = await knex.schema.hasColumn(TableName.AuditLog, "projectName");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesOrgIdExist) {
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
}
|
||||
if (doesProjectIdExist) {
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
}
|
||||
|
||||
// remove normalized field
|
||||
if (doesProjectNameExist) {
|
||||
t.dropColumn("projectName");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -1,164 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const formatPartitionDate = (date: Date) => {
|
||||
const year = date.getFullYear();
|
||||
const month = String(date.getMonth() + 1).padStart(2, "0");
|
||||
const day = String(date.getDate()).padStart(2, "0");
|
||||
|
||||
return `${year}-${month}-${day}`;
|
||||
};
|
||||
|
||||
const createAuditLogPartition = async (knex: Knex, startDate: Date, endDate: Date) => {
|
||||
const startDateStr = formatPartitionDate(startDate);
|
||||
const endDateStr = formatPartitionDate(endDate);
|
||||
|
||||
const partitionName = `${TableName.PartitionedAuditLog}_${startDateStr.replace(/-/g, "")}_${endDateStr.replace(
|
||||
/-/g,
|
||||
""
|
||||
)}`;
|
||||
|
||||
await knex.schema.raw(
|
||||
`CREATE TABLE ${partitionName} PARTITION OF ${TableName.PartitionedAuditLog} FOR VALUES FROM ('${startDateStr}') TO ('${endDateStr}')`
|
||||
);
|
||||
};
|
||||
|
||||
const isUsingDedicatedAuditLogDb = Boolean(process.env.AUDIT_LOGS_DB_CONNECTION_URI);
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!isUsingDedicatedAuditLogDb && (await knex.schema.hasTable(TableName.AuditLog))) {
|
||||
console.info("Dropping primary key of Audit Log table...");
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
// remove existing keys
|
||||
t.dropPrimary();
|
||||
});
|
||||
}
|
||||
|
||||
// create a new partitioned table for audit logs
|
||||
if (!(await knex.schema.hasTable(TableName.PartitionedAuditLog))) {
|
||||
const createTableSql = knex.schema
|
||||
.createTable(TableName.PartitionedAuditLog, (t) => {
|
||||
t.uuid("id").defaultTo(knex.fn.uuid());
|
||||
t.string("actor").notNullable();
|
||||
t.jsonb("actorMetadata").notNullable();
|
||||
t.string("ipAddress");
|
||||
t.string("eventType").notNullable();
|
||||
t.jsonb("eventMetadata");
|
||||
t.string("userAgent");
|
||||
t.string("userAgentType");
|
||||
t.datetime("expiresAt");
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("orgId");
|
||||
t.string("projectId");
|
||||
t.string("projectName");
|
||||
t.primary(["id", "createdAt"]);
|
||||
})
|
||||
.toString();
|
||||
|
||||
console.info("Creating partition table...");
|
||||
await knex.schema.raw(`
|
||||
${createTableSql} PARTITION BY RANGE ("createdAt");
|
||||
`);
|
||||
|
||||
console.log("Adding indices...");
|
||||
await knex.schema.alterTable(TableName.PartitionedAuditLog, (t) => {
|
||||
t.index(["projectId", "createdAt"]);
|
||||
t.index(["orgId", "createdAt"]);
|
||||
t.index("expiresAt");
|
||||
t.index("orgId");
|
||||
t.index("projectId");
|
||||
});
|
||||
|
||||
console.log("Adding GIN indices...");
|
||||
|
||||
await knex.raw(
|
||||
`CREATE INDEX IF NOT EXISTS "audit_logs_actorMetadata_idx" ON ${TableName.PartitionedAuditLog} USING gin("actorMetadata" jsonb_path_ops)`
|
||||
);
|
||||
console.log("GIN index for actorMetadata done");
|
||||
|
||||
await knex.raw(
|
||||
`CREATE INDEX IF NOT EXISTS "audit_logs_eventMetadata_idx" ON ${TableName.PartitionedAuditLog} USING gin("eventMetadata" jsonb_path_ops)`
|
||||
);
|
||||
console.log("GIN index for eventMetadata done");
|
||||
|
||||
// create default partition
|
||||
console.log("Creating default partition...");
|
||||
await knex.schema.raw(
|
||||
`CREATE TABLE ${TableName.PartitionedAuditLog}_default PARTITION OF ${TableName.PartitionedAuditLog} DEFAULT`
|
||||
);
|
||||
|
||||
const nextDate = new Date();
|
||||
nextDate.setDate(nextDate.getDate() + 1);
|
||||
const nextDateStr = formatPartitionDate(nextDate);
|
||||
|
||||
// attach existing audit log table as a partition ONLY if using the same DB
|
||||
if (!isUsingDedicatedAuditLogDb) {
|
||||
console.log("Attaching existing audit log table as a partition...");
|
||||
await knex.schema.raw(`
|
||||
ALTER TABLE ${TableName.AuditLog} ADD CONSTRAINT audit_log_old
|
||||
CHECK ( "createdAt" < DATE '${nextDateStr}' );
|
||||
|
||||
ALTER TABLE ${TableName.PartitionedAuditLog} ATTACH PARTITION ${TableName.AuditLog}
|
||||
FOR VALUES FROM (MINVALUE) TO ('${nextDateStr}' );
|
||||
`);
|
||||
}
|
||||
|
||||
// create partition from now until end of month
|
||||
console.log("Creating audit log partitions ahead of time... next date:", nextDateStr);
|
||||
await createAuditLogPartition(knex, nextDate, new Date(nextDate.getFullYear(), nextDate.getMonth() + 1));
|
||||
|
||||
// create partitions 4 years ahead
|
||||
const partitionMonths = 4 * 12;
|
||||
const partitionPromises: Promise<void>[] = [];
|
||||
for (let x = 1; x <= partitionMonths; x += 1) {
|
||||
partitionPromises.push(
|
||||
createAuditLogPartition(
|
||||
knex,
|
||||
new Date(nextDate.getFullYear(), nextDate.getMonth() + x, 1),
|
||||
new Date(nextDate.getFullYear(), nextDate.getMonth() + (x + 1), 1)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
await Promise.all(partitionPromises);
|
||||
console.log("Partition migration complete");
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const partitionSearchResult = await knex.raw(`
|
||||
SELECT inhrelid::regclass::text
|
||||
FROM pg_inherits
|
||||
WHERE inhparent::regclass::text = '${TableName.PartitionedAuditLog}'
|
||||
AND inhrelid::regclass::text = '${TableName.AuditLog}'
|
||||
`);
|
||||
|
||||
const isAuditLogAPartition = partitionSearchResult.rows.length > 0;
|
||||
if (isAuditLogAPartition) {
|
||||
// detach audit log from partition
|
||||
console.log("Detaching original audit log table from new partition table...");
|
||||
await knex.schema.raw(`
|
||||
ALTER TABLE ${TableName.PartitionedAuditLog} DETACH PARTITION ${TableName.AuditLog};
|
||||
|
||||
ALTER TABLE ${TableName.AuditLog} DROP CONSTRAINT audit_log_old;
|
||||
`);
|
||||
|
||||
// revert audit log modifications
|
||||
console.log("Reverting changes made to the audit log table...");
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
// we drop this first because adding to the partition results in a new primary key
|
||||
t.dropPrimary();
|
||||
|
||||
// add back the original keys of the audit logs table
|
||||
t.primary(["id"], {
|
||||
constraintName: "audit_logs_pkey"
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.PartitionedAuditLog);
|
||||
console.log("Partition rollback complete");
|
||||
}
|
@ -1,6 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export const dropConstraintIfExists = (tableName: TableName, constraintName: string, knex: Knex) =>
|
||||
knex.raw(`ALTER TABLE ${tableName} DROP CONSTRAINT IF EXISTS ${constraintName};`);
|
@ -54,7 +54,7 @@ export const getSecretManagerDataKey = async (knex: Knex, projectId: string) =>
|
||||
} else {
|
||||
const [kmsDoc] = await knex(TableName.KmsKey)
|
||||
.insert({
|
||||
name: slugify(alphaNumericNanoId(8).toLowerCase()),
|
||||
slug: slugify(alphaNumericNanoId(8).toLowerCase()),
|
||||
orgId: project.orgId,
|
||||
isReserved: false
|
||||
})
|
||||
|
@ -12,8 +12,7 @@ export const AccessApprovalPoliciesApproversSchema = z.object({
|
||||
policyId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
approverUserId: z.string().uuid().nullable().optional(),
|
||||
approverGroupId: z.string().uuid().nullable().optional()
|
||||
approverUserId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TAccessApprovalPoliciesApprovers = z.infer<typeof AccessApprovalPoliciesApproversSchema>;
|
||||
|
@ -28,8 +28,7 @@ export const CertificateAuthoritiesSchema = z.object({
|
||||
keyAlgorithm: z.string(),
|
||||
notBefore: z.date().nullable().optional(),
|
||||
notAfter: z.date().nullable().optional(),
|
||||
activeCaCertId: z.string().uuid().nullable().optional(),
|
||||
requireTemplateForIssuance: z.boolean().default(false)
|
||||
activeCaCertId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TCertificateAuthorities = z.infer<typeof CertificateAuthoritiesSchema>;
|
||||
|
@ -16,9 +16,7 @@ export const CertificateTemplatesSchema = z.object({
|
||||
subjectAlternativeName: z.string(),
|
||||
ttl: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
keyUsages: z.string().array().nullable().optional(),
|
||||
extendedKeyUsages: z.string().array().nullable().optional()
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TCertificateTemplates = z.infer<typeof CertificateTemplatesSchema>;
|
||||
|
@ -22,9 +22,7 @@ export const CertificatesSchema = z.object({
|
||||
revocationReason: z.number().nullable().optional(),
|
||||
altNames: z.string().default("").nullable().optional(),
|
||||
caCertId: z.string().uuid(),
|
||||
certificateTemplateId: z.string().uuid().nullable().optional(),
|
||||
keyUsages: z.string().array().nullable().optional(),
|
||||
extendedKeyUsages: z.string().array().nullable().optional()
|
||||
certificateTemplateId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TCertificates = z.infer<typeof CertificatesSchema>;
|
||||
|
@ -1,23 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityMetadataSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
key: z.string(),
|
||||
value: z.string(),
|
||||
orgId: z.string().uuid(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
identityId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TIdentityMetadata = z.infer<typeof IdentityMetadataSchema>;
|
||||
export type TIdentityMetadataInsert = Omit<z.input<typeof IdentityMetadataSchema>, TImmutableDBKeys>;
|
||||
export type TIdentityMetadataUpdate = Partial<Omit<z.input<typeof IdentityMetadataSchema>, TImmutableDBKeys>>;
|
@ -31,7 +31,6 @@ export * from "./identity-aws-auths";
|
||||
export * from "./identity-azure-auths";
|
||||
export * from "./identity-gcp-auths";
|
||||
export * from "./identity-kubernetes-auths";
|
||||
export * from "./identity-metadata";
|
||||
export * from "./identity-oidc-auths";
|
||||
export * from "./identity-org-memberships";
|
||||
export * from "./identity-project-additional-privilege";
|
||||
@ -55,7 +54,6 @@ export * from "./org-bots";
|
||||
export * from "./org-memberships";
|
||||
export * from "./org-roles";
|
||||
export * from "./organizations";
|
||||
export * from "./partitioned-audit-logs";
|
||||
export * from "./pki-alerts";
|
||||
export * from "./pki-collection-items";
|
||||
export * from "./pki-collections";
|
||||
@ -64,7 +62,6 @@ export * from "./project-environments";
|
||||
export * from "./project-keys";
|
||||
export * from "./project-memberships";
|
||||
export * from "./project-roles";
|
||||
export * from "./project-slack-configs";
|
||||
export * from "./project-user-additional-privilege";
|
||||
export * from "./project-user-membership-roles";
|
||||
export * from "./projects";
|
||||
@ -104,7 +101,6 @@ export * from "./secret-versions-v2";
|
||||
export * from "./secrets";
|
||||
export * from "./secrets-v2";
|
||||
export * from "./service-tokens";
|
||||
export * from "./slack-integrations";
|
||||
export * from "./super-admin";
|
||||
export * from "./trusted-ips";
|
||||
export * from "./user-actions";
|
||||
@ -113,4 +109,3 @@ export * from "./user-encryption-keys";
|
||||
export * from "./user-group-membership";
|
||||
export * from "./users";
|
||||
export * from "./webhooks";
|
||||
export * from "./workflow-integrations";
|
||||
|
@ -13,11 +13,9 @@ export const KmsKeysSchema = z.object({
|
||||
isDisabled: z.boolean().default(false).nullable().optional(),
|
||||
isReserved: z.boolean().default(true).nullable().optional(),
|
||||
orgId: z.string().uuid(),
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
projectId: z.string().nullable().optional(),
|
||||
slug: z.string().nullable().optional()
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;
|
||||
|
@ -70,8 +70,6 @@ export enum TableName {
|
||||
IdentityProjectMembership = "identity_project_memberships",
|
||||
IdentityProjectMembershipRole = "identity_project_membership_role",
|
||||
IdentityProjectAdditionalPrivilege = "identity_project_additional_privilege",
|
||||
// used by both identity and users
|
||||
IdentityMetadata = "identity_metadata",
|
||||
ScimToken = "scim_tokens",
|
||||
AccessApprovalPolicy = "access_approval_policies",
|
||||
AccessApprovalPolicyApprover = "access_approval_policies_approvers",
|
||||
@ -90,7 +88,6 @@ export enum TableName {
|
||||
OidcConfig = "oidc_configs",
|
||||
LdapGroupMap = "ldap_group_maps",
|
||||
AuditLog = "audit_logs",
|
||||
PartitionedAuditLog = "partitioned_audit_logs",
|
||||
AuditLogStream = "audit_log_streams",
|
||||
GitAppInstallSession = "git_app_install_sessions",
|
||||
GitAppOrg = "git_app_org",
|
||||
@ -117,10 +114,7 @@ export enum TableName {
|
||||
InternalKms = "internal_kms",
|
||||
InternalKmsKeyVersion = "internal_kms_key_version",
|
||||
// @depreciated
|
||||
KmsKeyVersion = "kms_key_versions",
|
||||
WorkflowIntegrations = "workflow_integrations",
|
||||
SlackIntegrations = "slack_integrations",
|
||||
ProjectSlackConfigs = "project_slack_configs"
|
||||
KmsKeyVersion = "kms_key_versions"
|
||||
}
|
||||
|
||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
||||
|
@ -26,8 +26,7 @@ export const OidcConfigsSchema = z.object({
|
||||
isActive: z.boolean(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
orgId: z.string().uuid(),
|
||||
lastUsed: z.date().nullable().optional()
|
||||
orgId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;
|
||||
|
@ -1,29 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const PartitionedAuditLogsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
actor: z.string(),
|
||||
actorMetadata: z.unknown(),
|
||||
ipAddress: z.string().nullable().optional(),
|
||||
eventType: z.string(),
|
||||
eventMetadata: z.unknown().nullable().optional(),
|
||||
userAgent: z.string().nullable().optional(),
|
||||
userAgentType: z.string().nullable().optional(),
|
||||
expiresAt: z.date().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
orgId: z.string().uuid().nullable().optional(),
|
||||
projectId: z.string().nullable().optional(),
|
||||
projectName: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TPartitionedAuditLogs = z.infer<typeof PartitionedAuditLogsSchema>;
|
||||
export type TPartitionedAuditLogsInsert = Omit<z.input<typeof PartitionedAuditLogsSchema>, TImmutableDBKeys>;
|
||||
export type TPartitionedAuditLogsUpdate = Partial<Omit<z.input<typeof PartitionedAuditLogsSchema>, TImmutableDBKeys>>;
|
@ -1,24 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ProjectSlackConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
projectId: z.string(),
|
||||
slackIntegrationId: z.string().uuid(),
|
||||
isAccessRequestNotificationEnabled: z.boolean().default(false),
|
||||
accessRequestChannels: z.string().default(""),
|
||||
isSecretRequestNotificationEnabled: z.boolean().default(false),
|
||||
secretRequestChannels: z.string().default(""),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TProjectSlackConfigs = z.infer<typeof ProjectSlackConfigsSchema>;
|
||||
export type TProjectSlackConfigsInsert = Omit<z.input<typeof ProjectSlackConfigsSchema>, TImmutableDBKeys>;
|
||||
export type TProjectSlackConfigsUpdate = Partial<Omit<z.input<typeof ProjectSlackConfigsSchema>, TImmutableDBKeys>>;
|
@ -12,8 +12,7 @@ export const SecretApprovalPoliciesApproversSchema = z.object({
|
||||
policyId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
approverUserId: z.string().uuid().nullable().optional(),
|
||||
approverGroupId: z.string().uuid().nullable().optional()
|
||||
approverUserId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TSecretApprovalPoliciesApprovers = z.infer<typeof SecretApprovalPoliciesApproversSchema>;
|
||||
|
@ -5,16 +5,14 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretSharingSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedValue: z.string().nullable().optional(),
|
||||
iv: z.string().nullable().optional(),
|
||||
tag: z.string().nullable().optional(),
|
||||
hashedHex: z.string().nullable().optional(),
|
||||
encryptedValue: z.string(),
|
||||
iv: z.string(),
|
||||
tag: z.string(),
|
||||
hashedHex: z.string(),
|
||||
expiresAt: z.date(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
orgId: z.string().uuid().nullable().optional(),
|
||||
@ -23,10 +21,7 @@ export const SecretSharingSchema = z.object({
|
||||
expiresAfterViews: z.number().nullable().optional(),
|
||||
accessType: z.string().default("anyone"),
|
||||
name: z.string().nullable().optional(),
|
||||
lastViewedAt: z.date().nullable().optional(),
|
||||
password: z.string().nullable().optional(),
|
||||
encryptedSecret: zodBuffer.nullable().optional(),
|
||||
identifier: z.string().nullable().optional()
|
||||
lastViewedAt: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;
|
||||
|
@ -1,27 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SlackIntegrationsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
teamId: z.string(),
|
||||
teamName: z.string(),
|
||||
slackUserId: z.string(),
|
||||
slackAppId: z.string(),
|
||||
encryptedBotAccessToken: zodBuffer,
|
||||
slackBotId: z.string(),
|
||||
slackBotUserId: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSlackIntegrations = z.infer<typeof SlackIntegrationsSchema>;
|
||||
export type TSlackIntegrationsInsert = Omit<z.input<typeof SlackIntegrationsSchema>, TImmutableDBKeys>;
|
||||
export type TSlackIntegrationsUpdate = Partial<Omit<z.input<typeof SlackIntegrationsSchema>, TImmutableDBKeys>>;
|
@ -5,8 +5,6 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SuperAdminSchema = z.object({
|
||||
@ -21,9 +19,7 @@ export const SuperAdminSchema = z.object({
|
||||
trustLdapEmails: z.boolean().default(false).nullable().optional(),
|
||||
trustOidcEmails: z.boolean().default(false).nullable().optional(),
|
||||
defaultAuthOrgId: z.string().uuid().nullable().optional(),
|
||||
enabledLoginMethods: z.string().array().nullable().optional(),
|
||||
encryptedSlackClientId: zodBuffer.nullable().optional(),
|
||||
encryptedSlackClientSecret: zodBuffer.nullable().optional()
|
||||
enabledLoginMethods: z.string().array().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
|
||||
|
@ -1,22 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const WorkflowIntegrationsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
integration: z.string(),
|
||||
slug: z.string(),
|
||||
orgId: z.string().uuid(),
|
||||
description: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TWorkflowIntegrations = z.infer<typeof WorkflowIntegrationsSchema>;
|
||||
export type TWorkflowIntegrationsInsert = Omit<z.input<typeof WorkflowIntegrationsSchema>, TImmutableDBKeys>;
|
||||
export type TWorkflowIntegrationsUpdate = Partial<Omit<z.input<typeof WorkflowIntegrationsSchema>, TImmutableDBKeys>>;
|
@ -1,9 +1,7 @@
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
||||
import { EnforcementLevel } from "@app/lib/types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { sapPubSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -12,32 +10,28 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
server.route({
|
||||
url: "/",
|
||||
method: "POST",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
projectSlug: z.string().trim(),
|
||||
name: z.string().optional(),
|
||||
secretPath: z.string().trim().default("/"),
|
||||
environment: z.string(),
|
||||
approvers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
projectSlug: z.string().trim(),
|
||||
name: z.string().optional(),
|
||||
secretPath: z.string().trim().default("/"),
|
||||
environment: z.string(),
|
||||
approverUserIds: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approverUserIds.length, {
|
||||
path: ["approvals"],
|
||||
message: "The number of approvals should be lower than the number of approvers."
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
approval: sapPubSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const approval = await server.services.accessApprovalPolicy.createAccessApprovalPolicy({
|
||||
actor: req.permission.type,
|
||||
@ -56,9 +50,6 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
server.route({
|
||||
url: "/",
|
||||
method: "GET",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
projectSlug: z.string().trim()
|
||||
@ -67,15 +58,14 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
200: z.object({
|
||||
approvals: sapPubSchema
|
||||
.extend({
|
||||
approvers: z
|
||||
.object({ type: z.nativeEnum(ApproverType), id: z.string().nullable().optional() })
|
||||
.array()
|
||||
.nullable()
|
||||
.optional()
|
||||
userApprovers: z
|
||||
.object({
|
||||
userId: z.string()
|
||||
})
|
||||
.array(),
|
||||
secretPath: z.string().optional().nullable()
|
||||
})
|
||||
.array()
|
||||
.nullable()
|
||||
.optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -125,37 +115,33 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
server.route({
|
||||
url: "/:policyId",
|
||||
method: "PATCH",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
policyId: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
name: z.string().optional(),
|
||||
secretPath: z
|
||||
.string()
|
||||
.trim()
|
||||
.optional()
|
||||
.transform((val) => (val === "" ? "/" : val)),
|
||||
approvers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).optional(),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
name: z.string().optional(),
|
||||
secretPath: z
|
||||
.string()
|
||||
.trim()
|
||||
.optional()
|
||||
.transform((val) => (val === "" ? "/" : val)),
|
||||
approverUserIds: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approverUserIds.length, {
|
||||
path: ["approvals"],
|
||||
message: "The number of approvals should be lower than the number of approvers."
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
approval: sapPubSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
await server.services.accessApprovalPolicy.updateAccessApprovalPolicy({
|
||||
policyId: req.params.policyId,
|
||||
@ -171,9 +157,6 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
server.route({
|
||||
url: "/:policyId",
|
||||
method: "DELETE",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
policyId: z.string()
|
||||
@ -184,7 +167,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const approval = await server.services.accessApprovalPolicy.deleteAccessApprovalPolicy({
|
||||
actor: req.permission.type,
|
||||
@ -196,44 +179,4 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
return { approval };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/:policyId",
|
||||
method: "GET",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
policyId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
approval: sapPubSchema.extend({
|
||||
approvers: z
|
||||
.object({
|
||||
type: z.nativeEnum(ApproverType),
|
||||
id: z.string().nullable().optional(),
|
||||
name: z.string().nullable().optional()
|
||||
})
|
||||
.array()
|
||||
.nullable()
|
||||
.optional()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const approval = await server.services.accessApprovalPolicy.getAccessApprovalPolicyById({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.params
|
||||
});
|
||||
|
||||
return { approval };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -11,30 +11,6 @@ export const registerCaCrlRouter = async (server: FastifyZodProvider) => {
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Get CRL in DER format (deprecated)",
|
||||
params: z.object({
|
||||
crlId: z.string().trim().describe(CA_CRLS.GET.crlId)
|
||||
}),
|
||||
response: {
|
||||
200: z.instanceof(Buffer)
|
||||
}
|
||||
},
|
||||
handler: async (req, res) => {
|
||||
const { crl } = await server.services.certificateAuthorityCrl.getCrlById(req.params.crlId);
|
||||
|
||||
res.header("Content-Type", "application/pkix-crl");
|
||||
|
||||
return Buffer.from(crl);
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:crlId/der",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Get CRL in DER format",
|
||||
params: z.object({
|
||||
|
@ -2,7 +2,7 @@ import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
||||
import { DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
|
||||
import { DEFAULT_REQUEST_SCHEMA, DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
|
||||
import { daysToMillisecond } from "@app/lib/dates";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
@ -18,6 +18,7 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
body: z.object({
|
||||
dynamicSecretName: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.dynamicSecretName).toLowerCase(),
|
||||
projectSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.projectSlug),
|
||||
@ -65,6 +66,7 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
params: z.object({
|
||||
leaseId: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.DELETE.leaseId)
|
||||
}),
|
||||
@ -107,6 +109,7 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
params: z.object({
|
||||
leaseId: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.RENEW.leaseId)
|
||||
}),
|
||||
@ -160,6 +163,7 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
params: z.object({
|
||||
leaseId: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.GET_BY_LEASEID.leaseId)
|
||||
}),
|
||||
|
@ -4,7 +4,7 @@ import { z } from "zod";
|
||||
|
||||
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
||||
import { DynamicSecretProviderSchema } from "@app/ee/services/dynamic-secret/providers/models";
|
||||
import { DYNAMIC_SECRETS } from "@app/lib/api-docs";
|
||||
import { DEFAULT_REQUEST_SCHEMA, DYNAMIC_SECRETS } from "@app/lib/api-docs";
|
||||
import { daysToMillisecond } from "@app/lib/dates";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
@ -20,6 +20,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
body: z.object({
|
||||
projectSlug: z.string().min(1).describe(DYNAMIC_SECRETS.CREATE.projectSlug),
|
||||
provider: DynamicSecretProviderSchema.describe(DYNAMIC_SECRETS.CREATE.provider),
|
||||
@ -77,39 +78,6 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/entra-id/users",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
tenantId: z.string().min(1).describe("The tenant ID of the Azure Entra ID"),
|
||||
applicationId: z.string().min(1).describe("The application ID of the Azure Entra ID App Registration"),
|
||||
clientSecret: z.string().min(1).describe("The client secret of the Azure Entra ID App Registration")
|
||||
}),
|
||||
response: {
|
||||
200: z
|
||||
.object({
|
||||
name: z.string().min(1).describe("The name of the user"),
|
||||
id: z.string().min(1).describe("The ID of the user"),
|
||||
email: z.string().min(1).describe("The email of the user")
|
||||
})
|
||||
.array()
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const data = await server.services.dynamicSecret.fetchAzureEntraIdUsers({
|
||||
tenantId: req.body.tenantId,
|
||||
applicationId: req.body.applicationId,
|
||||
clientSecret: req.body.clientSecret
|
||||
});
|
||||
return data;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:name",
|
||||
@ -117,6 +85,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
params: z.object({
|
||||
name: z.string().toLowerCase().describe(DYNAMIC_SECRETS.UPDATE.name)
|
||||
}),
|
||||
@ -184,6 +153,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
params: z.object({
|
||||
name: z.string().toLowerCase().describe(DYNAMIC_SECRETS.DELETE.name)
|
||||
}),
|
||||
@ -220,6 +190,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
params: z.object({
|
||||
name: z.string().min(1).describe(DYNAMIC_SECRETS.GET_BY_NAME.name)
|
||||
}),
|
||||
@ -257,6 +228,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
querystring: z.object({
|
||||
projectSlug: z.string().min(1).describe(DYNAMIC_SECRETS.LIST.projectSlug),
|
||||
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRETS.LIST.path),
|
||||
@ -270,7 +242,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const dynamicSecretCfgs = await server.services.dynamicSecret.listDynamicSecretsByEnv({
|
||||
const dynamicSecretCfgs = await server.services.dynamicSecret.list({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
@ -288,6 +260,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
...DEFAULT_REQUEST_SCHEMA,
|
||||
params: z.object({
|
||||
name: z.string().min(1).describe(DYNAMIC_SECRETS.LIST_LEAES_BY_NAME.name)
|
||||
}),
|
||||
|
@ -26,7 +26,7 @@ const sanitizedExternalSchemaForGetAll = KmsKeysSchema.pick({
|
||||
isDisabled: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
name: true
|
||||
slug: true
|
||||
})
|
||||
.extend({
|
||||
externalKms: ExternalKmsSchema.pick({
|
||||
@ -57,7 +57,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
name: z.string().min(1).trim().toLowerCase(),
|
||||
slug: z.string().min(1).trim().toLowerCase(),
|
||||
description: z.string().trim().optional(),
|
||||
provider: ExternalKmsInputSchema
|
||||
}),
|
||||
@ -74,7 +74,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
name: req.body.name,
|
||||
slug: req.body.slug,
|
||||
provider: req.body.provider,
|
||||
description: req.body.description
|
||||
});
|
||||
@ -87,7 +87,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
metadata: {
|
||||
kmsId: externalKms.id,
|
||||
provider: req.body.provider.type,
|
||||
name: req.body.name,
|
||||
slug: req.body.slug,
|
||||
description: req.body.description
|
||||
}
|
||||
}
|
||||
@ -108,7 +108,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
id: z.string().trim().min(1)
|
||||
}),
|
||||
body: z.object({
|
||||
name: z.string().min(1).trim().toLowerCase().optional(),
|
||||
slug: z.string().min(1).trim().toLowerCase().optional(),
|
||||
description: z.string().trim().optional(),
|
||||
provider: ExternalKmsInputUpdateSchema
|
||||
}),
|
||||
@ -125,7 +125,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
name: req.body.name,
|
||||
slug: req.body.slug,
|
||||
provider: req.body.provider,
|
||||
description: req.body.description,
|
||||
id: req.params.id
|
||||
@ -139,7 +139,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
metadata: {
|
||||
kmsId: externalKms.id,
|
||||
provider: req.body.provider.type,
|
||||
name: req.body.name,
|
||||
slug: req.body.slug,
|
||||
description: req.body.description
|
||||
}
|
||||
}
|
||||
@ -182,7 +182,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
type: EventType.DELETE_KMS,
|
||||
metadata: {
|
||||
kmsId: externalKms.id,
|
||||
name: externalKms.name
|
||||
slug: externalKms.slug
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -224,7 +224,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
type: EventType.GET_KMS,
|
||||
metadata: {
|
||||
kmsId: externalKms.id,
|
||||
name: externalKms.name
|
||||
slug: externalKms.slug
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -260,13 +260,13 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/name/:name",
|
||||
url: "/slug/:slug",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
name: z.string().trim().min(1)
|
||||
slug: z.string().trim().min(1)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -276,12 +276,12 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const externalKms = await server.services.externalKms.findByName({
|
||||
const externalKms = await server.services.externalKms.findBySlug({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
name: req.params.name
|
||||
slug: req.params.slug
|
||||
});
|
||||
return { externalKms };
|
||||
}
|
||||
|
@ -10,7 +10,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
url: "/",
|
||||
method: "POST",
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
body: z.object({
|
||||
name: z.string().trim().min(1).max(50).describe(GROUPS.CREATE.name),
|
||||
@ -43,59 +43,12 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/:id",
|
||||
method: "GET",
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.GET_BY_ID.id)
|
||||
}),
|
||||
response: {
|
||||
200: GroupsSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const group = await server.services.group.getGroupById({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.id
|
||||
});
|
||||
|
||||
return group;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/",
|
||||
method: "GET",
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
response: {
|
||||
200: GroupsSchema.array()
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const groups = await server.services.org.getOrgGroups({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
orgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return groups;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/:id",
|
||||
url: "/:currentSlug",
|
||||
method: "PATCH",
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.UPDATE.id)
|
||||
currentSlug: z.string().trim().describe(GROUPS.UPDATE.currentSlug)
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
@ -117,7 +70,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
handler: async (req) => {
|
||||
const group = await server.services.group.updateGroup({
|
||||
id: req.params.id,
|
||||
currentSlug: req.params.currentSlug,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
@ -130,12 +83,12 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/:id",
|
||||
url: "/:slug",
|
||||
method: "DELETE",
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.DELETE.id)
|
||||
slug: z.string().trim().describe(GROUPS.DELETE.slug)
|
||||
}),
|
||||
response: {
|
||||
200: GroupsSchema
|
||||
@ -143,7 +96,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
handler: async (req) => {
|
||||
const group = await server.services.group.deleteGroup({
|
||||
id: req.params.id,
|
||||
groupSlug: req.params.slug,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
@ -156,11 +109,11 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id/users",
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
url: "/:slug/users",
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.LIST_USERS.id)
|
||||
slug: z.string().trim().describe(GROUPS.LIST_USERS.slug)
|
||||
}),
|
||||
querystring: z.object({
|
||||
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
|
||||
@ -188,25 +141,24 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { users, totalCount } = await server.services.group.listGroupUsers({
|
||||
id: req.params.id,
|
||||
groupSlug: req.params.slug,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.query
|
||||
});
|
||||
|
||||
return { users, totalCount };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:id/users/:username",
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
url: "/:slug/users/:username",
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.ADD_USER.id),
|
||||
slug: z.string().trim().describe(GROUPS.ADD_USER.slug),
|
||||
username: z.string().trim().describe(GROUPS.ADD_USER.username)
|
||||
}),
|
||||
response: {
|
||||
@ -221,7 +173,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
handler: async (req) => {
|
||||
const user = await server.services.group.addUserToGroup({
|
||||
id: req.params.id,
|
||||
groupSlug: req.params.slug,
|
||||
username: req.params.username,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
@ -235,11 +187,11 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:id/users/:username",
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
url: "/:slug/users/:username",
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string().trim().describe(GROUPS.DELETE_USER.id),
|
||||
slug: z.string().trim().describe(GROUPS.DELETE_USER.slug),
|
||||
username: z.string().trim().describe(GROUPS.DELETE_USER.username)
|
||||
}),
|
||||
response: {
|
||||
@ -254,7 +206,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
handler: async (req) => {
|
||||
const user = await server.services.group.removeUserFromGroup({
|
||||
id: req.params.id,
|
||||
groupSlug: req.params.slug,
|
||||
username: req.params.username,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
|
@ -5,7 +5,7 @@ import { z } from "zod";
|
||||
|
||||
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types";
|
||||
import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||
import { UnauthorizedError } from "@app/lib/errors";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
@ -61,7 +61,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
handler: async (req) => {
|
||||
const { permissions, privilegePermission } = req.body;
|
||||
if (!permissions && !privilegePermission) {
|
||||
throw new UnauthorizedError({ message: "Permission or privilegePermission must be provided" });
|
||||
throw new BadRequestError({ message: "Permission or privilegePermission must be provided" });
|
||||
}
|
||||
|
||||
const permission = privilegePermission
|
||||
@ -140,7 +140,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
handler: async (req) => {
|
||||
const { permissions, privilegePermission } = req.body;
|
||||
if (!permissions && !privilegePermission) {
|
||||
throw new UnauthorizedError({ message: "Permission or privilegePermission must be provided" });
|
||||
throw new BadRequestError({ message: "Permission or privilegePermission must be provided" });
|
||||
}
|
||||
|
||||
const permission = privilegePermission
|
||||
@ -224,7 +224,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
handler: async (req) => {
|
||||
const { permissions, privilegePermission, ...updatedInfo } = req.body.privilegeDetails;
|
||||
if (!permissions && !privilegePermission) {
|
||||
throw new UnauthorizedError({ message: "Permission or privilegePermission must be provided" });
|
||||
throw new BadRequestError({ message: "Permission or privilegePermission must be provided" });
|
||||
}
|
||||
|
||||
const permission = privilegePermission
|
||||
|
@ -1,6 +1,6 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
// TODO(akhilmhdh): Fix this when license service gets it type
|
||||
// TODO(akhilmhdh): Fix this when licence service gets it type
|
||||
import { z } from "zod";
|
||||
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
|
@ -3,11 +3,10 @@ import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectMembershipRole, ProjectMembershipsSchema, ProjectRolesSchema } from "@app/db/schemas";
|
||||
import { ProjectPermissionSchema } from "@app/ee/services/permission/project-permission";
|
||||
import { PROJECT_ROLE } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { ProjectPermissionSchema, SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
@ -102,7 +101,6 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
message: "Slug must be a valid"
|
||||
}),
|
||||
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
|
||||
permissions: ProjectPermissionSchema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
||||
}),
|
||||
response: {
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { PartitionedAuditLogsSchema, SecretSnapshotsSchema } from "@app/db/schemas";
|
||||
import { AuditLogsSchema, SecretSnapshotsSchema } from "@app/db/schemas";
|
||||
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { AUDIT_LOGS, PROJECTS } from "@app/lib/api-docs";
|
||||
import { getLastMidnightDateISO, removeTrailingSlash } from "@app/lib/fn";
|
||||
@ -87,12 +87,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
/*
|
||||
* Daniel: This endpoint is no longer is use.
|
||||
* We are keeping it for now because it has been exposed in our public api docs for a while, so by removing it we are likely to break users workflows.
|
||||
*
|
||||
* Please refer to the new endpoint, GET /api/v1/organization/audit-logs, for the same (and more) functionality.
|
||||
*/
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:workspaceId/audit-logs",
|
||||
@ -107,7 +101,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
workspaceId: z.string().trim().describe(AUDIT_LOGS.EXPORT.projectId)
|
||||
workspaceId: z.string().trim().describe(AUDIT_LOGS.EXPORT.workspaceId)
|
||||
}),
|
||||
querystring: z.object({
|
||||
eventType: z.nativeEnum(EventType).optional().describe(AUDIT_LOGS.EXPORT.eventType),
|
||||
@ -120,7 +114,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
auditLogs: PartitionedAuditLogsSchema.omit({
|
||||
auditLogs: AuditLogsSchema.omit({
|
||||
eventMetadata: true,
|
||||
eventType: true,
|
||||
actor: true,
|
||||
@ -128,12 +122,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
.merge(
|
||||
z.object({
|
||||
project: z
|
||||
.object({
|
||||
name: z.string(),
|
||||
slug: z.string()
|
||||
})
|
||||
.optional(),
|
||||
event: z.object({
|
||||
type: z.string(),
|
||||
metadata: z.any()
|
||||
@ -150,20 +138,16 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const auditLogs = await server.services.auditLog.listAuditLogs({
|
||||
const auditLogs = await server.services.auditLog.listProjectAuditLogs({
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actor: req.permission.type,
|
||||
|
||||
filter: {
|
||||
...req.query,
|
||||
projectId: req.params.workspaceId,
|
||||
endDate: req.query.endDate,
|
||||
startDate: req.query.startDate || getLastMidnightDateISO(),
|
||||
auditLogActorId: req.query.actor,
|
||||
eventType: req.query.eventType ? [req.query.eventType] : undefined
|
||||
}
|
||||
projectId: req.params.workspaceId,
|
||||
...req.query,
|
||||
endDate: req.query.endDate,
|
||||
startDate: req.query.startDate || getLastMidnightDateISO(),
|
||||
auditLogActor: req.query.actor,
|
||||
actor: req.permission.type
|
||||
});
|
||||
return { auditLogs };
|
||||
}
|
||||
@ -203,7 +187,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
200: z.object({
|
||||
secretManagerKmsKey: z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
isExternal: z.boolean()
|
||||
})
|
||||
})
|
||||
@ -243,7 +227,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
200: z.object({
|
||||
secretManagerKmsKey: z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
isExternal: z.boolean()
|
||||
})
|
||||
})
|
||||
@ -268,7 +252,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
metadata: {
|
||||
secretManagerKmsKey: {
|
||||
id: secretManagerKmsKey.id,
|
||||
name: secretManagerKmsKey.name
|
||||
slug: secretManagerKmsKey.slug
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -336,7 +320,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
200: z.object({
|
||||
secretManagerKmsKey: z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
isExternal: z.boolean()
|
||||
})
|
||||
})
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { RateLimitSchema } from "@app/db/schemas";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
@ -29,7 +29,7 @@ export const registerRateLimitRouter = async (server: FastifyZodProvider) => {
|
||||
handler: async () => {
|
||||
const rateLimit = await server.services.rateLimit.getRateLimits();
|
||||
if (!rateLimit) {
|
||||
throw new NotFoundError({
|
||||
throw new BadRequestError({
|
||||
name: "Get Rate Limit Error",
|
||||
message: "Rate limit configuration does not exist."
|
||||
});
|
||||
|
@ -61,7 +61,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
id: samlConfigId
|
||||
};
|
||||
} else {
|
||||
throw new BadRequestError({ message: "Missing sso identifier or org slug" });
|
||||
throw new BadRequestError({ message: "Missing sso identitier or org slug" });
|
||||
}
|
||||
|
||||
const ssoConfig = await server.services.saml.getSaml(ssoLookupDetails);
|
||||
@ -100,52 +100,25 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
async (req, profile, cb) => {
|
||||
try {
|
||||
if (!profile) throw new BadRequestError({ message: "Missing profile" });
|
||||
const email = profile?.email ?? (profile?.emailAddress as string); // emailRippling is added because in Rippling the field `email` reserved
|
||||
|
||||
const email =
|
||||
profile?.email ??
|
||||
// entra sends data in this format
|
||||
(profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/email"] as string) ??
|
||||
(profile?.emailAddress as string); // emailRippling is added because in Rippling the field `email` reserved\
|
||||
|
||||
const firstName = (profile.firstName ??
|
||||
// entra sends data in this format
|
||||
profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/firstName"]) as string;
|
||||
|
||||
const lastName =
|
||||
profile.lastName ?? profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/lastName"];
|
||||
|
||||
if (!email || !firstName) {
|
||||
logger.info(
|
||||
{
|
||||
err: new Error("Invalid saml request. Missing email or first name"),
|
||||
profile
|
||||
},
|
||||
`email: ${email} firstName: ${profile.firstName as string}`
|
||||
);
|
||||
if (!email || !profile.firstName) {
|
||||
throw new BadRequestError({ message: "Invalid request. Missing email or first name" });
|
||||
}
|
||||
|
||||
const userMetadata = Object.keys(profile.attributes || {})
|
||||
.map((key) => {
|
||||
// for the ones like in format: http://schemas.xmlsoap.org/ws/2005/05/identity/claims/email
|
||||
const formatedKey = key.startsWith("http") ? key.split("/").at(-1) || "" : key;
|
||||
return { key: formatedKey, value: String((profile.attributes as Record<string, string>)[key]) };
|
||||
})
|
||||
.filter((el) => el.key && !["email", "firstName", "lastName"].includes(el.key));
|
||||
|
||||
const { isUserCompleted, providerAuthToken } = await server.services.saml.samlLogin({
|
||||
externalId: profile.nameID,
|
||||
email,
|
||||
firstName,
|
||||
lastName: lastName as string,
|
||||
firstName: profile.firstName as string,
|
||||
lastName: profile.lastName as string,
|
||||
relayState: (req.body as { RelayState?: string }).RelayState,
|
||||
authProvider: (req as unknown as FastifyRequest).ssoConfig?.authProvider as string,
|
||||
orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId as string,
|
||||
metadata: userMetadata
|
||||
orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId as string
|
||||
});
|
||||
cb(null, { isUserCompleted, providerAuthToken });
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
cb(error as Error);
|
||||
cb(null, {});
|
||||
}
|
||||
},
|
||||
() => {}
|
||||
|
@ -5,47 +5,22 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const ScimUserSchema = z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z
|
||||
.object({
|
||||
familyName: z.string().trim().optional(),
|
||||
givenName: z.string().trim().optional()
|
||||
})
|
||||
.optional(),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
});
|
||||
|
||||
const ScimGroupSchema = z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z
|
||||
.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string().optional()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
});
|
||||
|
||||
export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
server.addContentTypeParser("application/scim+json", { parseAs: "string" }, (_, body, done) => {
|
||||
try {
|
||||
const strBody = body instanceof Buffer ? body.toString() : body;
|
||||
if (!strBody) {
|
||||
done(null, undefined);
|
||||
return;
|
||||
}
|
||||
const json: unknown = JSON.parse(strBody);
|
||||
done(null, json);
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
done(error, undefined);
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/scim-tokens",
|
||||
method: "POST",
|
||||
@ -152,7 +127,25 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
Resources: z.array(ScimUserSchema),
|
||||
Resources: z.array(
|
||||
z.object({
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
})
|
||||
),
|
||||
itemsPerPage: z.number(),
|
||||
schemas: z.array(z.string()),
|
||||
startIndex: z.number(),
|
||||
@ -180,7 +173,30 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: ScimUserSchema
|
||||
201: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean(),
|
||||
groups: z.array(
|
||||
z.object({
|
||||
value: z.string().trim(),
|
||||
display: z.string().trim()
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -200,12 +216,10 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
userName: z.string().trim(),
|
||||
name: z
|
||||
.object({
|
||||
familyName: z.string().trim().optional(),
|
||||
givenName: z.string().trim().optional()
|
||||
})
|
||||
.optional(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
@ -215,10 +229,28 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
active: z.boolean().default(true)
|
||||
// displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
}),
|
||||
response: {
|
||||
200: ScimUserSchema
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -228,8 +260,8 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
const user = await req.server.services.scim.createScimUser({
|
||||
externalId: req.body.userName,
|
||||
email: primaryEmail,
|
||||
firstName: req.body?.name?.givenName,
|
||||
lastName: req.body?.name?.familyName,
|
||||
firstName: req.body.name.givenName,
|
||||
lastName: req.body.name.familyName,
|
||||
orgId: req.permission.orgId
|
||||
});
|
||||
|
||||
@ -259,116 +291,6 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PUT",
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z
|
||||
.object({
|
||||
familyName: z.string().trim().optional(),
|
||||
givenName: z.string().trim().optional()
|
||||
})
|
||||
.optional(),
|
||||
displayName: z.string().trim(),
|
||||
emails: z
|
||||
.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
active: z.boolean()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const primaryEmail = req.body.emails?.find((email) => email.primary)?.value;
|
||||
const user = await req.server.services.scim.replaceScimUser({
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
firstName: req.body?.name?.givenName,
|
||||
lastName: req.body?.name?.familyName,
|
||||
active: req.body?.active,
|
||||
email: primaryEmail,
|
||||
externalId: req.body.userName
|
||||
});
|
||||
return user;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PATCH",
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
Operations: z.array(
|
||||
z.union([
|
||||
z.object({
|
||||
op: z.union([z.literal("remove"), z.literal("Remove")]),
|
||||
path: z.string().trim(),
|
||||
value: z
|
||||
.object({
|
||||
value: z.string()
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
}),
|
||||
z.object({
|
||||
op: z.union([z.literal("add"), z.literal("Add"), z.literal("replace"), z.literal("Replace")]),
|
||||
path: z.string().trim().optional(),
|
||||
value: z.any().optional()
|
||||
})
|
||||
])
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: ScimUserSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const user = await req.server.services.scim.updateScimUser({
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
operations: req.body.Operations
|
||||
});
|
||||
|
||||
return user;
|
||||
}
|
||||
});
|
||||
server.route({
|
||||
url: "/Groups",
|
||||
method: "POST",
|
||||
@ -383,10 +305,25 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
display: z.string()
|
||||
})
|
||||
)
|
||||
.optional()
|
||||
.optional() // okta-specific
|
||||
}),
|
||||
response: {
|
||||
200: ScimGroupSchema
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z
|
||||
.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -407,12 +344,26 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
querystring: z.object({
|
||||
startIndex: z.coerce.number().default(1),
|
||||
count: z.coerce.number().default(20),
|
||||
filter: z.string().trim().optional(),
|
||||
excludedAttributes: z.string().trim().optional()
|
||||
filter: z.string().trim().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
Resources: z.array(ScimGroupSchema),
|
||||
Resources: z.array(
|
||||
z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
),
|
||||
itemsPerPage: z.number(),
|
||||
schemas: z.array(z.string()),
|
||||
startIndex: z.number(),
|
||||
@ -426,8 +377,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
orgId: req.permission.orgId,
|
||||
startIndex: req.query.startIndex,
|
||||
filter: req.query.filter,
|
||||
limit: req.query.count,
|
||||
isMembersExcluded: req.query.excludedAttributes === "members"
|
||||
limit: req.query.count
|
||||
});
|
||||
|
||||
return groups;
|
||||
@ -442,7 +392,20 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
groupId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: ScimGroupSchema
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
@ -451,7 +414,6 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId
|
||||
});
|
||||
|
||||
return group;
|
||||
}
|
||||
});
|
||||
@ -475,12 +437,25 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: ScimGroupSchema
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const group = await req.server.services.scim.replaceScimGroup({
|
||||
const group = await req.server.services.scim.updateScimGroupNamePut({
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId,
|
||||
...req.body
|
||||
@ -502,34 +477,54 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
Operations: z.array(
|
||||
z.union([
|
||||
z.object({
|
||||
op: z.union([z.literal("remove"), z.literal("Remove")]),
|
||||
path: z.string().trim(),
|
||||
value: z
|
||||
.object({
|
||||
value: z.string()
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
op: z.union([z.literal("replace"), z.literal("Replace")]),
|
||||
value: z.object({
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim()
|
||||
})
|
||||
}),
|
||||
z.object({
|
||||
op: z.union([z.literal("add"), z.literal("Add"), z.literal("replace"), z.literal("Replace")]),
|
||||
path: z.string().trim().optional(),
|
||||
value: z.any()
|
||||
op: z.union([z.literal("remove"), z.literal("Remove")]),
|
||||
path: z.string().trim()
|
||||
}),
|
||||
z.object({
|
||||
op: z.union([z.literal("add"), z.literal("Add")]),
|
||||
path: z.string().trim(),
|
||||
value: z.array(
|
||||
z.object({
|
||||
value: z.string().trim(),
|
||||
display: z.string().trim().optional()
|
||||
})
|
||||
)
|
||||
})
|
||||
])
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: ScimGroupSchema
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
),
|
||||
meta: z.object({
|
||||
resourceType: z.string().trim()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const group = await req.server.services.scim.updateScimGroup({
|
||||
const group = await req.server.services.scim.updateScimGroupNamePatch({
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId,
|
||||
operations: req.body.Operations
|
||||
});
|
||||
|
||||
return group;
|
||||
}
|
||||
});
|
||||
@ -555,4 +550,60 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
return group;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PUT",
|
||||
schema: {
|
||||
params: z.object({
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
userName: z.string().trim(),
|
||||
name: z.object({
|
||||
familyName: z.string().trim(),
|
||||
givenName: z.string().trim()
|
||||
}),
|
||||
emails: z.array(
|
||||
z.object({
|
||||
primary: z.boolean(),
|
||||
value: z.string().email(),
|
||||
type: z.string().trim()
|
||||
})
|
||||
),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean(),
|
||||
groups: z.array(
|
||||
z.object({
|
||||
value: z.string().trim(),
|
||||
display: z.string().trim()
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const user = await req.server.services.scim.replaceScimUser({
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
active: req.body.active
|
||||
});
|
||||
return user;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -1,7 +1,6 @@
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { EnforcementLevel } from "@app/lib/types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
@ -17,33 +16,32 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
workspaceId: z.string(),
|
||||
name: z.string().optional(),
|
||||
environment: z.string(),
|
||||
secretPath: z
|
||||
.string()
|
||||
.optional()
|
||||
.nullable()
|
||||
.default("/")
|
||||
.transform((val) => (val ? removeTrailingSlash(val) : val)),
|
||||
approvers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
workspaceId: z.string(),
|
||||
name: z.string().optional(),
|
||||
environment: z.string(),
|
||||
secretPath: z
|
||||
.string()
|
||||
.optional()
|
||||
.nullable()
|
||||
.default("/")
|
||||
.transform((val) => (val ? removeTrailingSlash(val) : val)),
|
||||
approvers: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approvers.length, {
|
||||
path: ["approvals"],
|
||||
message: "The number of approvals should be lower than the number of approvers."
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
approval: sapPubSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const approval = await server.services.secretApprovalPolicy.createSecretApprovalPolicy({
|
||||
actor: req.permission.type,
|
||||
@ -69,31 +67,30 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
params: z.object({
|
||||
sapId: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
name: z.string().optional(),
|
||||
approvers: z
|
||||
.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
|
||||
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
|
||||
])
|
||||
.array()
|
||||
.min(1, { message: "At least one approver should be provided" }),
|
||||
approvals: z.number().min(1).default(1),
|
||||
secretPath: z
|
||||
.string()
|
||||
.optional()
|
||||
.nullable()
|
||||
.transform((val) => (val ? removeTrailingSlash(val) : val))
|
||||
.transform((val) => (val === "" ? "/" : val)),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).optional()
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
name: z.string().optional(),
|
||||
approvers: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1),
|
||||
secretPath: z
|
||||
.string()
|
||||
.optional()
|
||||
.nullable()
|
||||
.transform((val) => (val ? removeTrailingSlash(val) : val))
|
||||
.transform((val) => (val === "" ? "/" : val)),
|
||||
enforcementLevel: z.nativeEnum(EnforcementLevel).optional()
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approvers.length, {
|
||||
path: ["approvals"],
|
||||
message: "The number of approvals should be lower than the number of approvers."
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
approval: sapPubSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const approval = await server.services.secretApprovalPolicy.updateSecretApprovalPolicy({
|
||||
actor: req.permission.type,
|
||||
@ -123,7 +120,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const approval = await server.services.secretApprovalPolicy.deleteSecretApprovalPolicy({
|
||||
actor: req.permission.type,
|
||||
@ -150,10 +147,9 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
200: z.object({
|
||||
approvals: sapPubSchema
|
||||
.extend({
|
||||
approvers: z
|
||||
userApprovers: z
|
||||
.object({
|
||||
id: z.string().nullable().optional(),
|
||||
type: z.nativeEnum(ApproverType)
|
||||
userId: z.string()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
@ -174,44 +170,6 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/:sapId",
|
||||
method: "GET",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
sapId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
approval: sapPubSchema.extend({
|
||||
approvers: z
|
||||
.object({
|
||||
id: z.string().nullable().optional(),
|
||||
type: z.nativeEnum(ApproverType),
|
||||
name: z.string().nullable().optional()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const approval = await server.services.secretApprovalPolicy.getSecretApprovalPolicyById({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.params
|
||||
});
|
||||
|
||||
return { approval };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/board",
|
||||
method: "GET",
|
||||
@ -228,7 +186,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
200: z.object({
|
||||
policy: sapPubSchema
|
||||
.extend({
|
||||
userApprovers: z.object({ userId: z.string().nullable().optional() }).array()
|
||||
userApprovers: z.object({ userId: z.string() }).array()
|
||||
})
|
||||
.optional()
|
||||
})
|
||||
|
@ -13,7 +13,7 @@ import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const approvalRequestUser = z.object({ userId: z.string().nullable().optional() }).merge(
|
||||
const approvalRequestUser = z.object({ userId: z.string() }).merge(
|
||||
UsersSchema.pick({
|
||||
email: true,
|
||||
firstName: true,
|
||||
@ -46,11 +46,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
approvals: z.number(),
|
||||
approvers: z
|
||||
.object({
|
||||
userId: z.string().nullable().optional()
|
||||
})
|
||||
.array(),
|
||||
approvers: z.string().array(),
|
||||
secretPath: z.string().optional().nullable(),
|
||||
enforcementLevel: z.string()
|
||||
}),
|
||||
@ -58,11 +54,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
|
||||
environment: z.string(),
|
||||
reviewers: z.object({ userId: z.string(), status: z.string() }).array(),
|
||||
approvers: z
|
||||
.object({
|
||||
userId: z.string().nullable().optional()
|
||||
})
|
||||
.array()
|
||||
approvers: z.string().array()
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
|
@ -5,38 +5,22 @@ import { AccessApprovalPoliciesSchema, TableName, TAccessApprovalPolicies } from
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex";
|
||||
|
||||
import { ApproverType } from "./access-approval-policy-types";
|
||||
|
||||
export type TAccessApprovalPolicyDALFactory = ReturnType<typeof accessApprovalPolicyDALFactory>;
|
||||
|
||||
export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
const accessApprovalPolicyOrm = ormify(db, TableName.AccessApprovalPolicy);
|
||||
|
||||
const accessApprovalPolicyFindQuery = async (
|
||||
tx: Knex,
|
||||
filter: TFindFilter<TAccessApprovalPolicies>,
|
||||
customFilter?: {
|
||||
policyId?: string;
|
||||
}
|
||||
) => {
|
||||
const accessApprovalPolicyFindQuery = async (tx: Knex, filter: TFindFilter<TAccessApprovalPolicies>) => {
|
||||
const result = await tx(TableName.AccessApprovalPolicy)
|
||||
// eslint-disable-next-line
|
||||
.where(buildFindFilter(filter))
|
||||
.where((qb) => {
|
||||
if (customFilter?.policyId) {
|
||||
void qb.where(`${TableName.AccessApprovalPolicy}.id`, "=", customFilter.policyId);
|
||||
}
|
||||
})
|
||||
.join(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalPolicyApprover,
|
||||
`${TableName.AccessApprovalPolicy}.id`,
|
||||
`${TableName.AccessApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
.leftJoin(TableName.Users, `${TableName.AccessApprovalPolicyApprover}.approverUserId`, `${TableName.Users}.id`)
|
||||
.select(tx.ref("username").withSchema(TableName.Users).as("approverUsername"))
|
||||
.select(tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
.select(tx.ref("approverGroupId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
.select(tx.ref("name").withSchema(TableName.Environment).as("envName"))
|
||||
.select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug"))
|
||||
.select(tx.ref("id").withSchema(TableName.Environment).as("envId"))
|
||||
@ -46,10 +30,10 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
return result;
|
||||
};
|
||||
|
||||
const findById = async (policyId: string, tx?: Knex) => {
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), {
|
||||
[`${TableName.AccessApprovalPolicy}.id` as "id"]: policyId
|
||||
[`${TableName.AccessApprovalPolicy}.id` as "id"]: id
|
||||
});
|
||||
const formattedDoc = sqlNestRelationships({
|
||||
data: doc,
|
||||
@ -66,18 +50,9 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "approverUserId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({ approverUserId: id }) => ({
|
||||
id,
|
||||
type: "user"
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "approverGroupId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({ approverGroupId: id }) => ({
|
||||
id,
|
||||
type: "group"
|
||||
label: "userApprovers" as const,
|
||||
mapper: ({ approverUserId }) => ({
|
||||
userId: approverUserId
|
||||
})
|
||||
}
|
||||
]
|
||||
@ -89,15 +64,9 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const find = async (
|
||||
filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>,
|
||||
customFilter?: {
|
||||
policyId?: string;
|
||||
},
|
||||
tx?: Knex
|
||||
) => {
|
||||
const find = async (filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), filter, customFilter);
|
||||
const docs = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), filter);
|
||||
|
||||
const formattedDocs = sqlNestRelationships({
|
||||
data: docs,
|
||||
@ -115,19 +84,9 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "approverUserId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({ approverUserId: id, approverUsername }) => ({
|
||||
id,
|
||||
type: ApproverType.User,
|
||||
name: approverUsername
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "approverGroupId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({ approverGroupId: id }) => ({
|
||||
id,
|
||||
type: ApproverType.Group
|
||||
label: "userApprovers" as const,
|
||||
mapper: ({ approverUserId }) => ({
|
||||
userId: approverUserId
|
||||
})
|
||||
}
|
||||
]
|
||||
|
@ -1,11 +1,12 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TIsApproversValid } from "./access-approval-policy-types";
|
||||
import { TVerifyApprovers } from "./access-approval-policy-types";
|
||||
|
||||
export const isApproversValid = async ({
|
||||
export const verifyApprovers = async ({
|
||||
userIds,
|
||||
projectId,
|
||||
orgId,
|
||||
@ -13,9 +14,9 @@ export const isApproversValid = async ({
|
||||
actorAuthMethod,
|
||||
secretPath,
|
||||
permissionService
|
||||
}: TIsApproversValid) => {
|
||||
try {
|
||||
for await (const userId of userIds) {
|
||||
}: TVerifyApprovers) => {
|
||||
for await (const userId of userIds) {
|
||||
try {
|
||||
const { permission: approverPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.USER,
|
||||
userId,
|
||||
@ -28,9 +29,8 @@ export const isApproversValid = async ({
|
||||
ProjectPermissionActions.Create,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: envSlug, secretPath })
|
||||
);
|
||||
} catch (err) {
|
||||
throw new BadRequestError({ message: "One or more approvers doesn't have access to be specified secret path" });
|
||||
}
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
@ -2,21 +2,17 @@ import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TGroupDALFactory } from "../group/group-dal";
|
||||
import { TAccessApprovalPolicyApproverDALFactory } from "./access-approval-policy-approver-dal";
|
||||
import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal";
|
||||
import { isApproversValid } from "./access-approval-policy-fns";
|
||||
import { verifyApprovers } from "./access-approval-policy-fns";
|
||||
import {
|
||||
ApproverType,
|
||||
TCreateAccessApprovalPolicy,
|
||||
TDeleteAccessApprovalPolicy,
|
||||
TGetAccessApprovalPolicyByIdDTO,
|
||||
TGetAccessPolicyCountByEnvironmentDTO,
|
||||
TListAccessApprovalPoliciesDTO,
|
||||
TUpdateAccessApprovalPolicy
|
||||
@ -29,8 +25,6 @@ type TSecretApprovalPolicyServiceFactoryDep = {
|
||||
projectEnvDAL: Pick<TProjectEnvDALFactory, "find" | "findOne">;
|
||||
accessApprovalPolicyApproverDAL: TAccessApprovalPolicyApproverDALFactory;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find">;
|
||||
groupDAL: TGroupDALFactory;
|
||||
userDAL: Pick<TUserDALFactory, "find">;
|
||||
};
|
||||
|
||||
export type TAccessApprovalPolicyServiceFactory = ReturnType<typeof accessApprovalPolicyServiceFactory>;
|
||||
@ -38,11 +32,9 @@ export type TAccessApprovalPolicyServiceFactory = ReturnType<typeof accessApprov
|
||||
export const accessApprovalPolicyServiceFactory = ({
|
||||
accessApprovalPolicyDAL,
|
||||
accessApprovalPolicyApproverDAL,
|
||||
groupDAL,
|
||||
permissionService,
|
||||
projectEnvDAL,
|
||||
projectDAL,
|
||||
userDAL
|
||||
projectDAL
|
||||
}: TSecretApprovalPolicyServiceFactoryDep) => {
|
||||
const createAccessApprovalPolicy = async ({
|
||||
name,
|
||||
@ -52,29 +44,15 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
secretPath,
|
||||
actorAuthMethod,
|
||||
approvals,
|
||||
approvers,
|
||||
approverUserIds,
|
||||
projectSlug,
|
||||
environment,
|
||||
enforcementLevel
|
||||
}: TCreateAccessApprovalPolicy) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
// If there is a group approver people might be added to the group later to meet the approvers quota
|
||||
const groupApprovers = approvers
|
||||
.filter((approver) => approver.type === ApproverType.Group)
|
||||
.map((approver) => approver.id) as string[];
|
||||
|
||||
const userApprovers = approvers
|
||||
.filter((approver) => approver.type === ApproverType.User)
|
||||
.map((approver) => approver.id)
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
const userApproverNames = approvers
|
||||
.map((approver) => (approver.type === ApproverType.User ? approver.name : undefined))
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
if (!groupApprovers && approvals > userApprovers.length + userApproverNames.length)
|
||||
if (approvals > approverUserIds.length)
|
||||
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -89,65 +67,18 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
ProjectPermissionSub.SecretApproval
|
||||
);
|
||||
const env = await projectEnvDAL.findOne({ slug: environment, projectId: project.id });
|
||||
if (!env) throw new NotFoundError({ message: "Environment not found" });
|
||||
if (!env) throw new BadRequestError({ message: "Environment not found" });
|
||||
|
||||
let approverUserIds = userApprovers;
|
||||
if (userApproverNames.length) {
|
||||
const approverUsers = await userDAL.find({
|
||||
$in: {
|
||||
username: userApproverNames
|
||||
}
|
||||
});
|
||||
|
||||
const approverNamesFromDb = approverUsers.map((user) => user.username);
|
||||
const invalidUsernames = userApproverNames.filter((username) => !approverNamesFromDb.includes(username));
|
||||
|
||||
if (invalidUsernames.length) {
|
||||
throw new BadRequestError({
|
||||
message: `Invalid approver user: ${invalidUsernames.join(", ")}`
|
||||
});
|
||||
}
|
||||
|
||||
approverUserIds = approverUserIds.concat(approverUsers.map((user) => user.id));
|
||||
}
|
||||
|
||||
const usersPromises: Promise<
|
||||
{
|
||||
id: string;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
firstName: string | null | undefined;
|
||||
lastName: string | null | undefined;
|
||||
isPartOfGroup: boolean;
|
||||
}[]
|
||||
>[] = [];
|
||||
const verifyAllApprovers = [...approverUserIds];
|
||||
|
||||
for (const groupId of groupApprovers) {
|
||||
usersPromises.push(groupDAL.findAllGroupPossibleMembers({ orgId: actorOrgId, groupId, offset: 0 }));
|
||||
}
|
||||
const verifyGroupApprovers = (await Promise.all(usersPromises))
|
||||
.flat()
|
||||
.filter((user) => user.isPartOfGroup)
|
||||
.map((user) => user.id);
|
||||
verifyAllApprovers.push(...verifyGroupApprovers);
|
||||
|
||||
const approversValid = await isApproversValid({
|
||||
await verifyApprovers({
|
||||
projectId: project.id,
|
||||
orgId: actorOrgId,
|
||||
envSlug: environment,
|
||||
secretPath,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: verifyAllApprovers
|
||||
userIds: approverUserIds
|
||||
});
|
||||
|
||||
if (!approversValid) {
|
||||
throw new BadRequestError({
|
||||
message: "One or more approvers doesn't have access to be specified secret path"
|
||||
});
|
||||
}
|
||||
|
||||
const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => {
|
||||
const doc = await accessApprovalPolicyDAL.create(
|
||||
{
|
||||
@ -159,26 +90,13 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
if (approverUserIds.length) {
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
approverUserIds.map((userId) => ({
|
||||
approverUserId: userId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
if (groupApprovers) {
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
groupApprovers.map((groupId) => ({
|
||||
approverGroupId: groupId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
approverUserIds.map((userId) => ({
|
||||
approverUserId: userId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
return doc;
|
||||
});
|
||||
return { ...accessApproval, environment: env, projectId: project.id };
|
||||
@ -192,7 +110,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
projectSlug
|
||||
}: TListAccessApprovalPoliciesDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
// Anyone in the project should be able to get the policies.
|
||||
/* const { permission } = */ await permissionService.getProjectPermission(
|
||||
@ -210,7 +128,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
|
||||
const updateAccessApprovalPolicy = async ({
|
||||
policyId,
|
||||
approvers,
|
||||
approverUserIds,
|
||||
secretPath,
|
||||
name,
|
||||
actorId,
|
||||
@ -220,30 +138,8 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
approvals,
|
||||
enforcementLevel
|
||||
}: TUpdateAccessApprovalPolicy) => {
|
||||
const groupApprovers = approvers
|
||||
.filter((approver) => approver.type === ApproverType.Group)
|
||||
.map((approver) => approver.id) as string[];
|
||||
|
||||
const userApprovers = approvers
|
||||
.filter((approver) => approver.type === ApproverType.User)
|
||||
.map((approver) => approver.id)
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
const userApproverNames = approvers
|
||||
.map((approver) => (approver.type === ApproverType.User ? approver.name : undefined))
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
const accessApprovalPolicy = await accessApprovalPolicyDAL.findById(policyId);
|
||||
const currentAppovals = approvals || accessApprovalPolicy.approvals;
|
||||
if (
|
||||
groupApprovers?.length === 0 &&
|
||||
userApprovers &&
|
||||
currentAppovals > userApprovers.length + userApproverNames.length
|
||||
) {
|
||||
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
||||
}
|
||||
|
||||
if (!accessApprovalPolicy) throw new NotFoundError({ message: "Secret approval policy not found" });
|
||||
if (!accessApprovalPolicy) throw new BadRequestError({ message: "Secret approval policy not found" });
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
@ -265,100 +161,26 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await accessApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx);
|
||||
|
||||
if (userApprovers.length || userApproverNames.length) {
|
||||
let userApproverIds = userApprovers;
|
||||
if (userApproverNames.length) {
|
||||
const approverUsers = await userDAL.find({
|
||||
$in: {
|
||||
username: userApproverNames
|
||||
}
|
||||
});
|
||||
|
||||
const approverNamesFromDb = approverUsers.map((user) => user.username);
|
||||
const invalidUsernames = userApproverNames.filter((username) => !approverNamesFromDb.includes(username));
|
||||
|
||||
if (invalidUsernames.length) {
|
||||
throw new BadRequestError({
|
||||
message: `Invalid approver user: ${invalidUsernames.join(", ")}`
|
||||
});
|
||||
}
|
||||
|
||||
userApproverIds = userApproverIds.concat(approverUsers.map((user) => user.id));
|
||||
}
|
||||
|
||||
const approversValid = await isApproversValid({
|
||||
if (approverUserIds) {
|
||||
await verifyApprovers({
|
||||
projectId: accessApprovalPolicy.projectId,
|
||||
orgId: actorOrgId,
|
||||
envSlug: accessApprovalPolicy.environment.slug,
|
||||
secretPath: doc.secretPath!,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: userApproverIds
|
||||
userIds: approverUserIds
|
||||
});
|
||||
|
||||
if (!approversValid) {
|
||||
throw new BadRequestError({
|
||||
message: "One or more approvers doesn't have access to be specified secret path"
|
||||
});
|
||||
}
|
||||
|
||||
await accessApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx);
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
userApproverIds.map((userId) => ({
|
||||
approverUserIds.map((userId) => ({
|
||||
approverUserId: userId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
if (groupApprovers) {
|
||||
const usersPromises: Promise<
|
||||
{
|
||||
id: string;
|
||||
email: string | null | undefined;
|
||||
username: string;
|
||||
firstName: string | null | undefined;
|
||||
lastName: string | null | undefined;
|
||||
isPartOfGroup: boolean;
|
||||
}[]
|
||||
>[] = [];
|
||||
|
||||
for (const groupId of groupApprovers) {
|
||||
usersPromises.push(groupDAL.findAllGroupPossibleMembers({ orgId: actorOrgId, groupId, offset: 0 }));
|
||||
}
|
||||
const verifyGroupApprovers = (await Promise.all(usersPromises))
|
||||
.flat()
|
||||
.filter((user) => user.isPartOfGroup)
|
||||
.map((user) => user.id);
|
||||
|
||||
const approversValid = await isApproversValid({
|
||||
projectId: accessApprovalPolicy.projectId,
|
||||
orgId: actorOrgId,
|
||||
envSlug: accessApprovalPolicy.environment.slug,
|
||||
secretPath: doc.secretPath!,
|
||||
actorAuthMethod,
|
||||
permissionService,
|
||||
userIds: verifyGroupApprovers
|
||||
});
|
||||
|
||||
if (!approversValid) {
|
||||
throw new BadRequestError({
|
||||
message: "One or more approvers doesn't have access to be specified secret path"
|
||||
});
|
||||
}
|
||||
|
||||
await accessApprovalPolicyApproverDAL.insertMany(
|
||||
groupApprovers.map((groupId) => ({
|
||||
approverGroupId: groupId,
|
||||
policyId: doc.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
return doc;
|
||||
});
|
||||
return {
|
||||
@ -376,7 +198,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
actorOrgId
|
||||
}: TDeleteAccessApprovalPolicy) => {
|
||||
const policy = await accessApprovalPolicyDAL.findById(policyId);
|
||||
if (!policy) throw new NotFoundError({ message: "Secret approval policy not found" });
|
||||
if (!policy) throw new BadRequestError({ message: "Secret approval policy not found" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@ -404,7 +226,7 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
}: TGetAccessPolicyCountByEnvironmentDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const { membership } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@ -413,53 +235,22 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
if (!membership) {
|
||||
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
|
||||
}
|
||||
if (!membership) throw new BadRequestError({ message: "User not found in project" });
|
||||
|
||||
const environment = await projectEnvDAL.findOne({ projectId: project.id, slug: envSlug });
|
||||
if (!environment) throw new NotFoundError({ message: "Environment not found" });
|
||||
if (!environment) throw new BadRequestError({ message: "Environment not found" });
|
||||
|
||||
const policies = await accessApprovalPolicyDAL.find({ envId: environment.id, projectId: project.id });
|
||||
if (!policies) throw new NotFoundError({ message: "No policies found" });
|
||||
if (!policies) throw new BadRequestError({ message: "No policies found" });
|
||||
|
||||
return { count: policies.length };
|
||||
};
|
||||
|
||||
const getAccessApprovalPolicyById = async ({
|
||||
actorId,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
policyId
|
||||
}: TGetAccessApprovalPolicyByIdDTO) => {
|
||||
const [policy] = await accessApprovalPolicyDAL.find({}, { policyId });
|
||||
|
||||
if (!policy) {
|
||||
throw new NotFoundError({
|
||||
message: "Cannot find access approval policy"
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
policy.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
|
||||
|
||||
return policy;
|
||||
};
|
||||
|
||||
return {
|
||||
getAccessPolicyCountByEnvSlug,
|
||||
createAccessApprovalPolicy,
|
||||
deleteAccessApprovalPolicy,
|
||||
updateAccessApprovalPolicy,
|
||||
getAccessApprovalPolicyByProjectSlug,
|
||||
getAccessApprovalPolicyById
|
||||
getAccessApprovalPolicyByProjectSlug
|
||||
};
|
||||
};
|
||||
|
@ -3,7 +3,7 @@ import { ActorAuthMethod } from "@app/services/auth/auth-type";
|
||||
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
|
||||
export type TIsApproversValid = {
|
||||
export type TVerifyApprovers = {
|
||||
userIds: string[];
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
envSlug: string;
|
||||
@ -13,16 +13,11 @@ export type TIsApproversValid = {
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export enum ApproverType {
|
||||
Group = "group",
|
||||
User = "user"
|
||||
}
|
||||
|
||||
export type TCreateAccessApprovalPolicy = {
|
||||
approvals: number;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[];
|
||||
approverUserIds: string[];
|
||||
projectSlug: string;
|
||||
name: string;
|
||||
enforcementLevel: EnforcementLevel;
|
||||
@ -31,7 +26,7 @@ export type TCreateAccessApprovalPolicy = {
|
||||
export type TUpdateAccessApprovalPolicy = {
|
||||
policyId: string;
|
||||
approvals?: number;
|
||||
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[];
|
||||
approverUserIds?: string[];
|
||||
secretPath?: string;
|
||||
name?: string;
|
||||
enforcementLevel?: EnforcementLevel;
|
||||
@ -46,10 +41,6 @@ export type TGetAccessPolicyCountByEnvironmentDTO = {
|
||||
projectSlug: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetAccessApprovalPolicyByIdDTO = {
|
||||
policyId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TListAccessApprovalPoliciesDTO = {
|
||||
projectSlug: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
@ -39,12 +39,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`${TableName.AccessApprovalPolicy}.id`,
|
||||
`${TableName.AccessApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.UserGroupMembership,
|
||||
`${TableName.AccessApprovalPolicyApprover}.approverGroupId`,
|
||||
`${TableName.UserGroupMembership}.groupId`
|
||||
)
|
||||
.leftJoin(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
|
||||
|
||||
.join<TUsers>(
|
||||
db(TableName.Users).as("requestedByUser"),
|
||||
@ -65,7 +59,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
)
|
||||
|
||||
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
.select(db.ref("userId").withSchema(TableName.UserGroupMembership).as("approverGroupUserId"))
|
||||
|
||||
.select(
|
||||
db.ref("projectId").withSchema(TableName.Environment),
|
||||
@ -149,12 +142,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
label: "reviewers" as const,
|
||||
mapper: ({ reviewerUserId: userId, reviewerStatus: status }) => (userId ? { userId, status } : undefined)
|
||||
},
|
||||
{ key: "approverUserId", label: "approvers" as const, mapper: ({ approverUserId }) => approverUserId },
|
||||
{
|
||||
key: "approverGroupUserId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({ approverGroupUserId }) => approverGroupUserId
|
||||
}
|
||||
{ key: "approverUserId", label: "approvers" as const, mapper: ({ approverUserId }) => approverUserId }
|
||||
]
|
||||
});
|
||||
|
||||
@ -184,28 +172,17 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`requestedByUser.id`
|
||||
)
|
||||
|
||||
.leftJoin(
|
||||
.join(
|
||||
TableName.AccessApprovalPolicyApprover,
|
||||
`${TableName.AccessApprovalPolicy}.id`,
|
||||
`${TableName.AccessApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
|
||||
.leftJoin<TUsers>(
|
||||
.join<TUsers>(
|
||||
db(TableName.Users).as("accessApprovalPolicyApproverUser"),
|
||||
`${TableName.AccessApprovalPolicyApprover}.approverUserId`,
|
||||
"accessApprovalPolicyApproverUser.id"
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.UserGroupMembership,
|
||||
`${TableName.AccessApprovalPolicyApprover}.approverGroupId`,
|
||||
`${TableName.UserGroupMembership}.groupId`
|
||||
)
|
||||
|
||||
.leftJoin<TUsers>(
|
||||
db(TableName.Users).as("accessApprovalPolicyGroupApproverUser"),
|
||||
`${TableName.UserGroupMembership}.userId`,
|
||||
"accessApprovalPolicyGroupApproverUser.id"
|
||||
)
|
||||
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalRequestReviewer,
|
||||
@ -223,15 +200,10 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
.select(selectAllTableCols(TableName.AccessApprovalRequest))
|
||||
.select(
|
||||
tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover),
|
||||
tx.ref("userId").withSchema(TableName.UserGroupMembership),
|
||||
tx.ref("email").withSchema("accessApprovalPolicyApproverUser").as("approverEmail"),
|
||||
tx.ref("email").withSchema("accessApprovalPolicyGroupApproverUser").as("approverGroupEmail"),
|
||||
tx.ref("username").withSchema("accessApprovalPolicyApproverUser").as("approverUsername"),
|
||||
tx.ref("username").withSchema("accessApprovalPolicyGroupApproverUser").as("approverGroupUsername"),
|
||||
tx.ref("firstName").withSchema("accessApprovalPolicyApproverUser").as("approverFirstName"),
|
||||
tx.ref("firstName").withSchema("accessApprovalPolicyGroupApproverUser").as("approverGroupFirstName"),
|
||||
tx.ref("lastName").withSchema("accessApprovalPolicyApproverUser").as("approverLastName"),
|
||||
tx.ref("lastName").withSchema("accessApprovalPolicyGroupApproverUser").as("approverGroupLastName"),
|
||||
tx.ref("email").withSchema("requestedByUser").as("requestedByUserEmail"),
|
||||
tx.ref("username").withSchema("requestedByUser").as("requestedByUserUsername"),
|
||||
tx.ref("firstName").withSchema("requestedByUser").as("requestedByUserFirstName"),
|
||||
@ -310,23 +282,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
lastName,
|
||||
username
|
||||
})
|
||||
},
|
||||
{
|
||||
key: "userId",
|
||||
label: "approvers" as const,
|
||||
mapper: ({
|
||||
userId,
|
||||
approverGroupEmail: email,
|
||||
approverGroupUsername: username,
|
||||
approverGroupLastName: lastName,
|
||||
approverFirstName: firstName
|
||||
}) => ({
|
||||
userId,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
username
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { PackRule, unpackRules } from "@casl/ability/extra";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { UnauthorizedError } from "@app/lib/errors";
|
||||
|
||||
import { TVerifyPermission } from "./access-approval-request-types";
|
||||
|
||||
@ -19,7 +19,7 @@ export const verifyRequestedPermissions = ({ permissions }: TVerifyPermission) =
|
||||
);
|
||||
|
||||
if (!permission || !permission.length) {
|
||||
throw new BadRequestError({ message: "No permission provided" });
|
||||
throw new UnauthorizedError({ message: "No permission provided" });
|
||||
}
|
||||
|
||||
const requestedPermissions: string[] = [];
|
||||
@ -39,10 +39,10 @@ export const verifyRequestedPermissions = ({ permissions }: TVerifyPermission) =
|
||||
const permissionEnv = firstPermission.conditions?.environment;
|
||||
|
||||
if (!permissionEnv || typeof permissionEnv !== "string") {
|
||||
throw new BadRequestError({ message: "Permission environment is not a string" });
|
||||
throw new UnauthorizedError({ message: "Permission environment is not a string" });
|
||||
}
|
||||
if (!permissionSecretPath || typeof permissionSecretPath !== "string") {
|
||||
throw new BadRequestError({ message: "Permission path is not a string" });
|
||||
throw new UnauthorizedError({ message: "Permission path is not a string" });
|
||||
}
|
||||
|
||||
return {
|
||||
|
@ -3,22 +3,17 @@ import ms from "ms";
|
||||
|
||||
import { ProjectMembershipRole } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
import { TProjectSlackConfigDALFactory } from "@app/services/slack/project-slack-config-dal";
|
||||
import { triggerSlackNotification } from "@app/services/slack/slack-fns";
|
||||
import { SlackTriggerFeature } from "@app/services/slack/slack-types";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TAccessApprovalPolicyApproverDALFactory } from "../access-approval-policy/access-approval-policy-approver-dal";
|
||||
import { TAccessApprovalPolicyDALFactory } from "../access-approval-policy/access-approval-policy-dal";
|
||||
import { isApproversValid } from "../access-approval-policy/access-approval-policy-fns";
|
||||
import { TGroupDALFactory } from "../group/group-dal";
|
||||
import { verifyApprovers } from "../access-approval-policy/access-approval-policy-fns";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal";
|
||||
import { ProjectUserAdditionalPrivilegeTemporaryMode } from "../project-user-additional-privilege/project-user-additional-privilege-types";
|
||||
@ -38,10 +33,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
accessApprovalPolicyApproverDAL: Pick<TAccessApprovalPolicyApproverDALFactory, "find">;
|
||||
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
|
||||
projectDAL: Pick<
|
||||
TProjectDALFactory,
|
||||
"checkProjectUpgradeStatus" | "findProjectBySlug" | "findProjectWithOrg" | "findById"
|
||||
>;
|
||||
projectDAL: Pick<TProjectDALFactory, "checkProjectUpgradeStatus" | "findProjectBySlug">;
|
||||
accessApprovalRequestDAL: Pick<
|
||||
TAccessApprovalRequestDALFactory,
|
||||
| "create"
|
||||
@ -58,21 +50,17 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
TAccessApprovalRequestReviewerDALFactory,
|
||||
"create" | "find" | "findOne" | "transaction"
|
||||
>;
|
||||
groupDAL: Pick<TGroupDALFactory, "findAllGroupPossibleMembers">;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findById">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
"findUserByProjectMembershipId" | "findUsersByProjectMembershipIds" | "find" | "findById"
|
||||
>;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
projectSlackConfigDAL: Pick<TProjectSlackConfigDALFactory, "getIntegrationDetailsByProject">;
|
||||
};
|
||||
|
||||
export type TAccessApprovalRequestServiceFactory = ReturnType<typeof accessApprovalRequestServiceFactory>;
|
||||
|
||||
export const accessApprovalRequestServiceFactory = ({
|
||||
groupDAL,
|
||||
projectDAL,
|
||||
projectEnvDAL,
|
||||
permissionService,
|
||||
@ -83,9 +71,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
accessApprovalPolicyApproverDAL,
|
||||
additionalPrivilegeDAL,
|
||||
smtpService,
|
||||
userDAL,
|
||||
kmsService,
|
||||
projectSlackConfigDAL
|
||||
userDAL
|
||||
}: TSecretApprovalRequestServiceFactoryDep) => {
|
||||
const createAccessApprovalRequest = async ({
|
||||
isTemporary,
|
||||
@ -99,7 +85,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
}: TCreateAccessApprovalRequestDTO) => {
|
||||
const cfg = getConfig();
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new UnauthorizedError({ message: "Project not found" });
|
||||
|
||||
// Anyone can create an access approval request.
|
||||
const { membership } = await permissionService.getProjectPermission(
|
||||
@ -109,56 +95,31 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
if (!membership) {
|
||||
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
|
||||
}
|
||||
if (!membership) throw new UnauthorizedError({ message: "You are not a member of this project" });
|
||||
|
||||
const requestedByUser = await userDAL.findById(actorId);
|
||||
if (!requestedByUser) throw new ForbiddenRequestError({ message: "User not found" });
|
||||
if (!requestedByUser) throw new UnauthorizedError({ message: "User not found" });
|
||||
|
||||
await projectDAL.checkProjectUpgradeStatus(project.id);
|
||||
|
||||
const { envSlug, secretPath, accessTypes } = verifyRequestedPermissions({ permissions: requestedPermissions });
|
||||
const environment = await projectEnvDAL.findOne({ projectId: project.id, slug: envSlug });
|
||||
|
||||
if (!environment) throw new NotFoundError({ message: "Environment not found" });
|
||||
if (!environment) throw new UnauthorizedError({ message: "Environment not found" });
|
||||
|
||||
const policy = await accessApprovalPolicyDAL.findOne({
|
||||
envId: environment.id,
|
||||
secretPath
|
||||
});
|
||||
if (!policy) throw new NotFoundError({ message: "No policy matching criteria was found." });
|
||||
|
||||
const approverIds: string[] = [];
|
||||
const approverGroupIds: string[] = [];
|
||||
if (!policy) throw new UnauthorizedError({ message: "No policy matching criteria was found." });
|
||||
|
||||
const approvers = await accessApprovalPolicyApproverDAL.find({
|
||||
policyId: policy.id
|
||||
});
|
||||
|
||||
approvers.forEach((approver) => {
|
||||
if (approver.approverUserId) {
|
||||
approverIds.push(approver.approverUserId);
|
||||
} else if (approver.approverGroupId) {
|
||||
approverGroupIds.push(approver.approverGroupId);
|
||||
}
|
||||
});
|
||||
|
||||
const groupUsers = (
|
||||
await Promise.all(
|
||||
approverGroupIds.map((groupApproverId) =>
|
||||
groupDAL.findAllGroupPossibleMembers({
|
||||
orgId: actorOrgId,
|
||||
groupId: groupApproverId
|
||||
})
|
||||
)
|
||||
)
|
||||
).flat();
|
||||
approverIds.push(...groupUsers.filter((user) => user.isPartOfGroup).map((user) => user.id));
|
||||
|
||||
const approverUsers = await userDAL.find({
|
||||
$in: {
|
||||
id: [...new Set(approverIds)]
|
||||
id: approvers.map((approver) => approver.approverUserId)
|
||||
}
|
||||
});
|
||||
|
||||
@ -205,36 +166,13 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
tx
|
||||
);
|
||||
|
||||
const requesterFullName = `${requestedByUser.firstName} ${requestedByUser.lastName}`;
|
||||
const approvalUrl = `${cfg.SITE_URL}/project/${project.id}/approval`;
|
||||
|
||||
await triggerSlackNotification({
|
||||
projectId: project.id,
|
||||
projectSlackConfigDAL,
|
||||
projectDAL,
|
||||
kmsService,
|
||||
notification: {
|
||||
type: SlackTriggerFeature.ACCESS_REQUEST,
|
||||
payload: {
|
||||
projectName: project.name,
|
||||
requesterFullName,
|
||||
isTemporary,
|
||||
requesterEmail: requestedByUser.email as string,
|
||||
secretPath,
|
||||
environment: envSlug,
|
||||
permissions: accessTypes,
|
||||
approvalUrl
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
recipients: approverUsers.filter((approver) => approver.email).map((approver) => approver.email!),
|
||||
subjectLine: "Access Approval Request",
|
||||
|
||||
substitutions: {
|
||||
projectName: project.name,
|
||||
requesterFullName,
|
||||
requesterFullName: `${requestedByUser.firstName} ${requestedByUser.lastName}`,
|
||||
requesterEmail: requestedByUser.email,
|
||||
isTemporary,
|
||||
...(isTemporary && {
|
||||
@ -243,7 +181,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
secretPath,
|
||||
environment: envSlug,
|
||||
permissions: accessTypes,
|
||||
approvalUrl
|
||||
approvalUrl: `${cfg.SITE_URL}/project/${project.id}/approval`
|
||||
},
|
||||
template: SmtpTemplates.AccessApprovalRequest
|
||||
});
|
||||
@ -264,7 +202,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
actorAuthMethod
|
||||
}: TListApprovalRequestsDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new UnauthorizedError({ message: "Project not found" });
|
||||
|
||||
const { membership } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@ -273,9 +211,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
if (!membership) {
|
||||
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
|
||||
}
|
||||
if (!membership) throw new UnauthorizedError({ message: "You are not a member of this project" });
|
||||
|
||||
const policies = await accessApprovalPolicyDAL.find({ projectId: project.id });
|
||||
let requests = await accessApprovalRequestDAL.findRequestsWithPrivilegeByPolicyIds(policies.map((p) => p.id));
|
||||
@ -300,7 +236,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
actorOrgId
|
||||
}: TReviewAccessRequestDTO) => {
|
||||
const accessApprovalRequest = await accessApprovalRequestDAL.findById(requestId);
|
||||
if (!accessApprovalRequest) throw new NotFoundError({ message: "Secret approval request not found" });
|
||||
if (!accessApprovalRequest) throw new BadRequestError({ message: "Secret approval request not found" });
|
||||
|
||||
const { policy } = accessApprovalRequest;
|
||||
const { membership, hasRole } = await permissionService.getProjectPermission(
|
||||
@ -311,21 +247,19 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
if (!membership) {
|
||||
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
|
||||
}
|
||||
if (!membership) throw new UnauthorizedError({ message: "You are not a member of this project" });
|
||||
|
||||
if (
|
||||
!hasRole(ProjectMembershipRole.Admin) &&
|
||||
accessApprovalRequest.requestedByUserId !== actorId && // The request wasn't made by the current user
|
||||
!policy.approvers.find((approver) => approver.userId === actorId) // The request isn't performed by an assigned approver
|
||||
) {
|
||||
throw new ForbiddenRequestError({ message: "You are not authorized to approve this request" });
|
||||
throw new UnauthorizedError({ message: "You are not authorized to approve this request" });
|
||||
}
|
||||
|
||||
const reviewerProjectMembership = await projectMembershipDAL.findById(membership.id);
|
||||
|
||||
const approversValid = await isApproversValid({
|
||||
await verifyApprovers({
|
||||
projectId: accessApprovalRequest.projectId,
|
||||
orgId: actorOrgId,
|
||||
envSlug: accessApprovalRequest.environment,
|
||||
@ -335,10 +269,6 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
userIds: [reviewerProjectMembership.userId]
|
||||
});
|
||||
|
||||
if (!approversValid) {
|
||||
throw new ForbiddenRequestError({ message: "You don't have access to approve this request" });
|
||||
}
|
||||
|
||||
const existingReviews = await accessApprovalRequestReviewerDAL.find({ requestId: accessApprovalRequest.id });
|
||||
if (existingReviews.some((review) => review.status === ApprovalStatus.REJECTED)) {
|
||||
throw new BadRequestError({ message: "The request has already been rejected by another reviewer" });
|
||||
@ -421,7 +351,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
|
||||
const getCount = async ({ projectSlug, actor, actorAuthMethod, actorId, actorOrgId }: TGetAccessRequestCountDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new UnauthorizedError({ message: "Project not found" });
|
||||
|
||||
const { membership } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@ -430,9 +360,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
if (!membership) {
|
||||
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
|
||||
}
|
||||
if (!membership) throw new BadRequestError({ message: "User not found in project" });
|
||||
|
||||
const count = await accessApprovalRequestDAL.getCount({ projectId: project.id });
|
||||
|
||||
|
@ -2,11 +2,10 @@ import { ForbiddenError } from "@casl/ability";
|
||||
import { RawAxiosRequestHeaders } from "axios";
|
||||
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { validateLocalIps } from "@app/lib/validator";
|
||||
|
||||
import { AUDIT_LOG_STREAM_TIMEOUT } from "../audit-log/audit-log-queue";
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
@ -43,15 +42,13 @@ export const auditLogStreamServiceFactory = ({
|
||||
actorOrgId,
|
||||
actorAuthMethod
|
||||
}: TCreateAuditLogStreamDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
|
||||
if (!actorOrgId) throw new BadRequestError({ message: "Missing org id from token" });
|
||||
|
||||
const appCfg = getConfig();
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.auditLogStreams) {
|
||||
if (!plan.auditLogStreams)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create audit log streams due to plan restriction. Upgrade plan to create group."
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
@ -62,9 +59,7 @@ export const auditLogStreamServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Settings);
|
||||
|
||||
if (appCfg.isCloud) {
|
||||
blockLocalAndPrivateIpAddresses(url);
|
||||
}
|
||||
validateLocalIps(url);
|
||||
|
||||
const totalStreams = await auditLogStreamDAL.find({ orgId: actorOrgId });
|
||||
if (totalStreams.length >= plan.auditLogStreamLimit) {
|
||||
@ -121,7 +116,7 @@ export const auditLogStreamServiceFactory = ({
|
||||
actorOrgId,
|
||||
actorAuthMethod
|
||||
}: TUpdateAuditLogStreamDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
|
||||
if (!actorOrgId) throw new BadRequestError({ message: "Missing org id from token" });
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.auditLogStreams)
|
||||
@ -130,14 +125,13 @@ export const auditLogStreamServiceFactory = ({
|
||||
});
|
||||
|
||||
const logStream = await auditLogStreamDAL.findById(id);
|
||||
if (!logStream) throw new NotFoundError({ message: "Audit log stream not found" });
|
||||
if (!logStream) throw new BadRequestError({ message: "Audit log stream not found" });
|
||||
|
||||
const { orgId } = logStream;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
|
||||
|
||||
const appCfg = getConfig();
|
||||
if (url && appCfg.isCloud) blockLocalAndPrivateIpAddresses(url);
|
||||
if (url) validateLocalIps(url);
|
||||
|
||||
// testing connection first
|
||||
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
@ -179,10 +173,10 @@ export const auditLogStreamServiceFactory = ({
|
||||
};
|
||||
|
||||
const deleteById = async ({ id, actor, actorId, actorOrgId, actorAuthMethod }: TDeleteAuditLogStreamDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
|
||||
if (!actorOrgId) throw new BadRequestError({ message: "Missing org id from token" });
|
||||
|
||||
const logStream = await auditLogStreamDAL.findById(id);
|
||||
if (!logStream) throw new NotFoundError({ message: "Audit log stream not found" });
|
||||
if (!logStream) throw new BadRequestError({ message: "Audit log stream not found" });
|
||||
|
||||
const { orgId } = logStream;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
@ -194,7 +188,7 @@ export const auditLogStreamServiceFactory = ({
|
||||
|
||||
const getById = async ({ id, actor, actorId, actorOrgId, actorAuthMethod }: TGetDetailsAuditLogStreamDTO) => {
|
||||
const logStream = await auditLogStreamDAL.findById(id);
|
||||
if (!logStream) throw new NotFoundError({ message: "Audit log stream not found" });
|
||||
if (!logStream) throw new BadRequestError({ message: "Audit log stream not found" });
|
||||
|
||||
const { orgId } = logStream;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
|
@ -1,15 +1,10 @@
|
||||
// weird commonjs-related error in the CI requires us to do the import like this
|
||||
import knex from "knex";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError, GatewayTimeoutError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, stripUndefinedInWhere } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { EventType } from "./audit-log-types";
|
||||
|
||||
export type TAuditLogDALFactory = ReturnType<typeof auditLogDALFactory>;
|
||||
|
||||
@ -26,122 +21,56 @@ type TFindQuery = {
|
||||
};
|
||||
|
||||
export const auditLogDALFactory = (db: TDbClient) => {
|
||||
const auditLogOrm = ormify(db, TableName.PartitionedAuditLog);
|
||||
const auditLogOrm = ormify(db, TableName.AuditLog);
|
||||
|
||||
const find = async (
|
||||
{
|
||||
orgId,
|
||||
projectId,
|
||||
userAgentType,
|
||||
startDate,
|
||||
endDate,
|
||||
limit = 20,
|
||||
offset = 0,
|
||||
actorId,
|
||||
actorType,
|
||||
eventType,
|
||||
eventMetadata
|
||||
}: Omit<TFindQuery, "actor" | "eventType"> & {
|
||||
actorId?: string;
|
||||
actorType?: ActorType;
|
||||
eventType?: EventType[];
|
||||
eventMetadata?: Record<string, string>;
|
||||
},
|
||||
tx?: knex.Knex
|
||||
{ orgId, projectId, userAgentType, startDate, endDate, limit = 20, offset = 0, actor, eventType }: TFindQuery,
|
||||
tx?: Knex
|
||||
) => {
|
||||
if (!orgId && !projectId) {
|
||||
throw new Error("Either orgId or projectId must be provided");
|
||||
}
|
||||
|
||||
try {
|
||||
// Find statements
|
||||
const sqlQuery = (tx || db.replicaNode())(TableName.PartitionedAuditLog)
|
||||
// eslint-disable-next-line func-names
|
||||
.where(function () {
|
||||
if (orgId) {
|
||||
void this.where(`${TableName.PartitionedAuditLog}.orgId`, orgId);
|
||||
} else if (projectId) {
|
||||
void this.where(`${TableName.PartitionedAuditLog}.projectId`, projectId);
|
||||
}
|
||||
});
|
||||
|
||||
if (userAgentType) {
|
||||
void sqlQuery.where("userAgentType", userAgentType);
|
||||
}
|
||||
|
||||
// Select statements
|
||||
void sqlQuery
|
||||
.select(selectAllTableCols(TableName.PartitionedAuditLog))
|
||||
const sqlQuery = (tx || db.replicaNode())(TableName.AuditLog)
|
||||
.where(
|
||||
stripUndefinedInWhere({
|
||||
projectId,
|
||||
orgId,
|
||||
eventType,
|
||||
actor,
|
||||
userAgentType
|
||||
})
|
||||
)
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
.orderBy(`${TableName.PartitionedAuditLog}.createdAt`, "desc");
|
||||
|
||||
// Special case: Filter by actor ID
|
||||
if (actorId) {
|
||||
void sqlQuery.whereRaw(`"actorMetadata" @> jsonb_build_object('userId', ?::text)`, [actorId]);
|
||||
}
|
||||
|
||||
// Special case: Filter by key/value pairs in eventMetadata field
|
||||
if (eventMetadata && Object.keys(eventMetadata).length) {
|
||||
Object.entries(eventMetadata).forEach(([key, value]) => {
|
||||
void sqlQuery.whereRaw(`"eventMetadata" @> jsonb_build_object(?::text, ?::text)`, [key, value]);
|
||||
});
|
||||
}
|
||||
|
||||
// Filter by actor type
|
||||
if (actorType) {
|
||||
void sqlQuery.where("actor", actorType);
|
||||
}
|
||||
|
||||
// Filter by event types
|
||||
if (eventType?.length) {
|
||||
void sqlQuery.whereIn("eventType", eventType);
|
||||
}
|
||||
|
||||
// Filter by date range
|
||||
.orderBy("createdAt", "desc");
|
||||
if (startDate) {
|
||||
void sqlQuery.where(`${TableName.PartitionedAuditLog}.createdAt`, ">=", startDate);
|
||||
void sqlQuery.where("createdAt", ">=", startDate);
|
||||
}
|
||||
if (endDate) {
|
||||
void sqlQuery.where(`${TableName.PartitionedAuditLog}.createdAt`, "<=", endDate);
|
||||
void sqlQuery.where("createdAt", "<=", endDate);
|
||||
}
|
||||
|
||||
// we timeout long running queries to prevent DB resource issues (2 minutes)
|
||||
const docs = await sqlQuery.timeout(1000 * 120);
|
||||
|
||||
const docs = await sqlQuery;
|
||||
return docs;
|
||||
} catch (error) {
|
||||
if (error instanceof knex.KnexTimeoutError) {
|
||||
throw new GatewayTimeoutError({
|
||||
error,
|
||||
message: "Failed to fetch audit logs due to timeout. Add more search filters."
|
||||
});
|
||||
}
|
||||
|
||||
throw new DatabaseError({ error });
|
||||
}
|
||||
};
|
||||
|
||||
// delete all audit log that have expired
|
||||
const pruneAuditLog = async (tx?: knex.Knex) => {
|
||||
const pruneAuditLog = async (tx?: Knex) => {
|
||||
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
|
||||
const MAX_RETRY_ON_FAILURE = 3;
|
||||
|
||||
const today = new Date();
|
||||
let deletedAuditLogIds: { id: string }[] = [];
|
||||
let numberOfRetryOnFailure = 0;
|
||||
let isRetrying = false;
|
||||
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
|
||||
do {
|
||||
try {
|
||||
const findExpiredLogSubQuery = (tx || db)(TableName.PartitionedAuditLog)
|
||||
const findExpiredLogSubQuery = (tx || db)(TableName.AuditLog)
|
||||
.where("expiresAt", "<", today)
|
||||
.select("id")
|
||||
.limit(AUDIT_LOG_PRUNE_BATCH_SIZE);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
deletedAuditLogIds = await (tx || db)(TableName.PartitionedAuditLog)
|
||||
deletedAuditLogIds = await (tx || db)(TableName.AuditLog)
|
||||
.whereIn("id", findExpiredLogSubQuery)
|
||||
.del()
|
||||
.returning("id");
|
||||
@ -155,9 +84,7 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
setTimeout(resolve, 10); // time to breathe for db
|
||||
});
|
||||
}
|
||||
isRetrying = numberOfRetryOnFailure > 0;
|
||||
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
|
||||
} while (deletedAuditLogIds.length > 0 || numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
|
||||
};
|
||||
|
||||
return { ...auditLogOrm, pruneAuditLog, find };
|
||||
|
@ -74,7 +74,6 @@ export const auditLogQueueServiceFactory = ({
|
||||
actorMetadata: actor.metadata,
|
||||
userAgent,
|
||||
projectId,
|
||||
projectName: project?.name,
|
||||
ipAddress,
|
||||
orgId,
|
||||
eventType: event.type,
|
||||
|
@ -3,7 +3,6 @@ import { ForbiddenError } from "@casl/ability";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TAuditLogDALFactory } from "./audit-log-dal";
|
||||
@ -12,7 +11,7 @@ import { EventType, TCreateAuditLogDTO, TListProjectAuditLogDTO } from "./audit-
|
||||
|
||||
type TAuditLogServiceFactoryDep = {
|
||||
auditLogDAL: TAuditLogDALFactory;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getOrgPermission">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
auditLogQueue: TAuditLogQueueServiceFactory;
|
||||
};
|
||||
|
||||
@ -23,48 +22,38 @@ export const auditLogServiceFactory = ({
|
||||
auditLogQueue,
|
||||
permissionService
|
||||
}: TAuditLogServiceFactoryDep) => {
|
||||
const listAuditLogs = async ({ actorAuthMethod, actorId, actorOrgId, actor, filter }: TListProjectAuditLogDTO) => {
|
||||
// Filter logs for specific project
|
||||
if (filter.projectId) {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
filter.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
|
||||
} else {
|
||||
// Organization-wide logs
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
/**
|
||||
* NOTE (dangtony98): Update this to organization-level audit log permission check once audit logs are moved
|
||||
* to the organization level ✅
|
||||
*/
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
|
||||
}
|
||||
|
||||
// If project ID is not provided, then we need to return all the audit logs for the organization itself.
|
||||
const listProjectAuditLogs = async ({
|
||||
userAgentType,
|
||||
eventType,
|
||||
offset,
|
||||
limit,
|
||||
endDate,
|
||||
startDate,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId,
|
||||
auditLogActor
|
||||
}: TListProjectAuditLogDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
|
||||
const auditLogs = await auditLogDAL.find({
|
||||
startDate: filter.startDate,
|
||||
endDate: filter.endDate,
|
||||
limit: filter.limit,
|
||||
offset: filter.offset,
|
||||
eventType: filter.eventType,
|
||||
userAgentType: filter.userAgentType,
|
||||
actorId: filter.auditLogActorId,
|
||||
actorType: filter.actorType,
|
||||
eventMetadata: filter.eventMetadata,
|
||||
...(filter.projectId ? { projectId: filter.projectId } : { orgId: actorOrgId })
|
||||
startDate,
|
||||
endDate,
|
||||
limit,
|
||||
offset,
|
||||
eventType,
|
||||
userAgentType,
|
||||
actor: auditLogActor,
|
||||
projectId
|
||||
});
|
||||
|
||||
return auditLogs.map(({ eventType: logEventType, actor: eActor, actorMetadata, eventMetadata, ...el }) => ({
|
||||
...el,
|
||||
event: { type: logEventType, metadata: eventMetadata },
|
||||
@ -87,6 +76,6 @@ export const auditLogServiceFactory = ({
|
||||
|
||||
return {
|
||||
createAuditLog,
|
||||
listAuditLogs
|
||||
listProjectAuditLogs
|
||||
};
|
||||
};
|
||||
|
@ -1,4 +1,3 @@
|
||||
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
|
||||
@ -6,23 +5,19 @@ import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
|
||||
import { PkiItemType } from "@app/services/pki-collection/pki-collection-types";
|
||||
|
||||
export type TListProjectAuditLogDTO = {
|
||||
filter: {
|
||||
userAgentType?: UserAgentType;
|
||||
eventType?: EventType[];
|
||||
offset?: number;
|
||||
limit: number;
|
||||
endDate?: string;
|
||||
startDate?: string;
|
||||
projectId?: string;
|
||||
auditLogActorId?: string;
|
||||
actorType?: ActorType;
|
||||
eventMetadata?: Record<string, string>;
|
||||
};
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
auditLogActor?: string;
|
||||
projectId: string;
|
||||
eventType?: string;
|
||||
startDate?: string;
|
||||
endDate?: string;
|
||||
userAgentType?: string;
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TCreateAuditLogDTO = {
|
||||
event: Event;
|
||||
actor: UserActor | IdentityActor | ServiceActor | ScimClientActor | PlatformActor;
|
||||
actor: UserActor | IdentityActor | ServiceActor | ScimClientActor;
|
||||
orgId?: string;
|
||||
projectId?: string;
|
||||
} & BaseAuthData;
|
||||
@ -123,7 +118,6 @@ export enum EventType {
|
||||
UPDATE_WEBHOOK_STATUS = "update-webhook-status",
|
||||
DELETE_WEBHOOK = "delete-webhook",
|
||||
GET_SECRET_IMPORTS = "get-secret-imports",
|
||||
GET_SECRET_IMPORT = "get-secret-import",
|
||||
CREATE_SECRET_IMPORT = "create-secret-import",
|
||||
UPDATE_SECRET_IMPORT = "update-secret-import",
|
||||
DELETE_SECRET_IMPORT = "delete-secret-import",
|
||||
@ -146,7 +140,6 @@ export enum EventType {
|
||||
GET_CA_CRLS = "get-certificate-authority-crls",
|
||||
ISSUE_CERT = "issue-cert",
|
||||
SIGN_CERT = "sign-cert",
|
||||
GET_CA_CERTIFICATE_TEMPLATES = "get-ca-certificate-templates",
|
||||
GET_CERT = "get-cert",
|
||||
DELETE_CERT = "delete-cert",
|
||||
REVOKE_CERT = "revoke-cert",
|
||||
@ -176,21 +169,7 @@ export enum EventType {
|
||||
GET_CERTIFICATE_TEMPLATE = "get-certificate-template",
|
||||
CREATE_CERTIFICATE_TEMPLATE_EST_CONFIG = "create-certificate-template-est-config",
|
||||
UPDATE_CERTIFICATE_TEMPLATE_EST_CONFIG = "update-certificate-template-est-config",
|
||||
GET_CERTIFICATE_TEMPLATE_EST_CONFIG = "get-certificate-template-est-config",
|
||||
ATTEMPT_CREATE_SLACK_INTEGRATION = "attempt-create-slack-integration",
|
||||
ATTEMPT_REINSTALL_SLACK_INTEGRATION = "attempt-reinstall-slack-integration",
|
||||
GET_SLACK_INTEGRATION = "get-slack-integration",
|
||||
UPDATE_SLACK_INTEGRATION = "update-slack-integration",
|
||||
DELETE_SLACK_INTEGRATION = "delete-slack-integration",
|
||||
GET_PROJECT_SLACK_CONFIG = "get-project-slack-config",
|
||||
UPDATE_PROJECT_SLACK_CONFIG = "update-project-slack-config",
|
||||
INTEGRATION_SYNCED = "integration-synced",
|
||||
CREATE_CMEK = "create-cmek",
|
||||
UPDATE_CMEK = "update-cmek",
|
||||
DELETE_CMEK = "delete-cmek",
|
||||
GET_CMEKS = "get-cmeks",
|
||||
CMEK_ENCRYPT = "cmek-encrypt",
|
||||
CMEK_DECRYPT = "cmek-decrypt"
|
||||
GET_CERTIFICATE_TEMPLATE_EST_CONFIG = "get-certificate-template-est-config"
|
||||
}
|
||||
|
||||
interface UserActorMetadata {
|
||||
@ -211,8 +190,6 @@ interface IdentityActorMetadata {
|
||||
|
||||
interface ScimClientActorMetadata {}
|
||||
|
||||
interface PlatformActorMetadata {}
|
||||
|
||||
export interface UserActor {
|
||||
type: ActorType.USER;
|
||||
metadata: UserActorMetadata;
|
||||
@ -223,11 +200,6 @@ export interface ServiceActor {
|
||||
metadata: ServiceActorMetadata;
|
||||
}
|
||||
|
||||
export interface PlatformActor {
|
||||
type: ActorType.PLATFORM;
|
||||
metadata: PlatformActorMetadata;
|
||||
}
|
||||
|
||||
export interface IdentityActor {
|
||||
type: ActorType.IDENTITY;
|
||||
metadata: IdentityActorMetadata;
|
||||
@ -238,7 +210,7 @@ export interface ScimClientActor {
|
||||
metadata: ScimClientActorMetadata;
|
||||
}
|
||||
|
||||
export type Actor = UserActor | ServiceActor | IdentityActor | ScimClientActor | PlatformActor;
|
||||
export type Actor = UserActor | ServiceActor | IdentityActor | ScimClientActor;
|
||||
|
||||
interface GetSecretsEvent {
|
||||
type: EventType.GET_SECRETS;
|
||||
@ -1012,14 +984,6 @@ interface GetSecretImportsEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetSecretImportEvent {
|
||||
type: EventType.GET_SECRET_IMPORT;
|
||||
metadata: {
|
||||
secretImportId: string;
|
||||
folderId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateSecretImportEvent {
|
||||
type: EventType.CREATE_SECRET_IMPORT;
|
||||
metadata: {
|
||||
@ -1228,14 +1192,6 @@ interface SignCert {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCaCertificateTemplates {
|
||||
type: EventType.GET_CA_CERTIFICATE_TEMPLATES;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCert {
|
||||
type: EventType.GET_CERT;
|
||||
metadata: {
|
||||
@ -1366,7 +1322,7 @@ interface CreateKmsEvent {
|
||||
metadata: {
|
||||
kmsId: string;
|
||||
provider: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
description?: string;
|
||||
};
|
||||
}
|
||||
@ -1375,7 +1331,7 @@ interface DeleteKmsEvent {
|
||||
type: EventType.DELETE_KMS;
|
||||
metadata: {
|
||||
kmsId: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
}
|
||||
|
||||
@ -1384,7 +1340,7 @@ interface UpdateKmsEvent {
|
||||
metadata: {
|
||||
kmsId: string;
|
||||
provider: string;
|
||||
name?: string;
|
||||
slug?: string;
|
||||
description?: string;
|
||||
};
|
||||
}
|
||||
@ -1393,7 +1349,7 @@ interface GetKmsEvent {
|
||||
type: EventType.GET_KMS;
|
||||
metadata: {
|
||||
kmsId: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
}
|
||||
|
||||
@ -1402,7 +1358,7 @@ interface UpdateProjectKmsEvent {
|
||||
metadata: {
|
||||
secretManagerKmsKey: {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
@ -1490,120 +1446,6 @@ interface GetCertificateTemplateEstConfig {
|
||||
};
|
||||
}
|
||||
|
||||
interface AttemptCreateSlackIntegration {
|
||||
type: EventType.ATTEMPT_CREATE_SLACK_INTEGRATION;
|
||||
metadata: {
|
||||
slug: string;
|
||||
description?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface AttemptReinstallSlackIntegration {
|
||||
type: EventType.ATTEMPT_REINSTALL_SLACK_INTEGRATION;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateSlackIntegration {
|
||||
type: EventType.UPDATE_SLACK_INTEGRATION;
|
||||
metadata: {
|
||||
id: string;
|
||||
slug: string;
|
||||
description?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteSlackIntegration {
|
||||
type: EventType.DELETE_SLACK_INTEGRATION;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetSlackIntegration {
|
||||
type: EventType.GET_SLACK_INTEGRATION;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateProjectSlackConfig {
|
||||
type: EventType.UPDATE_PROJECT_SLACK_CONFIG;
|
||||
metadata: {
|
||||
id: string;
|
||||
slackIntegrationId: string;
|
||||
isAccessRequestNotificationEnabled: boolean;
|
||||
accessRequestChannels: string;
|
||||
isSecretRequestNotificationEnabled: boolean;
|
||||
secretRequestChannels: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetProjectSlackConfig {
|
||||
type: EventType.GET_PROJECT_SLACK_CONFIG;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
interface IntegrationSyncedEvent {
|
||||
type: EventType.INTEGRATION_SYNCED;
|
||||
metadata: {
|
||||
integrationId: string;
|
||||
lastSyncJobId: string;
|
||||
lastUsed: Date;
|
||||
syncMessage: string;
|
||||
isSynced: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateCmekEvent {
|
||||
type: EventType.CREATE_CMEK;
|
||||
metadata: {
|
||||
keyId: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
encryptionAlgorithm: SymmetricEncryption;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteCmekEvent {
|
||||
type: EventType.DELETE_CMEK;
|
||||
metadata: {
|
||||
keyId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateCmekEvent {
|
||||
type: EventType.UPDATE_CMEK;
|
||||
metadata: {
|
||||
keyId: string;
|
||||
name?: string;
|
||||
description?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCmeksEvent {
|
||||
type: EventType.GET_CMEKS;
|
||||
metadata: {
|
||||
keyIds: string[];
|
||||
};
|
||||
}
|
||||
|
||||
interface CmekEncryptEvent {
|
||||
type: EventType.CMEK_ENCRYPT;
|
||||
metadata: {
|
||||
keyId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CmekDecryptEvent {
|
||||
type: EventType.CMEK_DECRYPT;
|
||||
metadata: {
|
||||
keyId: string;
|
||||
};
|
||||
}
|
||||
|
||||
export type Event =
|
||||
| GetSecretsEvent
|
||||
| GetSecretEvent
|
||||
@ -1683,7 +1525,6 @@ export type Event =
|
||||
| UpdateWebhookStatusEvent
|
||||
| DeleteWebhookEvent
|
||||
| GetSecretImportsEvent
|
||||
| GetSecretImportEvent
|
||||
| CreateSecretImportEvent
|
||||
| UpdateSecretImportEvent
|
||||
| DeleteSecretImportEvent
|
||||
@ -1706,7 +1547,6 @@ export type Event =
|
||||
| GetCaCrls
|
||||
| IssueCert
|
||||
| SignCert
|
||||
| GetCaCertificateTemplates
|
||||
| GetCert
|
||||
| DeleteCert
|
||||
| RevokeCert
|
||||
@ -1736,18 +1576,4 @@ export type Event =
|
||||
| DeleteCertificateTemplate
|
||||
| CreateCertificateTemplateEstConfig
|
||||
| UpdateCertificateTemplateEstConfig
|
||||
| GetCertificateTemplateEstConfig
|
||||
| AttemptCreateSlackIntegration
|
||||
| AttemptReinstallSlackIntegration
|
||||
| UpdateSlackIntegration
|
||||
| DeleteSlackIntegration
|
||||
| GetSlackIntegration
|
||||
| UpdateProjectSlackConfig
|
||||
| GetProjectSlackConfig
|
||||
| IntegrationSyncedEvent
|
||||
| CreateCmekEvent
|
||||
| UpdateCmekEvent
|
||||
| DeleteCmekEvent
|
||||
| GetCmeksEvent
|
||||
| CmekEncryptEvent
|
||||
| CmekDecryptEvent;
|
||||
| GetCertificateTemplateEstConfig;
|
||||
|
@ -2,9 +2,10 @@ import { ForbiddenError } from "@casl/ability";
|
||||
import * as x509 from "@peculiar/x509";
|
||||
|
||||
import { TCertificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal";
|
||||
// import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
@ -18,6 +19,7 @@ type TCertificateAuthorityCrlServiceFactoryDep = {
|
||||
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction">;
|
||||
kmsService: Pick<TKmsServiceFactory, "decryptWithKmsKey" | "generateKmsKey">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
// licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TCertificateAuthorityCrlServiceFactory = ReturnType<typeof certificateAuthorityCrlServiceFactory>;
|
||||
@ -64,7 +66,7 @@ export const certificateAuthorityCrlServiceFactory = ({
|
||||
*/
|
||||
const getCaCrls = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCaCrlsDTO) => {
|
||||
const ca = await certificateAuthorityDAL.findById(caId);
|
||||
if (!ca) throw new NotFoundError({ message: "CA not found" });
|
||||
if (!ca) throw new BadRequestError({ message: "CA not found" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
@ -79,6 +81,13 @@ export const certificateAuthorityCrlServiceFactory = ({
|
||||
ProjectPermissionSub.CertificateAuthorities
|
||||
);
|
||||
|
||||
// const plan = await licenseService.getPlan(actorOrgId);
|
||||
// if (!plan.caCrl)
|
||||
// throw new BadRequestError({
|
||||
// message:
|
||||
// "Failed to get CA certificate revocation lists (CRLs) due to plan restriction. Upgrade plan to get the CA CRL."
|
||||
// });
|
||||
|
||||
const caCrls = await certificateAuthorityCrlDAL.find({ caId: ca.id }, { sort: [["createdAt", "desc"]] });
|
||||
|
||||
const keyId = await getProjectKmsCertificateKeyId({
|
||||
|
@ -7,7 +7,7 @@ import { TPermissionServiceFactory } from "@app/ee/services/permission/permissio
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
@ -61,7 +61,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
}: TCreateDynamicSecretLeaseDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -84,10 +84,10 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
}
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
|
||||
if (!dynamicSecretCfg) throw new NotFoundError({ message: "Dynamic secret not found" });
|
||||
if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" });
|
||||
|
||||
const totalLeasesTaken = await dynamicSecretLeaseDAL.countLeasesForDynamicSecret(dynamicSecretCfg.id);
|
||||
if (totalLeasesTaken >= appCfg.MAX_LEASE_LIMIT)
|
||||
@ -134,7 +134,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
leaseId
|
||||
}: TRenewDynamicSecretLeaseDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -157,10 +157,10 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
}
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
|
||||
if (!dynamicSecretLease) throw new NotFoundError({ message: "Dynamic secret lease not found" });
|
||||
if (!dynamicSecretLease) throw new BadRequestError({ message: "Dynamic secret lease not found" });
|
||||
|
||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
@ -208,7 +208,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
isForced
|
||||
}: TDeleteDynamicSecretLeaseDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -224,10 +224,10 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
|
||||
if (!dynamicSecretLease) throw new NotFoundError({ message: "Dynamic secret lease not found" });
|
||||
if (!dynamicSecretLease) throw new BadRequestError({ message: "Dynamic secret lease not found" });
|
||||
|
||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
@ -273,7 +273,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
actorAuthMethod
|
||||
}: TListDynamicSecretLeasesDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -289,10 +289,10 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
|
||||
if (!dynamicSecretCfg) throw new NotFoundError({ message: "Dynamic secret not found" });
|
||||
if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" });
|
||||
|
||||
const dynamicSecretLeases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfg.id });
|
||||
return dynamicSecretLeases;
|
||||
@ -309,7 +309,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
actorAuthMethod
|
||||
}: TDetailsDynamicSecretLeaseDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -325,10 +325,10 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
|
||||
if (!dynamicSecretLease) throw new NotFoundError({ message: "Dynamic secret lease not found" });
|
||||
if (!dynamicSecretLease) throw new BadRequestError({ message: "Dynamic secret lease not found" });
|
||||
|
||||
return dynamicSecretLease;
|
||||
};
|
||||
|
@ -1,70 +1,10 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { SecretsOrderBy } from "@app/services/secret/secret-types";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TDynamicSecretDALFactory = ReturnType<typeof dynamicSecretDALFactory>;
|
||||
|
||||
export const dynamicSecretDALFactory = (db: TDbClient) => {
|
||||
const orm = ormify(db, TableName.DynamicSecret);
|
||||
|
||||
// find dynamic secrets for multiple environments (folder IDs are cross env, thus need to rank for pagination)
|
||||
const listDynamicSecretsByFolderIds = async (
|
||||
{
|
||||
folderIds,
|
||||
search,
|
||||
limit,
|
||||
offset = 0,
|
||||
orderBy = SecretsOrderBy.Name,
|
||||
orderDirection = OrderByDirection.ASC
|
||||
}: {
|
||||
folderIds: string[];
|
||||
search?: string;
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
orderBy?: SecretsOrderBy;
|
||||
orderDirection?: OrderByDirection;
|
||||
},
|
||||
tx?: Knex
|
||||
) => {
|
||||
try {
|
||||
const query = (tx || db.replicaNode())(TableName.DynamicSecret)
|
||||
.whereIn("folderId", folderIds)
|
||||
.where((bd) => {
|
||||
if (search) {
|
||||
void bd.whereILike(`${TableName.DynamicSecret}.name`, `%${search}%`);
|
||||
}
|
||||
})
|
||||
.leftJoin(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.DynamicSecret}.folderId`)
|
||||
.leftJoin(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.select(
|
||||
selectAllTableCols(TableName.DynamicSecret),
|
||||
db.ref("slug").withSchema(TableName.Environment).as("environment"),
|
||||
db.raw(`DENSE_RANK() OVER (ORDER BY ${TableName.DynamicSecret}."name" ${orderDirection}) as rank`)
|
||||
)
|
||||
.orderBy(`${TableName.DynamicSecret}.${orderBy}`, orderDirection);
|
||||
|
||||
if (limit) {
|
||||
const rankOffset = offset + 1;
|
||||
return await (tx || db)
|
||||
.with("w", query)
|
||||
.select("*")
|
||||
.from<Awaited<typeof query>[number]>("w")
|
||||
.where("w.rank", ">=", rankOffset)
|
||||
.andWhere("w.rank", "<", rankOffset + limit);
|
||||
}
|
||||
|
||||
const dynamicSecrets = await query;
|
||||
|
||||
return dynamicSecrets;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "List dynamic secret multi env" });
|
||||
}
|
||||
};
|
||||
|
||||
return { ...orm, listDynamicSecretsByFolderIds };
|
||||
return orm;
|
||||
};
|
||||
|
@ -5,8 +5,7 @@ import { TLicenseServiceFactory } from "@app/ee/services/license/license-service
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
|
||||
@ -18,12 +17,9 @@ import {
|
||||
TCreateDynamicSecretDTO,
|
||||
TDeleteDynamicSecretDTO,
|
||||
TDetailsDynamicSecretDTO,
|
||||
TGetDynamicSecretsCountDTO,
|
||||
TListDynamicSecretsDTO,
|
||||
TListDynamicSecretsMultiEnvDTO,
|
||||
TUpdateDynamicSecretDTO
|
||||
} from "./dynamic-secret-types";
|
||||
import { AzureEntraIDProvider } from "./providers/azure-entra-id";
|
||||
import { DynamicSecretProviders, TDynamicProviderFns } from "./providers/models";
|
||||
|
||||
type TDynamicSecretServiceFactoryDep = {
|
||||
@ -35,7 +31,7 @@ type TDynamicSecretServiceFactoryDep = {
|
||||
"pruneDynamicSecret" | "unsetLeaseRevocation"
|
||||
>;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findBySecretPathMultiEnv">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
};
|
||||
@ -66,7 +62,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
actorAuthMethod
|
||||
}: TCreateDynamicSecretDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -89,7 +85,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
}
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const existingDynamicSecret = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
|
||||
if (existingDynamicSecret)
|
||||
@ -134,7 +130,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
actorAuthMethod
|
||||
}: TUpdateDynamicSecretDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
|
||||
@ -158,10 +154,10 @@ export const dynamicSecretServiceFactory = ({
|
||||
}
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
|
||||
if (!dynamicSecretCfg) throw new NotFoundError({ message: "Dynamic secret not found" });
|
||||
if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" });
|
||||
|
||||
if (newName) {
|
||||
const existingDynamicSecret = await dynamicSecretDAL.findOne({ name: newName, folderId: folder.id });
|
||||
@ -213,7 +209,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
isForced
|
||||
}: TDeleteDynamicSecretDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
|
||||
@ -230,7 +226,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
|
||||
if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" });
|
||||
@ -271,7 +267,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
actor
|
||||
}: TDetailsDynamicSecretDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
@ -287,10 +283,10 @@ export const dynamicSecretServiceFactory = ({
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
|
||||
if (!dynamicSecretCfg) throw new NotFoundError({ message: "Dynamic secret not found" });
|
||||
if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" });
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
@ -304,104 +300,19 @@ export const dynamicSecretServiceFactory = ({
|
||||
return { ...dynamicSecretCfg, inputs: providerInputs };
|
||||
};
|
||||
|
||||
// get unique dynamic secret count across multiple envs
|
||||
const getCountMultiEnv = async ({
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actorId,
|
||||
actor,
|
||||
projectId,
|
||||
path,
|
||||
environmentSlugs,
|
||||
search,
|
||||
isInternal
|
||||
}: TListDynamicSecretsMultiEnvDTO) => {
|
||||
if (!isInternal) {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
// verify user has access to each env in request
|
||||
environmentSlugs.forEach((environmentSlug) =>
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const folders = await folderDAL.findBySecretPathMultiEnv(projectId, environmentSlugs, path);
|
||||
if (!folders.length) throw new NotFoundError({ message: "Folders not found" });
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.find(
|
||||
{ $in: { folderId: folders.map((folder) => folder.id) }, $search: search ? { name: `%${search}%` } : undefined },
|
||||
{ countDistinct: "name" }
|
||||
);
|
||||
|
||||
return Number(dynamicSecretCfg[0]?.count ?? 0);
|
||||
};
|
||||
|
||||
// get dynamic secret count for a single env
|
||||
const getDynamicSecretCount = async ({
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actorId,
|
||||
actor,
|
||||
path,
|
||||
environmentSlug,
|
||||
search,
|
||||
projectId
|
||||
}: TGetDynamicSecretsCountDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.find(
|
||||
{ folderId: folder.id, $search: search ? { name: `%${search}%` } : undefined },
|
||||
{ count: true }
|
||||
);
|
||||
return Number(dynamicSecretCfg[0]?.count ?? 0);
|
||||
};
|
||||
|
||||
const listDynamicSecretsByEnv = async ({
|
||||
const list = async ({
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actorId,
|
||||
actor,
|
||||
projectSlug,
|
||||
path,
|
||||
environmentSlug,
|
||||
limit,
|
||||
offset,
|
||||
orderBy,
|
||||
orderDirection = OrderByDirection.ASC,
|
||||
search,
|
||||
...params
|
||||
environmentSlug
|
||||
}: TListDynamicSecretsDTO) => {
|
||||
let { projectId } = params;
|
||||
|
||||
if (!projectId) {
|
||||
if (!projectSlug) throw new BadRequestError({ message: "Project ID or slug required" });
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new NotFoundError({ message: "Project not found" });
|
||||
projectId = project.id;
|
||||
}
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) throw new BadRequestError({ message: "Project not found" });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
@ -415,86 +326,17 @@ export const dynamicSecretServiceFactory = ({
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
if (!folder) throw new BadRequestError({ message: "Folder not found" });
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.find(
|
||||
{ folderId: folder.id, $search: search ? { name: `%${search}%` } : undefined },
|
||||
{
|
||||
limit,
|
||||
offset,
|
||||
sort: orderBy ? [[orderBy, orderDirection]] : undefined
|
||||
}
|
||||
);
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.find({ folderId: folder.id });
|
||||
return dynamicSecretCfg;
|
||||
};
|
||||
|
||||
// get dynamic secrets for multiple envs
|
||||
const listDynamicSecretsByFolderIds = async ({
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actorId,
|
||||
actor,
|
||||
path,
|
||||
environmentSlugs,
|
||||
projectId,
|
||||
isInternal,
|
||||
...params
|
||||
}: TListDynamicSecretsMultiEnvDTO) => {
|
||||
if (!isInternal) {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
// verify user has access to each env in request
|
||||
environmentSlugs.forEach((environmentSlug) =>
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const folders = await folderDAL.findBySecretPathMultiEnv(projectId, environmentSlugs, path);
|
||||
if (!folders.length) throw new NotFoundError({ message: "Folders not found" });
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.listDynamicSecretsByFolderIds({
|
||||
folderIds: folders.map((folder) => folder.id),
|
||||
...params
|
||||
});
|
||||
|
||||
return dynamicSecretCfg;
|
||||
};
|
||||
|
||||
const fetchAzureEntraIdUsers = async ({
|
||||
tenantId,
|
||||
applicationId,
|
||||
clientSecret
|
||||
}: {
|
||||
tenantId: string;
|
||||
applicationId: string;
|
||||
clientSecret: string;
|
||||
}) => {
|
||||
const azureEntraIdUsers = await AzureEntraIDProvider().fetchAzureEntraIdUsers(
|
||||
tenantId,
|
||||
applicationId,
|
||||
clientSecret
|
||||
);
|
||||
return azureEntraIdUsers;
|
||||
};
|
||||
|
||||
return {
|
||||
create,
|
||||
updateByName,
|
||||
deleteByName,
|
||||
getDetails,
|
||||
listDynamicSecretsByEnv,
|
||||
listDynamicSecretsByFolderIds,
|
||||
getDynamicSecretCount,
|
||||
getCountMultiEnv,
|
||||
fetchAzureEntraIdUsers
|
||||
list
|
||||
};
|
||||
};
|
||||
|
@ -1,7 +1,6 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { OrderByDirection, TProjectPermission } from "@app/lib/types";
|
||||
import { SecretsOrderBy } from "@app/services/secret/secret-types";
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
import { DynamicSecretProviderSchema } from "./providers/models";
|
||||
|
||||
@ -51,20 +50,5 @@ export type TDetailsDynamicSecretDTO = {
|
||||
export type TListDynamicSecretsDTO = {
|
||||
path: string;
|
||||
environmentSlug: string;
|
||||
projectSlug?: string;
|
||||
projectId?: string;
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
orderBy?: SecretsOrderBy;
|
||||
orderDirection?: OrderByDirection;
|
||||
search?: string;
|
||||
projectSlug: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TListDynamicSecretsMultiEnvDTO = Omit<
|
||||
TListDynamicSecretsDTO,
|
||||
"projectId" | "environmentSlug" | "projectSlug"
|
||||
> & { projectId: string; environmentSlugs: string[]; isInternal?: boolean };
|
||||
|
||||
export type TGetDynamicSecretsCountDTO = Omit<TListDynamicSecretsDTO, "projectSlug" | "projectId"> & {
|
||||
projectId: string;
|
||||
};
|
||||
|
@ -1,138 +0,0 @@
|
||||
import axios from "axios";
|
||||
import { customAlphabet } from "nanoid";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
|
||||
import { AzureEntraIDSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const MSFT_GRAPH_API_URL = "https://graph.microsoft.com/v1.0/";
|
||||
const MSFT_LOGIN_URL = "https://login.microsoftonline.com";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
type User = { name: string; id: string; email: string };
|
||||
|
||||
export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
fetchAzureEntraIdUsers: (tenantId: string, applicationId: string, clientSecret: string) => Promise<User[]>;
|
||||
} => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await AzureEntraIDSchema.parseAsync(inputs);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getToken = async (
|
||||
tenantId: string,
|
||||
applicationId: string,
|
||||
clientSecret: string
|
||||
): Promise<{ token?: string; success: boolean }> => {
|
||||
const response = await axios.post<{ access_token: string }>(
|
||||
`${MSFT_LOGIN_URL}/${tenantId}/oauth2/v2.0/token`,
|
||||
{
|
||||
grant_type: "client_credentials",
|
||||
client_id: applicationId,
|
||||
client_secret: clientSecret,
|
||||
scope: "https://graph.microsoft.com/.default"
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
if (response.status === 200) {
|
||||
return { token: response.data.access_token, success: true };
|
||||
}
|
||||
return { success: false };
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
return data.success;
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
if (!data.success) {
|
||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||
}
|
||||
|
||||
const password = generatePassword();
|
||||
|
||||
const response = await axios.patch(
|
||||
`${MSFT_GRAPH_API_URL}/users/${providerInputs.userId}`,
|
||||
{
|
||||
passwordProfile: {
|
||||
forceChangePasswordNextSignIn: false,
|
||||
password
|
||||
}
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${data.token}`
|
||||
}
|
||||
}
|
||||
);
|
||||
if (response.status !== 204) {
|
||||
throw new BadRequestError({ message: "Failed to update password" });
|
||||
}
|
||||
|
||||
return { entityId: providerInputs.userId, data: { email: providerInputs.email, password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
// Creates a new password
|
||||
await create(inputs);
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const fetchAzureEntraIdUsers = async (tenantId: string, applicationId: string, clientSecret: string) => {
|
||||
const data = await getToken(tenantId, applicationId, clientSecret);
|
||||
if (!data.success) {
|
||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||
}
|
||||
|
||||
const response = await axios.get<{ value: [{ id: string; displayName: string; userPrincipalName: string }] }>(
|
||||
`${MSFT_GRAPH_API_URL}/users`,
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
Authorization: `Bearer ${data.token}`
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new BadRequestError({ message: "Failed to fetch users" });
|
||||
}
|
||||
|
||||
const users = response.data.value.map((user) => {
|
||||
return {
|
||||
name: user.displayName,
|
||||
id: user.id,
|
||||
email: user.userPrincipalName
|
||||
};
|
||||
});
|
||||
return users;
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew,
|
||||
fetchAzureEntraIdUsers
|
||||
};
|
||||
};
|
@ -1,126 +0,0 @@
|
||||
import { Client as ElasticSearchClient } from "@elastic/elasticsearch";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
|
||||
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const connection = new ElasticSearchClient({
|
||||
node: {
|
||||
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
|
||||
...(providerInputs.ca && {
|
||||
ssl: {
|
||||
rejectUnauthorized: false,
|
||||
ca: providerInputs.ca
|
||||
}
|
||||
})
|
||||
},
|
||||
auth: {
|
||||
...(providerInputs.auth.type === ElasticSearchAuthTypes.ApiKey
|
||||
? {
|
||||
apiKey: {
|
||||
api_key: providerInputs.auth.apiKey,
|
||||
id: providerInputs.auth.apiKeyId
|
||||
}
|
||||
}
|
||||
: {
|
||||
username: providerInputs.auth.username,
|
||||
password: providerInputs.auth.password
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
return connection;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection
|
||||
.info()
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
|
||||
return infoResponse;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
|
||||
await connection.security.putUser({
|
||||
username,
|
||||
password,
|
||||
full_name: "Managed by Infisical.com",
|
||||
roles: providerInputs.roles
|
||||
});
|
||||
|
||||
await connection.close();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
await connection.security.deleteUser({
|
||||
username: entityId
|
||||
});
|
||||
|
||||
await connection.close();
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -1,13 +1,7 @@
|
||||
import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache";
|
||||
import { AwsIamProvider } from "./aws-iam";
|
||||
import { AzureEntraIDProvider } from "./azure-entra-id";
|
||||
import { CassandraProvider } from "./cassandra";
|
||||
import { ElasticSearchProvider } from "./elastic-search";
|
||||
import { LdapProvider } from "./ldap";
|
||||
import { DynamicSecretProviders } from "./models";
|
||||
import { MongoAtlasProvider } from "./mongo-atlas";
|
||||
import { MongoDBProvider } from "./mongo-db";
|
||||
import { RabbitMqProvider } from "./rabbit-mq";
|
||||
import { RedisDatabaseProvider } from "./redis";
|
||||
import { SqlDatabaseProvider } from "./sql-database";
|
||||
|
||||
@ -16,11 +10,5 @@ export const buildDynamicSecretProviders = () => ({
|
||||
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
|
||||
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
|
||||
[DynamicSecretProviders.Redis]: RedisDatabaseProvider(),
|
||||
[DynamicSecretProviders.AwsElastiCache]: AwsElastiCacheDatabaseProvider(),
|
||||
[DynamicSecretProviders.MongoAtlas]: MongoAtlasProvider(),
|
||||
[DynamicSecretProviders.MongoDB]: MongoDBProvider(),
|
||||
[DynamicSecretProviders.ElasticSearch]: ElasticSearchProvider(),
|
||||
[DynamicSecretProviders.RabbitMq]: RabbitMqProvider(),
|
||||
[DynamicSecretProviders.AzureEntraID]: AzureEntraIDProvider(),
|
||||
[DynamicSecretProviders.Ldap]: LdapProvider()
|
||||
[DynamicSecretProviders.AwsElastiCache]: AwsElastiCacheDatabaseProvider()
|
||||
});
|
||||
|
@ -1,235 +0,0 @@
|
||||
import handlebars from "handlebars";
|
||||
import ldapjs from "ldapjs";
|
||||
import ldif from "ldif";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { LdapSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
const encodePassword = (password?: string) => {
|
||||
const quotedPassword = `"${password}"`;
|
||||
const utf16lePassword = Buffer.from(quotedPassword, "utf16le");
|
||||
const base64Password = utf16lePassword.toString("base64");
|
||||
return base64Password;
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(20);
|
||||
};
|
||||
|
||||
const generateLDIF = ({
|
||||
username,
|
||||
password,
|
||||
ldifTemplate
|
||||
}: {
|
||||
username: string;
|
||||
password?: string;
|
||||
ldifTemplate: string;
|
||||
}): string => {
|
||||
const data = {
|
||||
Username: username,
|
||||
Password: password,
|
||||
EncodedPassword: encodePassword(password)
|
||||
};
|
||||
|
||||
const renderTemplate = handlebars.compile(ldifTemplate);
|
||||
const renderedLdif = renderTemplate(data);
|
||||
|
||||
return renderedLdif;
|
||||
};
|
||||
|
||||
export const LdapProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await LdapSchema.parseAsync(inputs);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const client = ldapjs.createClient({
|
||||
url: providerInputs.url,
|
||||
tlsOptions: {
|
||||
ca: providerInputs.ca ? providerInputs.ca : null,
|
||||
rejectUnauthorized: !!providerInputs.ca
|
||||
},
|
||||
reconnect: true,
|
||||
bindDN: providerInputs.binddn,
|
||||
bindCredentials: providerInputs.bindpass
|
||||
});
|
||||
|
||||
client.on("error", (err: Error) => {
|
||||
client.unbind();
|
||||
reject(new BadRequestError({ message: err.message }));
|
||||
});
|
||||
|
||||
client.bind(providerInputs.binddn, providerInputs.bindpass, (err) => {
|
||||
if (err) {
|
||||
client.unbind();
|
||||
reject(new BadRequestError({ message: err.message }));
|
||||
} else {
|
||||
resolve(client);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
return client.connected;
|
||||
};
|
||||
|
||||
const executeLdif = async (client: ldapjs.Client, ldif_file: string) => {
|
||||
type TEntry = {
|
||||
dn: string;
|
||||
type: string;
|
||||
|
||||
changes: {
|
||||
operation?: string;
|
||||
attribute: {
|
||||
attribute: string;
|
||||
};
|
||||
value: {
|
||||
value: string;
|
||||
};
|
||||
values: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, can be any for ldapjs.Change.modification.values
|
||||
value: any;
|
||||
}[];
|
||||
}[];
|
||||
};
|
||||
|
||||
let parsedEntries: TEntry[];
|
||||
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||
parsedEntries = ldif.parse(ldif_file).entries as TEntry[];
|
||||
} catch (err) {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid LDIF format, refer to the documentation at Dynamic secrets > LDAP > LDIF Entries."
|
||||
});
|
||||
}
|
||||
|
||||
const dnArray: string[] = [];
|
||||
|
||||
for await (const entry of parsedEntries) {
|
||||
const { dn } = entry;
|
||||
let responseDn: string;
|
||||
|
||||
if (entry.type === "add") {
|
||||
const attributes: Record<string, string | string[]> = {};
|
||||
|
||||
entry.changes.forEach((change) => {
|
||||
const attrName = change.attribute.attribute;
|
||||
const attrValue = change.value.value;
|
||||
|
||||
attributes[attrName] = Array.isArray(attrValue) ? attrValue : [attrValue];
|
||||
});
|
||||
|
||||
responseDn = await new Promise((resolve, reject) => {
|
||||
client.add(dn, attributes, (err) => {
|
||||
if (err) {
|
||||
reject(new BadRequestError({ message: err.message }));
|
||||
} else {
|
||||
resolve(dn);
|
||||
}
|
||||
});
|
||||
});
|
||||
} else if (entry.type === "modify") {
|
||||
const changes: ldapjs.Change[] = [];
|
||||
|
||||
entry.changes.forEach((change) => {
|
||||
changes.push(
|
||||
new ldapjs.Change({
|
||||
operation: change.operation || "replace",
|
||||
modification: {
|
||||
type: change.attribute.attribute,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
|
||||
values: change.values.map((value) => value.value)
|
||||
}
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
responseDn = await new Promise((resolve, reject) => {
|
||||
client.modify(dn, changes, (err) => {
|
||||
if (err) {
|
||||
reject(new BadRequestError({ message: err.message }));
|
||||
} else {
|
||||
resolve(dn);
|
||||
}
|
||||
});
|
||||
});
|
||||
} else if (entry.type === "delete") {
|
||||
responseDn = await new Promise((resolve, reject) => {
|
||||
client.del(dn, (err) => {
|
||||
if (err) {
|
||||
reject(new BadRequestError({ message: err.message }));
|
||||
} else {
|
||||
resolve(dn);
|
||||
}
|
||||
});
|
||||
});
|
||||
} else {
|
||||
client.unbind();
|
||||
throw new BadRequestError({ message: `Unsupported operation type ${entry.type}` });
|
||||
}
|
||||
|
||||
dnArray.push(responseDn);
|
||||
}
|
||||
client.unbind();
|
||||
return dnArray;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.creationLdif });
|
||||
|
||||
try {
|
||||
const dnArray = await executeLdif(client, generatedLdif);
|
||||
|
||||
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
|
||||
} catch (err) {
|
||||
if (providerInputs.rollbackLdif) {
|
||||
const rollbackLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rollbackLdif });
|
||||
await executeLdif(client, rollbackLdif);
|
||||
}
|
||||
throw new BadRequestError({ message: (err as Error).message });
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const revocationLdif = generateLDIF({ username: entityId, ldifTemplate: providerInputs.revocationLdif });
|
||||
|
||||
await executeLdif(connection, revocationLdif);
|
||||
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -7,16 +7,12 @@ export enum SqlProviders {
|
||||
MsSQL = "mssql"
|
||||
}
|
||||
|
||||
export enum ElasticSearchAuthTypes {
|
||||
User = "user",
|
||||
ApiKey = "api-key"
|
||||
}
|
||||
|
||||
export const DynamicSecretRedisDBSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
username: z.string().trim(), // this is often "default".
|
||||
password: z.string().trim().optional(),
|
||||
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim(),
|
||||
renewStatement: z.string().trim().optional(),
|
||||
@ -34,48 +30,6 @@ export const DynamicSecretAwsElastiCacheSchema = z.object({
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretElasticSearchSchema = z.object({
|
||||
host: z.string().trim().min(1),
|
||||
port: z.number(),
|
||||
roles: z.array(z.string().trim().min(1)).min(1),
|
||||
|
||||
// two auth types "user, apikey"
|
||||
auth: z.discriminatedUnion("type", [
|
||||
z.object({
|
||||
type: z.literal(ElasticSearchAuthTypes.User),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim()
|
||||
}),
|
||||
z.object({
|
||||
type: z.literal(ElasticSearchAuthTypes.ApiKey),
|
||||
apiKey: z.string().trim(),
|
||||
apiKeyId: z.string().trim()
|
||||
})
|
||||
]),
|
||||
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretRabbitMqSchema = z.object({
|
||||
host: z.string().trim().min(1),
|
||||
port: z.number(),
|
||||
tags: z.array(z.string().trim()).default([]),
|
||||
|
||||
username: z.string().trim().min(1),
|
||||
password: z.string().trim().min(1),
|
||||
|
||||
ca: z.string().optional(),
|
||||
|
||||
virtualHost: z.object({
|
||||
name: z.string().trim().min(1),
|
||||
permissions: z.object({
|
||||
read: z.string().trim().min(1),
|
||||
write: z.string().trim().min(1),
|
||||
configure: z.string().trim().min(1)
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
export const DynamicSecretSqlDBSchema = z.object({
|
||||
client: z.nativeEnum(SqlProviders),
|
||||
host: z.string().trim().toLowerCase(),
|
||||
@ -113,90 +67,12 @@ export const DynamicSecretAwsIamSchema = z.object({
|
||||
policyArns: z.string().trim().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretMongoAtlasSchema = z.object({
|
||||
adminPublicKey: z.string().trim().min(1).describe("Admin user public api key"),
|
||||
adminPrivateKey: z.string().trim().min(1).describe("Admin user private api key"),
|
||||
groupId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.describe("Unique 24-hexadecimal digit string that identifies your project. This is same as project id"),
|
||||
roles: z
|
||||
.object({
|
||||
collectionName: z.string().optional().describe("Collection on which this role applies."),
|
||||
databaseName: z.string().min(1).describe("Database to which the user is granted access privileges."),
|
||||
roleName: z
|
||||
.string()
|
||||
.min(1)
|
||||
.describe(
|
||||
' Enum: "atlasAdmin" "backup" "clusterMonitor" "dbAdmin" "dbAdminAnyDatabase" "enableSharding" "read" "readAnyDatabase" "readWrite" "readWriteAnyDatabase" "<a custom role name>".Human-readable label that identifies a group of privileges assigned to a database user. This value can either be a built-in role or a custom role.'
|
||||
)
|
||||
})
|
||||
.array()
|
||||
.min(1),
|
||||
scopes: z
|
||||
.object({
|
||||
name: z
|
||||
.string()
|
||||
.min(1)
|
||||
.describe(
|
||||
"Human-readable label that identifies the cluster or MongoDB Atlas Data Lake that this database user can access."
|
||||
),
|
||||
type: z
|
||||
.string()
|
||||
.min(1)
|
||||
.describe("Category of resource that this database user can access. Enum: CLUSTER, DATA_LAKE, STREAM")
|
||||
})
|
||||
.array()
|
||||
});
|
||||
|
||||
export const DynamicSecretMongoDBSchema = z.object({
|
||||
host: z.string().min(1).trim().toLowerCase(),
|
||||
port: z.number().optional(),
|
||||
username: z.string().min(1).trim(),
|
||||
password: z.string().min(1).trim(),
|
||||
database: z.string().min(1).trim(),
|
||||
ca: z.string().min(1).optional(),
|
||||
roles: z
|
||||
.string()
|
||||
.array()
|
||||
.min(1)
|
||||
.describe(
|
||||
'Enum: "atlasAdmin" "backup" "clusterMonitor" "dbAdmin" "dbAdminAnyDatabase" "enableSharding" "read" "readAnyDatabase" "readWrite" "readWriteAnyDatabase" "<a custom role name>".Human-readable label that identifies a group of privileges assigned to a database user. This value can either be a built-in role or a custom role.'
|
||||
)
|
||||
});
|
||||
|
||||
export const AzureEntraIDSchema = z.object({
|
||||
tenantId: z.string().trim().min(1),
|
||||
userId: z.string().trim().min(1),
|
||||
email: z.string().trim().min(1),
|
||||
applicationId: z.string().trim().min(1),
|
||||
clientSecret: z.string().trim().min(1)
|
||||
});
|
||||
|
||||
export const LdapSchema = z.object({
|
||||
url: z.string().trim().min(1),
|
||||
binddn: z.string().trim().min(1),
|
||||
bindpass: z.string().trim().min(1),
|
||||
ca: z.string().optional(),
|
||||
|
||||
creationLdif: z.string().min(1),
|
||||
revocationLdif: z.string().min(1),
|
||||
rollbackLdif: z.string().optional()
|
||||
});
|
||||
|
||||
export enum DynamicSecretProviders {
|
||||
SqlDatabase = "sql-database",
|
||||
Cassandra = "cassandra",
|
||||
AwsIam = "aws-iam",
|
||||
Redis = "redis",
|
||||
AwsElastiCache = "aws-elasticache",
|
||||
MongoAtlas = "mongo-db-atlas",
|
||||
ElasticSearch = "elastic-search",
|
||||
MongoDB = "mongo-db",
|
||||
RabbitMq = "rabbit-mq",
|
||||
AzureEntraID = "azure-entra-id",
|
||||
Ldap = "ldap"
|
||||
AwsElastiCache = "aws-elasticache"
|
||||
}
|
||||
|
||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
@ -204,13 +80,7 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsElastiCache), inputs: DynamicSecretAwsElastiCacheSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.MongoAtlas), inputs: DynamicSecretMongoAtlasSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.ElasticSearch), inputs: DynamicSecretElasticSearchSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.MongoDB), inputs: DynamicSecretMongoDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AzureEntraID), inputs: AzureEntraIDSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema })
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsElastiCache), inputs: DynamicSecretAwsElastiCacheSchema })
|
||||
]);
|
||||
|
||||
export type TDynamicProviderFns = {
|
||||
|
@ -1,146 +0,0 @@
|
||||
import axios, { AxiosError } from "axios";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { createDigestAuthRequestInterceptor } from "@app/lib/axios/digest-auth";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretMongoAtlasSchema.parseAsync(inputs);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
|
||||
const client = axios.create({
|
||||
baseURL: "https://cloud.mongodb.com/api/atlas",
|
||||
headers: {
|
||||
Accept: "application/vnd.atlas.2023-02-01+json",
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
});
|
||||
const digestAuth = createDigestAuthRequestInterceptor(
|
||||
client,
|
||||
providerInputs.adminPublicKey,
|
||||
providerInputs.adminPrivateKey
|
||||
);
|
||||
return digestAuth;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const isConnected = await client({
|
||||
method: "GET",
|
||||
url: `v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||
params: { itemsPerPage: 1 }
|
||||
})
|
||||
.then(() => true)
|
||||
.catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
await client({
|
||||
method: "POST",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers`,
|
||||
data: {
|
||||
roles: providerInputs.roles,
|
||||
scopes: providerInputs.scopes,
|
||||
deleteAfterDate: expiration,
|
||||
username,
|
||||
password,
|
||||
databaseName: "admin",
|
||||
groupId: providerInputs.groupId
|
||||
}
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const isExisting = await client({
|
||||
method: "GET",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`
|
||||
}).catch((err) => {
|
||||
if ((err as AxiosError).response?.status === 404) return false;
|
||||
throw err;
|
||||
});
|
||||
if (isExisting) {
|
||||
await client({
|
||||
method: "DELETE",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
await client({
|
||||
method: "PATCH",
|
||||
url: `/v2/groups/${providerInputs.groupId}/databaseUsers/admin/${username}`,
|
||||
data: {
|
||||
deleteAfterDate: expiration,
|
||||
databaseName: "admin",
|
||||
groupId: providerInputs.groupId
|
||||
}
|
||||
}).catch((error) => {
|
||||
if ((error as AxiosError).response) {
|
||||
throw new Error(JSON.stringify((error as AxiosError).response?.data));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -1,116 +0,0 @@
|
||||
import { MongoClient } from "mongodb";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
|
||||
if (
|
||||
appCfg.isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||
const isSrv = !providerInputs.port;
|
||||
const uri = isSrv
|
||||
? `mongodb+srv://${providerInputs.host}`
|
||||
: `mongodb://${providerInputs.host}:${providerInputs.port}`;
|
||||
|
||||
const client = new MongoClient(uri, {
|
||||
auth: {
|
||||
username: providerInputs.username,
|
||||
password: providerInputs.password
|
||||
},
|
||||
directConnection: !isSrv,
|
||||
ca: providerInputs.ca
|
||||
});
|
||||
return client;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const isConnected = await client
|
||||
.db(providerInputs.database)
|
||||
.command({ ping: 1 })
|
||||
.then(() => true);
|
||||
|
||||
await client.close();
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
|
||||
const db = client.db(providerInputs.database);
|
||||
|
||||
await db.command({
|
||||
createUser: username,
|
||||
pwd: password,
|
||||
roles: providerInputs.roles
|
||||
});
|
||||
await client.close();
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
const db = client.db(providerInputs.database);
|
||||
await db.command({
|
||||
dropUser: username
|
||||
});
|
||||
await client.close();
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -1,172 +0,0 @@
|
||||
import axios, { Axios } from "axios";
|
||||
import https from "https";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
type TCreateRabbitMQUser = {
|
||||
axiosInstance: Axios;
|
||||
createUser: {
|
||||
username: string;
|
||||
password: string;
|
||||
tags: string[];
|
||||
};
|
||||
virtualHost: {
|
||||
name: string;
|
||||
permissions: {
|
||||
read: string;
|
||||
write: string;
|
||||
configure: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
type TDeleteRabbitMqUser = {
|
||||
axiosInstance: Axios;
|
||||
usernameToDelete: string;
|
||||
};
|
||||
|
||||
async function createRabbitMqUser({ axiosInstance, createUser, virtualHost }: TCreateRabbitMQUser): Promise<void> {
|
||||
try {
|
||||
// Create user
|
||||
const userUrl = `/users/${createUser.username}`;
|
||||
const userData = {
|
||||
password: createUser.password,
|
||||
tags: createUser.tags.join(",")
|
||||
};
|
||||
|
||||
await axiosInstance.put(userUrl, userData);
|
||||
|
||||
// Set permissions for the virtual host
|
||||
if (virtualHost) {
|
||||
const permissionData = {
|
||||
configure: virtualHost.permissions.configure,
|
||||
write: virtualHost.permissions.write,
|
||||
read: virtualHost.permissions.read
|
||||
};
|
||||
|
||||
await axiosInstance.put(
|
||||
`/permissions/${encodeURIComponent(virtualHost.name)}/${createUser.username}`,
|
||||
permissionData
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(error, "Error creating RabbitMQ user");
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRabbitMqUser) {
|
||||
await axiosInstance.delete(`users/${usernameToDelete}`);
|
||||
return { username: usernameToDelete };
|
||||
}
|
||||
|
||||
export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
|
||||
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||
const axiosInstance = axios.create({
|
||||
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
|
||||
auth: {
|
||||
username: providerInputs.username,
|
||||
password: providerInputs.password
|
||||
},
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
|
||||
...(providerInputs.ca && {
|
||||
httpsAgent: new https.Agent({ ca: providerInputs.ca, rejectUnauthorized: false })
|
||||
})
|
||||
});
|
||||
|
||||
return axiosInstance;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection.get("/whoami").then(() => true);
|
||||
|
||||
return infoResponse;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
|
||||
await createRabbitMqUser({
|
||||
axiosInstance: connection,
|
||||
virtualHost: providerInputs.virtualHost,
|
||||
createUser: {
|
||||
password,
|
||||
username,
|
||||
tags: [...(providerInputs.tags ?? []), "infisical-user"]
|
||||
}
|
||||
});
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
|
||||
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
|
||||
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -1,3 +1,4 @@
|
||||
/* eslint-disable no-console */
|
||||
import handlebars from "handlebars";
|
||||
import { Redis } from "ioredis";
|
||||
import { customAlphabet } from "nanoid";
|
||||
|
@ -30,7 +30,7 @@ export const externalKmsDALFactory = (db: TDbClient) => {
|
||||
isDisabled: el.isDisabled,
|
||||
isReserved: el.isReserved,
|
||||
orgId: el.orgId,
|
||||
name: el.name,
|
||||
slug: el.slug,
|
||||
createdAt: el.createdAt,
|
||||
updatedAt: el.updatedAt,
|
||||
externalKms: {
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TKmsKeyDALFactory } from "@app/services/kms/kms-key-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
@ -43,7 +43,7 @@ export const externalKmsServiceFactory = ({
|
||||
provider,
|
||||
description,
|
||||
actor,
|
||||
name,
|
||||
slug,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod
|
||||
@ -64,7 +64,7 @@ export const externalKmsServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const kmsName = name ? slugify(name) : slugify(alphaNumericNanoId(8).toLowerCase());
|
||||
const kmsSlug = slug ? slugify(slug) : slugify(alphaNumericNanoId(8).toLowerCase());
|
||||
|
||||
let sanitizedProviderInput = "";
|
||||
switch (provider.type) {
|
||||
@ -96,7 +96,7 @@ export const externalKmsServiceFactory = ({
|
||||
{
|
||||
isReserved: false,
|
||||
description,
|
||||
name: kmsName,
|
||||
slug: kmsSlug,
|
||||
orgId: actorOrgId
|
||||
},
|
||||
tx
|
||||
@ -120,7 +120,7 @@ export const externalKmsServiceFactory = ({
|
||||
description,
|
||||
actor,
|
||||
id: kmsId,
|
||||
name,
|
||||
slug,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod
|
||||
@ -142,10 +142,10 @@ export const externalKmsServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const kmsName = name ? slugify(name) : undefined;
|
||||
const kmsSlug = slug ? slugify(slug) : undefined;
|
||||
|
||||
const externalKmsDoc = await externalKmsDAL.findOne({ kmsKeyId: kmsDoc.id });
|
||||
if (!externalKmsDoc) throw new NotFoundError({ message: "External kms not found" });
|
||||
if (!externalKmsDoc) throw new BadRequestError({ message: "External kms not found" });
|
||||
|
||||
let sanitizedProviderInput = "";
|
||||
const { encryptor: orgDataKeyEncryptor, decryptor: orgDataKeyDecryptor } =
|
||||
@ -188,7 +188,7 @@ export const externalKmsServiceFactory = ({
|
||||
kmsDoc.id,
|
||||
{
|
||||
description,
|
||||
name: kmsName
|
||||
slug: kmsSlug
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -220,7 +220,7 @@ export const externalKmsServiceFactory = ({
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Kms);
|
||||
|
||||
const externalKmsDoc = await externalKmsDAL.findOne({ kmsKeyId: kmsDoc.id });
|
||||
if (!externalKmsDoc) throw new NotFoundError({ message: "External kms not found" });
|
||||
if (!externalKmsDoc) throw new BadRequestError({ message: "External kms not found" });
|
||||
|
||||
const externalKms = await externalKmsDAL.transaction(async (tx) => {
|
||||
const kms = await kmsDAL.deleteById(kmsDoc.id, tx);
|
||||
@ -258,7 +258,7 @@ export const externalKmsServiceFactory = ({
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Kms);
|
||||
|
||||
const externalKmsDoc = await externalKmsDAL.findOne({ kmsKeyId: kmsDoc.id });
|
||||
if (!externalKmsDoc) throw new NotFoundError({ message: "External kms not found" });
|
||||
if (!externalKmsDoc) throw new BadRequestError({ message: "External kms not found" });
|
||||
|
||||
const { decryptor: orgDataKeyDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
@ -280,14 +280,14 @@ export const externalKmsServiceFactory = ({
|
||||
}
|
||||
};
|
||||
|
||||
const findByName = async ({
|
||||
const findBySlug = async ({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
name: kmsName
|
||||
slug: kmsSlug
|
||||
}: TGetExternalKmsBySlugDTO) => {
|
||||
const kmsDoc = await kmsDAL.findOne({ name: kmsName, orgId: actorOrgId });
|
||||
const kmsDoc = await kmsDAL.findOne({ slug: kmsSlug, orgId: actorOrgId });
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
@ -298,7 +298,7 @@ export const externalKmsServiceFactory = ({
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Kms);
|
||||
|
||||
const externalKmsDoc = await externalKmsDAL.findOne({ kmsKeyId: kmsDoc.id });
|
||||
if (!externalKmsDoc) throw new NotFoundError({ message: "External kms not found" });
|
||||
if (!externalKmsDoc) throw new BadRequestError({ message: "External kms not found" });
|
||||
|
||||
const { decryptor: orgDataKeyDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
@ -327,6 +327,6 @@ export const externalKmsServiceFactory = ({
|
||||
deleteById,
|
||||
list,
|
||||
findById,
|
||||
findByName
|
||||
findBySlug
|
||||
};
|
||||
};
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user