mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-20 22:32:28 +00:00
Compare commits
343 Commits
docs-for-l
...
omar/eng-1
Author | SHA1 | Date | |
---|---|---|---|
3c954ea257 | |||
7d5aba258a | |||
c648235390 | |||
3c588beebe | |||
6614721d34 | |||
bbd8a049fb | |||
a91f64f742 | |||
1bc508b286 | |||
d3d30eba80 | |||
623a99be0e | |||
f80023f8f3 | |||
98289f56ae | |||
c40f195c1d | |||
fbfe694fc0 | |||
2098bd3be2 | |||
ef82c664a6 | |||
fcbedfaf1b | |||
882f6b22f5 | |||
bcd778457d | |||
0a1242db75 | |||
a078cb6059 | |||
095b26c8c9 | |||
fcdfcd0219 | |||
132de1479d | |||
d4a76b3621 | |||
331dcd4d79 | |||
025f64f068 | |||
05d7f94518 | |||
b58e32c754 | |||
4ace30aecd | |||
8b2a866994 | |||
b4386af2e0 | |||
2b44e32ac1 | |||
ec5e6eb7b4 | |||
48cb5f6e9b | |||
3c63312944 | |||
0842901d4f | |||
32d6826ade | |||
a750f48922 | |||
67662686f3 | |||
11c96245a7 | |||
a63191e11d | |||
7a13c155f5 | |||
fb6a085bf9 | |||
6c533f89d3 | |||
5ceb30f43f | |||
7728a4793b | |||
d3523ed1d6 | |||
35a9b2a38d | |||
16a9f8c194 | |||
9557639bfe | |||
1049f95952 | |||
e618d5ca5f | |||
d659250ce8 | |||
87363eabfe | |||
d1b9c316d8 | |||
b9867c0d06 | |||
afa2f383c5 | |||
39f7354fec | |||
c46c0cb1e8 | |||
6905ffba4e | |||
64fd423c61 | |||
da1a7466d1 | |||
d3f3f34129 | |||
c8fba7ce4c | |||
82c3e943eb | |||
dc3903ff15 | |||
a9c01dcf1f | |||
ae51fbb8f2 | |||
62910e93ca | |||
586b9d9a56 | |||
9e3c632a1f | |||
bb094f60c1 | |||
6d709fba62 | |||
27beca7099 | |||
28e7e4c52d | |||
cfc0ca1f03 | |||
b96593d0ab | |||
2de5896ba4 | |||
3455ad3898 | |||
c7a32a3b05 | |||
1ebfed8c11 | |||
a18f3c2919 | |||
16d215b588 | |||
a852b15a1e | |||
cacd9041b0 | |||
cfeffebd46 | |||
1dceedcdb4 | |||
14f03c38c3 | |||
be9f096e75 | |||
49133a044f | |||
b7fe3743db | |||
c5fded361c | |||
e676acbadf | |||
9b31a7bbb1 | |||
345be85825 | |||
f82b11851a | |||
b466b3073b | |||
46105fc315 | |||
3cf8fd2ff8 | |||
5277a50b3e | |||
dab8f0b261 | |||
4293665130 | |||
8afa65c272 | |||
4c739fd57f | |||
bcc2840020 | |||
8b3af92d23 | |||
9ca58894f0 | |||
d131314de0 | |||
9c03144f19 | |||
5495ffd78e | |||
a200469c72 | |||
85c3074216 | |||
cfc55ff283 | |||
7179b7a540 | |||
6c4cb5e084 | |||
9cfb044178 | |||
105eb70fd9 | |||
18a2547b24 | |||
588b3c77f9 | |||
a04834c7c9 | |||
9df9f4a5da | |||
afdc704423 | |||
57261cf0c8 | |||
06f6004993 | |||
f3bfb9cc5a | |||
48fb77be49 | |||
c3956c60e9 | |||
f55bcb93ba | |||
d3fb2a6a74 | |||
6a23b74481 | |||
602cf4b3c4 | |||
84ff71fef2 | |||
4c01bddf0e | |||
add5742b8c | |||
68f3964206 | |||
90374971ae | |||
3a1eadba8c | |||
5305017ce2 | |||
cf5f49d14e | |||
4f4b5be8ea | |||
ecea79f040 | |||
586b901318 | |||
ad8d247cdc | |||
3b47d7698b | |||
aa9a86df71 | |||
33411335ed | |||
ca55f19926 | |||
3794521c56 | |||
728f023263 | |||
229706f57f | |||
6cf2488326 | |||
2c402fbbb6 | |||
92ce05283b | |||
39d92ce6ff | |||
44a026446e | |||
bbf52c9a48 | |||
539e5b1907 | |||
44b02d5324 | |||
71fb6f1d11 | |||
e64100fab1 | |||
e4b149a849 | |||
5bcf07b32b | |||
3b0c48052b | |||
df50e3b0f9 | |||
bdf2ae40b6 | |||
b6c05a2f25 | |||
960efb9cf9 | |||
aa8d58abad | |||
cfb0cc4fea | |||
7712df296c | |||
7c38932121 | |||
69ad9845e1 | |||
7321c237d7 | |||
32430a6a16 | |||
3d6ea3251e | |||
f034adba76 | |||
463eb0014e | |||
be39e63832 | |||
21403f6fe5 | |||
2f9e542b31 | |||
089d6812fd | |||
464a3ccd53 | |||
71c9c0fa1e | |||
46ad1d47a9 | |||
2b977eeb33 | |||
a692148597 | |||
b762816e66 | |||
cf275979ba | |||
64bfa4f334 | |||
e3eb14bfd9 | |||
24b50651c9 | |||
1cd459fda7 | |||
38917327d9 | |||
63fac39fff | |||
7c62a776fb | |||
d7b494c6f8 | |||
93208afb36 | |||
1a084d8fcf | |||
269f851cbf | |||
7a61995dd4 | |||
dd4f133c6c | |||
c41d27e1ae | |||
1866ed8d23 | |||
7b3b232dde | |||
9d618b4ae9 | |||
5330ab2171 | |||
662e588c22 | |||
90057d80ff | |||
1eda7aaaac | |||
00dcadbc08 | |||
7a7289ebd0 | |||
e5d4677fd6 | |||
bce3f3d676 | |||
300372fa98 | |||
47a4f8bae9 | |||
863719f296 | |||
7317dc1cf5 | |||
75df898e78 | |||
0de6add3f7 | |||
0c008b6393 | |||
0c3894496c | |||
35fbd5d49d | |||
d03b453e3d | |||
96e331b678 | |||
d4d468660d | |||
75a4965928 | |||
660c09ded4 | |||
b5287d91c0 | |||
6a17763237 | |||
f2bd3daea2 | |||
7f70f96936 | |||
73e0a54518 | |||
0d295a2824 | |||
9a62efea4f | |||
506c30bcdb | |||
735ad4ff65 | |||
41e36dfcef | |||
421d8578b7 | |||
6685f8aa0a | |||
d6c37c1065 | |||
54f3f94185 | |||
907537f7c0 | |||
61263b9384 | |||
d71c85e052 | |||
b6d8be2105 | |||
0693f81d0a | |||
61d516ef35 | |||
31fc64fb4c | |||
8bf7e4c4d1 | |||
2027d4b44e | |||
d401c9074e | |||
afe35dbbb5 | |||
6ff1602fd5 | |||
6603364749 | |||
53bea22b85 | |||
7c84adc1c2 | |||
fa8d6735a1 | |||
a6137f267d | |||
d521ee7b7e | |||
827931e416 | |||
faa83344a7 | |||
3be3d807d2 | |||
9f7ea3c4e5 | |||
e67218f170 | |||
269c40c67c | |||
089a7e880b | |||
64ec741f1a | |||
c98233ddaf | |||
ae17981c41 | |||
6c49c7da3c | |||
2de04b6fe5 | |||
5c9ec1e4be | |||
ba89491d4c | |||
483e596a7a | |||
65f122bd41 | |||
682b552fdc | |||
d4cfd0b6ed | |||
ba1fd8a3f7 | |||
e8f09d2c7b | |||
774371a218 | |||
c4b54de303 | |||
433971a72d | |||
ed7fc0e5cd | |||
1ae6213387 | |||
4acf9413f0 | |||
f0549cab98 | |||
d75e49dce5 | |||
8819abd710 | |||
796f76da46 | |||
d6e1ed4d1e | |||
1295b68d80 | |||
c79f84c064 | |||
d0c50960ef | |||
85089a08e1 | |||
bf97294dad | |||
4053078d95 | |||
4ba3899861 | |||
6bae3628c0 | |||
4cb935dae7 | |||
ccad684ab2 | |||
9aebd712d1 | |||
05f07b25ac | |||
5b0dbf04b2 | |||
b050db84ab | |||
8fef6911f1 | |||
44ba31a743 | |||
6bdbac4750 | |||
60fb195706 | |||
c8109b4e84 | |||
1f2b0443cc | |||
dd1cabf9f6 | |||
8b781b925a | |||
ddcf5b576b | |||
7138b392f2 | |||
bfce1021fb | |||
93c0313b28 | |||
8cfc217519 | |||
d272c6217a | |||
2fe2ddd9fc | |||
e330ddd5ee | |||
7aba9c1a50 | |||
4cd8e0fa67 | |||
ea3d164ead | |||
df468e4865 | |||
66e96018c4 | |||
3b02eedca6 | |||
a55fe2b788 | |||
5d7a267f1d | |||
b16ab6f763 | |||
334a728259 | |||
4a3143e689 | |||
14810de054 | |||
8cfcbaa12c | |||
ada63b9e7d | |||
3f6a0c77f1 | |||
9e4b66e215 | |||
8a14914bc3 | |||
fc3a409164 | |||
ffc58b0313 | |||
9a7e05369c | |||
33b49f4466 | |||
60895537a7 |
.env.example
.github/workflows
.gitignore.husky
Dockerfile.fips.standalone-infisicalDockerfile.standalone-infisicalMakefileREADME.mdbackend
DockerfileDockerfile.devmain.ts
e2e-test
package-lock.jsonpackage.jsonscripts
src
@types
db
migrations
20240802181855_ca-cert-version.ts20241014084900_identity-multiple-auth-methods.ts20241110032223_add-missing-oidc-org-cascade-reference.ts20241112082701_add-totp-support.ts20241119143026_add-project-descripton.ts20241121131344_make-identity-metadata-not-nullable-again.ts
schemas
utils.tsee
routes
v1
dynamic-secret-router.tsexternal-kms-router.tsgroup-router.tsidentity-project-additional-privilege-router.tsoidc-router.tsorg-role-router.tsproject-role-router.tsproject-template-router.tssaml-router.tsuser-additional-privilege-router.ts
v2
services
audit-log
dynamic-secret-lease
dynamic-secret/providers
aws-elasticache.tsaws-iam.tsazure-entra-id.tscassandra.tselastic-search.tsindex.tsldap.tsmodels.tsmongo-atlas.tsmongo-db.tsrabbit-mq.tsredis.tssap-ase.tssap-hana.tssnowflake.tssql-database.tstotp.ts
external-kms
hsm
identity-project-additional-privilege-v2
identity-project-additional-privilege
ldap-config
license
oidc
permission
org-permission.tspermission-dal.tspermission-fns.tspermission-service.tspermission-types.tsproject-permission.ts
rate-limit
scim
secret-scanning/secret-scanning-queue
lib
api-docs
casl
config
errors
logger
telemetry
queue
server
app.tsboot-strap-check.ts
lib
plugins
routes
services
auth
identity-aws-auth
identity-azure-auth
identity-gcp-auth
identity-kubernetes-auth
identity-oidc-auth
identity-project
identity-token-auth
integration-auth
integration-app-list.tsintegration-auth-service.tsintegration-auth-types.tsintegration-list.tsintegration-sync-secret.ts
integration
kms
org
project-membership
project
secret-v2-bridge
secret
smtp
smtp-service.ts
templates
accessApprovalRequest.handlebarsaccessSecretRequestBypassed.handlebarsemailMfa.handlebarsemailVerification.handlebarsexternalImportFailed.handlebarsexternalImportStarted.handlebarsexternalImportSuccessful.handlebarshistoricalSecretLeakIncident.handlebarsintegrationSyncFailed.handlebarsnewDevice.handlebarsorganizationInvitation.handlebarspasswordReset.handlebarspkiExpirationAlert.handlebarsscimUserProvisioned.handlebarssecretApprovalRequestNeedsReview.handlebarssecretLeakIncident.handlebarssecretReminder.handlebarssignupEmailVerification.handlebarsunlockAccount.handlebarsworkspaceInvitation.handlebars
totp
user
webhook
cli
company
docker-compose.dev.ymldocker-compose.prod.ymldocs
api-reference/endpoints/secrets
cli
documentation/platform
dynamic-secrets
aws-elasticache.mdxaws-iam.mdxazure-entra-id.mdxcassandra.mdxelastic-search.mdxldap.mdxmongo-atlas.mdxmongo-db.mdxmssql.mdxmysql.mdxoracle.mdxpostgresql.mdxrabbit-mq.mdxredis.mdxsap-ase.mdxsap-hana.mdxsnowflake.mdxtotp.mdx
identities/oidc-auth
kms-configuration
mfa.mdxscim
images
integrations/octopus-deploy
integrations-octopus-deploy-add-role.pngintegrations-octopus-deploy-add-to-team.pngintegrations-octopus-deploy-authorize.pngintegrations-octopus-deploy-copy-api-key.pngintegrations-octopus-deploy-create-api-key.pngintegrations-octopus-deploy-create-service-account.pngintegrations-octopus-deploy-create-team.pngintegrations-octopus-deploy-create.pngintegrations-octopus-deploy-generate-api-key.pngintegrations-octopus-deploy-integrations.pngintegrations-octopus-deploy-save-team.pngintegrations-octopus-deploy-sync.pngintegrations-octopus-deploy-team-settings.pngintegrations-octopus-deploy-user-settings.png
mfa-authenticator.pngmfa-email.pngplatform
integrations
mint.jsonsdks
self-hosting
frontend
next.config.jspackage-lock.jsonpackage.jsonusePagination.tsxreactQuery.tsx
public
src
components
dashboard
mfa
navigation
notifications
tags/CreateTagModal
utilities
v2
CopyButton
CreatableSelect
DatePicker
FilterableSelect
FormControl
Pagination
Select/components
Tooltip
projects
context/ProjectPermissionContext
helpers
hooks
api
auditLogs
auth
dashboard
dynamicSecret
integrationAuth
integrations
kms
organization
secrets
types.tsusers
workspace
layouts/AppLayout
lib/schemas
pages
integrations
login
org/[id]/overview
signupinvite.tsxstyles
views
IntegrationsPage
IntegrationDetailsPage/components
IntegrationPage.utils.tsxIntegrationsPage.tsxcomponents
CloudIntegrationSection
FrameworkIntegrationSection
IntegrationsSection
Login
Org
AuditLogsPage/components
IdentityPage/components
IdentityClientSecretModal.tsx
IdentityProjectsSection
MembersPage
MembersPage.tsx
components
OrgGroupsTab/components/OrgGroupsSection
OrgIdentityTab/components/IdentitySection
OrgMembersTab
RolePage/components
Types
UserPage
Project
IdentityDetailsPage
KmsPage/components
MemberDetailsPage/components
MemberProjectAdditionalPrivilegeSection
MemberRoleDetailsSection
MembersPage
MembersPage.tsx
components
RolePage/components
SecretApprovalPage/components/ApprovalPolicyList
SecretMainPage
SecretMainPage.tsx
components
ActionBar
CreateSecretForm
DynamicSecretListView
SecretDropzone
SecretOverviewPage
Settings
BillingSettingsPage/components/BillingDetailsTab
OrgSettingsPage/components
OrgAuthTab
OrgEncryptionTab
OrgWorkflowIntegrationTab
ProjectTemplatesTab/components
EditProjectTemplateSection/components
ProjectTemplateDetailsModal.tsxPersonalSettingsPage/SecuritySection
ProjectSettingsPage/components
EnvironmentSection
ProjectGeneralTab
ProjectNameChangeSection
ProjectOverviewChangeSection
SecretTagsSection
index.tsxSignup/components/UserInfoSSOStep
helm-charts/secrets-operator
k8-operator
api/v1alpha1
config
controllers
packages
npm
otel-collector-config.yamlprometheus.dev.yml@ -74,6 +74,14 @@ CAPTCHA_SECRET=
|
||||
|
||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED=false
|
||||
OTEL_EXPORT_TYPE=prometheus
|
||||
OTEL_EXPORT_OTLP_ENDPOINT=
|
||||
OTEL_OTLP_PUSH_INTERVAL=
|
||||
|
||||
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=
|
||||
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=
|
||||
|
||||
PLAIN_API_KEY=
|
||||
PLAIN_WISH_LABEL_IDS=
|
||||
|
||||
|
@ -10,8 +10,7 @@ on:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
# packages: write
|
||||
# issues: write
|
||||
|
||||
jobs:
|
||||
cli-integration-tests:
|
||||
name: Run tests before deployment
|
||||
@ -26,6 +25,63 @@ jobs:
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
npm-release:
|
||||
runs-on: ubuntu-20.04
|
||||
env:
|
||||
working-directory: ./npm
|
||||
needs:
|
||||
- cli-integration-tests
|
||||
- goreleaser
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Extract version
|
||||
run: |
|
||||
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
|
||||
echo "Version extracted: $VERSION"
|
||||
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Print version
|
||||
run: echo ${{ env.CLI_VERSION }}
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
cache-dependency-path: ./npm/package-lock.json
|
||||
- name: Install dependencies
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm install --ignore-scripts
|
||||
|
||||
- name: Set NPM version
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
|
||||
|
||||
- name: Setup NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: |
|
||||
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
|
||||
|
||||
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
- name: Pack NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm pack
|
||||
|
||||
- name: Publish NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [cli-integration-tests]
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -71,3 +71,5 @@ frontend-build
|
||||
cli/infisical-merge
|
||||
cli/test/infisical-merge
|
||||
/backend/binary
|
||||
|
||||
/npm/bin
|
||||
|
@ -1,6 +1,12 @@
|
||||
#!/usr/bin/env sh
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
|
||||
# Check if infisical is installed
|
||||
if ! command -v infisical >/dev/null 2>&1; then
|
||||
echo "\nError: Infisical CLI is not installed. Please install the Infisical CLI before comitting.\n You can refer to the documentation at https://infisical.com/docs/cli/overview\n\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
npx lint-staged
|
||||
|
||||
infisical scan git-changes --staged -v
|
||||
|
@ -69,13 +69,21 @@ RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
# Required for pkcs11js and ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@ -91,13 +99,21 @@ ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
# Required for pkcs11js and ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@ -108,13 +124,24 @@ RUN mkdir frontend-build
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
# Install necessary packages
|
||||
# Install necessary packages including ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
curl \
|
||||
git \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
|
@ -72,8 +72,16 @@ RUN addgroup --system --gid 1001 nodejs \
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apk add --no-cache python3 make g++
|
||||
# Install all required dependencies for build
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -88,8 +96,19 @@ FROM base AS backend-runner
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apk add --no-cache python3 make g++
|
||||
# Install all required dependencies for runtime
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -100,11 +119,32 @@ RUN mkdir frontend-build
|
||||
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
RUN apk add --upgrade --no-cache ca-certificates
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.31.1 && apk add --no-cache git
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install all required runtime dependencies
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
bash \
|
||||
curl \
|
||||
git
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Setup user permissions
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
@ -127,7 +167,6 @@ ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
@ -149,4 +188,4 @@ EXPOSE 443
|
||||
|
||||
USER non-root-user
|
||||
|
||||
CMD ["./standalone-entrypoint.sh"]
|
||||
CMD ["./standalone-entrypoint.sh"]
|
4
Makefile
4
Makefile
@ -10,6 +10,9 @@ up-dev:
|
||||
up-dev-ldap:
|
||||
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
||||
|
||||
up-dev-metrics:
|
||||
docker compose -f docker-compose.dev.yml --profile metrics up --build
|
||||
|
||||
up-prod:
|
||||
docker-compose -f docker-compose.prod.yml up --build
|
||||
|
||||
@ -27,4 +30,3 @@ reviewable-api:
|
||||
npm run type:check
|
||||
|
||||
reviewable: reviewable-ui reviewable-api
|
||||
|
||||
|
@ -14,15 +14,6 @@
|
||||
<a href="https://infisical.com/careers">Hiring (Remote/SF)</a>
|
||||
</h4>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://infisical.com/docs/self-hosting/deployment-options/aws-ec2">
|
||||
<img src=".github/images/deploy-to-aws.png" width="137" />
|
||||
</a>
|
||||
<a href="https://infisical.com/docs/self-hosting/deployment-options/digital-ocean-marketplace" alt="Deploy to DigitalOcean">
|
||||
<img width="200" alt="Deploy to DO" src="https://www.deploytodo.com/do-btn-blue.svg"/>
|
||||
</a>
|
||||
</p>
|
||||
|
||||
<h4 align="center">
|
||||
<a href="https://github.com/Infisical/infisical/blob/main/LICENSE">
|
||||
<img src="https://img.shields.io/badge/license-MIT-blue.svg" alt="Infisical is released under the MIT license." />
|
||||
|
@ -9,6 +9,15 @@ RUN apk --update add \
|
||||
make \
|
||||
g++
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@ -28,6 +37,17 @@ RUN apk --update add \
|
||||
make \
|
||||
g++
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN npm ci --only-production && npm cache clean --force
|
||||
|
||||
COPY --from=build /app .
|
||||
|
@ -7,7 +7,7 @@ ARG SOFTHSM2_VERSION=2.5.0
|
||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||
|
||||
# install build dependencies including python3
|
||||
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apk --update add \
|
||||
alpine-sdk \
|
||||
autoconf \
|
||||
@ -19,7 +19,19 @@ RUN apk --update add \
|
||||
make \
|
||||
g++
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# build and install SoftHSM2
|
||||
|
||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||
WORKDIR ${SOFTHSM2_SOURCES}
|
||||
|
||||
|
@ -10,12 +10,15 @@ export const mockQueue = (): TQueueServiceFactory => {
|
||||
queue: async (name, jobData) => {
|
||||
job[name] = jobData;
|
||||
},
|
||||
queuePg: async () => {},
|
||||
initialize: async () => {},
|
||||
shutdown: async () => undefined,
|
||||
stopRepeatableJob: async () => true,
|
||||
start: (name, jobFn) => {
|
||||
queues[name] = jobFn;
|
||||
workers[name] = jobFn;
|
||||
},
|
||||
startPg: async () => {},
|
||||
listen: (name, event) => {
|
||||
events[name] = event;
|
||||
},
|
||||
|
@ -5,6 +5,9 @@ export const mockSmtpServer = (): TSmtpService => {
|
||||
return {
|
||||
sendMail: async (data) => {
|
||||
storage.push(data);
|
||||
},
|
||||
verify: async () => {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
86
backend/e2e-test/routes/v3/secret-recursive.spec.ts
Normal file
86
backend/e2e-test/routes/v3/secret-recursive.spec.ts
Normal file
@ -0,0 +1,86 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretV2, deleteSecretV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Secret Recursive Testing", async () => {
|
||||
const projectId = seedData1.projectV3.id;
|
||||
const folderAndSecretNames = [
|
||||
{ name: "deep1", path: "/", expectedSecretCount: 4 },
|
||||
{ name: "deep21", path: "/deep1", expectedSecretCount: 2 },
|
||||
{ name: "deep3", path: "/deep1/deep2", expectedSecretCount: 1 },
|
||||
{ name: "deep22", path: "/deep2", expectedSecretCount: 1 }
|
||||
];
|
||||
|
||||
beforeAll(async () => {
|
||||
const rootFolderIds: string[] = [];
|
||||
for (const folder of folderAndSecretNames) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const createdFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: folder.path,
|
||||
name: folder.name
|
||||
});
|
||||
|
||||
if (folder.path === "/") {
|
||||
rootFolderIds.push(createdFolder.id);
|
||||
}
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2({
|
||||
secretPath: folder.path,
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
key: folder.name,
|
||||
value: folder.name
|
||||
});
|
||||
}
|
||||
|
||||
return async () => {
|
||||
await Promise.all(
|
||||
rootFolderIds.map((id) =>
|
||||
deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod"
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod",
|
||||
key: folderAndSecretNames[0].name
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
test.each(folderAndSecretNames)("$path recursive secret fetching", async ({ path, expectedSecretCount }) => {
|
||||
const secrets = await getSecretsV2({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: path,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod",
|
||||
recursive: true
|
||||
});
|
||||
|
||||
expect(secrets.secrets.length).toEqual(expectedSecretCount);
|
||||
expect(secrets.secrets.sort((a, b) => a.secretKey.localeCompare(b.secretKey))).toEqual(
|
||||
folderAndSecretNames
|
||||
.filter((el) => el.path.startsWith(path))
|
||||
.sort((a, b) => a.name.localeCompare(b.name))
|
||||
.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.name,
|
||||
secretValue: el.name
|
||||
})
|
||||
)
|
||||
);
|
||||
});
|
||||
});
|
@ -97,6 +97,7 @@ export const getSecretsV2 = async (dto: {
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
recursive?: boolean;
|
||||
}) => {
|
||||
const getSecretsResponse = await testServer.inject({
|
||||
method: "GET",
|
||||
@ -109,7 +110,8 @@ export const getSecretsV2 = async (dto: {
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true"
|
||||
include_imports: "true",
|
||||
recursive: String(dto.recursive || false)
|
||||
}
|
||||
});
|
||||
expect(getSecretsResponse.statusCode).toBe(200);
|
||||
|
@ -53,13 +53,13 @@ export default {
|
||||
extension: "ts"
|
||||
});
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL);
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL, cfg.DB_CONNECTION_URI);
|
||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule();
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule() });
|
||||
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
|
||||
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
|
1858
backend/package-lock.json
generated
1858
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -50,6 +50,7 @@
|
||||
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
||||
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
|
||||
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
|
||||
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
|
||||
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
|
||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
@ -58,6 +59,7 @@
|
||||
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
@ -130,14 +132,25 @@
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@octopusdeploy/api-client": "^3.4.1",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
||||
"@opentelemetry/instrumentation": "^0.55.0",
|
||||
"@opentelemetry/resources": "^1.28.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.28.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.27.0",
|
||||
"@peculiar/asn1-schema": "^2.3.8",
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
@ -178,14 +191,17 @@
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
"otplib": "^12.0.1",
|
||||
"passport-github": "^1.1.0",
|
||||
"passport-gitlab2": "^5.0.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-ldapauth": "^3.0.1",
|
||||
"pg": "^8.11.3",
|
||||
"pg-boss": "^10.1.5",
|
||||
"pg-query-stream": "^4.5.3",
|
||||
"picomatch": "^3.0.1",
|
||||
"pino": "^8.16.2",
|
||||
|
@ -8,61 +8,80 @@ const prompt = promptSync({
|
||||
sigint: true
|
||||
});
|
||||
|
||||
const sanitizeInputParam = (value: string) => {
|
||||
// Escape double quotes and wrap the entire value in double quotes
|
||||
if (value) {
|
||||
return `"${value.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
return '""';
|
||||
};
|
||||
|
||||
const exportDb = () => {
|
||||
const exportHost = prompt("Enter your Postgres Host to migrate from: ");
|
||||
const exportPort = prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432";
|
||||
const exportUser = prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical";
|
||||
const exportPassword = prompt("Enter your Postgres Password to migrate from: ");
|
||||
const exportDatabase = prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical";
|
||||
const exportHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate from: "));
|
||||
const exportPort = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432"
|
||||
);
|
||||
const exportUser = sanitizeInputParam(
|
||||
prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const exportPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate from: "));
|
||||
const exportDatabase = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
|
||||
// we do not include the audit_log and secret_sharing entries
|
||||
execSync(
|
||||
`PGDATABASE="${exportDatabase}" PGPASSWORD="${exportPassword}" PGHOST="${exportHost}" PGPORT=${exportPort} PGUSER=${exportUser} pg_dump infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
||||
`PGDATABASE=${exportDatabase} PGPASSWORD=${exportPassword} PGHOST=${exportHost} PGPORT=${exportPort} PGUSER=${exportUser} pg_dump -Fc infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
||||
__dirname,
|
||||
"../src/db/dump.sql"
|
||||
"../src/db/backup.dump"
|
||||
)}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
||||
};
|
||||
|
||||
const importDbForOrg = () => {
|
||||
const importHost = prompt("Enter your Postgres Host to migrate to: ");
|
||||
const importPort = prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432";
|
||||
const importUser = prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical";
|
||||
const importPassword = prompt("Enter your Postgres Password to migrate to: ");
|
||||
const importDatabase = prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical";
|
||||
const orgId = prompt("Enter the organization ID to migrate: ");
|
||||
const importHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate to: "));
|
||||
const importPort = sanitizeInputParam(prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432");
|
||||
const importUser = sanitizeInputParam(
|
||||
prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const importPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate to: "));
|
||||
const importDatabase = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const orgId = sanitizeInputParam(prompt("Enter the organization ID to migrate: "));
|
||||
|
||||
if (!existsSync(path.join(__dirname, "../src/db/dump.sql"))) {
|
||||
if (!existsSync(path.join(__dirname, "../src/db/backup.dump"))) {
|
||||
console.log("File not found, please export the database first.");
|
||||
return;
|
||||
}
|
||||
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -f ${path.join(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} pg_restore -d ${importDatabase} --verbose ${path.join(
|
||||
__dirname,
|
||||
"../src/db/dump.sql"
|
||||
)}`
|
||||
"../src/db/backup.dump"
|
||||
)}`,
|
||||
{ maxBuffer: 1024 * 1024 * 4096 }
|
||||
);
|
||||
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
||||
);
|
||||
|
||||
// delete global/instance-level resources not relevant to the organization to migrate
|
||||
// users
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
||||
);
|
||||
|
||||
// identities
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
||||
);
|
||||
|
||||
// reset slack configuration in superAdmin
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
||||
);
|
||||
|
||||
console.log("Organization migrated successfully.");
|
||||
|
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import "@fastify/request-context";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
}
|
||||
}
|
4
backend/src/@types/fastify-zod.d.ts
vendored
4
backend/src/@types/fastify-zod.d.ts
vendored
@ -1,6 +1,6 @@
|
||||
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { CustomLogger } from "@app/lib/logger/logger";
|
||||
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
|
||||
|
||||
declare global {
|
||||
@ -8,7 +8,7 @@ declare global {
|
||||
RawServerDefault,
|
||||
RawRequestDefaultExpression<RawServerDefault>,
|
||||
RawReplyDefaultExpression<RawServerDefault>,
|
||||
Readonly<Logger>,
|
||||
Readonly<CustomLogger>,
|
||||
ZodTypeProvider
|
||||
>;
|
||||
|
||||
|
9
backend/src/@types/fastify.d.ts
vendored
9
backend/src/@types/fastify.d.ts
vendored
@ -1,5 +1,7 @@
|
||||
import "fastify";
|
||||
|
||||
import { Redis } from "ioredis";
|
||||
|
||||
import { TUsers } from "@app/db/schemas";
|
||||
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
|
||||
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
|
||||
@ -79,6 +81,7 @@ import { TServiceTokenServiceFactory } from "@app/services/service-token/service
|
||||
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
|
||||
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
||||
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||
import { TTotpServiceFactory } from "@app/services/totp/totp-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { TUserServiceFactory } from "@app/services/user/user-service";
|
||||
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
||||
@ -86,6 +89,10 @@ import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
||||
|
||||
declare module "fastify" {
|
||||
interface Session {
|
||||
callbackPort: string;
|
||||
}
|
||||
|
||||
interface FastifyRequest {
|
||||
realIp: string;
|
||||
// used for mfa session authentication
|
||||
@ -114,6 +121,7 @@ declare module "fastify" {
|
||||
}
|
||||
|
||||
interface FastifyInstance {
|
||||
redis: Redis;
|
||||
services: {
|
||||
login: TAuthLoginFactory;
|
||||
password: TAuthPasswordFactory;
|
||||
@ -193,6 +201,7 @@ declare module "fastify" {
|
||||
migration: TExternalMigrationServiceFactory;
|
||||
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
|
||||
projectTemplate: TProjectTemplateServiceFactory;
|
||||
totp: TTotpServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
4
backend/src/@types/knex.d.ts
vendored
4
backend/src/@types/knex.d.ts
vendored
@ -314,6 +314,9 @@ import {
|
||||
TSuperAdmin,
|
||||
TSuperAdminInsert,
|
||||
TSuperAdminUpdate,
|
||||
TTotpConfigs,
|
||||
TTotpConfigsInsert,
|
||||
TTotpConfigsUpdate,
|
||||
TTrustedIps,
|
||||
TTrustedIpsInsert,
|
||||
TTrustedIpsUpdate,
|
||||
@ -826,5 +829,6 @@ declare module "knex/types/tables" {
|
||||
TProjectTemplatesInsert,
|
||||
TProjectTemplatesUpdate
|
||||
>;
|
||||
[TableName.TotpConfig]: KnexOriginal.CompositeTableType<TTotpConfigs, TTotpConfigsInsert, TTotpConfigsUpdate>;
|
||||
}
|
||||
}
|
||||
|
@ -64,23 +64,25 @@ export async function up(knex: Knex): Promise<void> {
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.Certificate)) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").nullable();
|
||||
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
|
||||
});
|
||||
const hasCaCertIdColumn = await knex.schema.hasColumn(TableName.Certificate, "caCertId");
|
||||
if (!hasCaCertIdColumn) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").nullable();
|
||||
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
|
||||
});
|
||||
|
||||
await knex.raw(`
|
||||
await knex.raw(`
|
||||
UPDATE "${TableName.Certificate}" cert
|
||||
SET "caCertId" = (
|
||||
SELECT caCert.id
|
||||
FROM "${TableName.CertificateAuthorityCert}" caCert
|
||||
WHERE caCert."caId" = cert."caId"
|
||||
)
|
||||
`);
|
||||
)`);
|
||||
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").notNullable().alter();
|
||||
});
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,7 @@ import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const BATCH_SIZE = 30_000;
|
||||
const BATCH_SIZE = 10_000;
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
||||
@ -12,7 +12,18 @@ export async function up(knex: Knex): Promise<void> {
|
||||
t.string("authMethod").nullable();
|
||||
});
|
||||
|
||||
let nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
|
||||
// first we remove identities without auth method that is unused
|
||||
// ! We delete all access tokens where the identity has no auth method set!
|
||||
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
|
||||
await knex(TableName.IdentityAccessToken)
|
||||
.leftJoin(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
|
||||
.whereNull(`${TableName.Identity}.authMethod`)
|
||||
.delete();
|
||||
|
||||
let nullableAccessTokens = await knex(TableName.IdentityAccessToken)
|
||||
.whereNull("authMethod")
|
||||
.limit(BATCH_SIZE)
|
||||
.select("id");
|
||||
let totalUpdated = 0;
|
||||
|
||||
do {
|
||||
@ -33,24 +44,15 @@ export async function up(knex: Knex): Promise<void> {
|
||||
});
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
|
||||
nullableAccessTokens = await knex(TableName.IdentityAccessToken)
|
||||
.whereNull("authMethod")
|
||||
.limit(BATCH_SIZE)
|
||||
.select("id");
|
||||
|
||||
totalUpdated += batchIds.length;
|
||||
console.log(`Updated ${batchIds.length} access tokens in batch <> Total updated: ${totalUpdated}`);
|
||||
} while (nullableAccessTokens.length > 0);
|
||||
|
||||
// ! We delete all access tokens where the identity has no auth method set!
|
||||
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
|
||||
await knex(TableName.IdentityAccessToken)
|
||||
.whereNotExists((queryBuilder) => {
|
||||
void queryBuilder
|
||||
.select("id")
|
||||
.from(TableName.Identity)
|
||||
.whereRaw(`${TableName.IdentityAccessToken}."identityId" = ${TableName.Identity}.id`)
|
||||
.whereNotNull("authMethod");
|
||||
})
|
||||
.delete();
|
||||
|
||||
// Finally we set the authMethod to notNullable after populating the column.
|
||||
// This will fail if the data is not populated correctly, so it's safe.
|
||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||
|
@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
t.dropForeign("orgId");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
t.dropForeign("orgId");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization);
|
||||
});
|
||||
}
|
||||
}
|
54
backend/src/db/migrations/20241112082701_add-totp-support.ts
Normal file
54
backend/src/db/migrations/20241112082701_add-totp-support.ts
Normal file
@ -0,0 +1,54 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.TotpConfig))) {
|
||||
await knex.schema.createTable(TableName.TotpConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("userId").notNullable();
|
||||
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
t.boolean("isVerified").defaultTo(false).notNullable();
|
||||
t.binary("encryptedRecoveryCodes").notNullable();
|
||||
t.binary("encryptedSecret").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
t.unique("userId");
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.TotpConfig);
|
||||
}
|
||||
|
||||
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
if (!doesOrgMfaMethodColExist) {
|
||||
t.string("selectedMfaMethod");
|
||||
}
|
||||
});
|
||||
|
||||
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
if (!doesUserSelectedMfaMethodColExist) {
|
||||
t.string("selectedMfaMethod");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropOnUpdateTrigger(knex, TableName.TotpConfig);
|
||||
await knex.schema.dropTableIfExists(TableName.TotpConfig);
|
||||
|
||||
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
if (doesOrgMfaMethodColExist) {
|
||||
t.dropColumn("selectedMfaMethod");
|
||||
}
|
||||
});
|
||||
|
||||
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
if (doesUserSelectedMfaMethodColExist) {
|
||||
t.dropColumn("selectedMfaMethod");
|
||||
}
|
||||
});
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
|
||||
|
||||
if (!hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.string("description");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
|
||||
|
||||
if (hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.dropColumn("description");
|
||||
});
|
||||
}
|
||||
}
|
20
backend/src/db/migrations/20241121131344_make-identity-metadata-not-nullable-again.ts
Normal file
20
backend/src/db/migrations/20241121131344_make-identity-metadata-not-nullable-again.ts
Normal file
@ -0,0 +1,20 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
||||
await knex(TableName.IdentityMetadata).whereNull("value").delete();
|
||||
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
||||
t.string("value", 1020).notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
||||
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
||||
t.string("value", 1020).alter();
|
||||
});
|
||||
}
|
||||
}
|
@ -106,6 +106,7 @@ export * from "./secrets-v2";
|
||||
export * from "./service-tokens";
|
||||
export * from "./slack-integrations";
|
||||
export * from "./super-admin";
|
||||
export * from "./totp-configs";
|
||||
export * from "./trusted-ips";
|
||||
export * from "./user-actions";
|
||||
export * from "./user-aliases";
|
||||
|
@ -12,7 +12,7 @@ import { TImmutableDBKeys } from "./models";
|
||||
export const KmsRootConfigSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedRootKey: zodBuffer,
|
||||
encryptionStrategy: z.string(),
|
||||
encryptionStrategy: z.string().default("SOFTWARE").nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
@ -117,6 +117,7 @@ export enum TableName {
|
||||
ExternalKms = "external_kms",
|
||||
InternalKms = "internal_kms",
|
||||
InternalKmsKeyVersion = "internal_kms_key_version",
|
||||
TotpConfig = "totp_configs",
|
||||
// @depreciated
|
||||
KmsKeyVersion = "kms_key_versions",
|
||||
WorkflowIntegrations = "workflow_integrations",
|
||||
|
@ -21,7 +21,8 @@ export const OrganizationsSchema = z.object({
|
||||
kmsDefaultKeyId: z.string().uuid().nullable().optional(),
|
||||
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||
defaultMembershipRole: z.string().default("member"),
|
||||
enforceMfa: z.boolean().default(false)
|
||||
enforceMfa: z.boolean().default(false),
|
||||
selectedMfaMethod: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
|
||||
|
@ -23,7 +23,8 @@ export const ProjectsSchema = z.object({
|
||||
kmsCertificateKeyId: z.string().uuid().nullable().optional(),
|
||||
auditLogsRetentionDays: z.number().nullable().optional(),
|
||||
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
|
||||
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional()
|
||||
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||
description: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TProjects = z.infer<typeof ProjectsSchema>;
|
||||
|
24
backend/src/db/schemas/totp-configs.ts
Normal file
24
backend/src/db/schemas/totp-configs.ts
Normal file
@ -0,0 +1,24 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const TotpConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
userId: z.string().uuid(),
|
||||
isVerified: z.boolean().default(false),
|
||||
encryptedRecoveryCodes: zodBuffer,
|
||||
encryptedSecret: zodBuffer,
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TTotpConfigs = z.infer<typeof TotpConfigsSchema>;
|
||||
export type TTotpConfigsInsert = Omit<z.input<typeof TotpConfigsSchema>, TImmutableDBKeys>;
|
||||
export type TTotpConfigsUpdate = Partial<Omit<z.input<typeof TotpConfigsSchema>, TImmutableDBKeys>>;
|
@ -26,7 +26,8 @@ export const UsersSchema = z.object({
|
||||
consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(),
|
||||
isLocked: z.boolean().default(false).nullable().optional(),
|
||||
temporaryLockDateEnd: z.date().nullable().optional(),
|
||||
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional()
|
||||
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional(),
|
||||
selectedMfaMethod: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TUsers = z.infer<typeof UsersSchema>;
|
||||
|
@ -2,6 +2,9 @@ import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "./schemas";
|
||||
|
||||
interface PgTriggerResult {
|
||||
rows: Array<{ exists: boolean }>;
|
||||
}
|
||||
export const createJunctionTable = (knex: Knex, tableName: TableName, table1Name: TableName, table2Name: TableName) =>
|
||||
knex.schema.createTable(tableName, (table) => {
|
||||
table.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
@ -28,13 +31,26 @@ DROP FUNCTION IF EXISTS on_update_timestamp() CASCADE;
|
||||
|
||||
// we would be using this to apply updatedAt where ever we wanta
|
||||
// remember to set `timestamps(true,true,true)` before this on schema
|
||||
export const createOnUpdateTrigger = (knex: Knex, tableName: string) =>
|
||||
knex.raw(`
|
||||
CREATE TRIGGER "${tableName}_updatedAt"
|
||||
BEFORE UPDATE ON ${tableName}
|
||||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE on_update_timestamp();
|
||||
`);
|
||||
export const createOnUpdateTrigger = async (knex: Knex, tableName: string) => {
|
||||
const triggerExists = await knex.raw<PgTriggerResult>(`
|
||||
SELECT EXISTS (
|
||||
SELECT 1
|
||||
FROM pg_trigger
|
||||
WHERE tgname = '${tableName}_updatedAt'
|
||||
);
|
||||
`);
|
||||
|
||||
if (!triggerExists?.rows?.[0]?.exists) {
|
||||
return knex.raw(`
|
||||
CREATE TRIGGER "${tableName}_updatedAt"
|
||||
BEFORE UPDATE ON ${tableName}
|
||||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE on_update_timestamp();
|
||||
`);
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
export const dropOnUpdateTrigger = (knex: Knex, tableName: string) =>
|
||||
knex.raw(`DROP TRIGGER IF EXISTS "${tableName}_updatedAt" ON ${tableName}`);
|
||||
|
@ -1,4 +1,3 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
@ -8,6 +7,7 @@ import { DYNAMIC_SECRETS } from "@app/lib/api-docs";
|
||||
import { daysToMillisecond } from "@app/lib/dates";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -48,15 +48,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
.nullable(),
|
||||
path: z.string().describe(DYNAMIC_SECRETS.CREATE.path).trim().default("/").transform(removeTrailingSlash),
|
||||
environmentSlug: z.string().describe(DYNAMIC_SECRETS.CREATE.environmentSlug).min(1),
|
||||
name: z
|
||||
.string()
|
||||
.describe(DYNAMIC_SECRETS.CREATE.name)
|
||||
.min(1)
|
||||
.toLowerCase()
|
||||
.max(64)
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid"
|
||||
})
|
||||
name: slugSchema({ min: 1, max: 64, field: "Name" }).describe(DYNAMIC_SECRETS.CREATE.name)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -4,9 +4,15 @@ import { ExternalKmsSchema, KmsKeysSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import {
|
||||
ExternalKmsAwsSchema,
|
||||
ExternalKmsGcpCredentialSchema,
|
||||
ExternalKmsGcpSchema,
|
||||
ExternalKmsInputSchema,
|
||||
ExternalKmsInputUpdateSchema
|
||||
ExternalKmsInputUpdateSchema,
|
||||
KmsGcpKeyFetchAuthType,
|
||||
KmsProviders,
|
||||
TExternalKmsGcpCredentialSchema
|
||||
} from "@app/ee/services/external-kms/providers/model";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -44,7 +50,8 @@ const sanitizedExternalSchemaForGetById = KmsKeysSchema.extend({
|
||||
statusDetails: true,
|
||||
provider: true
|
||||
}).extend({
|
||||
providerInput: ExternalKmsAwsSchema
|
||||
// for GCP, we don't return the credential object as it is sensitive data that should not be exposed
|
||||
providerInput: z.union([ExternalKmsAwsSchema, ExternalKmsGcpSchema.pick({ gcpRegion: true, keyName: true })])
|
||||
})
|
||||
});
|
||||
|
||||
@ -286,4 +293,67 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
return { externalKms };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/gcp/keys",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.discriminatedUnion("authMethod", [
|
||||
z.object({
|
||||
authMethod: z.literal(KmsGcpKeyFetchAuthType.Credential),
|
||||
region: z.string().trim().min(1),
|
||||
credential: ExternalKmsGcpCredentialSchema
|
||||
}),
|
||||
z.object({
|
||||
authMethod: z.literal(KmsGcpKeyFetchAuthType.Kms),
|
||||
region: z.string().trim().min(1),
|
||||
kmsId: z.string().trim().min(1)
|
||||
})
|
||||
]),
|
||||
response: {
|
||||
200: z.object({
|
||||
keys: z.string().array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { region, authMethod } = req.body;
|
||||
let credentialJson: TExternalKmsGcpCredentialSchema | undefined;
|
||||
|
||||
if (authMethod === KmsGcpKeyFetchAuthType.Credential) {
|
||||
credentialJson = req.body.credential;
|
||||
} else if (authMethod === KmsGcpKeyFetchAuthType.Kms) {
|
||||
const externalKms = await server.services.externalKms.findById({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.body.kmsId
|
||||
});
|
||||
|
||||
if (!externalKms || externalKms.external.provider !== KmsProviders.Gcp) {
|
||||
throw new NotFoundError({ message: "KMS not found or not of type GCP" });
|
||||
}
|
||||
|
||||
credentialJson = externalKms.external.providerInput.credential as TExternalKmsGcpCredentialSchema;
|
||||
}
|
||||
|
||||
if (!credentialJson) {
|
||||
throw new NotFoundError({
|
||||
message: "Something went wrong while fetching the GCP credential, please check inputs and try again"
|
||||
});
|
||||
}
|
||||
|
||||
const results = await server.services.externalKms.fetchGcpKeys({
|
||||
credential: credentialJson,
|
||||
gcpRegion: region
|
||||
});
|
||||
|
||||
return results;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -1,8 +1,8 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { GroupsSchema, OrgMembershipRole, UsersSchema } from "@app/db/schemas";
|
||||
import { GROUPS } from "@app/lib/api-docs";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
@ -14,15 +14,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
schema: {
|
||||
body: z.object({
|
||||
name: z.string().trim().min(1).max(50).describe(GROUPS.CREATE.name),
|
||||
slug: z
|
||||
.string()
|
||||
.min(5)
|
||||
.max(36)
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.optional()
|
||||
.describe(GROUPS.CREATE.slug),
|
||||
slug: slugSchema({ min: 5, max: 36 }).optional().describe(GROUPS.CREATE.slug),
|
||||
role: z.string().trim().min(1).default(OrgMembershipRole.NoAccess).describe(GROUPS.CREATE.role)
|
||||
}),
|
||||
response: {
|
||||
@ -100,14 +92,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
body: z
|
||||
.object({
|
||||
name: z.string().trim().min(1).describe(GROUPS.UPDATE.name),
|
||||
slug: z
|
||||
.string()
|
||||
.min(5)
|
||||
.max(36)
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.describe(GROUPS.UPDATE.slug),
|
||||
slug: slugSchema({ min: 5, max: 36 }).describe(GROUPS.UPDATE.slug),
|
||||
role: z.string().trim().min(1).describe(GROUPS.UPDATE.role)
|
||||
})
|
||||
.partial(),
|
||||
|
@ -8,6 +8,7 @@ import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||
import { UnauthorizedError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import {
|
||||
ProjectPermissionSchema,
|
||||
@ -33,17 +34,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
body: z.object({
|
||||
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.identityId),
|
||||
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.projectSlug),
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(60)
|
||||
.trim()
|
||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.optional()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
permissions: ProjectPermissionSchema.array()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
|
||||
.optional(),
|
||||
@ -77,7 +68,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
...req.body,
|
||||
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
|
||||
slug: req.body.slug ?? slugify(alphaNumericNanoId(12)),
|
||||
isTemporary: false,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore-error this is valid ts
|
||||
@ -103,17 +94,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
body: z.object({
|
||||
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.identityId),
|
||||
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.projectSlug),
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(60)
|
||||
.trim()
|
||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.optional()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
permissions: ProjectPermissionSchema.array()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
|
||||
.optional(),
|
||||
@ -159,7 +140,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
...req.body,
|
||||
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
|
||||
slug: req.body.slug ?? slugify(alphaNumericNanoId(12)),
|
||||
isTemporary: true,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore-error this is valid ts
|
||||
@ -189,16 +170,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.projectSlug),
|
||||
privilegeDetails: z
|
||||
.object({
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(60)
|
||||
.trim()
|
||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.newSlug),
|
||||
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.newSlug),
|
||||
permissions: ProjectPermissionSchema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
|
||||
privilegePermission: ProjectSpecificPrivilegePermissionSchema.describe(
|
||||
IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.privilegePermission
|
||||
|
@ -9,7 +9,6 @@
|
||||
import { Authenticator, Strategy } from "@fastify/passport";
|
||||
import fastifySession from "@fastify/session";
|
||||
import RedisStore from "connect-redis";
|
||||
import { Redis } from "ioredis";
|
||||
import { z } from "zod";
|
||||
|
||||
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
|
||||
@ -21,7 +20,6 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
const appCfg = getConfig();
|
||||
const redis = new Redis(appCfg.REDIS_URL);
|
||||
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
|
||||
|
||||
/*
|
||||
@ -30,7 +28,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
- Fastify session <> Redis structure is based on the ff: https://github.com/fastify/session/blob/master/examples/redis.js
|
||||
*/
|
||||
const redisStore = new RedisStore({
|
||||
client: redis,
|
||||
client: server.redis,
|
||||
prefix: "oidc-session:",
|
||||
ttl: 600 // 10 minutes
|
||||
});
|
||||
|
@ -1,8 +1,8 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { OrgMembershipRole, OrgMembershipsSchema, OrgRolesSchema } from "@app/db/schemas";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
@ -18,17 +18,10 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
|
||||
organizationId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.trim()
|
||||
.refine(
|
||||
(val) => !Object.values(OrgMembershipRole).includes(val as OrgMembershipRole),
|
||||
"Please choose a different slug, the slug you have entered is reserved"
|
||||
)
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid"
|
||||
}),
|
||||
slug: slugSchema({ min: 1, max: 64 }).refine(
|
||||
(val) => !Object.values(OrgMembershipRole).includes(val as OrgMembershipRole),
|
||||
"Please choose a different slug, the slug you have entered is reserved"
|
||||
),
|
||||
name: z.string().trim(),
|
||||
description: z.string().trim().optional(),
|
||||
permissions: z.any().array()
|
||||
@ -94,17 +87,13 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
|
||||
roleId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z
|
||||
.string()
|
||||
.trim()
|
||||
.optional()
|
||||
// TODO: Switch to slugSchema after verifying correct methods with Akhil - Omar 11/24
|
||||
slug: slugSchema({ min: 1, max: 64 })
|
||||
.refine(
|
||||
(val) => typeof val !== "undefined" && !Object.keys(OrgMembershipRole).includes(val),
|
||||
(val) => !Object.keys(OrgMembershipRole).includes(val),
|
||||
"Please choose a different slug, the slug you have entered is reserved."
|
||||
)
|
||||
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
|
||||
message: "Slug must be a valid"
|
||||
}),
|
||||
.optional(),
|
||||
name: z.string().trim().optional(),
|
||||
description: z.string().trim().optional(),
|
||||
permissions: z.any().array().optional()
|
||||
|
@ -1,5 +1,4 @@
|
||||
import { packRules } from "@casl/ability/extra";
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectMembershipRole, ProjectMembershipsSchema, ProjectRolesSchema } from "@app/db/schemas";
|
||||
@ -9,6 +8,7 @@ import {
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { PROJECT_ROLE } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedRoleSchemaV1 } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -32,18 +32,11 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
projectSlug: z.string().trim().describe(PROJECT_ROLE.CREATE.projectSlug)
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z
|
||||
.string()
|
||||
.toLowerCase()
|
||||
.trim()
|
||||
.min(1)
|
||||
slug: slugSchema({ max: 64 })
|
||||
.refine(
|
||||
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
||||
"Please choose a different slug, the slug you have entered is reserved"
|
||||
)
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid"
|
||||
})
|
||||
.describe(PROJECT_ROLE.CREATE.slug),
|
||||
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
|
||||
@ -94,21 +87,13 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId)
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z
|
||||
.string()
|
||||
.toLowerCase()
|
||||
.trim()
|
||||
.optional()
|
||||
.describe(PROJECT_ROLE.UPDATE.slug)
|
||||
slug: slugSchema({ max: 64 })
|
||||
.refine(
|
||||
(val) =>
|
||||
typeof val === "undefined" ||
|
||||
!Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
||||
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
||||
"Please choose a different slug, the slug you have entered is reserved"
|
||||
)
|
||||
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
|
||||
message: "Slug must be a valid"
|
||||
}),
|
||||
.describe(PROJECT_ROLE.UPDATE.slug)
|
||||
.optional(),
|
||||
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
|
||||
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
||||
|
@ -1,4 +1,3 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectMembershipRole, ProjectTemplatesSchema } from "@app/db/schemas";
|
||||
@ -8,22 +7,13 @@ import { ProjectTemplateDefaultEnvironments } from "@app/ee/services/project-tem
|
||||
import { isInfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-fns";
|
||||
import { ProjectTemplates } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;
|
||||
|
||||
const SlugSchema = z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(32)
|
||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Must be valid slug format"
|
||||
});
|
||||
|
||||
const isReservedRoleSlug = (slug: string) =>
|
||||
Object.values(ProjectMembershipRole).includes(slug as ProjectMembershipRole);
|
||||
|
||||
@ -34,14 +24,14 @@ const SanitizedProjectTemplateSchema = ProjectTemplatesSchema.extend({
|
||||
roles: z
|
||||
.object({
|
||||
name: z.string().trim().min(1),
|
||||
slug: SlugSchema,
|
||||
slug: slugSchema(),
|
||||
permissions: UnpackedPermissionSchema.array()
|
||||
})
|
||||
.array(),
|
||||
environments: z
|
||||
.object({
|
||||
name: z.string().trim().min(1),
|
||||
slug: SlugSchema,
|
||||
slug: slugSchema(),
|
||||
position: z.number().min(1)
|
||||
})
|
||||
.array()
|
||||
@ -50,7 +40,7 @@ const SanitizedProjectTemplateSchema = ProjectTemplatesSchema.extend({
|
||||
const ProjectTemplateRolesSchema = z
|
||||
.object({
|
||||
name: z.string().trim().min(1),
|
||||
slug: SlugSchema,
|
||||
slug: slugSchema(),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
})
|
||||
.array()
|
||||
@ -78,7 +68,7 @@ const ProjectTemplateRolesSchema = z
|
||||
const ProjectTemplateEnvironmentsSchema = z
|
||||
.object({
|
||||
name: z.string().trim().min(1),
|
||||
slug: SlugSchema,
|
||||
slug: slugSchema(),
|
||||
position: z.number().min(1)
|
||||
})
|
||||
.array()
|
||||
@ -188,9 +178,11 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
|
||||
schema: {
|
||||
description: "Create a project template.",
|
||||
body: z.object({
|
||||
name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), {
|
||||
message: `The requested project template name is reserved.`
|
||||
}).describe(ProjectTemplates.CREATE.name),
|
||||
name: slugSchema({ field: "name" })
|
||||
.refine((val) => !isInfisicalProjectTemplate(val), {
|
||||
message: `The requested project template name is reserved.`
|
||||
})
|
||||
.describe(ProjectTemplates.CREATE.name),
|
||||
description: z.string().max(256).trim().optional().describe(ProjectTemplates.CREATE.description),
|
||||
roles: ProjectTemplateRolesSchema.default([]).describe(ProjectTemplates.CREATE.roles),
|
||||
environments: ProjectTemplateEnvironmentsSchema.default(ProjectTemplateDefaultEnvironments).describe(
|
||||
@ -230,9 +222,10 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
|
||||
description: "Update a project template.",
|
||||
params: z.object({ templateId: z.string().uuid().describe(ProjectTemplates.UPDATE.templateId) }),
|
||||
body: z.object({
|
||||
name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), {
|
||||
message: `The requested project template name is reserved.`
|
||||
})
|
||||
name: slugSchema({ field: "name" })
|
||||
.refine((val) => !isInfisicalProjectTemplate(val), {
|
||||
message: `The requested project template name is reserved.`
|
||||
})
|
||||
.optional()
|
||||
.describe(ProjectTemplates.UPDATE.name),
|
||||
description: z.string().max(256).trim().optional().describe(ProjectTemplates.UPDATE.description),
|
||||
|
@ -122,6 +122,8 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
`email: ${email} firstName: ${profile.firstName as string}`
|
||||
);
|
||||
|
||||
throw new Error("Invalid saml request. Missing email or first name");
|
||||
}
|
||||
|
||||
const userMetadata = Object.keys(profile.attributes || {})
|
||||
|
@ -7,6 +7,7 @@ import { ProjectUserAdditionalPrivilegeTemporaryMode } from "@app/ee/services/pr
|
||||
import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/santizedSchemas/user-additional-privilege";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -21,17 +22,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
schema: {
|
||||
body: z.object({
|
||||
projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId),
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(60)
|
||||
.trim()
|
||||
.refine((v) => v.toLowerCase() === v, "Slug must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.optional()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
slug: slugSchema({ min: 1, max: 60 }).optional().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({
|
||||
@ -87,15 +78,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
slug: z
|
||||
.string()
|
||||
.max(60)
|
||||
.trim()
|
||||
.refine((v) => v.toLowerCase() === v, "Slug must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
|
||||
slug: slugSchema({ min: 1, max: 60 }).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.optional()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
|
||||
|
@ -7,6 +7,7 @@ import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-p
|
||||
import { IDENTITY_ADDITIONAL_PRIVILEGE_V2 } from "@app/lib/api-docs";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/santizedSchemas/identitiy-additional-privilege";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -28,17 +29,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
body: z.object({
|
||||
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.identityId),
|
||||
projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.projectId),
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(60)
|
||||
.trim()
|
||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.optional()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
|
||||
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({
|
||||
@ -100,16 +91,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
id: z.string().trim().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.id)
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(60)
|
||||
.trim()
|
||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
|
||||
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.optional()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission),
|
||||
|
@ -1,11 +1,11 @@
|
||||
import { packRules } from "@casl/ability/extra";
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectMembershipRole, ProjectRolesSchema } from "@app/db/schemas";
|
||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { PROJECT_ROLE } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -29,18 +29,11 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
projectId: z.string().trim().describe(PROJECT_ROLE.CREATE.projectId)
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z
|
||||
.string()
|
||||
.toLowerCase()
|
||||
.trim()
|
||||
.min(1)
|
||||
slug: slugSchema({ min: 1, max: 64 })
|
||||
.refine(
|
||||
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
||||
"Please choose a different slug, the slug you have entered is reserved"
|
||||
)
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid"
|
||||
})
|
||||
.describe(PROJECT_ROLE.CREATE.slug),
|
||||
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
|
||||
@ -90,21 +83,13 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId)
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z
|
||||
.string()
|
||||
.toLowerCase()
|
||||
.trim()
|
||||
.optional()
|
||||
.describe(PROJECT_ROLE.UPDATE.slug)
|
||||
slug: slugSchema({ min: 1, max: 64 })
|
||||
.refine(
|
||||
(val) =>
|
||||
typeof val === "undefined" ||
|
||||
!Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
||||
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
||||
"Please choose a different slug, the slug you have entered is reserved"
|
||||
)
|
||||
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
|
||||
message: "Slug must be a valid"
|
||||
}),
|
||||
.optional()
|
||||
.describe(PROJECT_ROLE.UPDATE.slug),
|
||||
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { RawAxiosRequestHeaders } from "axios";
|
||||
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
@ -20,27 +21,130 @@ type TAuditLogQueueServiceFactoryDep = {
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TAuditLogQueueServiceFactory = ReturnType<typeof auditLogQueueServiceFactory>;
|
||||
export type TAuditLogQueueServiceFactory = Awaited<ReturnType<typeof auditLogQueueServiceFactory>>;
|
||||
|
||||
// keep this timeout 5s it must be fast because else the queue will take time to finish
|
||||
// audit log is a crowded queue thus needs to be fast
|
||||
export const AUDIT_LOG_STREAM_TIMEOUT = 5 * 1000;
|
||||
export const auditLogQueueServiceFactory = ({
|
||||
|
||||
export const auditLogQueueServiceFactory = async ({
|
||||
auditLogDAL,
|
||||
queueService,
|
||||
projectDAL,
|
||||
licenseService,
|
||||
auditLogStreamDAL
|
||||
}: TAuditLogQueueServiceFactoryDep) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const pushToLog = async (data: TCreateAuditLogDTO) => {
|
||||
await queueService.queue(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
||||
removeOnFail: {
|
||||
count: 3
|
||||
},
|
||||
removeOnComplete: true
|
||||
});
|
||||
if (appCfg.USE_PG_QUEUE && appCfg.SHOULD_INIT_PG_QUEUE) {
|
||||
await queueService.queuePg<QueueName.AuditLog>(QueueJobs.AuditLog, data, {
|
||||
retryLimit: 10,
|
||||
retryBackoff: true
|
||||
});
|
||||
} else {
|
||||
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
||||
removeOnFail: {
|
||||
count: 3
|
||||
},
|
||||
removeOnComplete: true
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
if (appCfg.SHOULD_INIT_PG_QUEUE) {
|
||||
await queueService.startPg<QueueName.AuditLog>(
|
||||
QueueJobs.AuditLog,
|
||||
async ([job]) => {
|
||||
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
|
||||
let { orgId } = job.data;
|
||||
const MS_IN_DAY = 24 * 60 * 60 * 1000;
|
||||
let project;
|
||||
|
||||
if (!orgId) {
|
||||
// it will never be undefined for both org and project id
|
||||
// TODO(akhilmhdh): use caching here in dal to avoid db calls
|
||||
project = await projectDAL.findById(projectId as string);
|
||||
orgId = project.orgId;
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (plan.auditLogsRetentionDays === 0) {
|
||||
// skip inserting if audit log retention is 0 meaning its not supported
|
||||
return;
|
||||
}
|
||||
|
||||
// For project actions, set TTL to project-level audit log retention config
|
||||
// This condition ensures that the plan's audit log retention days cannot be bypassed
|
||||
const ttlInDays =
|
||||
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
|
||||
? project.auditLogsRetentionDays
|
||||
: plan.auditLogsRetentionDays;
|
||||
|
||||
const ttl = ttlInDays * MS_IN_DAY;
|
||||
|
||||
const auditLog = await auditLogDAL.create({
|
||||
actor: actor.type,
|
||||
actorMetadata: actor.metadata,
|
||||
userAgent,
|
||||
projectId,
|
||||
projectName: project?.name,
|
||||
ipAddress,
|
||||
orgId,
|
||||
eventType: event.type,
|
||||
expiresAt: new Date(Date.now() + ttl),
|
||||
eventMetadata: event.metadata,
|
||||
userAgentType
|
||||
});
|
||||
|
||||
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
|
||||
await Promise.allSettled(
|
||||
logStreams.map(
|
||||
async ({
|
||||
url,
|
||||
encryptedHeadersTag,
|
||||
encryptedHeadersIV,
|
||||
encryptedHeadersKeyEncoding,
|
||||
encryptedHeadersCiphertext
|
||||
}) => {
|
||||
const streamHeaders =
|
||||
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
|
||||
? (JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
|
||||
iv: encryptedHeadersIV,
|
||||
tag: encryptedHeadersTag,
|
||||
ciphertext: encryptedHeadersCiphertext
|
||||
})
|
||||
) as LogStreamHeaders[])
|
||||
: [];
|
||||
|
||||
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
|
||||
if (streamHeaders.length)
|
||||
streamHeaders.forEach(({ key, value }) => {
|
||||
headers[key] = value;
|
||||
});
|
||||
|
||||
return request.post(url, auditLog, {
|
||||
headers,
|
||||
// request timeout
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
// connection timeout
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
});
|
||||
}
|
||||
)
|
||||
);
|
||||
},
|
||||
{
|
||||
batchSize: 1,
|
||||
workerCount: 30,
|
||||
pollingIntervalSeconds: 0.5
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
queueService.start(QueueName.AuditLog, async (job) => {
|
||||
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
|
||||
let { orgId } = job.data;
|
||||
|
@ -112,7 +112,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
})
|
||||
) as object;
|
||||
|
||||
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
|
||||
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
|
||||
const { maxTTL } = dynamicSecretCfg;
|
||||
const expireAt = new Date(new Date().getTime() + ms(selectedTTL));
|
||||
if (maxTTL) {
|
||||
@ -187,7 +187,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
})
|
||||
) as object;
|
||||
|
||||
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
|
||||
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
|
||||
const { maxTTL } = dynamicSecretCfg;
|
||||
const expireAt = new Date(dynamicSecretLease.expireAt.getTime() + ms(selectedTTL));
|
||||
if (maxTTL) {
|
||||
|
@ -80,7 +80,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
|
||||
}
|
||||
};
|
||||
|
||||
const addUserToInfisicalGroup = async (userId: string) => {
|
||||
const $addUserToInfisicalGroup = async (userId: string) => {
|
||||
// figure out if the default user is already in the group, if it is, then we shouldn't add it again
|
||||
|
||||
const addUserToGroupCommand = new ModifyUserGroupCommand({
|
||||
@ -96,7 +96,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
|
||||
await ensureInfisicalGroupExists(clusterName);
|
||||
|
||||
await elastiCache.send(new CreateUserCommand(creationInput)); // First create the user
|
||||
await addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
|
||||
await $addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
|
||||
|
||||
return {
|
||||
userId: creationInput.UserId,
|
||||
@ -127,7 +127,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
|
||||
};
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
@ -211,8 +211,8 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -33,7 +33,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
|
||||
const client = new IAMClient({
|
||||
region: providerInputs.region,
|
||||
credentials: {
|
||||
@ -47,7 +47,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
|
||||
return isConnected;
|
||||
@ -55,7 +55,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
|
||||
@ -118,7 +118,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
@ -179,9 +179,8 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// do nothing
|
||||
const username = entityId;
|
||||
return { entityId: username };
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -9,7 +9,7 @@ const MSFT_GRAPH_API_URL = "https://graph.microsoft.com/v1.0/";
|
||||
const MSFT_LOGIN_URL = "https://login.microsoftonline.com";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
@ -23,7 +23,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getToken = async (
|
||||
const $getToken = async (
|
||||
tenantId: string,
|
||||
applicationId: string,
|
||||
clientSecret: string
|
||||
@ -51,18 +51,13 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
return data.success;
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
if (!data.success) {
|
||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||
}
|
||||
@ -98,7 +93,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
};
|
||||
|
||||
const fetchAzureEntraIdUsers = async (tenantId: string, applicationId: string, clientSecret: string) => {
|
||||
const data = await getToken(tenantId, applicationId, clientSecret);
|
||||
const data = await $getToken(tenantId, applicationId, clientSecret);
|
||||
if (!data.success) {
|
||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||
}
|
||||
@ -127,6 +122,11 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
return users;
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
|
@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
@ -27,7 +27,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
|
||||
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||
const client = new cassandra.Client({
|
||||
sslOptions,
|
||||
@ -47,7 +47,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
|
||||
await client.shutdown();
|
||||
@ -56,7 +56,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -82,7 +82,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const { keyspace } = providerInputs;
|
||||
@ -99,20 +99,24 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const { keyspace } = providerInputs;
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace, expiration });
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
keyspace,
|
||||
expiration
|
||||
});
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
for await (const query of queries) {
|
||||
await client.execute(query);
|
||||
}
|
||||
await client.shutdown();
|
||||
return { entityId: username };
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -8,7 +8,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
@ -24,7 +24,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const connection = new ElasticSearchClient({
|
||||
node: {
|
||||
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
|
||||
@ -55,7 +55,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection
|
||||
.info()
|
||||
@ -67,7 +67,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -85,7 +85,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
await connection.security.deleteUser({
|
||||
username: entityId
|
||||
@ -95,8 +95,8 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -6,15 +6,17 @@ import { AzureEntraIDProvider } from "./azure-entra-id";
|
||||
import { CassandraProvider } from "./cassandra";
|
||||
import { ElasticSearchProvider } from "./elastic-search";
|
||||
import { LdapProvider } from "./ldap";
|
||||
import { DynamicSecretProviders } from "./models";
|
||||
import { DynamicSecretProviders, TDynamicProviderFns } from "./models";
|
||||
import { MongoAtlasProvider } from "./mongo-atlas";
|
||||
import { MongoDBProvider } from "./mongo-db";
|
||||
import { RabbitMqProvider } from "./rabbit-mq";
|
||||
import { RedisDatabaseProvider } from "./redis";
|
||||
import { SapAseProvider } from "./sap-ase";
|
||||
import { SapHanaProvider } from "./sap-hana";
|
||||
import { SqlDatabaseProvider } from "./sql-database";
|
||||
import { TotpProvider } from "./totp";
|
||||
|
||||
export const buildDynamicSecretProviders = () => ({
|
||||
export const buildDynamicSecretProviders = (): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
|
||||
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
|
||||
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
|
||||
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
|
||||
@ -27,5 +29,7 @@ export const buildDynamicSecretProviders = () => ({
|
||||
[DynamicSecretProviders.AzureEntraID]: AzureEntraIDProvider(),
|
||||
[DynamicSecretProviders.Ldap]: LdapProvider(),
|
||||
[DynamicSecretProviders.SapHana]: SapHanaProvider(),
|
||||
[DynamicSecretProviders.Snowflake]: SnowflakeProvider()
|
||||
[DynamicSecretProviders.Snowflake]: SnowflakeProvider(),
|
||||
[DynamicSecretProviders.Totp]: TotpProvider(),
|
||||
[DynamicSecretProviders.SapAse]: SapAseProvider()
|
||||
});
|
||||
|
@ -52,7 +52,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const client = ldapjs.createClient({
|
||||
url: providerInputs.url,
|
||||
@ -83,7 +83,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
return client.connected;
|
||||
};
|
||||
|
||||
@ -191,7 +191,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
||||
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
|
||||
@ -235,7 +235,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
||||
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
|
||||
@ -268,7 +268,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -4,7 +4,8 @@ export enum SqlProviders {
|
||||
Postgres = "postgres",
|
||||
MySQL = "mysql2",
|
||||
Oracle = "oracledb",
|
||||
MsSQL = "mssql"
|
||||
MsSQL = "mssql",
|
||||
SapAse = "sap-ase"
|
||||
}
|
||||
|
||||
export enum ElasticSearchAuthTypes {
|
||||
@ -17,6 +18,17 @@ export enum LdapCredentialType {
|
||||
Static = "static"
|
||||
}
|
||||
|
||||
export enum TotpConfigType {
|
||||
URL = "url",
|
||||
MANUAL = "manual"
|
||||
}
|
||||
|
||||
export enum TotpAlgorithm {
|
||||
SHA1 = "sha1",
|
||||
SHA256 = "sha256",
|
||||
SHA512 = "sha512"
|
||||
}
|
||||
|
||||
export const DynamicSecretRedisDBSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
@ -107,6 +119,16 @@ export const DynamicSecretCassandraSchema = z.object({
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretSapAseSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
database: z.string().trim(),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim(),
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim()
|
||||
});
|
||||
|
||||
export const DynamicSecretAwsIamSchema = z.object({
|
||||
accessKey: z.string().trim().min(1),
|
||||
secretAccessKey: z.string().trim().min(1),
|
||||
@ -221,6 +243,34 @@ export const LdapSchema = z.union([
|
||||
})
|
||||
]);
|
||||
|
||||
export const DynamicSecretTotpSchema = z.discriminatedUnion("configType", [
|
||||
z.object({
|
||||
configType: z.literal(TotpConfigType.URL),
|
||||
url: z
|
||||
.string()
|
||||
.url()
|
||||
.trim()
|
||||
.min(1)
|
||||
.refine((val) => {
|
||||
const urlObj = new URL(val);
|
||||
const secret = urlObj.searchParams.get("secret");
|
||||
|
||||
return Boolean(secret);
|
||||
}, "OTP URL must contain secret field")
|
||||
}),
|
||||
z.object({
|
||||
configType: z.literal(TotpConfigType.MANUAL),
|
||||
secret: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.transform((val) => val.replace(/\s+/g, "")),
|
||||
period: z.number().optional(),
|
||||
algorithm: z.nativeEnum(TotpAlgorithm).optional(),
|
||||
digits: z.number().optional()
|
||||
})
|
||||
]);
|
||||
|
||||
export enum DynamicSecretProviders {
|
||||
SqlDatabase = "sql-database",
|
||||
Cassandra = "cassandra",
|
||||
@ -234,12 +284,15 @@ export enum DynamicSecretProviders {
|
||||
AzureEntraID = "azure-entra-id",
|
||||
Ldap = "ldap",
|
||||
SapHana = "sap-hana",
|
||||
Snowflake = "snowflake"
|
||||
Snowflake = "snowflake",
|
||||
Totp = "totp",
|
||||
SapAse = "sap-ase"
|
||||
}
|
||||
|
||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SapAse), inputs: DynamicSecretSapAseSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SapHana), inputs: DynamicSecretSapHanaSchema }),
|
||||
@ -250,7 +303,8 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AzureEntraID), inputs: AzureEntraIDSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema })
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Totp), inputs: DynamicSecretTotpSchema })
|
||||
]);
|
||||
|
||||
export type TDynamicProviderFns = {
|
||||
|
@ -8,7 +8,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
@ -22,7 +22,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
|
||||
const client = axios.create({
|
||||
baseURL: "https://cloud.mongodb.com/api/atlas",
|
||||
headers: {
|
||||
@ -40,7 +40,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client({
|
||||
method: "GET",
|
||||
@ -59,7 +59,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -87,7 +87,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const isExisting = await client({
|
||||
@ -114,7 +114,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
@ -8,7 +8,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
@ -23,7 +23,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||
const isSrv = !providerInputs.port;
|
||||
const uri = isSrv
|
||||
? `mongodb+srv://${providerInputs.host}`
|
||||
@ -42,7 +42,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client
|
||||
.db(providerInputs.database)
|
||||
@ -55,7 +55,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -74,7 +74,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
@ -88,6 +88,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -11,7 +11,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
@ -84,7 +84,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||
const axiosInstance = axios.create({
|
||||
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
|
||||
auth: {
|
||||
@ -105,7 +105,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection.get("/whoami").then(() => true);
|
||||
|
||||
@ -114,7 +114,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -134,15 +134,15 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
|
||||
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -10,7 +10,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
@ -55,7 +55,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
|
||||
let connection: Redis | null = null;
|
||||
try {
|
||||
connection = new Redis({
|
||||
@ -92,7 +92,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const pingResponse = await connection
|
||||
.ping()
|
||||
@ -104,7 +104,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -126,7 +126,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
@ -141,7 +141,9 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
145
backend/src/ee/services/dynamic-secret/providers/sap-ase.ts
Normal file
145
backend/src/ee/services/dynamic-secret/providers/sap-ase.ts
Normal file
@ -0,0 +1,145 @@
|
||||
import handlebars from "handlebars";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import odbc from "odbc";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(25);
|
||||
};
|
||||
|
||||
enum SapCommands {
|
||||
CreateLogin = "sp_addlogin",
|
||||
DropLogin = "sp_droplogin"
|
||||
}
|
||||
|
||||
export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
|
||||
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
|
||||
const connectionString =
|
||||
`DRIVER={FreeTDS};` +
|
||||
`SERVER=${providerInputs.host};` +
|
||||
`PORT=${providerInputs.port};` +
|
||||
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
|
||||
`UID=${providerInputs.username};` +
|
||||
`PWD=${providerInputs.password};` +
|
||||
`TDS_VERSION=5.0`;
|
||||
|
||||
const client = await odbc.connect(connectionString);
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const [resultFromMasterDatabase] = await masterClient.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||
const [resultFromSelectedDatabase] = await client.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||
|
||||
if (!resultFromSelectedDatabase.version) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SAP ASE connection, version query failed"
|
||||
});
|
||||
}
|
||||
|
||||
if (resultFromMasterDatabase.version !== resultFromSelectedDatabase.version) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SAP ASE connection (master), version mismatch"
|
||||
});
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const username = `inf_${generateUsername()}`;
|
||||
const password = `${generatePassword()}`;
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password
|
||||
});
|
||||
|
||||
const queries = creationStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
|
||||
|
||||
for await (const query of queries) {
|
||||
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
|
||||
// If not done, then the newly created user won't be able to authenticate.
|
||||
await (query.startsWith(SapCommands.CreateLogin) ? masterClient : client).query(query);
|
||||
}
|
||||
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({
|
||||
username
|
||||
});
|
||||
|
||||
const queries = revokeStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
|
||||
// Get all processes for this login and kill them. If there are active connections to the database when drop login happens, it will throw an error.
|
||||
const result = await masterClient.query<{ spid?: string }>(`sp_who '${username}'`);
|
||||
|
||||
if (result && result.length > 0) {
|
||||
for await (const row of result) {
|
||||
if (row.spid) {
|
||||
await masterClient.query(`KILL ${row.spid.trim()}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for await (const query of queries) {
|
||||
await (query.startsWith(SapCommands.DropLogin) ? masterClient : client).query(query);
|
||||
}
|
||||
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (_: unknown, username: string) => {
|
||||
// No need for renewal
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -32,7 +32,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
|
||||
const client = hdb.createClient({
|
||||
host: providerInputs.host,
|
||||
port: providerInputs.port,
|
||||
@ -64,9 +64,9 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const testResult: boolean = await new Promise((resolve, reject) => {
|
||||
const testResult = await new Promise<boolean>((resolve, reject) => {
|
||||
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
|
||||
if (err) {
|
||||
reject();
|
||||
@ -86,7 +86,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
const password = generatePassword();
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
@ -114,7 +114,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
@ -135,13 +135,15 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, username: string, expireAt: number) => {
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
try {
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration });
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username: entityId, expiration });
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
await new Promise((resolve, reject) => {
|
||||
@ -161,7 +163,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
client.disconnect();
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -12,7 +12,7 @@ import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
|
||||
const noop = () => {};
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
@ -34,7 +34,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
|
||||
const client = snowflake.createConnection({
|
||||
account: `${providerInputs.orgId}-${providerInputs.accountId}`,
|
||||
username: providerInputs.username,
|
||||
@ -49,7 +49,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
let isValidConnection: boolean;
|
||||
|
||||
@ -72,7 +72,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -107,7 +107,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
try {
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
@ -131,17 +131,16 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, username: string, expireAt: number) => {
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
if (!providerInputs.renewStatement) return { entityId: username };
|
||||
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
try {
|
||||
const expiration = getDaysToExpiry(new Date(expireAt));
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username,
|
||||
username: entityId,
|
||||
expiration
|
||||
});
|
||||
|
||||
@ -161,7 +160,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
client.destroy(noop);
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -14,7 +14,7 @@ const generatePassword = (provider: SqlProviders) => {
|
||||
// oracle has limit of 48 password length
|
||||
const size = provider === SqlProviders.Oracle ? 30 : 48;
|
||||
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
@ -32,7 +32,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
|
||||
const ssl = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||
const db = knex({
|
||||
client: providerInputs.client,
|
||||
@ -52,7 +52,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await getClient(providerInputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
// oracle needs from keyword
|
||||
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
||||
|
||||
@ -63,7 +63,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await getClient(providerInputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername(providerInputs.client);
|
||||
const password = generatePassword(providerInputs.client);
|
||||
@ -90,7 +90,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await getClient(providerInputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const { database } = providerInputs;
|
||||
@ -110,13 +110,19 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await getClient(providerInputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const db = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const { database } = providerInputs;
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration, database });
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
expiration,
|
||||
database
|
||||
});
|
||||
|
||||
if (renewStatement) {
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
@ -128,7 +134,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
}
|
||||
|
||||
await db.destroy();
|
||||
return { entityId: username };
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
90
backend/src/ee/services/dynamic-secret/providers/totp.ts
Normal file
90
backend/src/ee/services/dynamic-secret/providers/totp.ts
Normal file
@ -0,0 +1,90 @@
|
||||
import { authenticator } from "otplib";
|
||||
import { HashAlgorithms } from "otplib/core";
|
||||
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretTotpSchema, TDynamicProviderFns, TotpConfigType } from "./models";
|
||||
|
||||
export const TotpProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretTotpSchema.parseAsync(inputs);
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const validateConnection = async () => {
|
||||
return true;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const entityId = alphaNumericNanoId(32);
|
||||
const authenticatorInstance = authenticator.clone();
|
||||
|
||||
let secret: string;
|
||||
let period: number | null | undefined;
|
||||
let digits: number | null | undefined;
|
||||
let algorithm: HashAlgorithms | null | undefined;
|
||||
|
||||
if (providerInputs.configType === TotpConfigType.URL) {
|
||||
const urlObj = new URL(providerInputs.url);
|
||||
secret = urlObj.searchParams.get("secret") as string;
|
||||
const periodFromUrl = urlObj.searchParams.get("period");
|
||||
const digitsFromUrl = urlObj.searchParams.get("digits");
|
||||
const algorithmFromUrl = urlObj.searchParams.get("algorithm");
|
||||
|
||||
if (periodFromUrl) {
|
||||
period = +periodFromUrl;
|
||||
}
|
||||
|
||||
if (digitsFromUrl) {
|
||||
digits = +digitsFromUrl;
|
||||
}
|
||||
|
||||
if (algorithmFromUrl) {
|
||||
algorithm = algorithmFromUrl.toLowerCase() as HashAlgorithms;
|
||||
}
|
||||
} else {
|
||||
secret = providerInputs.secret;
|
||||
period = providerInputs.period;
|
||||
digits = providerInputs.digits;
|
||||
algorithm = providerInputs.algorithm as unknown as HashAlgorithms;
|
||||
}
|
||||
|
||||
if (digits) {
|
||||
authenticatorInstance.options = { digits };
|
||||
}
|
||||
|
||||
if (algorithm) {
|
||||
authenticatorInstance.options = { algorithm };
|
||||
}
|
||||
|
||||
if (period) {
|
||||
authenticatorInstance.options = { step: period };
|
||||
}
|
||||
|
||||
return {
|
||||
entityId,
|
||||
data: { TOTP: authenticatorInstance.generate(secret), TIME_REMAINING: authenticatorInstance.timeRemaining() }
|
||||
};
|
||||
};
|
||||
|
||||
const revoke = async (_inputs: unknown, entityId: string) => {
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -20,7 +20,8 @@ import {
|
||||
TUpdateExternalKmsDTO
|
||||
} from "./external-kms-types";
|
||||
import { AwsKmsProviderFactory } from "./providers/aws-kms";
|
||||
import { ExternalKmsAwsSchema, KmsProviders } from "./providers/model";
|
||||
import { GcpKmsProviderFactory } from "./providers/gcp-kms";
|
||||
import { ExternalKmsAwsSchema, ExternalKmsGcpSchema, KmsProviders, TExternalKmsGcpSchema } from "./providers/model";
|
||||
|
||||
type TExternalKmsServiceFactoryDep = {
|
||||
externalKmsDAL: TExternalKmsDALFactory;
|
||||
@ -78,6 +79,13 @@ export const externalKmsServiceFactory = ({
|
||||
await externalKms.validateConnection();
|
||||
}
|
||||
break;
|
||||
case KmsProviders.Gcp:
|
||||
{
|
||||
const externalKms = await GcpKmsProviderFactory({ inputs: provider.inputs });
|
||||
await externalKms.validateConnection();
|
||||
sanitizedProviderInput = JSON.stringify(provider.inputs);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new BadRequestError({ message: "external kms provided is invalid" });
|
||||
}
|
||||
@ -88,7 +96,7 @@ export const externalKmsServiceFactory = ({
|
||||
});
|
||||
|
||||
const { cipherTextBlob: encryptedProviderInputs } = orgDataKeyEncryptor({
|
||||
plainText: Buffer.from(sanitizedProviderInput, "utf8")
|
||||
plainText: Buffer.from(sanitizedProviderInput)
|
||||
});
|
||||
|
||||
const externalKms = await externalKmsDAL.transaction(async (tx) => {
|
||||
@ -162,7 +170,7 @@ export const externalKmsServiceFactory = ({
|
||||
case KmsProviders.Aws:
|
||||
{
|
||||
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
|
||||
JSON.parse(decryptedProviderInputBlob.toString())
|
||||
);
|
||||
const updatedProviderInput = { ...decryptedProviderInput, ...provider.inputs };
|
||||
const externalKms = await AwsKmsProviderFactory({ inputs: updatedProviderInput });
|
||||
@ -170,6 +178,17 @@ export const externalKmsServiceFactory = ({
|
||||
sanitizedProviderInput = JSON.stringify(updatedProviderInput);
|
||||
}
|
||||
break;
|
||||
case KmsProviders.Gcp:
|
||||
{
|
||||
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString())
|
||||
);
|
||||
const updatedProviderInput = { ...decryptedProviderInput, ...provider.inputs };
|
||||
const externalKms = await GcpKmsProviderFactory({ inputs: updatedProviderInput });
|
||||
await externalKms.validateConnection();
|
||||
sanitizedProviderInput = JSON.stringify(updatedProviderInput);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new BadRequestError({ message: "external kms provided is invalid" });
|
||||
}
|
||||
@ -178,7 +197,7 @@ export const externalKmsServiceFactory = ({
|
||||
let encryptedProviderInputs: Buffer | undefined;
|
||||
if (sanitizedProviderInput) {
|
||||
const { cipherTextBlob } = orgDataKeyEncryptor({
|
||||
plainText: Buffer.from(sanitizedProviderInput, "utf8")
|
||||
plainText: Buffer.from(sanitizedProviderInput)
|
||||
});
|
||||
encryptedProviderInputs = cipherTextBlob;
|
||||
}
|
||||
@ -271,10 +290,17 @@ export const externalKmsServiceFactory = ({
|
||||
switch (externalKmsDoc.provider) {
|
||||
case KmsProviders.Aws: {
|
||||
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
|
||||
JSON.parse(decryptedProviderInputBlob.toString())
|
||||
);
|
||||
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
|
||||
}
|
||||
case KmsProviders.Gcp: {
|
||||
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString())
|
||||
);
|
||||
|
||||
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
|
||||
}
|
||||
default:
|
||||
throw new BadRequestError({ message: "external kms provided is invalid" });
|
||||
}
|
||||
@ -312,21 +338,34 @@ export const externalKmsServiceFactory = ({
|
||||
switch (externalKmsDoc.provider) {
|
||||
case KmsProviders.Aws: {
|
||||
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
|
||||
JSON.parse(decryptedProviderInputBlob.toString())
|
||||
);
|
||||
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
|
||||
}
|
||||
case KmsProviders.Gcp: {
|
||||
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString())
|
||||
);
|
||||
|
||||
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
|
||||
}
|
||||
default:
|
||||
throw new BadRequestError({ message: "external kms provided is invalid" });
|
||||
}
|
||||
};
|
||||
|
||||
const fetchGcpKeys = async ({ credential, gcpRegion }: Pick<TExternalKmsGcpSchema, "credential" | "gcpRegion">) => {
|
||||
const externalKms = await GcpKmsProviderFactory({ inputs: { credential, gcpRegion, keyName: "" } });
|
||||
return externalKms.getKeysList();
|
||||
};
|
||||
|
||||
return {
|
||||
create,
|
||||
updateById,
|
||||
deleteById,
|
||||
list,
|
||||
findById,
|
||||
findByName
|
||||
findByName,
|
||||
fetchGcpKeys
|
||||
};
|
||||
};
|
||||
|
113
backend/src/ee/services/external-kms/providers/gcp-kms.ts
Normal file
113
backend/src/ee/services/external-kms/providers/gcp-kms.ts
Normal file
@ -0,0 +1,113 @@
|
||||
import { KeyManagementServiceClient } from "@google-cloud/kms";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { ExternalKmsGcpSchema, TExternalKmsGcpClientSchema, TExternalKmsProviderFns } from "./model";
|
||||
|
||||
const getGcpKmsClient = async ({ credential, gcpRegion }: TExternalKmsGcpClientSchema) => {
|
||||
const gcpKmsClient = new KeyManagementServiceClient({
|
||||
credentials: credential
|
||||
});
|
||||
const projectId = credential.project_id;
|
||||
const locationName = gcpKmsClient.locationPath(projectId, gcpRegion);
|
||||
|
||||
return {
|
||||
gcpKmsClient,
|
||||
locationName
|
||||
};
|
||||
};
|
||||
|
||||
type GcpKmsProviderArgs = {
|
||||
inputs: unknown;
|
||||
};
|
||||
type TGcpKmsProviderFactoryReturn = TExternalKmsProviderFns & {
|
||||
getKeysList: () => Promise<{ keys: string[] }>;
|
||||
};
|
||||
|
||||
export const GcpKmsProviderFactory = async ({ inputs }: GcpKmsProviderArgs): Promise<TGcpKmsProviderFactoryReturn> => {
|
||||
const { credential, gcpRegion, keyName } = await ExternalKmsGcpSchema.parseAsync(inputs);
|
||||
const { gcpKmsClient, locationName } = await getGcpKmsClient({
|
||||
credential,
|
||||
gcpRegion
|
||||
});
|
||||
|
||||
const validateConnection = async () => {
|
||||
try {
|
||||
await gcpKmsClient.listKeyRings({
|
||||
parent: locationName
|
||||
});
|
||||
return true;
|
||||
} catch (error) {
|
||||
throw new BadRequestError({
|
||||
message: "Cannot connect to GCP KMS"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// Used when adding the KMS to fetch the list of keys in specified region
|
||||
const getKeysList = async () => {
|
||||
try {
|
||||
const [keyRings] = await gcpKmsClient.listKeyRings({
|
||||
parent: locationName
|
||||
});
|
||||
|
||||
const validKeyRings = keyRings
|
||||
.filter(
|
||||
(keyRing): keyRing is { name: string } =>
|
||||
keyRing !== null && typeof keyRing === "object" && "name" in keyRing && typeof keyRing.name === "string"
|
||||
)
|
||||
.map((keyRing) => keyRing.name);
|
||||
const keyList: string[] = [];
|
||||
const keyListPromises = validKeyRings.map((keyRingName) =>
|
||||
gcpKmsClient
|
||||
.listCryptoKeys({
|
||||
parent: keyRingName
|
||||
})
|
||||
.then(([cryptoKeys]) =>
|
||||
cryptoKeys
|
||||
.filter(
|
||||
(key): key is { name: string } =>
|
||||
key !== null && typeof key === "object" && "name" in key && typeof key.name === "string"
|
||||
)
|
||||
.map((key) => key.name)
|
||||
)
|
||||
);
|
||||
|
||||
const cryptoKeyLists = await Promise.all(keyListPromises);
|
||||
keyList.push(...cryptoKeyLists.flat());
|
||||
return { keys: keyList };
|
||||
} catch (error) {
|
||||
logger.error(error, "Could not validate GCP KMS connection and credentials");
|
||||
throw new BadRequestError({
|
||||
message: "Could not validate GCP KMS connection and credentials",
|
||||
error
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const encrypt = async (data: Buffer) => {
|
||||
const encryptedText = await gcpKmsClient.encrypt({
|
||||
name: keyName,
|
||||
plaintext: data
|
||||
});
|
||||
if (!encryptedText[0].ciphertext) throw new Error("encryption failed");
|
||||
return { encryptedBlob: Buffer.from(encryptedText[0].ciphertext) };
|
||||
};
|
||||
|
||||
const decrypt = async (encryptedBlob: Buffer) => {
|
||||
const decryptedText = await gcpKmsClient.decrypt({
|
||||
name: keyName,
|
||||
ciphertext: encryptedBlob
|
||||
});
|
||||
if (!decryptedText[0].plaintext) throw new Error("decryption failed");
|
||||
return { data: Buffer.from(decryptedText[0].plaintext) };
|
||||
};
|
||||
|
||||
return {
|
||||
validateConnection,
|
||||
getKeysList,
|
||||
encrypt,
|
||||
decrypt
|
||||
};
|
||||
};
|
@ -1,13 +1,23 @@
|
||||
import { z } from "zod";
|
||||
|
||||
export enum KmsProviders {
|
||||
Aws = "aws"
|
||||
Aws = "aws",
|
||||
Gcp = "gcp"
|
||||
}
|
||||
|
||||
export enum KmsAwsCredentialType {
|
||||
AssumeRole = "assume-role",
|
||||
AccessKey = "access-key"
|
||||
}
|
||||
// Google uses snake_case for their enum values and we need to match that
|
||||
export enum KmsGcpCredentialType {
|
||||
ServiceAccount = "service_account"
|
||||
}
|
||||
|
||||
export enum KmsGcpKeyFetchAuthType {
|
||||
Credential = "credential",
|
||||
Kms = "kmsId"
|
||||
}
|
||||
|
||||
export const ExternalKmsAwsSchema = z.object({
|
||||
credential: z
|
||||
@ -42,14 +52,44 @@ export const ExternalKmsAwsSchema = z.object({
|
||||
});
|
||||
export type TExternalKmsAwsSchema = z.infer<typeof ExternalKmsAwsSchema>;
|
||||
|
||||
export const ExternalKmsGcpCredentialSchema = z.object({
|
||||
type: z.literal(KmsGcpCredentialType.ServiceAccount),
|
||||
project_id: z.string().min(1),
|
||||
private_key_id: z.string().min(1),
|
||||
private_key: z.string().min(1),
|
||||
client_email: z.string().min(1),
|
||||
client_id: z.string().min(1),
|
||||
auth_uri: z.string().min(1),
|
||||
token_uri: z.string().min(1),
|
||||
auth_provider_x509_cert_url: z.string().min(1),
|
||||
client_x509_cert_url: z.string().min(1),
|
||||
universe_domain: z.string().min(1)
|
||||
});
|
||||
|
||||
export type TExternalKmsGcpCredentialSchema = z.infer<typeof ExternalKmsGcpCredentialSchema>;
|
||||
|
||||
export const ExternalKmsGcpSchema = z.object({
|
||||
credential: ExternalKmsGcpCredentialSchema.describe("GCP Service Account JSON credential to connect"),
|
||||
gcpRegion: z.string().trim().describe("GCP region where the KMS key is located"),
|
||||
keyName: z.string().trim().describe("GCP key name")
|
||||
});
|
||||
export type TExternalKmsGcpSchema = z.infer<typeof ExternalKmsGcpSchema>;
|
||||
|
||||
const ExternalKmsGcpClientSchema = ExternalKmsGcpSchema.pick({ gcpRegion: true }).extend({
|
||||
credential: ExternalKmsGcpCredentialSchema
|
||||
});
|
||||
export type TExternalKmsGcpClientSchema = z.infer<typeof ExternalKmsGcpClientSchema>;
|
||||
|
||||
// The root schema of the JSON
|
||||
export const ExternalKmsInputSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema })
|
||||
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema }),
|
||||
z.object({ type: z.literal(KmsProviders.Gcp), inputs: ExternalKmsGcpSchema })
|
||||
]);
|
||||
export type TExternalKmsInputSchema = z.infer<typeof ExternalKmsInputSchema>;
|
||||
|
||||
export const ExternalKmsInputUpdateSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema.partial() })
|
||||
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema.partial() }),
|
||||
z.object({ type: z.literal(KmsProviders.Gcp), inputs: ExternalKmsGcpSchema.partial() })
|
||||
]);
|
||||
export type TExternalKmsInputUpdateSchema = z.infer<typeof ExternalKmsInputUpdateSchema>;
|
||||
|
||||
|
@ -27,7 +27,7 @@ export const initializeHsmModule = () => {
|
||||
|
||||
logger.info("PKCS#11 module initialized");
|
||||
} catch (err) {
|
||||
logger.error("Failed to initialize PKCS#11 module:", err);
|
||||
logger.error(err, "Failed to initialize PKCS#11 module");
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
@ -39,7 +39,7 @@ export const initializeHsmModule = () => {
|
||||
isInitialized = false;
|
||||
logger.info("PKCS#11 module finalized");
|
||||
} catch (err) {
|
||||
logger.error("Failed to finalize PKCS#11 module:", err);
|
||||
logger.error(err, "Failed to finalize PKCS#11 module");
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import { packRules } from "@casl/ability/extra";
|
||||
import ms from "ms";
|
||||
|
||||
@ -62,7 +62,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityId,
|
||||
@ -139,7 +142,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityProjectMembership.identityId,
|
||||
@ -216,7 +222,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
const { permission: identityRolePermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityProjectMembership.identityId,
|
||||
@ -258,7 +267,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
|
||||
return {
|
||||
...identityPrivilege,
|
||||
@ -289,7 +301,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
|
||||
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
@ -321,7 +336,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
|
||||
const identityPrivileges = await identityProjectAdditionalPrivilegeDAL.find(
|
||||
{
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { ForbiddenError, MongoAbility, RawRuleOf } from "@casl/ability";
|
||||
import { ForbiddenError, MongoAbility, RawRuleOf, subject } from "@casl/ability";
|
||||
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
|
||||
import ms from "ms";
|
||||
|
||||
@ -69,7 +69,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityId,
|
||||
@ -146,7 +150,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
@ -241,7 +249,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
|
||||
const { permission: identityRolePermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityProjectMembership.identityId,
|
||||
@ -294,7 +306,10 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
|
||||
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
@ -333,7 +348,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
|
||||
const identityPrivileges = await identityProjectAdditionalPrivilegeDAL.find({
|
||||
projectMembershipId: identityProjectMembership.id
|
||||
|
@ -36,8 +36,7 @@ export const testLDAPConfig = async (ldapConfig: TLDAPConfig): Promise<boolean>
|
||||
});
|
||||
|
||||
ldapClient.on("error", (err) => {
|
||||
logger.error("LDAP client error:", err);
|
||||
logger.error(err);
|
||||
logger.error(err, "LDAP client error");
|
||||
resolve(false);
|
||||
});
|
||||
|
||||
|
@ -161,8 +161,8 @@ export const licenseServiceFactory = ({
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`getPlan: encountered an error when fetching pan [orgId=${orgId}] [projectId=${projectId}] [error]`,
|
||||
error
|
||||
error,
|
||||
`getPlan: encountered an error when fetching pan [orgId=${orgId}] [projectId=${projectId}] [error]`
|
||||
);
|
||||
await keyStore.setItemWithExpiry(
|
||||
FEATURE_CACHE_KEY(orgId),
|
||||
|
@ -17,7 +17,7 @@ import {
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, OidcAuthError } from "@app/lib/errors";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
@ -56,7 +56,7 @@ type TOidcConfigServiceFactoryDep = {
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
smtpService: Pick<TSmtpService, "sendMail" | "verify">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne" | "update" | "create">;
|
||||
};
|
||||
@ -223,6 +223,7 @@ export const oidcConfigServiceFactory = ({
|
||||
let newUser: TUsers | undefined;
|
||||
|
||||
if (serverCfg.trustOidcEmails) {
|
||||
// we prioritize getting the most complete user to create the new alias under
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
@ -230,6 +231,23 @@ export const oidcConfigServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (!newUser) {
|
||||
// this fetches user entries created via invites
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
username: email
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (newUser && !newUser.isEmailVerified) {
|
||||
// we automatically mark it as email-verified because we've configured trust for OIDC emails
|
||||
newUser = await userDAL.updateById(newUser.id, {
|
||||
isEmailVerified: true
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!newUser) {
|
||||
@ -332,14 +350,20 @@ export const oidcConfigServiceFactory = ({
|
||||
userId: user.id
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
});
|
||||
await smtpService
|
||||
.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
throw new OidcAuthError({
|
||||
message: `Error sending email confirmation code for user registration - contact the Infisical instance admin. ${err.message}`
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return { isUserCompleted, providerAuthToken };
|
||||
@ -395,6 +419,18 @@ export const oidcConfigServiceFactory = ({
|
||||
message: `Organization bot for organization with ID '${org.id}' not found`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
if (isActive && !serverCfg.trustOidcEmails) {
|
||||
const isSmtpConnected = await smtpService.verify();
|
||||
if (!isSmtpConnected) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Cannot enable OIDC when there are issues with the instance's SMTP configuration. Bypass this by turning on trust for OIDC emails in the server admin console."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
|
@ -31,7 +31,6 @@ export enum OrgPermissionSubjects {
|
||||
}
|
||||
|
||||
export type OrgPermissionSet =
|
||||
| [OrgPermissionActions.Read, OrgPermissionSubjects.Workspace]
|
||||
| [OrgPermissionActions.Create, OrgPermissionSubjects.Workspace]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Role]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Member]
|
||||
@ -52,7 +51,6 @@ export type OrgPermissionSet =
|
||||
const buildAdminPermission = () => {
|
||||
const { can, rules } = new AbilityBuilder<MongoAbility<OrgPermissionSet>>(createMongoAbility);
|
||||
// ws permissions
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Workspace);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace);
|
||||
// role permission
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Role);
|
||||
@ -135,7 +133,6 @@ export const orgAdminPermissions = buildAdminPermission();
|
||||
const buildMemberPermission = () => {
|
||||
const { can, rules } = new AbilityBuilder<MongoAbility<OrgPermissionSet>>(createMongoAbility);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Workspace);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Member);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
|
||||
|
@ -127,14 +127,15 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getProjectPermission = async (userId: string, projectId: string) => {
|
||||
try {
|
||||
const subQueryUserGroups = db(TableName.UserGroupMembership).where("userId", userId).select("groupId");
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.Users)
|
||||
.where(`${TableName.Users}.id`, userId)
|
||||
.leftJoin(TableName.UserGroupMembership, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.GroupProjectMembership, (queryBuilder) => {
|
||||
void queryBuilder
|
||||
.on(`${TableName.GroupProjectMembership}.projectId`, db.raw("?", [projectId]))
|
||||
.andOn(`${TableName.GroupProjectMembership}.groupId`, `${TableName.UserGroupMembership}.groupId`);
|
||||
// @ts-expect-error akhilmhdh: this is valid knexjs query. Its just ts type argument is missing it
|
||||
.andOnIn(`${TableName.GroupProjectMembership}.groupId`, subQueryUserGroups);
|
||||
})
|
||||
.leftJoin(
|
||||
TableName.GroupProjectMembershipRole,
|
||||
|
@ -29,4 +29,18 @@ function validateOrgSSO(actorAuthMethod: ActorAuthMethod, isOrgSsoEnforced: TOrg
|
||||
}
|
||||
}
|
||||
|
||||
export { isAuthMethodSaml, validateOrgSSO };
|
||||
const escapeHandlebarsMissingMetadata = (obj: Record<string, string>) => {
|
||||
const handler = {
|
||||
get(target: Record<string, string>, prop: string) {
|
||||
if (!(prop in target)) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
target[prop] = `{{identity.metadata.${prop}}}`; // Add missing key as an "own" property
|
||||
}
|
||||
return target[prop];
|
||||
}
|
||||
};
|
||||
|
||||
return new Proxy(obj, handler);
|
||||
};
|
||||
|
||||
export { escapeHandlebarsMissingMetadata, isAuthMethodSaml, validateOrgSSO };
|
||||
|
@ -21,7 +21,7 @@ import { TServiceTokenDALFactory } from "@app/services/service-token/service-tok
|
||||
|
||||
import { orgAdminPermissions, orgMemberPermissions, orgNoAccessPermissions, OrgPermissionSet } from "./org-permission";
|
||||
import { TPermissionDALFactory } from "./permission-dal";
|
||||
import { validateOrgSSO } from "./permission-fns";
|
||||
import { escapeHandlebarsMissingMetadata, validateOrgSSO } from "./permission-fns";
|
||||
import { TBuildOrgPermissionDTO, TBuildProjectPermissionDTO } from "./permission-service-types";
|
||||
import {
|
||||
buildServiceTokenProjectPermission,
|
||||
@ -227,11 +227,13 @@ export const permissionServiceFactory = ({
|
||||
})) || [];
|
||||
|
||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false, strict: true });
|
||||
const metadataKeyValuePair = objectify(
|
||||
userProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
|
||||
objectify(
|
||||
userProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
)
|
||||
);
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
@ -292,12 +294,15 @@ export const permissionServiceFactory = ({
|
||||
})) || [];
|
||||
|
||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false, strict: true });
|
||||
const metadataKeyValuePair = objectify(
|
||||
identityProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
|
||||
objectify(
|
||||
identityProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
)
|
||||
);
|
||||
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
identity: {
|
||||
|
@ -1,14 +1,7 @@
|
||||
import picomatch from "picomatch";
|
||||
import { z } from "zod";
|
||||
|
||||
export enum PermissionConditionOperators {
|
||||
$IN = "$in",
|
||||
$ALL = "$all",
|
||||
$REGEX = "$regex",
|
||||
$EQ = "$eq",
|
||||
$NEQ = "$ne",
|
||||
$GLOB = "$glob"
|
||||
}
|
||||
import { PermissionConditionOperators } from "@app/lib/casl";
|
||||
|
||||
export const PermissionConditionSchema = {
|
||||
[PermissionConditionOperators.$IN]: z.string().trim().min(1).array(),
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { AbilityBuilder, createMongoAbility, ForcedSubject, MongoAbility } from "@casl/ability";
|
||||
import { z } from "zod";
|
||||
|
||||
import { conditionsMatcher } from "@app/lib/casl";
|
||||
import { conditionsMatcher, PermissionConditionOperators } from "@app/lib/casl";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
|
||||
import { PermissionConditionOperators, PermissionConditionSchema } from "./permission-types";
|
||||
import { PermissionConditionSchema } from "./permission-types";
|
||||
|
||||
export enum ProjectPermissionActions {
|
||||
Read = "read",
|
||||
@ -82,6 +82,10 @@ export type SecretImportSubjectFields = {
|
||||
secretPath: string;
|
||||
};
|
||||
|
||||
export type IdentityManagementSubjectFields = {
|
||||
identityId: string;
|
||||
};
|
||||
|
||||
export type ProjectPermissionSet =
|
||||
| [
|
||||
ProjectPermissionActions,
|
||||
@ -121,7 +125,10 @@ export type ProjectPermissionSet =
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.ServiceTokens]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.SecretApproval]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.SecretRotation]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Identity]
|
||||
| [
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSub.Identity | (ForcedSubject<ProjectPermissionSub.Identity> & IdentityManagementSubjectFields)
|
||||
]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.CertificateAuthorities]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Certificates]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.CertificateTemplates]
|
||||
@ -213,6 +220,21 @@ const SecretConditionV2Schema = z
|
||||
})
|
||||
.partial();
|
||||
|
||||
const IdentityManagementConditionSchema = z
|
||||
.object({
|
||||
identityId: z.union([
|
||||
z.string(),
|
||||
z
|
||||
.object({
|
||||
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
|
||||
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
|
||||
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
|
||||
})
|
||||
.partial()
|
||||
])
|
||||
})
|
||||
.partial();
|
||||
|
||||
const GeneralPermissionSchema = [
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.SecretApproval).describe("The entity this permission pertains to."),
|
||||
@ -262,12 +284,6 @@ const GeneralPermissionSchema = [
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Identity).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.ServiceTokens).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
@ -373,6 +389,12 @@ export const ProjectPermissionV1Schema = z.discriminatedUnion("subject", [
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Identity).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
...GeneralPermissionSchema
|
||||
]);
|
||||
|
||||
@ -417,6 +439,16 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
|
||||
"When specified, only matching conditions will be allowed to access given resource."
|
||||
).optional()
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Identity).describe("The entity this permission pertains to."),
|
||||
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
),
|
||||
conditions: IdentityManagementConditionSchema.describe(
|
||||
"When specified, only matching conditions will be allowed to access given resource."
|
||||
).optional()
|
||||
}),
|
||||
...GeneralPermissionSchema
|
||||
]);
|
||||
|
||||
@ -697,26 +729,26 @@ export const buildServiceTokenProjectPermission = (
|
||||
[ProjectPermissionSub.Secrets, ProjectPermissionSub.SecretImports, ProjectPermissionSub.SecretFolders].forEach(
|
||||
(subject) => {
|
||||
if (canWrite) {
|
||||
// TODO: @Akhi
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Edit, subject, {
|
||||
// TODO: @Akhi
|
||||
// @ts-expect-error type
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Create, subject, {
|
||||
// @ts-expect-error type
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Delete, subject, {
|
||||
// @ts-expect-error type
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
}
|
||||
if (canRead) {
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Read, subject, {
|
||||
// @ts-expect-error type
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
|
@ -46,7 +46,7 @@ export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateL
|
||||
}
|
||||
return rateLimit;
|
||||
} catch (err) {
|
||||
logger.error("Error fetching rate limits %o", err);
|
||||
logger.error(err, "Error fetching rate limits");
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
@ -69,12 +69,12 @@ export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateL
|
||||
mfaRateLimit: rateLimit.mfaRateLimit
|
||||
};
|
||||
|
||||
logger.info(`syncRateLimitConfiguration: rate limit configuration: %o`, newRateLimitMaxConfiguration);
|
||||
logger.info(newRateLimitMaxConfiguration, "syncRateLimitConfiguration: rate limit configuration");
|
||||
Object.freeze(newRateLimitMaxConfiguration);
|
||||
rateLimitMaxConfiguration = newRateLimitMaxConfiguration;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error syncing rate limit configurations: %o`, error);
|
||||
logger.error(error, "Error syncing rate limit configurations");
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -18,6 +18,7 @@ import { TGroupProjectDALFactory } from "@app/services/group-project/group-proje
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { deleteOrgMembershipFn } from "@app/services/org/org-fns";
|
||||
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
|
||||
import { OrgAuthMethod } from "@app/services/org/org-types";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
|
||||
@ -71,6 +72,7 @@ type TScimServiceFactoryDep = {
|
||||
| "deleteMembershipById"
|
||||
| "transaction"
|
||||
| "updateMembershipById"
|
||||
| "findOrgById"
|
||||
>;
|
||||
orgMembershipDAL: Pick<
|
||||
TOrgMembershipDALFactory,
|
||||
@ -288,8 +290,7 @@ export const scimServiceFactory = ({
|
||||
const createScimUser = async ({ externalId, email, firstName, lastName, orgId }: TCreateScimUserDTO) => {
|
||||
if (!email) throw new ScimRequestError({ detail: "Invalid request. Missing email.", status: 400 });
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
|
||||
const org = await orgDAL.findOrgById(orgId);
|
||||
if (!org)
|
||||
throw new ScimRequestError({
|
||||
detail: "Organization not found",
|
||||
@ -302,13 +303,24 @@ export const scimServiceFactory = ({
|
||||
status: 403
|
||||
});
|
||||
|
||||
if (!org.orgAuthMethod) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Neither SAML or OIDC SSO is configured",
|
||||
status: 400
|
||||
});
|
||||
}
|
||||
|
||||
const appCfg = getConfig();
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
const aliasType = org.orgAuthMethod === OrgAuthMethod.OIDC ? UserAliasType.OIDC : UserAliasType.SAML;
|
||||
const trustScimEmails =
|
||||
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails;
|
||||
|
||||
const userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
aliasType: UserAliasType.SAML
|
||||
aliasType
|
||||
});
|
||||
|
||||
const { user: createdUser, orgMembership: createdOrgMembership } = await userDAL.transaction(async (tx) => {
|
||||
@ -349,7 +361,7 @@ export const scimServiceFactory = ({
|
||||
);
|
||||
}
|
||||
} else {
|
||||
if (serverCfg.trustSamlEmails) {
|
||||
if (trustScimEmails) {
|
||||
user = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
@ -367,9 +379,9 @@ export const scimServiceFactory = ({
|
||||
);
|
||||
user = await userDAL.create(
|
||||
{
|
||||
username: serverCfg.trustSamlEmails ? email : uniqueUsername,
|
||||
username: trustScimEmails ? email : uniqueUsername,
|
||||
email,
|
||||
isEmailVerified: serverCfg.trustSamlEmails,
|
||||
isEmailVerified: trustScimEmails,
|
||||
firstName,
|
||||
lastName,
|
||||
authMethods: [],
|
||||
@ -382,7 +394,7 @@ export const scimServiceFactory = ({
|
||||
await userAliasDAL.create(
|
||||
{
|
||||
userId: user.id,
|
||||
aliasType: UserAliasType.SAML,
|
||||
aliasType,
|
||||
externalId,
|
||||
emails: email ? [email] : [],
|
||||
orgId
|
||||
@ -437,7 +449,7 @@ export const scimServiceFactory = ({
|
||||
recipients: [email],
|
||||
substitutions: {
|
||||
organizationName: org.name,
|
||||
callback_url: `${appCfg.SITE_URL}/api/v1/sso/redirect/saml2/organizations/${org.slug}`
|
||||
callback_url: `${appCfg.SITE_URL}/api/v1/sso/redirect/organizations/${org.slug}`
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -456,6 +468,14 @@ export const scimServiceFactory = ({
|
||||
|
||||
// partial
|
||||
const updateScimUser = async ({ orgMembershipId, orgId, operations }: TUpdateScimUserDTO) => {
|
||||
const org = await orgDAL.findOrgById(orgId);
|
||||
if (!org.orgAuthMethod) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Neither SAML or OIDC SSO is configured",
|
||||
status: 400
|
||||
});
|
||||
}
|
||||
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
[`${TableName.OrgMembership}.id` as "id"]: orgMembershipId,
|
||||
@ -493,6 +513,9 @@ export const scimServiceFactory = ({
|
||||
scimPatch(scimUser, operations);
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
const trustScimEmails =
|
||||
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails;
|
||||
|
||||
await userDAL.transaction(async (tx) => {
|
||||
await orgMembershipDAL.updateById(
|
||||
membership.id,
|
||||
@ -508,7 +531,7 @@ export const scimServiceFactory = ({
|
||||
firstName: scimUser.name.givenName,
|
||||
email: scimUser.emails[0].value,
|
||||
lastName: scimUser.name.familyName,
|
||||
isEmailVerified: hasEmailChanged ? serverCfg.trustSamlEmails : true
|
||||
isEmailVerified: hasEmailChanged ? trustScimEmails : true
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -526,6 +549,14 @@ export const scimServiceFactory = ({
|
||||
email,
|
||||
externalId
|
||||
}: TReplaceScimUserDTO) => {
|
||||
const org = await orgDAL.findOrgById(orgId);
|
||||
if (!org.orgAuthMethod) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Neither SAML or OIDC SSO is configured",
|
||||
status: 400
|
||||
});
|
||||
}
|
||||
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
[`${TableName.OrgMembership}.id` as "id"]: orgMembershipId,
|
||||
@ -555,7 +586,7 @@ export const scimServiceFactory = ({
|
||||
await userAliasDAL.update(
|
||||
{
|
||||
orgId,
|
||||
aliasType: UserAliasType.SAML,
|
||||
aliasType: org.orgAuthMethod === OrgAuthMethod.OIDC ? UserAliasType.OIDC : UserAliasType.SAML,
|
||||
userId: membership.userId
|
||||
},
|
||||
{
|
||||
@ -576,7 +607,8 @@ export const scimServiceFactory = ({
|
||||
firstName,
|
||||
email,
|
||||
lastName,
|
||||
isEmailVerified: serverCfg.trustSamlEmails
|
||||
isEmailVerified:
|
||||
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@ -238,11 +238,11 @@ export const secretScanningQueueFactory = ({
|
||||
});
|
||||
|
||||
queueService.listen(QueueName.SecretPushEventScan, "failed", (job, err) => {
|
||||
logger.error("Failed to secret scan on push", job?.data, err);
|
||||
logger.error(err, "Failed to secret scan on push", job?.data);
|
||||
});
|
||||
|
||||
queueService.listen(QueueName.SecretFullRepoScan, "failed", (job, err) => {
|
||||
logger.error("Failed to do full repo secret scan", job?.data, err);
|
||||
logger.error(err, "Failed to do full repo secret scan", job?.data);
|
||||
});
|
||||
|
||||
return { startFullRepoScan, startPushEventScan };
|
||||
|
@ -391,6 +391,7 @@ export const PROJECTS = {
|
||||
CREATE: {
|
||||
organizationSlug: "The slug of the organization to create the project in.",
|
||||
projectName: "The name of the project to create.",
|
||||
projectDescription: "An optional description label for the project.",
|
||||
slug: "An optional slug for the project.",
|
||||
template: "The name of the project template, if specified, to apply to this project."
|
||||
},
|
||||
@ -403,6 +404,7 @@ export const PROJECTS = {
|
||||
UPDATE: {
|
||||
workspaceId: "The ID of the project to update.",
|
||||
name: "The new name of the project.",
|
||||
projectDescription: "An optional description label for the project.",
|
||||
autoCapitalization: "Disable or enable auto-capitalization for the project."
|
||||
},
|
||||
GET_KEY: {
|
||||
@ -1080,7 +1082,8 @@ export const INTEGRATION = {
|
||||
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store.",
|
||||
shouldMaskSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Masked'.",
|
||||
shouldProtectSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Protected'.",
|
||||
shouldEnableDelete: "The flag to enable deletion of secrets."
|
||||
shouldEnableDelete: "The flag to enable deletion of secrets.",
|
||||
octopusDeployScopeValues: "Specifies the scope values to set on synced secrets to Octopus Deploy."
|
||||
}
|
||||
},
|
||||
UPDATE: {
|
||||
|
@ -54,3 +54,12 @@ export const isAtLeastAsPrivileged = (permissions1: MongoAbility, permissions2:
|
||||
|
||||
return set1.size >= set2.size;
|
||||
};
|
||||
|
||||
export enum PermissionConditionOperators {
|
||||
$IN = "$in",
|
||||
$ALL = "$all",
|
||||
$REGEX = "$regex",
|
||||
$EQ = "$eq",
|
||||
$NEQ = "$ne",
|
||||
$GLOB = "$glob"
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { Logger } from "pino";
|
||||
import { z } from "zod";
|
||||
|
||||
import { removeTrailingSlash } from "../fn";
|
||||
import { CustomLogger } from "../logger/logger";
|
||||
import { zpStr } from "../zod";
|
||||
|
||||
export const GITLAB_URL = "https://gitlab.com";
|
||||
@ -10,7 +10,7 @@ export const GITLAB_URL = "https://gitlab.com";
|
||||
export const IS_PACKAGED = (process as any)?.pkg !== undefined;
|
||||
|
||||
const zodStrBool = z
|
||||
.enum(["true", "false"])
|
||||
.string()
|
||||
.optional()
|
||||
.transform((val) => val === "true");
|
||||
|
||||
@ -157,6 +157,15 @@ const envSchema = z
|
||||
INFISICAL_CLOUD: zodStrBool.default("false"),
|
||||
MAINTENANCE_MODE: zodStrBool.default("false"),
|
||||
CAPTCHA_SECRET: zpStr(z.string().optional()),
|
||||
|
||||
// TELEMETRY
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED: zodStrBool.default("false"),
|
||||
OTEL_EXPORT_OTLP_ENDPOINT: zpStr(z.string().optional()),
|
||||
OTEL_OTLP_PUSH_INTERVAL: z.coerce.number().default(30000),
|
||||
OTEL_COLLECTOR_BASIC_AUTH_USERNAME: zpStr(z.string().optional()),
|
||||
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD: zpStr(z.string().optional()),
|
||||
OTEL_EXPORT_TYPE: z.enum(["prometheus", "otlp"]).optional(),
|
||||
|
||||
PLAIN_API_KEY: zpStr(z.string().optional()),
|
||||
PLAIN_WISH_LABEL_IDS: zpStr(z.string().optional()),
|
||||
DISABLE_AUDIT_LOG_GENERATION: zodStrBool.default("false"),
|
||||
@ -169,7 +178,10 @@ const envSchema = z
|
||||
HSM_LIB_PATH: zpStr(z.string().optional()),
|
||||
HSM_PIN: zpStr(z.string().optional()),
|
||||
HSM_KEY_LABEL: zpStr(z.string().optional()),
|
||||
HSM_SLOT: z.coerce.number().optional().default(0)
|
||||
HSM_SLOT: z.coerce.number().optional().default(0),
|
||||
|
||||
USE_PG_QUEUE: zodStrBool.default("false"),
|
||||
SHOULD_INIT_PG_QUEUE: zodStrBool.default("false")
|
||||
})
|
||||
// To ensure that basic encryption is always possible.
|
||||
.refine(
|
||||
@ -203,11 +215,11 @@ let envCfg: Readonly<z.infer<typeof envSchema>>;
|
||||
|
||||
export const getConfig = () => envCfg;
|
||||
// cannot import singleton logger directly as it needs config to load various transport
|
||||
export const initEnvConfig = (logger: Logger) => {
|
||||
export const initEnvConfig = (logger?: CustomLogger) => {
|
||||
const parsedEnv = envSchema.safeParse(process.env);
|
||||
if (!parsedEnv.success) {
|
||||
logger.error("Invalid environment variables. Check the error below");
|
||||
logger.error(parsedEnv.error.issues);
|
||||
(logger ?? console).error("Invalid environment variables. Check the error below");
|
||||
(logger ?? console).error(parsedEnv.error.issues);
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
|
@ -133,3 +133,15 @@ export class ScimRequestError extends Error {
|
||||
this.status = status;
|
||||
}
|
||||
}
|
||||
|
||||
export class OidcAuthError extends Error {
|
||||
name: string;
|
||||
|
||||
error: unknown;
|
||||
|
||||
constructor({ name, error, message }: { message?: string; name?: string; error?: unknown }) {
|
||||
super(message || "Something went wrong");
|
||||
this.name = name || "OidcAuthError";
|
||||
this.error = error;
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,8 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
// logger follows a singleton pattern
|
||||
// easier to use it that's all.
|
||||
import { requestContext } from "@fastify/request-context";
|
||||
import pino, { Logger } from "pino";
|
||||
import { z } from "zod";
|
||||
|
||||
@ -13,14 +15,37 @@ const logLevelToSeverityLookup: Record<string, string> = {
|
||||
"60": "CRITICAL"
|
||||
};
|
||||
|
||||
// eslint-disable-next-line import/no-mutable-exports
|
||||
export let logger: Readonly<Logger>;
|
||||
// akhilmhdh:
|
||||
// The logger is not placed in the main app config to avoid a circular dependency.
|
||||
// The config requires the logger to display errors when an invalid environment is supplied.
|
||||
// On the other hand, the logger needs the config to obtain credentials for AWS or other transports.
|
||||
// By keeping the logger separate, it becomes an independent package.
|
||||
|
||||
// We define our own custom logger interface to enforce structure to the logging methods.
|
||||
|
||||
export interface CustomLogger extends Omit<Logger, "info" | "error" | "warn" | "debug"> {
|
||||
info: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
|
||||
error: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
warn: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
debug: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
}
|
||||
|
||||
// eslint-disable-next-line import/no-mutable-exports
|
||||
export let logger: Readonly<CustomLogger>;
|
||||
|
||||
const loggerConfig = z.object({
|
||||
AWS_CLOUDWATCH_LOG_GROUP_NAME: z.string().default("infisical-log-stream"),
|
||||
AWS_CLOUDWATCH_LOG_REGION: z.string().default("us-east-1"),
|
||||
@ -62,6 +87,17 @@ const redactedKeys = [
|
||||
"config"
|
||||
];
|
||||
|
||||
const UNKNOWN_REQUEST_ID = "UNKNOWN_REQUEST_ID";
|
||||
|
||||
const extractReqId = () => {
|
||||
try {
|
||||
return requestContext.get("reqId") || UNKNOWN_REQUEST_ID;
|
||||
} catch (err) {
|
||||
console.log("failed to get request context", err);
|
||||
return UNKNOWN_REQUEST_ID;
|
||||
}
|
||||
};
|
||||
|
||||
export const initLogger = async () => {
|
||||
const cfg = loggerConfig.parse(process.env);
|
||||
const targets: pino.TransportMultiOptions["targets"][number][] = [
|
||||
@ -94,6 +130,30 @@ export const initLogger = async () => {
|
||||
targets
|
||||
});
|
||||
|
||||
const wrapLogger = (originalLogger: Logger): CustomLogger => {
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.info = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ reqId: extractReqId() }).info(obj, msg, ...args);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.error = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ reqId: extractReqId() }).error(obj, msg, ...args);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.warn = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ reqId: extractReqId() }).warn(obj, msg, ...args);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.debug = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ reqId: extractReqId() }).debug(obj, msg, ...args);
|
||||
};
|
||||
|
||||
return originalLogger;
|
||||
};
|
||||
|
||||
logger = pino(
|
||||
{
|
||||
mixin(_context, level) {
|
||||
@ -113,5 +173,6 @@ export const initLogger = async () => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
transport
|
||||
);
|
||||
return logger;
|
||||
|
||||
return wrapLogger(logger);
|
||||
};
|
||||
|
91
backend/src/lib/telemetry/instrumentation.ts
Normal file
91
backend/src/lib/telemetry/instrumentation.ts
Normal file
@ -0,0 +1,91 @@
|
||||
import opentelemetry, { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api";
|
||||
import { getNodeAutoInstrumentations } from "@opentelemetry/auto-instrumentations-node";
|
||||
import { OTLPMetricExporter } from "@opentelemetry/exporter-metrics-otlp-proto";
|
||||
import { PrometheusExporter } from "@opentelemetry/exporter-prometheus";
|
||||
import { registerInstrumentations } from "@opentelemetry/instrumentation";
|
||||
import { Resource } from "@opentelemetry/resources";
|
||||
import { AggregationTemporality, MeterProvider, PeriodicExportingMetricReader } from "@opentelemetry/sdk-metrics";
|
||||
import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION } from "@opentelemetry/semantic-conventions";
|
||||
import dotenv from "dotenv";
|
||||
|
||||
import { initEnvConfig } from "../config/env";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const initTelemetryInstrumentation = ({
|
||||
exportType,
|
||||
otlpURL,
|
||||
otlpUser,
|
||||
otlpPassword,
|
||||
otlpPushInterval
|
||||
}: {
|
||||
exportType?: string;
|
||||
otlpURL?: string;
|
||||
otlpUser?: string;
|
||||
otlpPassword?: string;
|
||||
otlpPushInterval?: number;
|
||||
}) => {
|
||||
diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG);
|
||||
|
||||
const resource = Resource.default().merge(
|
||||
new Resource({
|
||||
[ATTR_SERVICE_NAME]: "infisical-core",
|
||||
[ATTR_SERVICE_VERSION]: "0.1.0"
|
||||
})
|
||||
);
|
||||
|
||||
const metricReaders = [];
|
||||
switch (exportType) {
|
||||
case "prometheus": {
|
||||
const promExporter = new PrometheusExporter();
|
||||
metricReaders.push(promExporter);
|
||||
break;
|
||||
}
|
||||
case "otlp": {
|
||||
const otlpExporter = new OTLPMetricExporter({
|
||||
url: `${otlpURL}/v1/metrics`,
|
||||
headers: {
|
||||
Authorization: `Basic ${btoa(`${otlpUser}:${otlpPassword}`)}`
|
||||
},
|
||||
temporalityPreference: AggregationTemporality.DELTA
|
||||
});
|
||||
metricReaders.push(
|
||||
new PeriodicExportingMetricReader({
|
||||
exporter: otlpExporter,
|
||||
exportIntervalMillis: otlpPushInterval
|
||||
})
|
||||
);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error("Invalid OTEL export type");
|
||||
}
|
||||
|
||||
const meterProvider = new MeterProvider({
|
||||
resource,
|
||||
readers: metricReaders
|
||||
});
|
||||
|
||||
opentelemetry.metrics.setGlobalMeterProvider(meterProvider);
|
||||
|
||||
registerInstrumentations({
|
||||
instrumentations: [getNodeAutoInstrumentations()]
|
||||
});
|
||||
};
|
||||
|
||||
const setupTelemetry = () => {
|
||||
const appCfg = initEnvConfig();
|
||||
|
||||
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
|
||||
console.log("Initializing telemetry instrumentation");
|
||||
initTelemetryInstrumentation({
|
||||
otlpURL: appCfg.OTEL_EXPORT_OTLP_ENDPOINT,
|
||||
otlpUser: appCfg.OTEL_COLLECTOR_BASIC_AUTH_USERNAME,
|
||||
otlpPassword: appCfg.OTEL_COLLECTOR_BASIC_AUTH_PASSWORD,
|
||||
otlpPushInterval: appCfg.OTEL_OTLP_PUSH_INTERVAL,
|
||||
exportType: appCfg.OTEL_EXPORT_TYPE
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
void setupTelemetry();
|
@ -1,4 +1,7 @@
|
||||
import "./lib/telemetry/instrumentation";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import { Redis } from "ioredis";
|
||||
import path from "path";
|
||||
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
@ -18,6 +21,7 @@ dotenv.config();
|
||||
const run = async () => {
|
||||
const logger = await initLogger();
|
||||
const appCfg = initEnvConfig(logger);
|
||||
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: appCfg.DB_CONNECTION_URI,
|
||||
dbRootCert: appCfg.DB_ROOT_CERT,
|
||||
@ -52,13 +56,17 @@ const run = async () => {
|
||||
}
|
||||
|
||||
const smtp = smtpServiceFactory(formatSmtpConfig());
|
||||
const queue = queueServiceFactory(appCfg.REDIS_URL);
|
||||
|
||||
const queue = queueServiceFactory(appCfg.REDIS_URL, appCfg.DB_CONNECTION_URI);
|
||||
await queue.initialize();
|
||||
|
||||
const keyStore = keyStoreFactory(appCfg.REDIS_URL);
|
||||
const redis = new Redis(appCfg.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule();
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({ db, auditLogDb, hsmModule: hsmModule.getModule(), smtp, logger, queue, keyStore });
|
||||
const server = await main({ db, auditLogDb, hsmModule: hsmModule.getModule(), smtp, logger, queue, keyStore, redis });
|
||||
const bootstrap = await bootstrapCheck({ db });
|
||||
|
||||
// eslint-disable-next-line
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { Job, JobsOptions, Queue, QueueOptions, RepeatOptions, Worker, WorkerListener } from "bullmq";
|
||||
import Redis from "ioredis";
|
||||
import PgBoss, { WorkOptions } from "pg-boss";
|
||||
|
||||
import { SecretEncryptionAlgo, SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||
@ -7,6 +8,8 @@ import {
|
||||
TScanFullRepoEventPayload,
|
||||
TScanPushEventPayload
|
||||
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import {
|
||||
TFailedIntegrationSyncEmailsPayload,
|
||||
TIntegrationSyncPayload,
|
||||
@ -184,17 +187,39 @@ export type TQueueJobTypes = {
|
||||
};
|
||||
|
||||
export type TQueueServiceFactory = ReturnType<typeof queueServiceFactory>;
|
||||
export const queueServiceFactory = (redisUrl: string) => {
|
||||
export const queueServiceFactory = (redisUrl: string, dbConnectionUrl: string) => {
|
||||
const connection = new Redis(redisUrl, { maxRetriesPerRequest: null });
|
||||
const queueContainer = {} as Record<
|
||||
QueueName,
|
||||
Queue<TQueueJobTypes[QueueName]["payload"], void, TQueueJobTypes[QueueName]["name"]>
|
||||
>;
|
||||
|
||||
const pgBoss = new PgBoss({
|
||||
connectionString: dbConnectionUrl,
|
||||
archiveCompletedAfterSeconds: 60,
|
||||
archiveFailedAfterSeconds: 1000, // we want to keep failed jobs for a longer time so that it can be retried
|
||||
deleteAfterSeconds: 30
|
||||
});
|
||||
|
||||
const queueContainerPg = {} as Record<QueueJobs, boolean>;
|
||||
|
||||
const workerContainer = {} as Record<
|
||||
QueueName,
|
||||
Worker<TQueueJobTypes[QueueName]["payload"], void, TQueueJobTypes[QueueName]["name"]>
|
||||
>;
|
||||
|
||||
const initialize = async () => {
|
||||
const appCfg = getConfig();
|
||||
if (appCfg.SHOULD_INIT_PG_QUEUE) {
|
||||
logger.info("Initializing pg-queue...");
|
||||
await pgBoss.start();
|
||||
|
||||
pgBoss.on("error", (error) => {
|
||||
logger.error(error, "pg-queue error");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const start = <T extends QueueName>(
|
||||
name: T,
|
||||
jobFn: (job: Job<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>, token?: string) => Promise<void>,
|
||||
@ -215,6 +240,27 @@ export const queueServiceFactory = (redisUrl: string) => {
|
||||
});
|
||||
};
|
||||
|
||||
const startPg = async <T extends QueueName>(
|
||||
jobName: QueueJobs,
|
||||
jobsFn: (jobs: PgBoss.Job<TQueueJobTypes[T]["payload"]>[]) => Promise<void>,
|
||||
options: WorkOptions & {
|
||||
workerCount: number;
|
||||
}
|
||||
) => {
|
||||
if (queueContainerPg[jobName]) {
|
||||
throw new Error(`${jobName} queue is already initialized`);
|
||||
}
|
||||
|
||||
await pgBoss.createQueue(jobName);
|
||||
queueContainerPg[jobName] = true;
|
||||
|
||||
await Promise.all(
|
||||
Array.from({ length: options.workerCount }).map(() =>
|
||||
pgBoss.work<TQueueJobTypes[T]["payload"]>(jobName, options, jobsFn)
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
const listen = <
|
||||
T extends QueueName,
|
||||
U extends keyof WorkerListener<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>
|
||||
@ -238,6 +284,18 @@ export const queueServiceFactory = (redisUrl: string) => {
|
||||
await q.add(job, data, opts);
|
||||
};
|
||||
|
||||
const queuePg = async <T extends QueueName>(
|
||||
job: TQueueJobTypes[T]["name"],
|
||||
data: TQueueJobTypes[T]["payload"],
|
||||
opts?: PgBoss.SendOptions & { jobId?: string }
|
||||
) => {
|
||||
await pgBoss.send({
|
||||
name: job,
|
||||
data,
|
||||
options: opts
|
||||
});
|
||||
};
|
||||
|
||||
const stopRepeatableJob = async <T extends QueueName>(
|
||||
name: T,
|
||||
job: TQueueJobTypes[T]["name"],
|
||||
@ -274,5 +332,17 @@ export const queueServiceFactory = (redisUrl: string) => {
|
||||
await Promise.all(Object.values(workerContainer).map((worker) => worker.close()));
|
||||
};
|
||||
|
||||
return { start, listen, queue, shutdown, stopRepeatableJob, stopRepeatableJobByJobId, clearQueue, stopJobById };
|
||||
return {
|
||||
initialize,
|
||||
start,
|
||||
listen,
|
||||
queue,
|
||||
shutdown,
|
||||
stopRepeatableJob,
|
||||
stopRepeatableJobByJobId,
|
||||
clearQueue,
|
||||
stopJobById,
|
||||
startPg,
|
||||
queuePg
|
||||
};
|
||||
};
|
||||
|
@ -10,18 +10,22 @@ import fastifyFormBody from "@fastify/formbody";
|
||||
import helmet from "@fastify/helmet";
|
||||
import type { FastifyRateLimitOptions } from "@fastify/rate-limit";
|
||||
import ratelimiter from "@fastify/rate-limit";
|
||||
import { fastifyRequestContext } from "@fastify/request-context";
|
||||
import fastify from "fastify";
|
||||
import { Redis } from "ioredis";
|
||||
import { Knex } from "knex";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig, IS_PACKAGED } from "@app/lib/config/env";
|
||||
import { CustomLogger } from "@app/lib/logger/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
import { TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
|
||||
import { globalRateLimiterCfg } from "./config/rateLimiter";
|
||||
import { addErrorsToResponseSchemas } from "./plugins/add-errors-to-response-schemas";
|
||||
import { apiMetrics } from "./plugins/api-metrics";
|
||||
import { fastifyErrHandler } from "./plugins/error-handler";
|
||||
import { registerExternalNextjs } from "./plugins/external-nextjs";
|
||||
import { serializerCompiler, validatorCompiler, ZodTypeProvider } from "./plugins/fastify-zod";
|
||||
@ -34,19 +38,22 @@ type TMain = {
|
||||
auditLogDb?: Knex;
|
||||
db: Knex;
|
||||
smtp: TSmtpService;
|
||||
logger?: Logger;
|
||||
logger?: CustomLogger;
|
||||
queue: TQueueServiceFactory;
|
||||
keyStore: TKeyStoreFactory;
|
||||
hsmModule: HsmModule;
|
||||
redis: Redis;
|
||||
};
|
||||
|
||||
// Run the server!
|
||||
export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, keyStore }: TMain) => {
|
||||
export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, keyStore, redis }: TMain) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const server = fastify({
|
||||
logger: appCfg.NODE_ENV === "test" ? false : logger,
|
||||
genReqId: () => `req-${alphaNumericNanoId(14)}`,
|
||||
trustProxy: true,
|
||||
|
||||
connectionTimeout: appCfg.isHsmConfigured ? 90_000 : 30_000,
|
||||
ignoreTrailingSlash: true,
|
||||
pluginTimeout: 40_000
|
||||
@ -55,6 +62,7 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
||||
server.setValidatorCompiler(validatorCompiler);
|
||||
server.setSerializerCompiler(serializerCompiler);
|
||||
|
||||
server.decorate("redis", redis);
|
||||
server.addContentTypeParser("application/scim+json", { parseAs: "string" }, (_, body, done) => {
|
||||
try {
|
||||
const strBody = body instanceof Buffer ? body.toString() : body;
|
||||
@ -86,6 +94,10 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
||||
// pull ip based on various proxy headers
|
||||
await server.register(fastifyIp);
|
||||
|
||||
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
|
||||
await server.register(apiMetrics);
|
||||
}
|
||||
|
||||
await server.register(fastifySwagger);
|
||||
await server.register(fastifyFormBody);
|
||||
await server.register(fastifyErrHandler);
|
||||
@ -99,6 +111,13 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
||||
|
||||
await server.register(maintenanceMode);
|
||||
|
||||
await server.register(fastifyRequestContext, {
|
||||
defaultStoreValues: (req) => ({
|
||||
reqId: req.id,
|
||||
log: req.log.child({ reqId: req.id })
|
||||
})
|
||||
});
|
||||
|
||||
await server.register(registerRoutes, { smtp, queue, db, auditLogDb, keyStore, hsmModule });
|
||||
|
||||
if (appCfg.isProductionMode) {
|
||||
|
@ -46,10 +46,10 @@ export const bootstrapCheck = async ({ db }: BootstrapOpt) => {
|
||||
await createTransport(smtpCfg)
|
||||
.verify()
|
||||
.then(async () => {
|
||||
console.info("SMTP successfully connected");
|
||||
console.info(`SMTP - Verified connection to ${appCfg.SMTP_HOST}:${appCfg.SMTP_PORT}`);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(`SMTP - Failed to connect to ${appCfg.SMTP_HOST}:${appCfg.SMTP_PORT}`);
|
||||
.catch((err: Error) => {
|
||||
console.error(`SMTP - Failed to connect to ${appCfg.SMTP_HOST}:${appCfg.SMTP_PORT} - ${err.message}`);
|
||||
logger.error(err);
|
||||
});
|
||||
|
||||
|
23
backend/src/server/lib/schemas.ts
Normal file
23
backend/src/server/lib/schemas.ts
Normal file
@ -0,0 +1,23 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
interface SlugSchemaInputs {
|
||||
min?: number;
|
||||
max?: number;
|
||||
field?: string;
|
||||
}
|
||||
|
||||
export const slugSchema = ({ min = 1, max = 32, field = "Slug" }: SlugSchemaInputs = {}) => {
|
||||
return z
|
||||
.string()
|
||||
.trim()
|
||||
.min(min, {
|
||||
message: `${field} field must be at least ${min} lowercase character${min === 1 ? "" : "s"}`
|
||||
})
|
||||
.max(max, {
|
||||
message: `${field} field must be at most ${max} lowercase character${max === 1 ? "" : "s"}`
|
||||
})
|
||||
.refine((v) => slugify(v, { lowercase: true }) === v, {
|
||||
message: `${field} field can only contain lowercase letters, numbers, and hyphens`
|
||||
});
|
||||
};
|
21
backend/src/server/plugins/api-metrics.ts
Normal file
21
backend/src/server/plugins/api-metrics.ts
Normal file
@ -0,0 +1,21 @@
|
||||
import opentelemetry from "@opentelemetry/api";
|
||||
import fp from "fastify-plugin";
|
||||
|
||||
export const apiMetrics = fp(async (fastify) => {
|
||||
const apiMeter = opentelemetry.metrics.getMeter("API");
|
||||
const latencyHistogram = apiMeter.createHistogram("API_latency", {
|
||||
unit: "ms"
|
||||
});
|
||||
|
||||
fastify.addHook("onResponse", async (request, reply) => {
|
||||
const { method } = request;
|
||||
const route = request.routerPath;
|
||||
const { statusCode } = reply;
|
||||
|
||||
latencyHistogram.record(reply.elapsedTime, {
|
||||
route,
|
||||
method,
|
||||
statusCode
|
||||
});
|
||||
});
|
||||
});
|
@ -1,4 +1,4 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { ForbiddenError, PureAbility } from "@casl/ability";
|
||||
import fastifyPlugin from "fastify-plugin";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { ZodError } from "zod";
|
||||
@ -10,6 +10,7 @@ import {
|
||||
GatewayTimeoutError,
|
||||
InternalServerError,
|
||||
NotFoundError,
|
||||
OidcAuthError,
|
||||
RateLimitError,
|
||||
ScimRequestError,
|
||||
UnauthorizedError
|
||||
@ -26,6 +27,7 @@ enum HttpStatusCodes {
|
||||
NotFound = 404,
|
||||
Unauthorized = 401,
|
||||
Forbidden = 403,
|
||||
UnprocessableContent = 422,
|
||||
// eslint-disable-next-line @typescript-eslint/no-shadow
|
||||
InternalServerError = 500,
|
||||
GatewayTimeout = 504,
|
||||
@ -38,74 +40,102 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
|
||||
if (error instanceof BadRequestError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.BadRequest)
|
||||
.send({ statusCode: HttpStatusCodes.BadRequest, message: error.message, error: error.name });
|
||||
.send({ reqId: req.id, statusCode: HttpStatusCodes.BadRequest, message: error.message, error: error.name });
|
||||
} else if (error instanceof NotFoundError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.NotFound)
|
||||
.send({ statusCode: HttpStatusCodes.NotFound, message: error.message, error: error.name });
|
||||
.send({ reqId: req.id, statusCode: HttpStatusCodes.NotFound, message: error.message, error: error.name });
|
||||
} else if (error instanceof UnauthorizedError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.Unauthorized)
|
||||
.send({ statusCode: HttpStatusCodes.Unauthorized, message: error.message, error: error.name });
|
||||
void res.status(HttpStatusCodes.Unauthorized).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.Unauthorized,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof DatabaseError || error instanceof InternalServerError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.InternalServerError)
|
||||
.send({ statusCode: HttpStatusCodes.InternalServerError, message: "Something went wrong", error: error.name });
|
||||
void res.status(HttpStatusCodes.InternalServerError).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.InternalServerError,
|
||||
message: "Something went wrong",
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof GatewayTimeoutError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.GatewayTimeout)
|
||||
.send({ statusCode: HttpStatusCodes.GatewayTimeout, message: error.message, error: error.name });
|
||||
void res.status(HttpStatusCodes.GatewayTimeout).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.GatewayTimeout,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof ZodError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.Unauthorized)
|
||||
.send({ statusCode: HttpStatusCodes.Unauthorized, error: "ValidationFailure", message: error.issues });
|
||||
void res.status(HttpStatusCodes.UnprocessableContent).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.UnprocessableContent,
|
||||
error: "ValidationFailure",
|
||||
message: error.issues
|
||||
});
|
||||
} else if (error instanceof ForbiddenError) {
|
||||
void res.status(HttpStatusCodes.Forbidden).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.Forbidden,
|
||||
error: "PermissionDenied",
|
||||
message: `You are not allowed to ${error.action} on ${error.subjectType} - ${JSON.stringify(error.subject)}`
|
||||
message: `You are not allowed to ${error.action} on ${error.subjectType}`,
|
||||
details: (error.ability as PureAbility).rulesFor(error.action as string, error.subjectType).map((el) => ({
|
||||
action: el.action,
|
||||
inverted: el.inverted,
|
||||
subject: el.subject,
|
||||
conditions: el.conditions
|
||||
}))
|
||||
});
|
||||
} else if (error instanceof ForbiddenRequestError) {
|
||||
void res.status(HttpStatusCodes.Forbidden).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.Forbidden,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof RateLimitError) {
|
||||
void res.status(HttpStatusCodes.TooManyRequests).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.TooManyRequests,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof ScimRequestError) {
|
||||
void res.status(error.status).send({
|
||||
reqId: req.id,
|
||||
schemas: error.schemas,
|
||||
status: error.status,
|
||||
detail: error.detail
|
||||
});
|
||||
// Handle JWT errors and make them more human-readable for the end-user.
|
||||
} else if (error instanceof OidcAuthError) {
|
||||
void res.status(HttpStatusCodes.InternalServerError).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.InternalServerError,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof jwt.JsonWebTokenError) {
|
||||
const message = (() => {
|
||||
if (error.message === JWTErrors.JwtExpired) {
|
||||
return "Your token has expired. Please re-authenticate.";
|
||||
}
|
||||
if (error.message === JWTErrors.JwtMalformed) {
|
||||
return "The provided access token is malformed. Please use a valid token or generate a new one and try again.";
|
||||
}
|
||||
if (error.message === JWTErrors.InvalidAlgorithm) {
|
||||
return "The access token is signed with an invalid algorithm. Please provide a valid token and try again.";
|
||||
}
|
||||
let errorMessage = error.message;
|
||||
|
||||
return error.message;
|
||||
})();
|
||||
if (error.message === JWTErrors.JwtExpired) {
|
||||
errorMessage = "Your token has expired. Please re-authenticate.";
|
||||
} else if (error.message === JWTErrors.JwtMalformed) {
|
||||
errorMessage =
|
||||
"The provided access token is malformed. Please use a valid token or generate a new one and try again.";
|
||||
} else if (error.message === JWTErrors.InvalidAlgorithm) {
|
||||
errorMessage =
|
||||
"The access token is signed with an invalid algorithm. Please provide a valid token and try again.";
|
||||
}
|
||||
|
||||
void res.status(HttpStatusCodes.Forbidden).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.Forbidden,
|
||||
error: "TokenError",
|
||||
message
|
||||
message: errorMessage
|
||||
});
|
||||
} else {
|
||||
void res.status(HttpStatusCodes.InternalServerError).send({
|
||||
reqId: req.id,
|
||||
statusCode: HttpStatusCodes.InternalServerError,
|
||||
error: "InternalServerError",
|
||||
message: "Something went wrong"
|
||||
|
@ -19,7 +19,7 @@ export const registerSecretScannerGhApp = async (server: FastifyZodProvider) =>
|
||||
|
||||
app.on("installation", async (context) => {
|
||||
const { payload } = context;
|
||||
logger.info("Installed secret scanner to:", { repositories: payload.repositories });
|
||||
logger.info({ repositories: payload.repositories }, "Installed secret scanner to");
|
||||
});
|
||||
|
||||
app.on("push", async (context) => {
|
||||
|
@ -201,6 +201,8 @@ import { getServerCfg, superAdminServiceFactory } from "@app/services/super-admi
|
||||
import { telemetryDALFactory } from "@app/services/telemetry/telemetry-dal";
|
||||
import { telemetryQueueServiceFactory } from "@app/services/telemetry/telemetry-queue";
|
||||
import { telemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||
import { totpConfigDALFactory } from "@app/services/totp/totp-config-dal";
|
||||
import { totpServiceFactory } from "@app/services/totp/totp-service";
|
||||
import { userDALFactory } from "@app/services/user/user-dal";
|
||||
import { userServiceFactory } from "@app/services/user/user-service";
|
||||
import { userAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
@ -348,6 +350,7 @@ export const registerRoutes = async (
|
||||
const slackIntegrationDAL = slackIntegrationDALFactory(db);
|
||||
const projectSlackConfigDAL = projectSlackConfigDALFactory(db);
|
||||
const workflowIntegrationDAL = workflowIntegrationDALFactory(db);
|
||||
const totpConfigDAL = totpConfigDALFactory(db);
|
||||
|
||||
const externalGroupOrgRoleMappingDAL = externalGroupOrgRoleMappingDALFactory(db);
|
||||
|
||||
@ -391,13 +394,14 @@ export const registerRoutes = async (
|
||||
permissionService
|
||||
});
|
||||
|
||||
const auditLogQueue = auditLogQueueServiceFactory({
|
||||
const auditLogQueue = await auditLogQueueServiceFactory({
|
||||
auditLogDAL,
|
||||
queueService,
|
||||
projectDAL,
|
||||
licenseService,
|
||||
auditLogStreamDAL
|
||||
});
|
||||
|
||||
const auditLogService = auditLogServiceFactory({ auditLogDAL, permissionService, auditLogQueue });
|
||||
const auditLogStreamService = auditLogStreamServiceFactory({
|
||||
licenseService,
|
||||
@ -511,12 +515,19 @@ export const registerRoutes = async (
|
||||
projectMembershipDAL
|
||||
});
|
||||
|
||||
const loginService = authLoginServiceFactory({ userDAL, smtpService, tokenService, orgDAL });
|
||||
const totpService = totpServiceFactory({
|
||||
totpConfigDAL,
|
||||
userDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const loginService = authLoginServiceFactory({ userDAL, smtpService, tokenService, orgDAL, totpService });
|
||||
const passwordService = authPaswordServiceFactory({
|
||||
tokenService,
|
||||
smtpService,
|
||||
authDAL,
|
||||
userDAL
|
||||
userDAL,
|
||||
totpConfigDAL
|
||||
});
|
||||
|
||||
const projectBotService = projectBotServiceFactory({ permissionService, projectBotDAL, projectDAL });
|
||||
@ -1369,7 +1380,8 @@ export const registerRoutes = async (
|
||||
workflowIntegration: workflowIntegrationService,
|
||||
migration: migrationService,
|
||||
externalGroupOrgRoleMapping: externalGroupOrgRoleMappingService,
|
||||
projectTemplate: projectTemplateService
|
||||
projectTemplate: projectTemplateService,
|
||||
totp: totpService
|
||||
});
|
||||
|
||||
const cronJobs: CronJob[] = [];
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user