mirror of
https://github.com/Infisical/infisical.git
synced 2025-04-17 19:37:38 +00:00
Compare commits
364 Commits
maidul-ABA
...
daniel/cop
Author | SHA1 | Date | |
---|---|---|---|
5277a50b3e | |||
2c402fbbb6 | |||
92ce05283b | |||
39d92ce6ff | |||
44a026446e | |||
539e5b1907 | |||
44b02d5324 | |||
71fb6f1d11 | |||
e64100fab1 | |||
5bcf07b32b | |||
3b0c48052b | |||
df50e3b0f9 | |||
bdf2ae40b6 | |||
b6c05a2f25 | |||
960efb9cf9 | |||
aa8d58abad | |||
cfb0cc4fea | |||
7712df296c | |||
7c38932121 | |||
69ad9845e1 | |||
7321c237d7 | |||
32430a6a16 | |||
f034adba76 | |||
463eb0014e | |||
21403f6fe5 | |||
2f9e542b31 | |||
089d6812fd | |||
71c9c0fa1e | |||
46ad1d47a9 | |||
2b977eeb33 | |||
a692148597 | |||
b762816e66 | |||
cf275979ba | |||
64bfa4f334 | |||
e3eb14bfd9 | |||
24b50651c9 | |||
1cd459fda7 | |||
38917327d9 | |||
d7b494c6f8 | |||
93208afb36 | |||
1a084d8fcf | |||
dd4f133c6c | |||
c41d27e1ae | |||
1866ed8d23 | |||
7b3b232dde | |||
9d618b4ae9 | |||
5330ab2171 | |||
662e588c22 | |||
90057d80ff | |||
1eda7aaaac | |||
00dcadbc08 | |||
7a7289ebd0 | |||
e5d4677fd6 | |||
bce3f3d676 | |||
300372fa98 | |||
47a4f8bae9 | |||
863719f296 | |||
7317dc1cf5 | |||
75df898e78 | |||
0de6add3f7 | |||
0c008b6393 | |||
0c3894496c | |||
35fbd5d49d | |||
d03b453e3d | |||
96e331b678 | |||
d4d468660d | |||
75a4965928 | |||
660c09ded4 | |||
b5287d91c0 | |||
6a17763237 | |||
f2bd3daea2 | |||
7f70f96936 | |||
73e0a54518 | |||
0d295a2824 | |||
9a62efea4f | |||
506c30bcdb | |||
735ad4ff65 | |||
41e36dfcef | |||
421d8578b7 | |||
6685f8aa0a | |||
d6c37c1065 | |||
54f3f94185 | |||
907537f7c0 | |||
61263b9384 | |||
d71c85e052 | |||
b6d8be2105 | |||
0693f81d0a | |||
61d516ef35 | |||
31fc64fb4c | |||
8bf7e4c4d1 | |||
2027d4b44e | |||
d401c9074e | |||
afe35dbbb5 | |||
6ff1602fd5 | |||
6603364749 | |||
53bea22b85 | |||
7c84adc1c2 | |||
fa8d6735a1 | |||
a6137f267d | |||
d521ee7b7e | |||
827931e416 | |||
faa83344a7 | |||
3be3d807d2 | |||
9f7ea3c4e5 | |||
e67218f170 | |||
269c40c67c | |||
089a7e880b | |||
64ec741f1a | |||
c98233ddaf | |||
ae17981c41 | |||
6c49c7da3c | |||
2de04b6fe5 | |||
5c9ec1e4be | |||
ba89491d4c | |||
483e596a7a | |||
65f122bd41 | |||
682b552fdc | |||
d4cfd0b6ed | |||
ba1fd8a3f7 | |||
e8f09d2c7b | |||
774371a218 | |||
c4b54de303 | |||
433971a72d | |||
4acf9413f0 | |||
f0549cab98 | |||
d75e49dce5 | |||
8819abd710 | |||
796f76da46 | |||
d6e1ed4d1e | |||
1295b68d80 | |||
c79f84c064 | |||
d0c50960ef | |||
85089a08e1 | |||
bf97294dad | |||
4053078d95 | |||
4ba3899861 | |||
6bae3628c0 | |||
4cb935dae7 | |||
ccad684ab2 | |||
fd77708cad | |||
9aebd712d1 | |||
05f07b25ac | |||
5b0dbf04b2 | |||
b050db84ab | |||
8fef6911f1 | |||
44ba31a743 | |||
6bdbac4750 | |||
60fb195706 | |||
c8109b4e84 | |||
1f2b0443cc | |||
dd1cabf9f6 | |||
8b781b925a | |||
ddcf5b576b | |||
7138b392f2 | |||
bfce1021fb | |||
93c0313b28 | |||
8cfc217519 | |||
d272c6217a | |||
2fe2ddd9fc | |||
e330ddd5ee | |||
7aba9c1a50 | |||
4cd8e0fa67 | |||
ea3d164ead | |||
df468e4865 | |||
66e96018c4 | |||
3b02eedca6 | |||
a55fe2b788 | |||
5d7a267f1d | |||
b16ab6f763 | |||
2d2ad0724f | |||
e90efb7fc8 | |||
17d5e4bdab | |||
f22a5580a6 | |||
334a728259 | |||
4a3143e689 | |||
14810de054 | |||
8cfcbaa12c | |||
0e946f73bd | |||
7b8551f883 | |||
3b1ce86ee6 | |||
c649661133 | |||
70e44d04ef | |||
0dddd58be1 | |||
148f522c58 | |||
d4c911a28f | |||
603fcd8ab5 | |||
a1474145ae | |||
7c055f71f7 | |||
14884cd6b0 | |||
98fd146e85 | |||
1d3dca11e7 | |||
22f8a3daa7 | |||
395b3d9e05 | |||
1041e136fb | |||
21024b0d72 | |||
00e68dc0bf | |||
5e068cd8a0 | |||
abdf8f46a3 | |||
1cf046f6b3 | |||
0fda6d6f4d | |||
8d4115925c | |||
d0b3c6b66a | |||
a1685af119 | |||
8d4a06e9e4 | |||
6dbe3c8793 | |||
a3ec1a27de | |||
472f02e8b1 | |||
3989646b80 | |||
472f5eb8b4 | |||
f5b039f939 | |||
b7b3d07e9f | |||
891a1ea2b9 | |||
a807f0cf6c | |||
cfc0b2fb8d | |||
f096a567de | |||
65d642113d | |||
92e7e90c21 | |||
f9f6ec0a8d | |||
d9621b0b17 | |||
d80a70731d | |||
ada63b9e7d | |||
bd99b4e356 | |||
7db0bd7daa | |||
8bc538af93 | |||
8ef078872e | |||
d5f718c6ad | |||
5f93016d22 | |||
f220246eb4 | |||
829b399cda | |||
3f6a0c77f1 | |||
f91f9c9487 | |||
f0d19e4701 | |||
9e4b66e215 | |||
7eeff6c406 | |||
132c3080bb | |||
8a14914bc3 | |||
bf09fa33fa | |||
a87e7b792c | |||
e8ca020903 | |||
a603938488 | |||
cff7981fe0 | |||
b39d5c6682 | |||
829ae7d3c0 | |||
19c26c680c | |||
dd1f1d07cc | |||
027b200b1a | |||
c3f8c55672 | |||
75aeef3897 | |||
e761e65322 | |||
c97fe77aec | |||
370ed45abb | |||
3e16d7e160 | |||
6bf4b4a380 | |||
61f786e8d8 | |||
26064e3a08 | |||
9b246166a1 | |||
9dedaa6779 | |||
8eab7d2f01 | |||
4e796e7e41 | |||
c6fa647825 | |||
496cebb08f | |||
33db6df7f2 | |||
88d25e97e9 | |||
4ad9fa1ad1 | |||
1642fb42d8 | |||
3983c2bc4a | |||
34d87ca30f | |||
12b6f27151 | |||
ea426e8b2d | |||
4d567f0b08 | |||
6548372e3b | |||
77af640c4c | |||
90f85152bc | |||
cfa8770bdc | |||
be8562824d | |||
6956d14e2e | |||
4f1fe8a9fa | |||
b0031b71e0 | |||
bae7c6c3d7 | |||
7503876ca0 | |||
36b5a3dc90 | |||
dfe36f346f | |||
b1b61842c6 | |||
f9ca9b51b2 | |||
e8b33f27fc | |||
7e7e6ade5c | |||
4010817916 | |||
eea367c3bc | |||
860ebb73a9 | |||
56567ee7c9 | |||
1cd17a451c | |||
6b7bc2a3c4 | |||
cb52568ebd | |||
9d30fb3870 | |||
161ac5e097 | |||
bb5b585cf6 | |||
fa94191c40 | |||
6a5eabc411 | |||
c956a0f91f | |||
df7b55606e | |||
5f14b27f41 | |||
02b2395276 | |||
402fa2b0e0 | |||
3725241f52 | |||
10b457a695 | |||
3912e2082d | |||
7dd6eac20a | |||
5664e1ff26 | |||
a27a428329 | |||
b196251c19 | |||
b18d8d542f | |||
3c287600ab | |||
759d11ff21 | |||
2bd817765c | |||
7aa9c5dd00 | |||
b693c035ce | |||
c65a991943 | |||
3a3811cb3c | |||
332ca61f5d | |||
64f43e59d0 | |||
ccaf4c00af | |||
e3ba1c59bf | |||
ce0bc191d8 | |||
489ccb8e15 | |||
ae8f695b6f | |||
19357d4bd7 | |||
776d0a0fe1 | |||
85dec28667 | |||
21ea7dd317 | |||
1309f30af9 | |||
af0ec2400d | |||
770e73e40b | |||
39fdeabdea | |||
8401048daf | |||
335a87d856 | |||
1add9dd965 | |||
7245aaa9ec | |||
d32f69e052 | |||
726477e3d7 | |||
a4ca996a1b | |||
303312fe91 | |||
f3f2879d6d | |||
d0f3d96b3e | |||
70d2a21fbc | |||
418ae42d94 | |||
273c6b3842 | |||
6be8d5d2a7 | |||
9eb7640755 | |||
741138c4bd | |||
3841394eb7 | |||
3552119c7d | |||
7a46725523 | |||
3bc39c6cec | |||
b5b1e57fe7 | |||
1a5f66fe46 | |||
ac0cb6d96f | |||
f71f894de8 | |||
66d2cc8947 | |||
e034aa381a | |||
fc3a409164 | |||
ffc58b0313 | |||
9a7e05369c | |||
33b49f4466 | |||
60895537a7 |
.env.examplemfa-authenticator.pngmfa-email.png
.github/workflows
deployment-pipeline.ymlrelease-standalone-docker-img-postgres-offical.ymlrelease_build_infisical_cli.yml
.gitignore.husky
.infisicalignoreDockerfile.fips.standalone-infisicalDockerfile.standalone-infisicalMakefilebackend
DockerfileDockerfile.devmain.ts
e2e-test
package-lock.jsonpackage.jsonscripts
src
@types
db
migrations
20231222172455_integration.ts20240802181855_ca-cert-version.ts20241014084900_identity-multiple-auth-methods.ts20241101174939_project-templates.ts20241107112632_skip-bootstrap-cert-validation-est.ts20241110032223_add-missing-oidc-org-cascade-reference.ts20241111175154_kms-root-cfg-hsm.ts20241112082701_add-totp-support.ts20241119143026_add-project-descripton.ts20241121131344_make-identity-metadata-not-nullable-again.ts
schemas
certificate-template-est-configs.tsidentity-access-tokens.tsindex.tskms-root-config.tsmodels.tsorganizations.tsproject-roles.tsproject-templates.tsprojects.tstotp-configs.tsusers.ts
seeds
utils.tsee
routes
services
audit-log
certificate-est
dynamic-secret
group
hsm
ldap-config
license
oidc
permission
org-permission.tspermission-dal.tspermission-fns.tspermission-service.tspermission-types.tsproject-permission.ts
project-template
project-template-constants.tsproject-template-dal.tsproject-template-fns.tsproject-template-service.tsproject-template-types.ts
rate-limit
secret-approval-request
secret-rotation
secret-rotation-queue
templates
secret-scanning/secret-scanning-queue
keystore
lib
api-docs
casl
config
errors
logger
telemetry
types
validator
server
app.tsboot-strap-check.ts
plugins
routes
index.tssanitizedSchemas.ts
v1
admin-router.tsauth-router.tscertificate-template-router.tsdashboard-router.tsidentity-router.tsintegration-auth-router.tsintegration-router.tsorganization-router.tsproject-router.tssecret-folder-router.tsuser-router.ts
v2
identity-org-router.tsidentity-project-router.tsmfa-router.tsproject-membership-router.tsproject-router.tsuser-router.ts
v3
services
auth
certificate-template
cmek
external-group-org-role-mapping
external-migration
identity-access-token
identity-aws-auth
identity-azure-auth
identity-gcp-auth
identity-kubernetes-auth
identity-oidc-auth
identity-project
identity-token-auth
identity-ua
identity
integration-auth
integration-app-list.tsintegration-auth-service.tsintegration-auth-types.tsintegration-list.tsintegration-sync-secret.ts
integration
kms
org
project-membership
project
secret-folder
secret-sharing
secret-v2-bridge
secret
smtp
super-admin
telemetry
totp
user
webhook
cli/packages
api
cmd
util
company
docker-compose.dev.ymldocker-compose.prod.ymldocs
api-reference/endpoints
project-templates
secrets
changelog
cli
documentation
guides
platform
access-controls
admin-panel
audit-log-streams
dynamic-secrets
aws-elasticache.mdxaws-iam.mdxazure-entra-id.mdxcassandra.mdxelastic-search.mdxldap.mdxmongo-atlas.mdxmongo-db.mdxmssql.mdxmysql.mdxoracle.mdxpostgresql.mdxrabbit-mq.mdxredis.mdxsap-ase.mdxsap-hana.mdxsnowflake.mdxtotp.mdx
identities
kms
mfa.mdxpki
project-templates.mdxsecret-rotation
sso
images
integrations
bitbucket
octopus-deploy
integrations-octopus-deploy-add-role.pngintegrations-octopus-deploy-add-to-team.pngintegrations-octopus-deploy-authorize.pngintegrations-octopus-deploy-copy-api-key.pngintegrations-octopus-deploy-create-api-key.pngintegrations-octopus-deploy-create-service-account.pngintegrations-octopus-deploy-create-team.pngintegrations-octopus-deploy-create.pngintegrations-octopus-deploy-generate-api-key.pngintegrations-octopus-deploy-integrations.pngintegrations-octopus-deploy-save-team.pngintegrations-octopus-deploy-sync.pngintegrations-octopus-deploy-team-settings.pngintegrations-octopus-deploy-user-settings.png
platform
dynamic-secrets
dynamic-secret-modal-sap-hana.pngdynamic-secret-modal-totp.pngdynamic-secret-setup-modal-sap-hana.pngdynamic-secret-setup-modal-totp-manual.pngdynamic-secret-setup-modal-totp-url.pngmodify-sap-hana-sql-statements.png
sap-ase
dynamic-secret-sap-ase-modal.pngdynamic-secret-sap-ase-setup-modal.pngdynamic-secret-sap-ase-statements.png
snowflake
dynamic-secret-snowflake-create-service-user.pngdynamic-secret-snowflake-identifiers.pngdynamic-secret-snowflake-modal.pngdynamic-secret-snowflake-setup-modal.pngdynamic-secret-snowflake-sql-statements.pngdynamic-secret-snowflake-users-page.png
totp-lease-value.pngkms/hsm
pki/est
project-templates
integrations
internals
mint.jsonself-hosting
configuration
deployment-options
ee.mdxfaq.mdxreference-architectures
frontend
next.config.jspackage-lock.jsonpackage.jsonusePagination.tsx
public
src
components
dashboard
mfa
navigation
notifications
utilities
v2
Checkbox
CreatableSelect
DatePicker
FilterableSelect
FormControl
MultiSelect
SecretInput
Select
index.tsxprojects
context
helpers
hooks
api
admin
auditLogs
auth
certificateTemplates
dashboard
dynamicSecret
identities
integrationAuth
integrations
organization
projectTemplates
secrets
subscriptions
types.tsusers
workspace
layouts/AppLayout
lib/schemas
pages
reactQuery.tsxstyles
views
IntegrationsPage
Login
Org
AuditLogsPage
IdentityPage
IdentityPage.tsx
components
MembersPage/components
OrgIdentityTab/components/IdentitySection
IdentityAuthMethodModal.tsxIdentityAuthMethodModalContent.tsxIdentityAwsAuthForm.tsxIdentityAzureAuthForm.tsxIdentityGcpAuthForm.tsxIdentityKubernetesAuthForm.tsxIdentityModal.tsxIdentityOidcAuthForm.tsxIdentityTokenAuthForm.tsxIdentityUniversalAuthForm.tsx
OrgMembersTab/components/OrgMembersSection
RolePage/components
Project
CertificatesPage/components/CertificatesTab/components
IdentityDetailsPage/components/IdentityRoleDetailsSection
KmsPage/components
MemberDetailsPage/components/MemberRoleDetailsSection
MembersPage/components/MembersTab/components
RolePage/components/RolePermissionsSection/components
SecretApprovalPage/components/SecretApprovalRequest/components
SecretMainPage
SecretMainPage.tsx
components
ActionBar
ActionBar.tsx
CreateDynamicSecretForm
CreateSecretForm
DynamicSecretListView
SecretDropzone
SecretOverviewPage
Settings
BillingSettingsPage/components/BillingDetailsTab
OrgSettingsPage/components
OrgAuthTab
OrgTabGroup
ProjectTemplatesTab
ProjectTemplatesTab.tsx
components
DeleteProjectTemplateModal.tsx
index.tsxEditProjectTemplateSection
EditProjectTemplateSection.tsx
ProjectTemplateDetailsModal.tsxProjectTemplatesSection.tsxProjectTemplatesTable.tsxindex.tsxcomponents
EditProjectTemplate.tsxProjectTemplateEditRoleForm.tsxProjectTemplateEnvironmentsForm.tsxProjectTemplateRolesSection.tsxindex.tsx
index.tsxPersonalSettingsPage/SecuritySection
ProjectSettingsPage/components
Signup/components/UserInfoSSOStep
admin/DashboardPage
helm-charts/infisical-standalone-postgres
npm
otel-collector-config.yamlpackage-lock.jsonpackage.jsonprometheus.dev.yml
10
.env.example
10
.env.example
@ -74,7 +74,17 @@ CAPTCHA_SECRET=
|
||||
|
||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED=false
|
||||
OTEL_EXPORT_TYPE=prometheus
|
||||
OTEL_EXPORT_OTLP_ENDPOINT=
|
||||
OTEL_OTLP_PUSH_INTERVAL=
|
||||
|
||||
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=
|
||||
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=
|
||||
|
||||
PLAIN_API_KEY=
|
||||
PLAIN_WISH_LABEL_IDS=
|
||||
|
||||
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
|
||||
|
||||
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
|
||||
|
12
.github/workflows/deployment-pipeline.yml
vendored
12
.github/workflows/deployment-pipeline.yml
vendored
@ -170,6 +170,12 @@ jobs:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: eu-central-1
|
||||
role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
@ -183,12 +189,6 @@ jobs:
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.eu-central-1.amazonaws.com
|
||||
aws-region: eu-central-1
|
||||
role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
|
@ -1,62 +1,115 @@
|
||||
name: Release standalone docker image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*-postgres"
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*-postgres"
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Run tests before deployment
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
infisical-standalone:
|
||||
name: Build infisical standalone image postgres
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: version output
|
||||
run: |
|
||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical:latest-postgres
|
||||
infisical/infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: Dockerfile.standalone-infisical
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
infisical-tests:
|
||||
name: Run tests before deployment
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
|
||||
infisical-standalone:
|
||||
name: Build infisical standalone image postgres
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: version output
|
||||
run: |
|
||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical:latest-postgres
|
||||
infisical/infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: Dockerfile.standalone-infisical
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
|
||||
infisical-fips-standalone:
|
||||
name: Build infisical standalone image postgres
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: version output
|
||||
run: |
|
||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical-fips:latest-postgres
|
||||
infisical/infisical-fips:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical-fips:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: Dockerfile.fips.standalone-infisical
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
|
@ -10,8 +10,7 @@ on:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
# packages: write
|
||||
# issues: write
|
||||
|
||||
jobs:
|
||||
cli-integration-tests:
|
||||
name: Run tests before deployment
|
||||
@ -26,6 +25,63 @@ jobs:
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
npm-release:
|
||||
runs-on: ubuntu-20.04
|
||||
env:
|
||||
working-directory: ./npm
|
||||
needs:
|
||||
- cli-integration-tests
|
||||
- goreleaser
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Extract version
|
||||
run: |
|
||||
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
|
||||
echo "Version extracted: $VERSION"
|
||||
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Print version
|
||||
run: echo ${{ env.CLI_VERSION }}
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
cache-dependency-path: ./npm/package-lock.json
|
||||
- name: Install dependencies
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm install --ignore-scripts
|
||||
|
||||
- name: Set NPM version
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
|
||||
|
||||
- name: Setup NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: |
|
||||
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
|
||||
|
||||
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
- name: Pack NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm pack
|
||||
|
||||
- name: Publish NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [cli-integration-tests]
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -71,3 +71,5 @@ frontend-build
|
||||
cli/infisical-merge
|
||||
cli/test/infisical-merge
|
||||
/backend/binary
|
||||
|
||||
/npm/bin
|
||||
|
@ -1,6 +1,12 @@
|
||||
#!/usr/bin/env sh
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
|
||||
# Check if infisical is installed
|
||||
if ! command -v infisical >/dev/null 2>&1; then
|
||||
echo "\nError: Infisical CLI is not installed. Please install the Infisical CLI before comitting.\n You can refer to the documentation at https://infisical.com/docs/cli/overview\n\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
npx lint-staged
|
||||
|
||||
infisical scan git-changes --staged -v
|
||||
|
@ -6,3 +6,4 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/S
|
||||
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
|
||||
docs/mint.json:generic-api-key:651
|
||||
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
|
||||
|
194
Dockerfile.fips.standalone-infisical
Normal file
194
Dockerfile.fips.standalone-infisical
Normal file
@ -0,0 +1,194 @@
|
||||
ARG POSTHOG_HOST=https://app.posthog.com
|
||||
ARG POSTHOG_API_KEY=posthog-api-key
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||
|
||||
FROM node:20-slim AS base
|
||||
|
||||
FROM base AS frontend-dependencies
|
||||
WORKDIR /app
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
|
||||
|
||||
# Install dependencies
|
||||
RUN npm ci --only-production --ignore-scripts
|
||||
|
||||
# Rebuild the source code only when needed
|
||||
FROM base AS frontend-builder
|
||||
WORKDIR /app
|
||||
|
||||
# Copy dependencies
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
# Copy all files
|
||||
COPY /frontend .
|
||||
|
||||
ENV NODE_ENV production
|
||||
ENV NEXT_PUBLIC_ENV production
|
||||
ARG POSTHOG_HOST
|
||||
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
|
||||
# Production image
|
||||
FROM base AS frontend-runner
|
||||
WORKDIR /app
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
|
||||
VOLUME /app/.next/cache/images
|
||||
|
||||
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
|
||||
COPY --from=frontend-builder /app/public ./public
|
||||
RUN chown non-root-user:nodejs ./public/data
|
||||
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
|
||||
|
||||
USER non-root-user
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
##
|
||||
## BACKEND
|
||||
##
|
||||
FROM base AS backend-build
|
||||
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js and ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY /backend .
|
||||
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
|
||||
RUN npm i -D tsconfig-paths
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM base AS backend-runner
|
||||
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js and ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY --from=backend-build /app .
|
||||
|
||||
RUN mkdir frontend-build
|
||||
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
# Install necessary packages including ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
curl \
|
||||
git \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
# Give non-root-user permission to update SSL certs
|
||||
RUN chown -R non-root-user /etc/ssl/certs
|
||||
RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chmod -R u+rwx /etc/ssl/certs
|
||||
RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chown non-root-user /usr/sbin/update-ca-certificates
|
||||
RUN chmod u+rx /usr/sbin/update-ca-certificates
|
||||
|
||||
## set pre baked keys
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
|
||||
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
ENV HTTPS_ENABLED false
|
||||
ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
ENV TELEMETRY_ENABLED true
|
||||
|
||||
EXPOSE 8080
|
||||
EXPOSE 443
|
||||
|
||||
USER non-root-user
|
||||
|
||||
CMD ["./standalone-entrypoint.sh"]
|
@ -72,6 +72,17 @@ RUN addgroup --system --gid 1001 nodejs \
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for build
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@ -85,6 +96,20 @@ FROM base AS backend-runner
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for runtime
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@ -94,11 +119,32 @@ RUN mkdir frontend-build
|
||||
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
RUN apk add --upgrade --no-cache ca-certificates
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.31.1 && apk add --no-cache git
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install all required runtime dependencies
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
bash \
|
||||
curl \
|
||||
git
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Setup user permissions
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
@ -121,7 +167,6 @@ ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
@ -143,4 +188,4 @@ EXPOSE 443
|
||||
|
||||
USER non-root-user
|
||||
|
||||
CMD ["./standalone-entrypoint.sh"]
|
||||
CMD ["./standalone-entrypoint.sh"]
|
4
Makefile
4
Makefile
@ -10,6 +10,9 @@ up-dev:
|
||||
up-dev-ldap:
|
||||
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
||||
|
||||
up-dev-metrics:
|
||||
docker compose -f docker-compose.dev.yml --profile metrics up --build
|
||||
|
||||
up-prod:
|
||||
docker-compose -f docker-compose.prod.yml up --build
|
||||
|
||||
@ -27,4 +30,3 @@ reviewable-api:
|
||||
npm run type:check
|
||||
|
||||
reviewable: reviewable-ui reviewable-api
|
||||
|
||||
|
@ -3,6 +3,21 @@ FROM node:20-alpine AS build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@ -11,12 +26,28 @@ RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM node:20-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV npm_config_cache /home/node/.npm
|
||||
|
||||
COPY package*.json ./
|
||||
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN npm ci --only-production && npm cache clean --force
|
||||
|
||||
COPY --from=build /app .
|
||||
|
@ -1,5 +1,56 @@
|
||||
FROM node:20-alpine
|
||||
|
||||
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
||||
|
||||
ARG SOFTHSM2_VERSION=2.5.0
|
||||
|
||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||
|
||||
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apk --update add \
|
||||
alpine-sdk \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
openssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# build and install SoftHSM2
|
||||
|
||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||
WORKDIR ${SOFTHSM2_SOURCES}
|
||||
|
||||
RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
|
||||
&& sh autogen.sh \
|
||||
&& ./configure --prefix=/usr/local --disable-gost \
|
||||
&& make \
|
||||
&& make install
|
||||
|
||||
WORKDIR /root
|
||||
RUN rm -fr ${SOFTHSM2_SOURCES}
|
||||
|
||||
# install pkcs11-tool
|
||||
RUN apk --update add opensc
|
||||
|
||||
RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
|
||||
# ? App setup
|
||||
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
|
@ -5,6 +5,9 @@ export const mockSmtpServer = (): TSmtpService => {
|
||||
return {
|
||||
sendMail: async (data) => {
|
||||
storage.push(data);
|
||||
},
|
||||
verify: async () => {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
@ -34,7 +34,7 @@ describe("Identity v1", async () => {
|
||||
test("Create identity", async () => {
|
||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||
expect(newIdentity.name).toBe("mac1");
|
||||
expect(newIdentity.authMethod).toBeNull();
|
||||
expect(newIdentity.authMethods).toEqual([]);
|
||||
|
||||
await deleteIdentity(newIdentity.id);
|
||||
});
|
||||
@ -42,7 +42,7 @@ describe("Identity v1", async () => {
|
||||
test("Update identity", async () => {
|
||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||
expect(newIdentity.name).toBe("mac1");
|
||||
expect(newIdentity.authMethod).toBeNull();
|
||||
expect(newIdentity.authMethods).toEqual([]);
|
||||
|
||||
const updatedIdentity = await testServer.inject({
|
||||
method: "PATCH",
|
||||
|
@ -118,9 +118,9 @@ describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
|
||||
value: "stage-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
// wait for 10 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
setTimeout(resolve, 10000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
@ -173,9 +173,9 @@ describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
// wait for 10 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
setTimeout(resolve, 10000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
@ -343,9 +343,9 @@ describe.each([{ path: "/" }, { path: "/deep" }])(
|
||||
value: "prod-value"
|
||||
});
|
||||
|
||||
// wait for 5 second for replication to finish
|
||||
// wait for 10 second for replication to finish
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 5000); // time to breathe for db
|
||||
setTimeout(resolve, 10000); // time to breathe for db
|
||||
});
|
||||
|
||||
const secret = await getSecretByNameV2({
|
||||
|
@ -16,6 +16,7 @@ import { initDbConnection } from "@app/db";
|
||||
import { queueServiceFactory } from "@app/queue";
|
||||
import { keyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Redis } from "ioredis";
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
export default {
|
||||
@ -54,7 +55,12 @@ export default {
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL);
|
||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||
const server = await main({ db, smtp, logger, queue, keyStore });
|
||||
|
||||
const hsmModule = initializeHsmModule();
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule() });
|
||||
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
// @ts-expect-error type
|
||||
|
4280
backend/package-lock.json
generated
4280
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -44,12 +44,13 @@
|
||||
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts",
|
||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
|
||||
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
|
||||
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
|
||||
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
||||
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
|
||||
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
|
||||
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
|
||||
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
|
||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
@ -58,6 +59,7 @@
|
||||
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
@ -84,6 +86,7 @@
|
||||
"@types/passport-google-oauth20": "^2.0.14",
|
||||
"@types/pg": "^8.10.9",
|
||||
"@types/picomatch": "^2.3.3",
|
||||
"@types/pkcs11js": "^1.0.4",
|
||||
"@types/prompt-sync": "^4.2.3",
|
||||
"@types/resolve": "^1.20.6",
|
||||
"@types/safe-regex": "^1.1.6",
|
||||
@ -129,6 +132,7 @@
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
@ -137,6 +141,15 @@
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@octopusdeploy/api-client": "^3.4.1",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
||||
"@opentelemetry/instrumentation": "^0.55.0",
|
||||
"@opentelemetry/resources": "^1.28.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.28.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.27.0",
|
||||
"@peculiar/asn1-schema": "^2.3.8",
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
@ -156,11 +169,12 @@
|
||||
"connect-redis": "^7.1.1",
|
||||
"cron": "^3.1.7",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.26.0",
|
||||
"fastify": "^4.28.1",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
"google-auth-library": "^9.9.0",
|
||||
"googleapis": "^137.1.0",
|
||||
"handlebars": "^4.7.8",
|
||||
"hdb": "^0.19.10",
|
||||
"ioredis": "^5.3.2",
|
||||
"jmespath": "^0.16.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
@ -176,9 +190,11 @@
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
"otplib": "^12.0.1",
|
||||
"passport-github": "^1.1.0",
|
||||
"passport-gitlab2": "^5.0.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
@ -187,6 +203,7 @@
|
||||
"pg-query-stream": "^4.5.3",
|
||||
"picomatch": "^3.0.1",
|
||||
"pino": "^8.16.2",
|
||||
"pkcs11js": "^2.1.6",
|
||||
"pkijs": "^3.2.4",
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.3.8",
|
||||
@ -195,6 +212,7 @@
|
||||
"scim2-parse-filter": "^0.2.10",
|
||||
"sjcl": "^1.0.8",
|
||||
"smee-client": "^2.0.0",
|
||||
"snowflake-sdk": "^1.14.0",
|
||||
"tedious": "^18.2.1",
|
||||
"tweetnacl": "^1.0.3",
|
||||
"tweetnacl-util": "^0.15.1",
|
||||
|
@ -8,61 +8,80 @@ const prompt = promptSync({
|
||||
sigint: true
|
||||
});
|
||||
|
||||
const sanitizeInputParam = (value: string) => {
|
||||
// Escape double quotes and wrap the entire value in double quotes
|
||||
if (value) {
|
||||
return `"${value.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
return '""';
|
||||
};
|
||||
|
||||
const exportDb = () => {
|
||||
const exportHost = prompt("Enter your Postgres Host to migrate from: ");
|
||||
const exportPort = prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432";
|
||||
const exportUser = prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical";
|
||||
const exportPassword = prompt("Enter your Postgres Password to migrate from: ");
|
||||
const exportDatabase = prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical";
|
||||
const exportHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate from: "));
|
||||
const exportPort = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432"
|
||||
);
|
||||
const exportUser = sanitizeInputParam(
|
||||
prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const exportPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate from: "));
|
||||
const exportDatabase = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
|
||||
// we do not include the audit_log and secret_sharing entries
|
||||
execSync(
|
||||
`PGDATABASE="${exportDatabase}" PGPASSWORD="${exportPassword}" PGHOST="${exportHost}" PGPORT=${exportPort} PGUSER=${exportUser} pg_dump infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
||||
`PGDATABASE=${exportDatabase} PGPASSWORD=${exportPassword} PGHOST=${exportHost} PGPORT=${exportPort} PGUSER=${exportUser} pg_dump -Fc infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
||||
__dirname,
|
||||
"../src/db/dump.sql"
|
||||
"../src/db/backup.dump"
|
||||
)}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
||||
};
|
||||
|
||||
const importDbForOrg = () => {
|
||||
const importHost = prompt("Enter your Postgres Host to migrate to: ");
|
||||
const importPort = prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432";
|
||||
const importUser = prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical";
|
||||
const importPassword = prompt("Enter your Postgres Password to migrate to: ");
|
||||
const importDatabase = prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical";
|
||||
const orgId = prompt("Enter the organization ID to migrate: ");
|
||||
const importHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate to: "));
|
||||
const importPort = sanitizeInputParam(prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432");
|
||||
const importUser = sanitizeInputParam(
|
||||
prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const importPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate to: "));
|
||||
const importDatabase = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const orgId = sanitizeInputParam(prompt("Enter the organization ID to migrate: "));
|
||||
|
||||
if (!existsSync(path.join(__dirname, "../src/db/dump.sql"))) {
|
||||
if (!existsSync(path.join(__dirname, "../src/db/backup.dump"))) {
|
||||
console.log("File not found, please export the database first.");
|
||||
return;
|
||||
}
|
||||
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -f ${path.join(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} pg_restore -d ${importDatabase} --verbose ${path.join(
|
||||
__dirname,
|
||||
"../src/db/dump.sql"
|
||||
)}`
|
||||
"../src/db/backup.dump"
|
||||
)}`,
|
||||
{ maxBuffer: 1024 * 1024 * 4096 }
|
||||
);
|
||||
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
||||
);
|
||||
|
||||
// delete global/instance-level resources not relevant to the organization to migrate
|
||||
// users
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
||||
);
|
||||
|
||||
// identities
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
||||
);
|
||||
|
||||
// reset slack configuration in superAdmin
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
||||
);
|
||||
|
||||
console.log("Organization migrated successfully.");
|
||||
|
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import "@fastify/request-context";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
requestId: string;
|
||||
}
|
||||
}
|
4
backend/src/@types/fastify-zod.d.ts
vendored
4
backend/src/@types/fastify-zod.d.ts
vendored
@ -1,6 +1,6 @@
|
||||
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { CustomLogger } from "@app/lib/logger/logger";
|
||||
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
|
||||
|
||||
declare global {
|
||||
@ -8,7 +8,7 @@ declare global {
|
||||
RawServerDefault,
|
||||
RawRequestDefaultExpression<RawServerDefault>,
|
||||
RawReplyDefaultExpression<RawServerDefault>,
|
||||
Readonly<Logger>,
|
||||
Readonly<CustomLogger>,
|
||||
ZodTypeProvider
|
||||
>;
|
||||
|
||||
|
6
backend/src/@types/fastify.d.ts
vendored
6
backend/src/@types/fastify.d.ts
vendored
@ -18,6 +18,7 @@ import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-con
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
|
||||
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
|
||||
@ -43,6 +44,7 @@ import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
|
||||
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
|
||||
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
|
||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
||||
@ -77,6 +79,7 @@ import { TServiceTokenServiceFactory } from "@app/services/service-token/service
|
||||
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
|
||||
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
||||
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||
import { TTotpServiceFactory } from "@app/services/totp/totp-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { TUserServiceFactory } from "@app/services/user/user-service";
|
||||
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
||||
@ -183,12 +186,15 @@ declare module "fastify" {
|
||||
rateLimit: TRateLimitServiceFactory;
|
||||
userEngagement: TUserEngagementServiceFactory;
|
||||
externalKms: TExternalKmsServiceFactory;
|
||||
hsm: THsmServiceFactory;
|
||||
orgAdmin: TOrgAdminServiceFactory;
|
||||
slack: TSlackServiceFactory;
|
||||
workflowIntegration: TWorkflowIntegrationServiceFactory;
|
||||
cmek: TCmekServiceFactory;
|
||||
migration: TExternalMigrationServiceFactory;
|
||||
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
|
||||
projectTemplate: TProjectTemplateServiceFactory;
|
||||
totp: TTotpServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
4
backend/src/@types/hdb.d.ts
vendored
Normal file
4
backend/src/@types/hdb.d.ts
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
declare module "hdb" {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
|
||||
function createClient(options): any;
|
||||
}
|
12
backend/src/@types/knex.d.ts
vendored
12
backend/src/@types/knex.d.ts
vendored
@ -200,6 +200,9 @@ import {
|
||||
TProjectSlackConfigsInsert,
|
||||
TProjectSlackConfigsUpdate,
|
||||
TProjectsUpdate,
|
||||
TProjectTemplates,
|
||||
TProjectTemplatesInsert,
|
||||
TProjectTemplatesUpdate,
|
||||
TProjectUserAdditionalPrivilege,
|
||||
TProjectUserAdditionalPrivilegeInsert,
|
||||
TProjectUserAdditionalPrivilegeUpdate,
|
||||
@ -311,6 +314,9 @@ import {
|
||||
TSuperAdmin,
|
||||
TSuperAdminInsert,
|
||||
TSuperAdminUpdate,
|
||||
TTotpConfigs,
|
||||
TTotpConfigsInsert,
|
||||
TTotpConfigsUpdate,
|
||||
TTrustedIps,
|
||||
TTrustedIpsInsert,
|
||||
TTrustedIpsUpdate,
|
||||
@ -818,5 +824,11 @@ declare module "knex/types/tables" {
|
||||
TExternalGroupOrgRoleMappingsInsert,
|
||||
TExternalGroupOrgRoleMappingsUpdate
|
||||
>;
|
||||
[TableName.ProjectTemplates]: KnexOriginal.CompositeTableType<
|
||||
TProjectTemplates,
|
||||
TProjectTemplatesInsert,
|
||||
TProjectTemplatesUpdate
|
||||
>;
|
||||
[TableName.TotpConfig]: KnexOriginal.CompositeTableType<TTotpConfigs, TTotpConfigsInsert, TTotpConfigsUpdate>;
|
||||
}
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("integration").notNullable();
|
||||
t.string("teamId"); // vercel-specific
|
||||
t.string("url"); // for self hosted
|
||||
t.string("url"); // for self-hosted
|
||||
t.string("namespace"); // hashicorp specific
|
||||
t.string("accountId"); // netlify
|
||||
t.text("refreshCiphertext");
|
||||
@ -36,7 +36,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.createTable(TableName.Integration, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.boolean("isActive").notNullable();
|
||||
t.string("url"); // self hosted
|
||||
t.string("url"); // self-hosted
|
||||
t.string("app"); // name of app in provider
|
||||
t.string("appId");
|
||||
t.string("targetEnvironment");
|
||||
|
@ -64,23 +64,25 @@ export async function up(knex: Knex): Promise<void> {
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.Certificate)) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").nullable();
|
||||
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
|
||||
});
|
||||
const hasCaCertIdColumn = await knex.schema.hasColumn(TableName.Certificate, "caCertId");
|
||||
if (!hasCaCertIdColumn) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").nullable();
|
||||
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
|
||||
});
|
||||
|
||||
await knex.raw(`
|
||||
await knex.raw(`
|
||||
UPDATE "${TableName.Certificate}" cert
|
||||
SET "caCertId" = (
|
||||
SELECT caCert.id
|
||||
FROM "${TableName.CertificateAuthorityCert}" caCert
|
||||
WHERE caCert."caId" = cert."caId"
|
||||
)
|
||||
`);
|
||||
)`);
|
||||
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").notNullable().alter();
|
||||
});
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,78 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const BATCH_SIZE = 10_000;
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
||||
|
||||
if (!hasAuthMethodColumnAccessToken) {
|
||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||
t.string("authMethod").nullable();
|
||||
});
|
||||
|
||||
// first we remove identities without auth method that is unused
|
||||
// ! We delete all access tokens where the identity has no auth method set!
|
||||
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
|
||||
await knex(TableName.IdentityAccessToken)
|
||||
.leftJoin(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
|
||||
.whereNull(`${TableName.Identity}.authMethod`)
|
||||
.delete();
|
||||
|
||||
let nullableAccessTokens = await knex(TableName.IdentityAccessToken)
|
||||
.whereNull("authMethod")
|
||||
.limit(BATCH_SIZE)
|
||||
.select("id");
|
||||
let totalUpdated = 0;
|
||||
|
||||
do {
|
||||
const batchIds = nullableAccessTokens.map((token) => token.id);
|
||||
|
||||
// ! Update the auth method column in batches for the current batch
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.IdentityAccessToken)
|
||||
.whereIn("id", batchIds)
|
||||
.update({
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore because generate schema happens after this
|
||||
authMethod: knex(TableName.Identity)
|
||||
.select("authMethod")
|
||||
.whereRaw(`${TableName.IdentityAccessToken}."identityId" = ${TableName.Identity}.id`)
|
||||
.whereNotNull("authMethod")
|
||||
.first()
|
||||
});
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
nullableAccessTokens = await knex(TableName.IdentityAccessToken)
|
||||
.whereNull("authMethod")
|
||||
.limit(BATCH_SIZE)
|
||||
.select("id");
|
||||
|
||||
totalUpdated += batchIds.length;
|
||||
console.log(`Updated ${batchIds.length} access tokens in batch <> Total updated: ${totalUpdated}`);
|
||||
} while (nullableAccessTokens.length > 0);
|
||||
|
||||
// Finally we set the authMethod to notNullable after populating the column.
|
||||
// This will fail if the data is not populated correctly, so it's safe.
|
||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||
t.string("authMethod").notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
// ! We aren't dropping the authMethod column from the Identity itself, because we wan't to be able to easily rollback for the time being.
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
||||
|
||||
if (hasAuthMethodColumnAccessToken) {
|
||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||
t.dropColumn("authMethod");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const config = { transaction: false };
|
||||
export { config };
|
@ -0,0 +1,28 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.ProjectTemplates))) {
|
||||
await knex.schema.createTable(TableName.ProjectTemplates, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("name", 32).notNullable();
|
||||
t.string("description").nullable();
|
||||
t.jsonb("roles").notNullable();
|
||||
t.jsonb("environments").notNullable();
|
||||
t.uuid("orgId").notNullable().references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.ProjectTemplates);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.ProjectTemplates)) {
|
||||
await dropOnUpdateTrigger(knex, TableName.ProjectTemplates);
|
||||
|
||||
await knex.schema.dropTable(TableName.ProjectTemplates);
|
||||
}
|
||||
}
|
@ -0,0 +1,35 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasDisableBootstrapCertValidationCol = await knex.schema.hasColumn(
|
||||
TableName.CertificateTemplateEstConfig,
|
||||
"disableBootstrapCertValidation"
|
||||
);
|
||||
|
||||
const hasCaChainCol = await knex.schema.hasColumn(TableName.CertificateTemplateEstConfig, "encryptedCaChain");
|
||||
|
||||
await knex.schema.alterTable(TableName.CertificateTemplateEstConfig, (t) => {
|
||||
if (!hasDisableBootstrapCertValidationCol) {
|
||||
t.boolean("disableBootstrapCertValidation").defaultTo(false).notNullable();
|
||||
}
|
||||
|
||||
if (hasCaChainCol) {
|
||||
t.binary("encryptedCaChain").nullable().alter();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasDisableBootstrapCertValidationCol = await knex.schema.hasColumn(
|
||||
TableName.CertificateTemplateEstConfig,
|
||||
"disableBootstrapCertValidation"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.CertificateTemplateEstConfig, (t) => {
|
||||
if (hasDisableBootstrapCertValidationCol) {
|
||||
t.dropColumn("disableBootstrapCertValidation");
|
||||
}
|
||||
});
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
t.dropForeign("orgId");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
t.dropForeign("orgId");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization);
|
||||
});
|
||||
}
|
||||
}
|
23
backend/src/db/migrations/20241111175154_kms-root-cfg-hsm.ts
Normal file
23
backend/src/db/migrations/20241111175154_kms-root-cfg-hsm.ts
Normal file
@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEncryptionStrategy = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "encryptionStrategy");
|
||||
const hasTimestampsCol = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "createdAt");
|
||||
|
||||
await knex.schema.alterTable(TableName.KmsServerRootConfig, (t) => {
|
||||
if (!hasEncryptionStrategy) t.string("encryptionStrategy").defaultTo("SOFTWARE");
|
||||
if (!hasTimestampsCol) t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptionStrategy = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "encryptionStrategy");
|
||||
const hasTimestampsCol = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "createdAt");
|
||||
|
||||
await knex.schema.alterTable(TableName.KmsServerRootConfig, (t) => {
|
||||
if (hasEncryptionStrategy) t.dropColumn("encryptionStrategy");
|
||||
if (hasTimestampsCol) t.dropTimestamps(true);
|
||||
});
|
||||
}
|
54
backend/src/db/migrations/20241112082701_add-totp-support.ts
Normal file
54
backend/src/db/migrations/20241112082701_add-totp-support.ts
Normal file
@ -0,0 +1,54 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.TotpConfig))) {
|
||||
await knex.schema.createTable(TableName.TotpConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("userId").notNullable();
|
||||
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
t.boolean("isVerified").defaultTo(false).notNullable();
|
||||
t.binary("encryptedRecoveryCodes").notNullable();
|
||||
t.binary("encryptedSecret").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
t.unique("userId");
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.TotpConfig);
|
||||
}
|
||||
|
||||
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
if (!doesOrgMfaMethodColExist) {
|
||||
t.string("selectedMfaMethod");
|
||||
}
|
||||
});
|
||||
|
||||
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
if (!doesUserSelectedMfaMethodColExist) {
|
||||
t.string("selectedMfaMethod");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropOnUpdateTrigger(knex, TableName.TotpConfig);
|
||||
await knex.schema.dropTableIfExists(TableName.TotpConfig);
|
||||
|
||||
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
if (doesOrgMfaMethodColExist) {
|
||||
t.dropColumn("selectedMfaMethod");
|
||||
}
|
||||
});
|
||||
|
||||
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
if (doesUserSelectedMfaMethodColExist) {
|
||||
t.dropColumn("selectedMfaMethod");
|
||||
}
|
||||
});
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
|
||||
|
||||
if (!hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.string("description");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
|
||||
|
||||
if (hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.dropColumn("description");
|
||||
});
|
||||
}
|
||||
}
|
20
backend/src/db/migrations/20241121131344_make-identity-metadata-not-nullable-again.ts
Normal file
20
backend/src/db/migrations/20241121131344_make-identity-metadata-not-nullable-again.ts
Normal file
@ -0,0 +1,20 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
||||
await knex(TableName.IdentityMetadata).whereNull("value").delete();
|
||||
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
||||
t.string("value", 1020).notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
||||
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
||||
t.string("value", 1020).alter();
|
||||
});
|
||||
}
|
||||
}
|
@ -12,11 +12,12 @@ import { TImmutableDBKeys } from "./models";
|
||||
export const CertificateTemplateEstConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
certificateTemplateId: z.string().uuid(),
|
||||
encryptedCaChain: zodBuffer,
|
||||
encryptedCaChain: zodBuffer.nullable().optional(),
|
||||
hashedPassphrase: z.string(),
|
||||
isEnabled: z.boolean(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
disableBootstrapCertValidation: z.boolean().default(false)
|
||||
});
|
||||
|
||||
export type TCertificateTemplateEstConfigs = z.infer<typeof CertificateTemplateEstConfigsSchema>;
|
||||
|
@ -20,7 +20,8 @@ export const IdentityAccessTokensSchema = z.object({
|
||||
identityId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
name: z.string().nullable().optional()
|
||||
name: z.string().nullable().optional(),
|
||||
authMethod: z.string()
|
||||
});
|
||||
|
||||
export type TIdentityAccessTokens = z.infer<typeof IdentityAccessTokensSchema>;
|
||||
|
@ -64,6 +64,7 @@ export * from "./project-keys";
|
||||
export * from "./project-memberships";
|
||||
export * from "./project-roles";
|
||||
export * from "./project-slack-configs";
|
||||
export * from "./project-templates";
|
||||
export * from "./project-user-additional-privilege";
|
||||
export * from "./project-user-membership-roles";
|
||||
export * from "./projects";
|
||||
@ -105,6 +106,7 @@ export * from "./secrets-v2";
|
||||
export * from "./service-tokens";
|
||||
export * from "./slack-integrations";
|
||||
export * from "./super-admin";
|
||||
export * from "./totp-configs";
|
||||
export * from "./trusted-ips";
|
||||
export * from "./user-actions";
|
||||
export * from "./user-aliases";
|
||||
|
@ -11,7 +11,10 @@ import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const KmsRootConfigSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedRootKey: zodBuffer
|
||||
encryptedRootKey: zodBuffer,
|
||||
encryptionStrategy: z.string().default("SOFTWARE").nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TKmsRootConfig = z.infer<typeof KmsRootConfigSchema>;
|
||||
|
@ -41,6 +41,7 @@ export enum TableName {
|
||||
ProjectUserAdditionalPrivilege = "project_user_additional_privilege",
|
||||
ProjectUserMembershipRole = "project_user_membership_roles",
|
||||
ProjectKeys = "project_keys",
|
||||
ProjectTemplates = "project_templates",
|
||||
Secret = "secrets",
|
||||
SecretReference = "secret_references",
|
||||
SecretSharing = "secret_sharing",
|
||||
@ -116,6 +117,7 @@ export enum TableName {
|
||||
ExternalKms = "external_kms",
|
||||
InternalKms = "internal_kms",
|
||||
InternalKmsKeyVersion = "internal_kms_key_version",
|
||||
TotpConfig = "totp_configs",
|
||||
// @depreciated
|
||||
KmsKeyVersion = "kms_key_versions",
|
||||
WorkflowIntegrations = "workflow_integrations",
|
||||
@ -189,7 +191,7 @@ export enum ProjectUpgradeStatus {
|
||||
|
||||
export enum IdentityAuthMethod {
|
||||
TOKEN_AUTH = "token-auth",
|
||||
Univeral = "universal-auth",
|
||||
UNIVERSAL_AUTH = "universal-auth",
|
||||
KUBERNETES_AUTH = "kubernetes-auth",
|
||||
GCP_AUTH = "gcp-auth",
|
||||
AWS_AUTH = "aws-auth",
|
||||
|
@ -21,7 +21,8 @@ export const OrganizationsSchema = z.object({
|
||||
kmsDefaultKeyId: z.string().uuid().nullable().optional(),
|
||||
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||
defaultMembershipRole: z.string().default("member"),
|
||||
enforceMfa: z.boolean().default(false)
|
||||
enforceMfa: z.boolean().default(false),
|
||||
selectedMfaMethod: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
|
||||
|
@ -15,7 +15,8 @@ export const ProjectRolesSchema = z.object({
|
||||
permissions: z.unknown(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
projectId: z.string()
|
||||
projectId: z.string(),
|
||||
version: z.number().default(1)
|
||||
});
|
||||
|
||||
export type TProjectRoles = z.infer<typeof ProjectRolesSchema>;
|
||||
|
23
backend/src/db/schemas/project-templates.ts
Normal file
23
backend/src/db/schemas/project-templates.ts
Normal file
@ -0,0 +1,23 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ProjectTemplatesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
description: z.string().nullable().optional(),
|
||||
roles: z.unknown(),
|
||||
environments: z.unknown(),
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TProjectTemplates = z.infer<typeof ProjectTemplatesSchema>;
|
||||
export type TProjectTemplatesInsert = Omit<z.input<typeof ProjectTemplatesSchema>, TImmutableDBKeys>;
|
||||
export type TProjectTemplatesUpdate = Partial<Omit<z.input<typeof ProjectTemplatesSchema>, TImmutableDBKeys>>;
|
@ -23,7 +23,8 @@ export const ProjectsSchema = z.object({
|
||||
kmsCertificateKeyId: z.string().uuid().nullable().optional(),
|
||||
auditLogsRetentionDays: z.number().nullable().optional(),
|
||||
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
|
||||
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional()
|
||||
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||
description: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TProjects = z.infer<typeof ProjectsSchema>;
|
||||
|
24
backend/src/db/schemas/totp-configs.ts
Normal file
24
backend/src/db/schemas/totp-configs.ts
Normal file
@ -0,0 +1,24 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const TotpConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
userId: z.string().uuid(),
|
||||
isVerified: z.boolean().default(false),
|
||||
encryptedRecoveryCodes: zodBuffer,
|
||||
encryptedSecret: zodBuffer,
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TTotpConfigs = z.infer<typeof TotpConfigsSchema>;
|
||||
export type TTotpConfigsInsert = Omit<z.input<typeof TotpConfigsSchema>, TImmutableDBKeys>;
|
||||
export type TTotpConfigsUpdate = Partial<Omit<z.input<typeof TotpConfigsSchema>, TImmutableDBKeys>>;
|
@ -26,7 +26,8 @@ export const UsersSchema = z.object({
|
||||
consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(),
|
||||
isLocked: z.boolean().default(false).nullable().optional(),
|
||||
temporaryLockDateEnd: z.date().nullable().optional(),
|
||||
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional()
|
||||
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional(),
|
||||
selectedMfaMethod: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TUsers = z.infer<typeof UsersSchema>;
|
||||
|
@ -16,7 +16,7 @@ export async function seed(knex: Knex): Promise<void> {
|
||||
// @ts-ignore
|
||||
id: seedData1.machineIdentity.id,
|
||||
name: seedData1.machineIdentity.name,
|
||||
authMethod: IdentityAuthMethod.Univeral
|
||||
authMethod: IdentityAuthMethod.UNIVERSAL_AUTH
|
||||
}
|
||||
]);
|
||||
const identityUa = await knex(TableName.IdentityUniversalAuth)
|
||||
|
@ -2,6 +2,9 @@ import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "./schemas";
|
||||
|
||||
interface PgTriggerResult {
|
||||
rows: Array<{ exists: boolean }>;
|
||||
}
|
||||
export const createJunctionTable = (knex: Knex, tableName: TableName, table1Name: TableName, table2Name: TableName) =>
|
||||
knex.schema.createTable(tableName, (table) => {
|
||||
table.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
@ -28,13 +31,26 @@ DROP FUNCTION IF EXISTS on_update_timestamp() CASCADE;
|
||||
|
||||
// we would be using this to apply updatedAt where ever we wanta
|
||||
// remember to set `timestamps(true,true,true)` before this on schema
|
||||
export const createOnUpdateTrigger = (knex: Knex, tableName: string) =>
|
||||
knex.raw(`
|
||||
CREATE TRIGGER "${tableName}_updatedAt"
|
||||
BEFORE UPDATE ON ${tableName}
|
||||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE on_update_timestamp();
|
||||
`);
|
||||
export const createOnUpdateTrigger = async (knex: Knex, tableName: string) => {
|
||||
const triggerExists = await knex.raw<PgTriggerResult>(`
|
||||
SELECT EXISTS (
|
||||
SELECT 1
|
||||
FROM pg_trigger
|
||||
WHERE tgname = '${tableName}_updatedAt'
|
||||
);
|
||||
`);
|
||||
|
||||
if (!triggerExists?.rows?.[0]?.exists) {
|
||||
return knex.raw(`
|
||||
CREATE TRIGGER "${tableName}_updatedAt"
|
||||
BEFORE UPDATE ON ${tableName}
|
||||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE on_update_timestamp();
|
||||
`);
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
export const dropOnUpdateTrigger = (knex: Knex, tableName: string) =>
|
||||
knex.raw(`DROP TRIGGER IF EXISTS "${tableName}_updatedAt" ON ${tableName}`);
|
||||
|
@ -1,3 +1,5 @@
|
||||
import { registerProjectTemplateRouter } from "@app/ee/routes/v1/project-template-router";
|
||||
|
||||
import { registerAccessApprovalPolicyRouter } from "./access-approval-policy-router";
|
||||
import { registerAccessApprovalRequestRouter } from "./access-approval-request-router";
|
||||
import { registerAuditLogStreamRouter } from "./audit-log-stream-router";
|
||||
@ -92,4 +94,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
await server.register(registerExternalKmsRouter, {
|
||||
prefix: "/external-kms"
|
||||
});
|
||||
|
||||
await server.register(registerProjectTemplateRouter, { prefix: "/project-templates" });
|
||||
};
|
||||
|
@ -192,7 +192,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
roles: ProjectRolesSchema.omit({ permissions: true }).array()
|
||||
roles: ProjectRolesSchema.omit({ permissions: true, version: true }).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -225,7 +225,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
role: SanitizedRoleSchemaV1
|
||||
role: SanitizedRoleSchemaV1.omit({ version: true })
|
||||
})
|
||||
}
|
||||
},
|
||||
|
309
backend/src/ee/routes/v1/project-template-router.ts
Normal file
309
backend/src/ee/routes/v1/project-template-router.ts
Normal file
@ -0,0 +1,309 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectMembershipRole, ProjectTemplatesSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { ProjectTemplateDefaultEnvironments } from "@app/ee/services/project-template/project-template-constants";
|
||||
import { isInfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-fns";
|
||||
import { ProjectTemplates } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;
|
||||
|
||||
const SlugSchema = z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(32)
|
||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Must be valid slug format"
|
||||
});
|
||||
|
||||
const isReservedRoleSlug = (slug: string) =>
|
||||
Object.values(ProjectMembershipRole).includes(slug as ProjectMembershipRole);
|
||||
|
||||
const isReservedRoleName = (name: string) =>
|
||||
["custom", "admin", "viewer", "developer", "no access"].includes(name.toLowerCase());
|
||||
|
||||
const SanitizedProjectTemplateSchema = ProjectTemplatesSchema.extend({
|
||||
roles: z
|
||||
.object({
|
||||
name: z.string().trim().min(1),
|
||||
slug: SlugSchema,
|
||||
permissions: UnpackedPermissionSchema.array()
|
||||
})
|
||||
.array(),
|
||||
environments: z
|
||||
.object({
|
||||
name: z.string().trim().min(1),
|
||||
slug: SlugSchema,
|
||||
position: z.number().min(1)
|
||||
})
|
||||
.array()
|
||||
});
|
||||
|
||||
const ProjectTemplateRolesSchema = z
|
||||
.object({
|
||||
name: z.string().trim().min(1),
|
||||
slug: SlugSchema,
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
})
|
||||
.array()
|
||||
.superRefine((roles, ctx) => {
|
||||
if (!roles.length) return;
|
||||
|
||||
if (Buffer.byteLength(JSON.stringify(roles)) > MAX_JSON_SIZE_LIMIT_IN_BYTES)
|
||||
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Size limit exceeded" });
|
||||
|
||||
if (new Set(roles.map((v) => v.slug)).size !== roles.length)
|
||||
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Role slugs must be unique" });
|
||||
|
||||
if (new Set(roles.map((v) => v.name)).size !== roles.length)
|
||||
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Role names must be unique" });
|
||||
|
||||
roles.forEach((role) => {
|
||||
if (isReservedRoleSlug(role.slug))
|
||||
ctx.addIssue({ code: z.ZodIssueCode.custom, message: `Role slug "${role.slug}" is reserved` });
|
||||
|
||||
if (isReservedRoleName(role.name))
|
||||
ctx.addIssue({ code: z.ZodIssueCode.custom, message: `Role name "${role.name}" is reserved` });
|
||||
});
|
||||
});
|
||||
|
||||
const ProjectTemplateEnvironmentsSchema = z
|
||||
.object({
|
||||
name: z.string().trim().min(1),
|
||||
slug: SlugSchema,
|
||||
position: z.number().min(1)
|
||||
})
|
||||
.array()
|
||||
.min(1)
|
||||
.superRefine((environments, ctx) => {
|
||||
if (Buffer.byteLength(JSON.stringify(environments)) > MAX_JSON_SIZE_LIMIT_IN_BYTES)
|
||||
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Size limit exceeded" });
|
||||
|
||||
if (new Set(environments.map((v) => v.name)).size !== environments.length)
|
||||
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Environment names must be unique" });
|
||||
|
||||
if (new Set(environments.map((v) => v.slug)).size !== environments.length)
|
||||
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Environment slugs must be unique" });
|
||||
|
||||
if (
|
||||
environments.some((env) => env.position < 1 || env.position > environments.length) ||
|
||||
new Set(environments.map((env) => env.position)).size !== environments.length
|
||||
)
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "One or more of the positions specified is invalid. Positions must be sequential starting from 1."
|
||||
});
|
||||
});
|
||||
|
||||
export const registerProjectTemplateRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "List project templates for the current organization.",
|
||||
response: {
|
||||
200: z.object({
|
||||
projectTemplates: SanitizedProjectTemplateSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const projectTemplates = await server.services.projectTemplate.listProjectTemplatesByOrg(req.permission);
|
||||
|
||||
const auditTemplates = projectTemplates.filter((template) => !isInfisicalProjectTemplate(template.name));
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
event: {
|
||||
type: EventType.GET_PROJECT_TEMPLATES,
|
||||
metadata: {
|
||||
count: auditTemplates.length,
|
||||
templateIds: auditTemplates.map((template) => template.id)
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { projectTemplates };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:templateId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Get a project template by ID.",
|
||||
params: z.object({
|
||||
templateId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
projectTemplate: SanitizedProjectTemplateSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const projectTemplate = await server.services.projectTemplate.findProjectTemplateById(
|
||||
req.params.templateId,
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
event: {
|
||||
type: EventType.GET_PROJECT_TEMPLATE,
|
||||
metadata: {
|
||||
templateId: req.params.templateId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { projectTemplate };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Create a project template.",
|
||||
body: z.object({
|
||||
name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), {
|
||||
message: `The requested project template name is reserved.`
|
||||
}).describe(ProjectTemplates.CREATE.name),
|
||||
description: z.string().max(256).trim().optional().describe(ProjectTemplates.CREATE.description),
|
||||
roles: ProjectTemplateRolesSchema.default([]).describe(ProjectTemplates.CREATE.roles),
|
||||
environments: ProjectTemplateEnvironmentsSchema.default(ProjectTemplateDefaultEnvironments).describe(
|
||||
ProjectTemplates.CREATE.environments
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
projectTemplate: SanitizedProjectTemplateSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const projectTemplate = await server.services.projectTemplate.createProjectTemplate(req.body, req.permission);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
event: {
|
||||
type: EventType.CREATE_PROJECT_TEMPLATE,
|
||||
metadata: req.body
|
||||
}
|
||||
});
|
||||
|
||||
return { projectTemplate };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:templateId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Update a project template.",
|
||||
params: z.object({ templateId: z.string().uuid().describe(ProjectTemplates.UPDATE.templateId) }),
|
||||
body: z.object({
|
||||
name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), {
|
||||
message: `The requested project template name is reserved.`
|
||||
})
|
||||
.optional()
|
||||
.describe(ProjectTemplates.UPDATE.name),
|
||||
description: z.string().max(256).trim().optional().describe(ProjectTemplates.UPDATE.description),
|
||||
roles: ProjectTemplateRolesSchema.optional().describe(ProjectTemplates.UPDATE.roles),
|
||||
environments: ProjectTemplateEnvironmentsSchema.optional().describe(ProjectTemplates.UPDATE.environments)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
projectTemplate: SanitizedProjectTemplateSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const projectTemplate = await server.services.projectTemplate.updateProjectTemplateById(
|
||||
req.params.templateId,
|
||||
req.body,
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
event: {
|
||||
type: EventType.UPDATE_PROJECT_TEMPLATE,
|
||||
metadata: {
|
||||
templateId: req.params.templateId,
|
||||
...req.body
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { projectTemplate };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:templateId",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Delete a project template.",
|
||||
params: z.object({ templateId: z.string().uuid().describe(ProjectTemplates.DELETE.templateId) }),
|
||||
|
||||
response: {
|
||||
200: z.object({
|
||||
projectTemplate: SanitizedProjectTemplateSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const projectTemplate = await server.services.projectTemplate.deleteProjectTemplateById(
|
||||
req.params.templateId,
|
||||
req.permission
|
||||
);
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
event: {
|
||||
type: EventType.DELETE_PROJECT_TEMPLATE,
|
||||
metadata: {
|
||||
templateId: req.params.templateId
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { projectTemplate };
|
||||
}
|
||||
});
|
||||
};
|
@ -122,6 +122,8 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
`email: ${email} firstName: ${profile.firstName as string}`
|
||||
);
|
||||
|
||||
throw new Error("Invalid saml request. Missing email or first name");
|
||||
}
|
||||
|
||||
const userMetadata = Object.keys(profile.attributes || {})
|
||||
|
@ -186,7 +186,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
roles: ProjectRolesSchema.omit({ permissions: true }).array()
|
||||
roles: ProjectRolesSchema.omit({ permissions: true, version: true }).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -219,7 +219,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
role: SanitizedRoleSchema
|
||||
role: SanitizedRoleSchema.omit({ version: true })
|
||||
})
|
||||
}
|
||||
},
|
||||
|
@ -1,3 +1,7 @@
|
||||
import {
|
||||
TCreateProjectTemplateDTO,
|
||||
TUpdateProjectTemplateDTO
|
||||
} from "@app/ee/services/project-template/project-template-types";
|
||||
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
@ -192,7 +196,13 @@ export enum EventType {
|
||||
CMEK_ENCRYPT = "cmek-encrypt",
|
||||
CMEK_DECRYPT = "cmek-decrypt",
|
||||
UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "update-external-group-org-role-mapping",
|
||||
GET_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "get-external-group-org-role-mapping"
|
||||
GET_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "get-external-group-org-role-mapping",
|
||||
GET_PROJECT_TEMPLATES = "get-project-templates",
|
||||
GET_PROJECT_TEMPLATE = "get-project-template",
|
||||
CREATE_PROJECT_TEMPLATE = "create-project-template",
|
||||
UPDATE_PROJECT_TEMPLATE = "update-project-template",
|
||||
DELETE_PROJECT_TEMPLATE = "delete-project-template",
|
||||
APPLY_PROJECT_TEMPLATE = "apply-project-template"
|
||||
}
|
||||
|
||||
interface UserActorMetadata {
|
||||
@ -1618,6 +1628,46 @@ interface UpdateExternalGroupOrgRoleMappingsEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetProjectTemplatesEvent {
|
||||
type: EventType.GET_PROJECT_TEMPLATES;
|
||||
metadata: {
|
||||
count: number;
|
||||
templateIds: string[];
|
||||
};
|
||||
}
|
||||
|
||||
interface GetProjectTemplateEvent {
|
||||
type: EventType.GET_PROJECT_TEMPLATE;
|
||||
metadata: {
|
||||
templateId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateProjectTemplateEvent {
|
||||
type: EventType.CREATE_PROJECT_TEMPLATE;
|
||||
metadata: TCreateProjectTemplateDTO;
|
||||
}
|
||||
|
||||
interface UpdateProjectTemplateEvent {
|
||||
type: EventType.UPDATE_PROJECT_TEMPLATE;
|
||||
metadata: TUpdateProjectTemplateDTO & { templateId: string };
|
||||
}
|
||||
|
||||
interface DeleteProjectTemplateEvent {
|
||||
type: EventType.DELETE_PROJECT_TEMPLATE;
|
||||
metadata: {
|
||||
templateId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface ApplyProjectTemplateEvent {
|
||||
type: EventType.APPLY_PROJECT_TEMPLATE;
|
||||
metadata: {
|
||||
template: string;
|
||||
projectId: string;
|
||||
};
|
||||
}
|
||||
|
||||
export type Event =
|
||||
| GetSecretsEvent
|
||||
| GetSecretEvent
|
||||
@ -1766,4 +1816,10 @@ export type Event =
|
||||
| CmekEncryptEvent
|
||||
| CmekDecryptEvent
|
||||
| GetExternalGroupOrgRoleMappingsEvent
|
||||
| UpdateExternalGroupOrgRoleMappingsEvent;
|
||||
| UpdateExternalGroupOrgRoleMappingsEvent
|
||||
| GetProjectTemplatesEvent
|
||||
| GetProjectTemplateEvent
|
||||
| CreateProjectTemplateEvent
|
||||
| UpdateProjectTemplateEvent
|
||||
| DeleteProjectTemplateEvent
|
||||
| ApplyProjectTemplateEvent;
|
||||
|
@ -171,27 +171,29 @@ export const certificateEstServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const caCerts = estConfig.caChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((cert) => {
|
||||
return new x509.X509Certificate(cert);
|
||||
});
|
||||
if (!estConfig.disableBootstrapCertValidation) {
|
||||
const caCerts = estConfig.caChain
|
||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||
?.map((cert) => {
|
||||
return new x509.X509Certificate(cert);
|
||||
});
|
||||
|
||||
if (!caCerts) {
|
||||
throw new BadRequestError({ message: "Failed to parse certificate chain" });
|
||||
}
|
||||
if (!caCerts) {
|
||||
throw new BadRequestError({ message: "Failed to parse certificate chain" });
|
||||
}
|
||||
|
||||
const leafCertificate = decodeURIComponent(sslClientCert).match(
|
||||
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
|
||||
)?.[0];
|
||||
const leafCertificate = decodeURIComponent(sslClientCert).match(
|
||||
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
|
||||
)?.[0];
|
||||
|
||||
if (!leafCertificate) {
|
||||
throw new BadRequestError({ message: "Missing client certificate" });
|
||||
}
|
||||
if (!leafCertificate) {
|
||||
throw new BadRequestError({ message: "Missing client certificate" });
|
||||
}
|
||||
|
||||
const certObj = new x509.X509Certificate(leafCertificate);
|
||||
if (!(await isCertChainValid([certObj, ...caCerts]))) {
|
||||
throw new BadRequestError({ message: "Invalid certificate chain" });
|
||||
const certObj = new x509.X509Certificate(leafCertificate);
|
||||
if (!(await isCertChainValid([certObj, ...caCerts]))) {
|
||||
throw new BadRequestError({ message: "Invalid certificate chain" });
|
||||
}
|
||||
}
|
||||
|
||||
const { certificate } = await certificateAuthorityService.signCertFromCa({
|
||||
|
20
backend/src/ee/services/dynamic-secret/dynamic-secret-fns.ts
Normal file
20
backend/src/ee/services/dynamic-secret/dynamic-secret-fns.ts
Normal file
@ -0,0 +1,20 @@
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
|
||||
export const verifyHostInputValidity = (host: string) => {
|
||||
const appCfg = getConfig();
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
|
||||
if (
|
||||
appCfg.isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(host === "host.docker.internal" || host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
|
||||
if (host === "localhost" || host === "127.0.0.1" || dbHost === host) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
};
|
@ -9,7 +9,7 @@ import {
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { OrderByDirection, OrgServiceActor } from "@app/lib/types";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
|
||||
@ -22,6 +22,7 @@ import {
|
||||
TDeleteDynamicSecretDTO,
|
||||
TDetailsDynamicSecretDTO,
|
||||
TGetDynamicSecretsCountDTO,
|
||||
TListDynamicSecretsByFolderMappingsDTO,
|
||||
TListDynamicSecretsDTO,
|
||||
TListDynamicSecretsMultiEnvDTO,
|
||||
TUpdateDynamicSecretDTO
|
||||
@ -454,8 +455,44 @@ export const dynamicSecretServiceFactory = ({
|
||||
return dynamicSecretCfg;
|
||||
};
|
||||
|
||||
const listDynamicSecretsByFolderIds = async (
|
||||
{ folderMappings, filters, projectId }: TListDynamicSecretsByFolderMappingsDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor.type,
|
||||
actor.id,
|
||||
projectId,
|
||||
actor.authMethod,
|
||||
actor.orgId
|
||||
);
|
||||
|
||||
const userAccessibleFolderMappings = folderMappings.filter(({ path, environment }) =>
|
||||
permission.can(
|
||||
ProjectPermissionDynamicSecretActions.ReadRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment, secretPath: path })
|
||||
)
|
||||
);
|
||||
|
||||
const groupedFolderMappings = new Map(userAccessibleFolderMappings.map((path) => [path.folderId, path]));
|
||||
|
||||
const dynamicSecrets = await dynamicSecretDAL.listDynamicSecretsByFolderIds({
|
||||
folderIds: userAccessibleFolderMappings.map(({ folderId }) => folderId),
|
||||
...filters
|
||||
});
|
||||
|
||||
return dynamicSecrets.map((dynamicSecret) => {
|
||||
const { environment, path } = groupedFolderMappings.get(dynamicSecret.folderId)!;
|
||||
return {
|
||||
...dynamicSecret,
|
||||
environment,
|
||||
path
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
// get dynamic secrets for multiple envs
|
||||
const listDynamicSecretsByFolderIds = async ({
|
||||
const listDynamicSecretsByEnvs = async ({
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actorId,
|
||||
@ -521,9 +558,10 @@ export const dynamicSecretServiceFactory = ({
|
||||
deleteByName,
|
||||
getDetails,
|
||||
listDynamicSecretsByEnv,
|
||||
listDynamicSecretsByFolderIds,
|
||||
listDynamicSecretsByEnvs,
|
||||
getDynamicSecretCount,
|
||||
getCountMultiEnv,
|
||||
fetchAzureEntraIdUsers
|
||||
fetchAzureEntraIdUsers,
|
||||
listDynamicSecretsByFolderIds
|
||||
};
|
||||
};
|
||||
|
@ -48,17 +48,27 @@ export type TDetailsDynamicSecretDTO = {
|
||||
projectSlug: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TListDynamicSecretsDTO = {
|
||||
path: string;
|
||||
environmentSlug: string;
|
||||
projectSlug?: string;
|
||||
projectId?: string;
|
||||
export type ListDynamicSecretsFilters = {
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
orderBy?: SecretsOrderBy;
|
||||
orderDirection?: OrderByDirection;
|
||||
search?: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
};
|
||||
|
||||
export type TListDynamicSecretsDTO = {
|
||||
path: string;
|
||||
environmentSlug: string;
|
||||
projectSlug?: string;
|
||||
projectId?: string;
|
||||
} & ListDynamicSecretsFilters &
|
||||
Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TListDynamicSecretsByFolderMappingsDTO = {
|
||||
projectId: string;
|
||||
folderMappings: { folderId: string; path: string; environment: string }[];
|
||||
filters: ListDynamicSecretsFilters;
|
||||
};
|
||||
|
||||
export type TListDynamicSecretsMultiEnvDTO = Omit<
|
||||
TListDynamicSecretsDTO,
|
||||
|
@ -80,7 +80,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
|
||||
}
|
||||
};
|
||||
|
||||
const addUserToInfisicalGroup = async (userId: string) => {
|
||||
const $addUserToInfisicalGroup = async (userId: string) => {
|
||||
// figure out if the default user is already in the group, if it is, then we shouldn't add it again
|
||||
|
||||
const addUserToGroupCommand = new ModifyUserGroupCommand({
|
||||
@ -96,7 +96,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
|
||||
await ensureInfisicalGroupExists(clusterName);
|
||||
|
||||
await elastiCache.send(new CreateUserCommand(creationInput)); // First create the user
|
||||
await addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
|
||||
await $addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
|
||||
|
||||
return {
|
||||
userId: creationInput.UserId,
|
||||
@ -212,7 +212,7 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -33,7 +33,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
|
||||
const client = new IAMClient({
|
||||
region: providerInputs.region,
|
||||
credentials: {
|
||||
@ -47,7 +47,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
|
||||
return isConnected;
|
||||
@ -55,7 +55,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
|
||||
@ -118,7 +118,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
@ -179,9 +179,8 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// do nothing
|
||||
const username = entityId;
|
||||
return { entityId: username };
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -23,7 +23,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getToken = async (
|
||||
const $getToken = async (
|
||||
tenantId: string,
|
||||
applicationId: string,
|
||||
clientSecret: string
|
||||
@ -51,18 +51,13 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
return data.success;
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
if (!data.success) {
|
||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||
}
|
||||
@ -98,7 +93,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
};
|
||||
|
||||
const fetchAzureEntraIdUsers = async (tenantId: string, applicationId: string, clientSecret: string) => {
|
||||
const data = await getToken(tenantId, applicationId, clientSecret);
|
||||
const data = await $getToken(tenantId, applicationId, clientSecret);
|
||||
if (!data.success) {
|
||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||
}
|
||||
@ -127,6 +122,11 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
return users;
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
|
@ -27,7 +27,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
|
||||
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||
const client = new cassandra.Client({
|
||||
sslOptions,
|
||||
@ -47,7 +47,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
|
||||
await client.shutdown();
|
||||
@ -56,7 +56,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -82,7 +82,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const { keyspace } = providerInputs;
|
||||
@ -99,20 +99,24 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const { keyspace } = providerInputs;
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace, expiration });
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
keyspace,
|
||||
expiration
|
||||
});
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
for await (const query of queries) {
|
||||
await client.execute(query);
|
||||
}
|
||||
await client.shutdown();
|
||||
return { entityId: username };
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -2,10 +2,9 @@ import { Client as ElasticSearchClient } from "@elastic/elasticsearch";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
@ -19,28 +18,13 @@ const generateUsername = () => {
|
||||
|
||||
export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
|
||||
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const connection = new ElasticSearchClient({
|
||||
node: {
|
||||
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
|
||||
@ -71,7 +55,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection
|
||||
.info()
|
||||
@ -83,7 +67,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -101,7 +85,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
await connection.security.deleteUser({
|
||||
username: entityId
|
||||
@ -112,7 +96,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -1,17 +1,22 @@
|
||||
import { SnowflakeProvider } from "@app/ee/services/dynamic-secret/providers/snowflake";
|
||||
|
||||
import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache";
|
||||
import { AwsIamProvider } from "./aws-iam";
|
||||
import { AzureEntraIDProvider } from "./azure-entra-id";
|
||||
import { CassandraProvider } from "./cassandra";
|
||||
import { ElasticSearchProvider } from "./elastic-search";
|
||||
import { LdapProvider } from "./ldap";
|
||||
import { DynamicSecretProviders } from "./models";
|
||||
import { DynamicSecretProviders, TDynamicProviderFns } from "./models";
|
||||
import { MongoAtlasProvider } from "./mongo-atlas";
|
||||
import { MongoDBProvider } from "./mongo-db";
|
||||
import { RabbitMqProvider } from "./rabbit-mq";
|
||||
import { RedisDatabaseProvider } from "./redis";
|
||||
import { SapAseProvider } from "./sap-ase";
|
||||
import { SapHanaProvider } from "./sap-hana";
|
||||
import { SqlDatabaseProvider } from "./sql-database";
|
||||
import { TotpProvider } from "./totp";
|
||||
|
||||
export const buildDynamicSecretProviders = () => ({
|
||||
export const buildDynamicSecretProviders = (): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
|
||||
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
|
||||
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
|
||||
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
|
||||
@ -22,5 +27,9 @@ export const buildDynamicSecretProviders = () => ({
|
||||
[DynamicSecretProviders.ElasticSearch]: ElasticSearchProvider(),
|
||||
[DynamicSecretProviders.RabbitMq]: RabbitMqProvider(),
|
||||
[DynamicSecretProviders.AzureEntraID]: AzureEntraIDProvider(),
|
||||
[DynamicSecretProviders.Ldap]: LdapProvider()
|
||||
[DynamicSecretProviders.Ldap]: LdapProvider(),
|
||||
[DynamicSecretProviders.SapHana]: SapHanaProvider(),
|
||||
[DynamicSecretProviders.Snowflake]: SnowflakeProvider(),
|
||||
[DynamicSecretProviders.Totp]: TotpProvider(),
|
||||
[DynamicSecretProviders.SapAse]: SapAseProvider()
|
||||
});
|
||||
|
@ -7,7 +7,7 @@ import { z } from "zod";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { LdapSchema, TDynamicProviderFns } from "./models";
|
||||
import { LdapCredentialType, LdapSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
@ -52,7 +52,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const client = ldapjs.createClient({
|
||||
url: providerInputs.url,
|
||||
@ -83,7 +83,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
return client.connected;
|
||||
};
|
||||
|
||||
@ -191,37 +191,84 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.creationLdif });
|
||||
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
||||
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
|
||||
|
||||
try {
|
||||
const dnArray = await executeLdif(client, generatedLdif);
|
||||
if (dnMatch) {
|
||||
const username = dnMatch[1];
|
||||
const password = generatePassword();
|
||||
|
||||
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
|
||||
} catch (err) {
|
||||
if (providerInputs.rollbackLdif) {
|
||||
const rollbackLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rollbackLdif });
|
||||
await executeLdif(client, rollbackLdif);
|
||||
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rotationLdif });
|
||||
|
||||
try {
|
||||
const dnArray = await executeLdif(client, generatedLdif);
|
||||
|
||||
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
|
||||
} catch (err) {
|
||||
throw new BadRequestError({ message: (err as Error).message });
|
||||
}
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid rotation LDIF, missing DN."
|
||||
});
|
||||
}
|
||||
} else {
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.creationLdif });
|
||||
|
||||
try {
|
||||
const dnArray = await executeLdif(client, generatedLdif);
|
||||
|
||||
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
|
||||
} catch (err) {
|
||||
if (providerInputs.rollbackLdif) {
|
||||
const rollbackLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rollbackLdif });
|
||||
await executeLdif(client, rollbackLdif);
|
||||
}
|
||||
throw new BadRequestError({ message: (err as Error).message });
|
||||
}
|
||||
throw new BadRequestError({ message: (err as Error).message });
|
||||
}
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
||||
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
|
||||
|
||||
if (dnMatch) {
|
||||
const username = dnMatch[1];
|
||||
const password = generatePassword();
|
||||
|
||||
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rotationLdif });
|
||||
|
||||
try {
|
||||
const dnArray = await executeLdif(client, generatedLdif);
|
||||
|
||||
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
|
||||
} catch (err) {
|
||||
throw new BadRequestError({ message: (err as Error).message });
|
||||
}
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid rotation LDIF, missing DN."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const revocationLdif = generateLDIF({ username: entityId, ldifTemplate: providerInputs.revocationLdif });
|
||||
|
||||
await executeLdif(connection, revocationLdif);
|
||||
await executeLdif(client, revocationLdif);
|
||||
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -4,7 +4,8 @@ export enum SqlProviders {
|
||||
Postgres = "postgres",
|
||||
MySQL = "mysql2",
|
||||
Oracle = "oracledb",
|
||||
MsSQL = "mssql"
|
||||
MsSQL = "mssql",
|
||||
SapAse = "sap-ase"
|
||||
}
|
||||
|
||||
export enum ElasticSearchAuthTypes {
|
||||
@ -12,6 +13,22 @@ export enum ElasticSearchAuthTypes {
|
||||
ApiKey = "api-key"
|
||||
}
|
||||
|
||||
export enum LdapCredentialType {
|
||||
Dynamic = "dynamic",
|
||||
Static = "static"
|
||||
}
|
||||
|
||||
export enum TotpConfigType {
|
||||
URL = "url",
|
||||
MANUAL = "manual"
|
||||
}
|
||||
|
||||
export enum TotpAlgorithm {
|
||||
SHA1 = "sha1",
|
||||
SHA256 = "sha256",
|
||||
SHA512 = "sha512"
|
||||
}
|
||||
|
||||
export const DynamicSecretRedisDBSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
@ -102,6 +119,16 @@ export const DynamicSecretCassandraSchema = z.object({
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretSapAseSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
database: z.string().trim(),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim(),
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim()
|
||||
});
|
||||
|
||||
export const DynamicSecretAwsIamSchema = z.object({
|
||||
accessKey: z.string().trim().min(1),
|
||||
secretAccessKey: z.string().trim().min(1),
|
||||
@ -166,6 +193,27 @@ export const DynamicSecretMongoDBSchema = z.object({
|
||||
)
|
||||
});
|
||||
|
||||
export const DynamicSecretSapHanaSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim(),
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim(),
|
||||
renewStatement: z.string().trim().optional(),
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretSnowflakeSchema = z.object({
|
||||
accountId: z.string().trim().min(1),
|
||||
orgId: z.string().trim().min(1),
|
||||
username: z.string().trim().min(1),
|
||||
password: z.string().trim().min(1),
|
||||
creationStatement: z.string().trim().min(1),
|
||||
revocationStatement: z.string().trim().min(1),
|
||||
renewStatement: z.string().trim().optional()
|
||||
});
|
||||
|
||||
export const AzureEntraIDSchema = z.object({
|
||||
tenantId: z.string().trim().min(1),
|
||||
userId: z.string().trim().min(1),
|
||||
@ -174,16 +222,54 @@ export const AzureEntraIDSchema = z.object({
|
||||
clientSecret: z.string().trim().min(1)
|
||||
});
|
||||
|
||||
export const LdapSchema = z.object({
|
||||
url: z.string().trim().min(1),
|
||||
binddn: z.string().trim().min(1),
|
||||
bindpass: z.string().trim().min(1),
|
||||
ca: z.string().optional(),
|
||||
export const LdapSchema = z.union([
|
||||
z.object({
|
||||
url: z.string().trim().min(1),
|
||||
binddn: z.string().trim().min(1),
|
||||
bindpass: z.string().trim().min(1),
|
||||
ca: z.string().optional(),
|
||||
credentialType: z.literal(LdapCredentialType.Dynamic).optional().default(LdapCredentialType.Dynamic),
|
||||
creationLdif: z.string().min(1),
|
||||
revocationLdif: z.string().min(1),
|
||||
rollbackLdif: z.string().optional()
|
||||
}),
|
||||
z.object({
|
||||
url: z.string().trim().min(1),
|
||||
binddn: z.string().trim().min(1),
|
||||
bindpass: z.string().trim().min(1),
|
||||
ca: z.string().optional(),
|
||||
credentialType: z.literal(LdapCredentialType.Static),
|
||||
rotationLdif: z.string().min(1)
|
||||
})
|
||||
]);
|
||||
|
||||
creationLdif: z.string().min(1),
|
||||
revocationLdif: z.string().min(1),
|
||||
rollbackLdif: z.string().optional()
|
||||
});
|
||||
export const DynamicSecretTotpSchema = z.discriminatedUnion("configType", [
|
||||
z.object({
|
||||
configType: z.literal(TotpConfigType.URL),
|
||||
url: z
|
||||
.string()
|
||||
.url()
|
||||
.trim()
|
||||
.min(1)
|
||||
.refine((val) => {
|
||||
const urlObj = new URL(val);
|
||||
const secret = urlObj.searchParams.get("secret");
|
||||
|
||||
return Boolean(secret);
|
||||
}, "OTP URL must contain secret field")
|
||||
}),
|
||||
z.object({
|
||||
configType: z.literal(TotpConfigType.MANUAL),
|
||||
secret: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.transform((val) => val.replace(/\s+/g, "")),
|
||||
period: z.number().optional(),
|
||||
algorithm: z.nativeEnum(TotpAlgorithm).optional(),
|
||||
digits: z.number().optional()
|
||||
})
|
||||
]);
|
||||
|
||||
export enum DynamicSecretProviders {
|
||||
SqlDatabase = "sql-database",
|
||||
@ -196,21 +282,29 @@ export enum DynamicSecretProviders {
|
||||
MongoDB = "mongo-db",
|
||||
RabbitMq = "rabbit-mq",
|
||||
AzureEntraID = "azure-entra-id",
|
||||
Ldap = "ldap"
|
||||
Ldap = "ldap",
|
||||
SapHana = "sap-hana",
|
||||
Snowflake = "snowflake",
|
||||
Totp = "totp",
|
||||
SapAse = "sap-ase"
|
||||
}
|
||||
|
||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SapAse), inputs: DynamicSecretSapAseSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SapHana), inputs: DynamicSecretSapHanaSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsElastiCache), inputs: DynamicSecretAwsElastiCacheSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.MongoAtlas), inputs: DynamicSecretMongoAtlasSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.ElasticSearch), inputs: DynamicSecretElasticSearchSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.MongoDB), inputs: DynamicSecretMongoDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AzureEntraID), inputs: AzureEntraIDSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema })
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Totp), inputs: DynamicSecretTotpSchema })
|
||||
]);
|
||||
|
||||
export type TDynamicProviderFns = {
|
||||
|
@ -22,7 +22,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
|
||||
const client = axios.create({
|
||||
baseURL: "https://cloud.mongodb.com/api/atlas",
|
||||
headers: {
|
||||
@ -40,7 +40,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client({
|
||||
method: "GET",
|
||||
@ -59,7 +59,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -87,7 +87,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const isExisting = await client({
|
||||
@ -114,7 +114,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
@ -2,10 +2,9 @@ import { MongoClient } from "mongodb";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
@ -19,26 +18,12 @@ const generateUsername = () => {
|
||||
|
||||
export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
|
||||
if (
|
||||
appCfg.isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||
const isSrv = !providerInputs.port;
|
||||
const uri = isSrv
|
||||
? `mongodb+srv://${providerInputs.host}`
|
||||
@ -57,7 +42,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client
|
||||
.db(providerInputs.database)
|
||||
@ -70,7 +55,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -89,7 +74,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
@ -103,6 +88,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -3,12 +3,11 @@ import https from "https";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
@ -79,28 +78,13 @@ async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRa
|
||||
|
||||
export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
|
||||
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||
const axiosInstance = axios.create({
|
||||
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
|
||||
auth: {
|
||||
@ -121,7 +105,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection.get("/whoami").then(() => true);
|
||||
|
||||
@ -130,7 +114,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -150,7 +134,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
|
||||
|
||||
@ -158,7 +142,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -3,11 +3,10 @@ import { Redis } from "ioredis";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
@ -51,26 +50,12 @@ const executeTransactions = async (connection: Redis, commands: string[]): Promi
|
||||
|
||||
export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
|
||||
const providerInputs = await DynamicSecretRedisDBSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1" || dbHost === providerInputs.host)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
|
||||
let connection: Redis | null = null;
|
||||
try {
|
||||
connection = new Redis({
|
||||
@ -107,7 +92,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const pingResponse = await connection
|
||||
.ping()
|
||||
@ -119,7 +104,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -141,7 +126,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
@ -156,7 +141,9 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
145
backend/src/ee/services/dynamic-secret/providers/sap-ase.ts
Normal file
145
backend/src/ee/services/dynamic-secret/providers/sap-ase.ts
Normal file
@ -0,0 +1,145 @@
|
||||
import handlebars from "handlebars";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import odbc from "odbc";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(25);
|
||||
};
|
||||
|
||||
enum SapCommands {
|
||||
CreateLogin = "sp_addlogin",
|
||||
DropLogin = "sp_droplogin"
|
||||
}
|
||||
|
||||
export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
|
||||
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
|
||||
const connectionString =
|
||||
`DRIVER={FreeTDS};` +
|
||||
`SERVER=${providerInputs.host};` +
|
||||
`PORT=${providerInputs.port};` +
|
||||
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
|
||||
`UID=${providerInputs.username};` +
|
||||
`PWD=${providerInputs.password};` +
|
||||
`TDS_VERSION=5.0`;
|
||||
|
||||
const client = await odbc.connect(connectionString);
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const [resultFromMasterDatabase] = await masterClient.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||
const [resultFromSelectedDatabase] = await client.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||
|
||||
if (!resultFromSelectedDatabase.version) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SAP ASE connection, version query failed"
|
||||
});
|
||||
}
|
||||
|
||||
if (resultFromMasterDatabase.version !== resultFromSelectedDatabase.version) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SAP ASE connection (master), version mismatch"
|
||||
});
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const username = `inf_${generateUsername()}`;
|
||||
const password = `${generatePassword()}`;
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password
|
||||
});
|
||||
|
||||
const queries = creationStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
|
||||
|
||||
for await (const query of queries) {
|
||||
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
|
||||
// If not done, then the newly created user won't be able to authenticate.
|
||||
await (query.startsWith(SapCommands.CreateLogin) ? masterClient : client).query(query);
|
||||
}
|
||||
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({
|
||||
username
|
||||
});
|
||||
|
||||
const queries = revokeStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
|
||||
// Get all processes for this login and kill them. If there are active connections to the database when drop login happens, it will throw an error.
|
||||
const result = await masterClient.query<{ spid?: string }>(`sp_who '${username}'`);
|
||||
|
||||
if (result && result.length > 0) {
|
||||
for await (const row of result) {
|
||||
if (row.spid) {
|
||||
await masterClient.query(`KILL ${row.spid.trim()}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for await (const query of queries) {
|
||||
await (query.startsWith(SapCommands.DropLogin) ? masterClient : client).query(query);
|
||||
}
|
||||
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (_: unknown, username: string) => {
|
||||
// No need for renewal
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
176
backend/src/ee/services/dynamic-secret/providers/sap-hana.ts
Normal file
176
backend/src/ee/services/dynamic-secret/providers/sap-hana.ts
Normal file
@ -0,0 +1,176 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
|
||||
import handlebars from "handlebars";
|
||||
import hdb from "hdb";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSapHanaSchema.parseAsync(inputs);
|
||||
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
|
||||
const client = hdb.createClient({
|
||||
host: providerInputs.host,
|
||||
port: providerInputs.port,
|
||||
user: providerInputs.username,
|
||||
password: providerInputs.password,
|
||||
...(providerInputs.ca
|
||||
? {
|
||||
ca: providerInputs.ca
|
||||
}
|
||||
: {})
|
||||
});
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
client.connect((err: any) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
if (client.readyState) {
|
||||
return resolve(true);
|
||||
}
|
||||
|
||||
reject(new Error("SAP HANA client not ready"));
|
||||
});
|
||||
});
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const testResult = await new Promise<boolean>((resolve, reject) => {
|
||||
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
|
||||
if (err) {
|
||||
reject();
|
||||
}
|
||||
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
|
||||
return testResult;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
expiration
|
||||
});
|
||||
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
await new Promise((resolve, reject) => {
|
||||
client.exec(query, (err: any) => {
|
||||
if (err) {
|
||||
reject(
|
||||
new BadRequestError({
|
||||
message: err.message
|
||||
})
|
||||
);
|
||||
}
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
await new Promise((resolve, reject) => {
|
||||
client.exec(query, (err: any) => {
|
||||
if (err) {
|
||||
reject(
|
||||
new BadRequestError({
|
||||
message: err.message
|
||||
})
|
||||
);
|
||||
}
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
try {
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username: entityId, expiration });
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
await new Promise((resolve, reject) => {
|
||||
client.exec(query, (err: any) => {
|
||||
if (err) {
|
||||
reject(
|
||||
new BadRequestError({
|
||||
message: err.message
|
||||
})
|
||||
);
|
||||
}
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
client.disconnect();
|
||||
}
|
||||
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
173
backend/src/ee/services/dynamic-secret/providers/snowflake.ts
Normal file
173
backend/src/ee/services/dynamic-secret/providers/snowflake.ts
Normal file
@ -0,0 +1,173 @@
|
||||
import handlebars from "handlebars";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import snowflake from "snowflake-sdk";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
// destroy client requires callback...
|
||||
const noop = () => {};
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return `infisical_${alphaNumericNanoId(32)}`; // username must start with alpha character, hence prefix
|
||||
};
|
||||
|
||||
const getDaysToExpiry = (expiryDate: Date) => {
|
||||
const start = new Date().getTime();
|
||||
const end = new Date(expiryDate).getTime();
|
||||
const diffTime = Math.abs(end - start);
|
||||
|
||||
return Math.ceil(diffTime / (1000 * 60 * 60 * 24));
|
||||
};
|
||||
|
||||
export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSnowflakeSchema.parseAsync(inputs);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
|
||||
const client = snowflake.createConnection({
|
||||
account: `${providerInputs.orgId}-${providerInputs.accountId}`,
|
||||
username: providerInputs.username,
|
||||
password: providerInputs.password,
|
||||
application: "Infisical"
|
||||
});
|
||||
|
||||
await client.connectAsync(noop);
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
let isValidConnection: boolean;
|
||||
|
||||
try {
|
||||
isValidConnection = await Promise.race([
|
||||
client.isValidAsync(),
|
||||
new Promise((resolve) => {
|
||||
setTimeout(resolve, 10000);
|
||||
}).then(() => {
|
||||
throw new BadRequestError({ message: "Unable to establish connection - verify credentials" });
|
||||
})
|
||||
]);
|
||||
} finally {
|
||||
client.destroy(noop);
|
||||
}
|
||||
|
||||
return isValidConnection;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
|
||||
try {
|
||||
const expiration = getDaysToExpiry(new Date(expireAt));
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
expiration
|
||||
});
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
client.execute({
|
||||
sqlText: creationStatement,
|
||||
complete(err) {
|
||||
if (err) {
|
||||
return reject(new BadRequestError({ name: "CreateLease", message: err.message }));
|
||||
}
|
||||
|
||||
return resolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
} finally {
|
||||
client.destroy(noop);
|
||||
}
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
try {
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
client.execute({
|
||||
sqlText: revokeStatement,
|
||||
complete(err) {
|
||||
if (err) {
|
||||
return reject(new BadRequestError({ name: "RevokeLease", message: err.message }));
|
||||
}
|
||||
|
||||
return resolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
} finally {
|
||||
client.destroy(noop);
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
try {
|
||||
const expiration = getDaysToExpiry(new Date(expireAt));
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
expiration
|
||||
});
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
client.execute({
|
||||
sqlText: renewStatement,
|
||||
complete(err) {
|
||||
if (err) {
|
||||
return reject(new BadRequestError({ name: "RenewLease", message: err.message }));
|
||||
}
|
||||
|
||||
return resolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
} finally {
|
||||
client.destroy(noop);
|
||||
}
|
||||
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -3,11 +3,9 @@ import knex from "knex";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSqlDBSchema, SqlProviders, TDynamicProviderFns } from "./models";
|
||||
|
||||
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
|
||||
@ -29,31 +27,12 @@ const generateUsername = (provider: SqlProviders) => {
|
||||
|
||||
export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const appCfg = getConfig();
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
|
||||
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
|
||||
if (
|
||||
isCloud &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(providerInputs.host === "host.docker.internal" ||
|
||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
if (
|
||||
providerInputs.host === "localhost" ||
|
||||
providerInputs.host === "127.0.0.1" ||
|
||||
// database infisical uses
|
||||
dbHost === providerInputs.host
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
|
||||
const ssl = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||
const db = knex({
|
||||
client: providerInputs.client,
|
||||
@ -73,7 +52,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await getClient(providerInputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
// oracle needs from keyword
|
||||
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
||||
|
||||
@ -84,7 +63,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await getClient(providerInputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername(providerInputs.client);
|
||||
const password = generatePassword(providerInputs.client);
|
||||
@ -111,7 +90,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await getClient(providerInputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const { database } = providerInputs;
|
||||
@ -131,13 +110,19 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await getClient(providerInputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const db = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const { database } = providerInputs;
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration, database });
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
expiration,
|
||||
database
|
||||
});
|
||||
|
||||
if (renewStatement) {
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
@ -149,7 +134,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
}
|
||||
|
||||
await db.destroy();
|
||||
return { entityId: username };
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
90
backend/src/ee/services/dynamic-secret/providers/totp.ts
Normal file
90
backend/src/ee/services/dynamic-secret/providers/totp.ts
Normal file
@ -0,0 +1,90 @@
|
||||
import { authenticator } from "otplib";
|
||||
import { HashAlgorithms } from "otplib/core";
|
||||
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretTotpSchema, TDynamicProviderFns, TotpConfigType } from "./models";
|
||||
|
||||
export const TotpProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretTotpSchema.parseAsync(inputs);
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const validateConnection = async () => {
|
||||
return true;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const entityId = alphaNumericNanoId(32);
|
||||
const authenticatorInstance = authenticator.clone();
|
||||
|
||||
let secret: string;
|
||||
let period: number | null | undefined;
|
||||
let digits: number | null | undefined;
|
||||
let algorithm: HashAlgorithms | null | undefined;
|
||||
|
||||
if (providerInputs.configType === TotpConfigType.URL) {
|
||||
const urlObj = new URL(providerInputs.url);
|
||||
secret = urlObj.searchParams.get("secret") as string;
|
||||
const periodFromUrl = urlObj.searchParams.get("period");
|
||||
const digitsFromUrl = urlObj.searchParams.get("digits");
|
||||
const algorithmFromUrl = urlObj.searchParams.get("algorithm");
|
||||
|
||||
if (periodFromUrl) {
|
||||
period = +periodFromUrl;
|
||||
}
|
||||
|
||||
if (digitsFromUrl) {
|
||||
digits = +digitsFromUrl;
|
||||
}
|
||||
|
||||
if (algorithmFromUrl) {
|
||||
algorithm = algorithmFromUrl.toLowerCase() as HashAlgorithms;
|
||||
}
|
||||
} else {
|
||||
secret = providerInputs.secret;
|
||||
period = providerInputs.period;
|
||||
digits = providerInputs.digits;
|
||||
algorithm = providerInputs.algorithm as unknown as HashAlgorithms;
|
||||
}
|
||||
|
||||
if (digits) {
|
||||
authenticatorInstance.options = { digits };
|
||||
}
|
||||
|
||||
if (algorithm) {
|
||||
authenticatorInstance.options = { algorithm };
|
||||
}
|
||||
|
||||
if (period) {
|
||||
authenticatorInstance.options = { step: period };
|
||||
}
|
||||
|
||||
return {
|
||||
entityId,
|
||||
data: { TOTP: authenticatorInstance.generate(secret), TIME_REMAINING: authenticatorInstance.timeRemaining() }
|
||||
};
|
||||
};
|
||||
|
||||
const revoke = async (_inputs: unknown, entityId: string) => {
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -123,7 +123,7 @@ export const groupServiceFactory = ({
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update group due to plan restrictio Upgrade plan to update group."
|
||||
message: "Failed to update group due to plan restriction Upgrade plan to update group."
|
||||
});
|
||||
|
||||
const group = await groupDAL.findOne({ orgId: actorOrgId, id });
|
||||
|
58
backend/src/ee/services/hsm/hsm-fns.ts
Normal file
58
backend/src/ee/services/hsm/hsm-fns.ts
Normal file
@ -0,0 +1,58 @@
|
||||
import * as pkcs11js from "pkcs11js";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { HsmModule } from "./hsm-types";
|
||||
|
||||
export const initializeHsmModule = () => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
// Create a new instance of PKCS11 module
|
||||
const pkcs11 = new pkcs11js.PKCS11();
|
||||
let isInitialized = false;
|
||||
|
||||
const initialize = () => {
|
||||
if (!appCfg.isHsmConfigured) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Load the PKCS#11 module
|
||||
pkcs11.load(appCfg.HSM_LIB_PATH!);
|
||||
|
||||
// Initialize the module
|
||||
pkcs11.C_Initialize();
|
||||
isInitialized = true;
|
||||
|
||||
logger.info("PKCS#11 module initialized");
|
||||
} catch (err) {
|
||||
logger.error(err, "Failed to initialize PKCS#11 module");
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const finalize = () => {
|
||||
if (isInitialized) {
|
||||
try {
|
||||
pkcs11.C_Finalize();
|
||||
isInitialized = false;
|
||||
logger.info("PKCS#11 module finalized");
|
||||
} catch (err) {
|
||||
logger.error(err, "Failed to finalize PKCS#11 module");
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const getModule = (): HsmModule => ({
|
||||
pkcs11,
|
||||
isInitialized
|
||||
});
|
||||
|
||||
return {
|
||||
initialize,
|
||||
finalize,
|
||||
getModule
|
||||
};
|
||||
};
|
470
backend/src/ee/services/hsm/hsm-service.ts
Normal file
470
backend/src/ee/services/hsm/hsm-service.ts
Normal file
@ -0,0 +1,470 @@
|
||||
import pkcs11js from "pkcs11js";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { HsmKeyType, HsmModule } from "./hsm-types";
|
||||
|
||||
type THsmServiceFactoryDep = {
|
||||
hsmModule: HsmModule;
|
||||
};
|
||||
|
||||
export type THsmServiceFactory = ReturnType<typeof hsmServiceFactory>;
|
||||
|
||||
type SyncOrAsync<T> = T | Promise<T>;
|
||||
type SessionCallback<T> = (session: pkcs11js.Handle) => SyncOrAsync<T>;
|
||||
|
||||
// eslint-disable-next-line no-empty-pattern
|
||||
export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsmServiceFactoryDep) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
// Constants for buffer structures
|
||||
const IV_LENGTH = 16; // Luna HSM typically expects 16-byte IV for cbc
|
||||
const BLOCK_SIZE = 16;
|
||||
const HMAC_SIZE = 32;
|
||||
|
||||
const AES_KEY_SIZE = 256;
|
||||
const HMAC_KEY_SIZE = 256;
|
||||
|
||||
const $withSession = async <T>(callbackWithSession: SessionCallback<T>): Promise<T> => {
|
||||
const RETRY_INTERVAL = 200; // 200ms between attempts
|
||||
const MAX_TIMEOUT = 90_000; // 90 seconds maximum total time
|
||||
|
||||
let sessionHandle: pkcs11js.Handle | null = null;
|
||||
|
||||
const removeSession = () => {
|
||||
if (sessionHandle !== null) {
|
||||
try {
|
||||
pkcs11.C_Logout(sessionHandle);
|
||||
pkcs11.C_CloseSession(sessionHandle);
|
||||
logger.info("HSM: Terminated session successfully");
|
||||
} catch (error) {
|
||||
logger.error(error, "HSM: Failed to terminate session");
|
||||
} finally {
|
||||
sessionHandle = null;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
if (!pkcs11 || !isInitialized) {
|
||||
throw new Error("PKCS#11 module is not initialized");
|
||||
}
|
||||
|
||||
// Get slot list
|
||||
let slots: pkcs11js.Handle[];
|
||||
try {
|
||||
slots = pkcs11.C_GetSlotList(false); // false to get all slots
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to get slot list: ${(error as Error)?.message}`);
|
||||
}
|
||||
|
||||
if (slots.length === 0) {
|
||||
throw new Error("No slots available");
|
||||
}
|
||||
|
||||
if (appCfg.HSM_SLOT >= slots.length) {
|
||||
throw new Error(`HSM slot ${appCfg.HSM_SLOT} not found or not initialized`);
|
||||
}
|
||||
|
||||
const slotId = slots[appCfg.HSM_SLOT];
|
||||
|
||||
const startTime = Date.now();
|
||||
while (Date.now() - startTime < MAX_TIMEOUT) {
|
||||
try {
|
||||
// Open session
|
||||
// eslint-disable-next-line no-bitwise
|
||||
sessionHandle = pkcs11.C_OpenSession(slotId, pkcs11js.CKF_SERIAL_SESSION | pkcs11js.CKF_RW_SESSION);
|
||||
|
||||
// Login
|
||||
try {
|
||||
pkcs11.C_Login(sessionHandle, pkcs11js.CKU_USER, appCfg.HSM_PIN);
|
||||
logger.info("HSM: Successfully authenticated");
|
||||
break;
|
||||
} catch (error) {
|
||||
// Handle specific error cases
|
||||
if (error instanceof pkcs11js.Pkcs11Error) {
|
||||
if (error.code === pkcs11js.CKR_PIN_INCORRECT) {
|
||||
// We throw instantly here to prevent further attempts, because if too many attempts are made, the HSM will potentially wipe all key material
|
||||
logger.error(error, `HSM: Incorrect PIN detected for HSM slot ${appCfg.HSM_SLOT}`);
|
||||
throw new Error("HSM: Incorrect HSM Pin detected. Please check the HSM configuration.");
|
||||
}
|
||||
if (error.code === pkcs11js.CKR_USER_ALREADY_LOGGED_IN) {
|
||||
logger.warn("HSM: Session already logged in");
|
||||
}
|
||||
}
|
||||
throw error; // Re-throw other errors
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`HSM: Session creation failed. Retrying... Error: ${(error as Error)?.message}`);
|
||||
|
||||
if (sessionHandle !== null) {
|
||||
try {
|
||||
pkcs11.C_CloseSession(sessionHandle);
|
||||
} catch (closeError) {
|
||||
logger.error(closeError, "HSM: Failed to close session");
|
||||
}
|
||||
sessionHandle = null;
|
||||
}
|
||||
|
||||
// Wait before retrying
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, RETRY_INTERVAL);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (sessionHandle === null) {
|
||||
throw new Error("HSM: Failed to open session after maximum retries");
|
||||
}
|
||||
|
||||
// Execute callback with session handle
|
||||
const result = await callbackWithSession(sessionHandle);
|
||||
removeSession();
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error(error, "HSM: Failed to open session");
|
||||
throw error;
|
||||
} finally {
|
||||
// Ensure cleanup
|
||||
removeSession();
|
||||
}
|
||||
};
|
||||
|
||||
const $findKey = (sessionHandle: pkcs11js.Handle, type: HsmKeyType) => {
|
||||
const label = type === HsmKeyType.HMAC ? `${appCfg.HSM_KEY_LABEL}_HMAC` : appCfg.HSM_KEY_LABEL;
|
||||
const keyType = type === HsmKeyType.HMAC ? pkcs11js.CKK_GENERIC_SECRET : pkcs11js.CKK_AES;
|
||||
|
||||
const template = [
|
||||
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||
{ type: pkcs11js.CKA_KEY_TYPE, value: keyType },
|
||||
{ type: pkcs11js.CKA_LABEL, value: label }
|
||||
];
|
||||
|
||||
try {
|
||||
// Initialize search
|
||||
pkcs11.C_FindObjectsInit(sessionHandle, template);
|
||||
|
||||
try {
|
||||
// Find first matching object
|
||||
const handles = pkcs11.C_FindObjects(sessionHandle, 1);
|
||||
|
||||
if (handles.length === 0) {
|
||||
throw new Error("Failed to find master key");
|
||||
}
|
||||
|
||||
return handles[0]; // Return the key handle
|
||||
} finally {
|
||||
// Always finalize the search operation
|
||||
pkcs11.C_FindObjectsFinal(sessionHandle);
|
||||
}
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const $keyExists = (session: pkcs11js.Handle, type: HsmKeyType): boolean => {
|
||||
try {
|
||||
const key = $findKey(session, type);
|
||||
// items(0) will throw an error if no items are found
|
||||
// Return true only if we got a valid object with handle
|
||||
return !!key && key.length > 0;
|
||||
} catch (error) {
|
||||
// If items(0) throws, it means no key was found
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-call
|
||||
logger.error(error, "HSM: Failed while checking for HSM key presence");
|
||||
|
||||
if (error instanceof pkcs11js.Pkcs11Error) {
|
||||
if (error.code === pkcs11js.CKR_OBJECT_HANDLE_INVALID) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
const encrypt: {
|
||||
(data: Buffer, providedSession: pkcs11js.Handle): Promise<Buffer>;
|
||||
(data: Buffer): Promise<Buffer>;
|
||||
} = async (data: Buffer, providedSession?: pkcs11js.Handle) => {
|
||||
if (!pkcs11 || !isInitialized) {
|
||||
throw new Error("PKCS#11 module is not initialized");
|
||||
}
|
||||
|
||||
const $performEncryption = (sessionHandle: pkcs11js.Handle) => {
|
||||
try {
|
||||
const aesKey = $findKey(sessionHandle, HsmKeyType.AES);
|
||||
if (!aesKey) {
|
||||
throw new Error("HSM: Encryption failed, AES key not found");
|
||||
}
|
||||
|
||||
const hmacKey = $findKey(sessionHandle, HsmKeyType.HMAC);
|
||||
if (!hmacKey) {
|
||||
throw new Error("HSM: Encryption failed, HMAC key not found");
|
||||
}
|
||||
|
||||
const iv = Buffer.alloc(IV_LENGTH);
|
||||
pkcs11.C_GenerateRandom(sessionHandle, iv);
|
||||
|
||||
const encryptMechanism = {
|
||||
mechanism: pkcs11js.CKM_AES_CBC_PAD,
|
||||
parameter: iv
|
||||
};
|
||||
|
||||
pkcs11.C_EncryptInit(sessionHandle, encryptMechanism, aesKey);
|
||||
|
||||
// Calculate max buffer size (input length + potential full block of padding)
|
||||
const maxEncryptedLength = Math.ceil(data.length / BLOCK_SIZE) * BLOCK_SIZE + BLOCK_SIZE;
|
||||
|
||||
// Encrypt the data - this returns the encrypted data directly
|
||||
const encryptedData = pkcs11.C_Encrypt(sessionHandle, data, Buffer.alloc(maxEncryptedLength));
|
||||
|
||||
// Initialize HMAC
|
||||
const hmacMechanism = {
|
||||
mechanism: pkcs11js.CKM_SHA256_HMAC
|
||||
};
|
||||
|
||||
pkcs11.C_SignInit(sessionHandle, hmacMechanism, hmacKey);
|
||||
|
||||
// Sign the IV and encrypted data
|
||||
pkcs11.C_SignUpdate(sessionHandle, iv);
|
||||
pkcs11.C_SignUpdate(sessionHandle, encryptedData);
|
||||
|
||||
// Get the HMAC
|
||||
const hmac = Buffer.alloc(HMAC_SIZE);
|
||||
pkcs11.C_SignFinal(sessionHandle, hmac);
|
||||
|
||||
// Combine encrypted data and HMAC [Encrypted Data | HMAC]
|
||||
const finalBuffer = Buffer.alloc(encryptedData.length + hmac.length);
|
||||
encryptedData.copy(finalBuffer);
|
||||
hmac.copy(finalBuffer, encryptedData.length);
|
||||
|
||||
return Buffer.concat([iv, finalBuffer]);
|
||||
} catch (error) {
|
||||
logger.error(error, "HSM: Failed to perform encryption");
|
||||
throw new Error(`HSM: Encryption failed: ${(error as Error)?.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
if (providedSession) {
|
||||
return $performEncryption(providedSession);
|
||||
}
|
||||
|
||||
const result = await $withSession($performEncryption);
|
||||
return result;
|
||||
};
|
||||
|
||||
const decrypt: {
|
||||
(encryptedBlob: Buffer, providedSession: pkcs11js.Handle): Promise<Buffer>;
|
||||
(encryptedBlob: Buffer): Promise<Buffer>;
|
||||
} = async (encryptedBlob: Buffer, providedSession?: pkcs11js.Handle) => {
|
||||
if (!pkcs11 || !isInitialized) {
|
||||
throw new Error("PKCS#11 module is not initialized");
|
||||
}
|
||||
|
||||
const $performDecryption = (sessionHandle: pkcs11js.Handle) => {
|
||||
try {
|
||||
// structure is: [IV (16 bytes) | Encrypted Data (N bytes) | HMAC (32 bytes)]
|
||||
const iv = encryptedBlob.subarray(0, IV_LENGTH);
|
||||
const encryptedDataWithHmac = encryptedBlob.subarray(IV_LENGTH);
|
||||
|
||||
// Split encrypted data and HMAC
|
||||
const hmac = encryptedDataWithHmac.subarray(-HMAC_SIZE); // Last 32 bytes are HMAC
|
||||
|
||||
const encryptedData = encryptedDataWithHmac.subarray(0, -HMAC_SIZE); // Everything except last 32 bytes
|
||||
|
||||
// Find the keys
|
||||
const aesKey = $findKey(sessionHandle, HsmKeyType.AES);
|
||||
if (!aesKey) {
|
||||
throw new Error("HSM: Decryption failed, AES key not found");
|
||||
}
|
||||
|
||||
const hmacKey = $findKey(sessionHandle, HsmKeyType.HMAC);
|
||||
if (!hmacKey) {
|
||||
throw new Error("HSM: Decryption failed, HMAC key not found");
|
||||
}
|
||||
|
||||
// Verify HMAC first
|
||||
const hmacMechanism = {
|
||||
mechanism: pkcs11js.CKM_SHA256_HMAC
|
||||
};
|
||||
|
||||
pkcs11.C_VerifyInit(sessionHandle, hmacMechanism, hmacKey);
|
||||
pkcs11.C_VerifyUpdate(sessionHandle, iv);
|
||||
pkcs11.C_VerifyUpdate(sessionHandle, encryptedData);
|
||||
|
||||
try {
|
||||
pkcs11.C_VerifyFinal(sessionHandle, hmac);
|
||||
} catch (error) {
|
||||
logger.error(error, "HSM: HMAC verification failed");
|
||||
throw new Error("HSM: Decryption failed"); // Generic error for failed verification
|
||||
}
|
||||
|
||||
// Only decrypt if verification passed
|
||||
const decryptMechanism = {
|
||||
mechanism: pkcs11js.CKM_AES_CBC_PAD,
|
||||
parameter: iv
|
||||
};
|
||||
|
||||
pkcs11.C_DecryptInit(sessionHandle, decryptMechanism, aesKey);
|
||||
|
||||
const tempBuffer = Buffer.alloc(encryptedData.length);
|
||||
const decryptedData = pkcs11.C_Decrypt(sessionHandle, encryptedData, tempBuffer);
|
||||
|
||||
// Create a new buffer from the decrypted data
|
||||
return Buffer.from(decryptedData);
|
||||
} catch (error) {
|
||||
logger.error(error, "HSM: Failed to perform decryption");
|
||||
throw new Error("HSM: Decryption failed"); // Generic error for failed decryption, to avoid leaking details about why it failed (such as padding related errors)
|
||||
}
|
||||
};
|
||||
|
||||
if (providedSession) {
|
||||
return $performDecryption(providedSession);
|
||||
}
|
||||
|
||||
const result = await $withSession($performDecryption);
|
||||
return result;
|
||||
};
|
||||
|
||||
// We test the core functionality of the PKCS#11 module that we are using throughout Infisical. This is to ensure that the user doesn't configure a faulty or unsupported HSM device.
|
||||
const $testPkcs11Module = async (session: pkcs11js.Handle) => {
|
||||
try {
|
||||
if (!pkcs11 || !isInitialized) {
|
||||
throw new Error("PKCS#11 module is not initialized");
|
||||
}
|
||||
|
||||
if (!session) {
|
||||
throw new Error("HSM: Attempted to run test without a valid session");
|
||||
}
|
||||
|
||||
const randomData = pkcs11.C_GenerateRandom(session, Buffer.alloc(500));
|
||||
|
||||
const encryptedData = await encrypt(randomData, session);
|
||||
const decryptedData = await decrypt(encryptedData, session);
|
||||
|
||||
const randomDataHex = randomData.toString("hex");
|
||||
const decryptedDataHex = decryptedData.toString("hex");
|
||||
|
||||
if (randomDataHex !== decryptedDataHex && Buffer.compare(randomData, decryptedData)) {
|
||||
throw new Error("HSM: Startup test failed. Decrypted data does not match original data");
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error(error, "HSM: Error testing PKCS#11 module");
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
const isActive = async () => {
|
||||
if (!isInitialized || !appCfg.isHsmConfigured) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let pkcs11TestPassed = false;
|
||||
|
||||
try {
|
||||
pkcs11TestPassed = await $withSession($testPkcs11Module);
|
||||
} catch (err) {
|
||||
logger.error(err, "HSM: Error testing PKCS#11 module");
|
||||
}
|
||||
|
||||
return appCfg.isHsmConfigured && isInitialized && pkcs11TestPassed;
|
||||
};
|
||||
|
||||
const startService = async () => {
|
||||
if (!appCfg.isHsmConfigured || !pkcs11 || !isInitialized) return;
|
||||
|
||||
try {
|
||||
await $withSession(async (sessionHandle) => {
|
||||
// Check if master key exists, create if not
|
||||
|
||||
const genericAttributes = [
|
||||
{ type: pkcs11js.CKA_TOKEN, value: true }, // Persistent storage
|
||||
{ type: pkcs11js.CKA_EXTRACTABLE, value: false }, // Cannot be extracted
|
||||
{ type: pkcs11js.CKA_SENSITIVE, value: true }, // Sensitive value
|
||||
{ type: pkcs11js.CKA_PRIVATE, value: true } // Requires authentication
|
||||
];
|
||||
|
||||
if (!$keyExists(sessionHandle, HsmKeyType.AES)) {
|
||||
// Template for generating 256-bit AES master key
|
||||
const keyTemplate = [
|
||||
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_AES },
|
||||
{ type: pkcs11js.CKA_VALUE_LEN, value: AES_KEY_SIZE / 8 },
|
||||
{ type: pkcs11js.CKA_LABEL, value: appCfg.HSM_KEY_LABEL! },
|
||||
{ type: pkcs11js.CKA_ENCRYPT, value: true }, // Allow encryption
|
||||
{ type: pkcs11js.CKA_DECRYPT, value: true }, // Allow decryption
|
||||
...genericAttributes
|
||||
];
|
||||
|
||||
// Generate the key
|
||||
pkcs11.C_GenerateKey(
|
||||
sessionHandle,
|
||||
{
|
||||
mechanism: pkcs11js.CKM_AES_KEY_GEN
|
||||
},
|
||||
keyTemplate
|
||||
);
|
||||
|
||||
logger.info(`HSM: Master key created successfully with label: ${appCfg.HSM_KEY_LABEL}`);
|
||||
}
|
||||
|
||||
// Check if HMAC key exists, create if not
|
||||
if (!$keyExists(sessionHandle, HsmKeyType.HMAC)) {
|
||||
const hmacKeyTemplate = [
|
||||
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_GENERIC_SECRET },
|
||||
{ type: pkcs11js.CKA_VALUE_LEN, value: HMAC_KEY_SIZE / 8 }, // 256-bit key
|
||||
{ type: pkcs11js.CKA_LABEL, value: `${appCfg.HSM_KEY_LABEL!}_HMAC` },
|
||||
{ type: pkcs11js.CKA_SIGN, value: true }, // Allow signing
|
||||
{ type: pkcs11js.CKA_VERIFY, value: true }, // Allow verification
|
||||
...genericAttributes
|
||||
];
|
||||
|
||||
// Generate the HMAC key
|
||||
pkcs11.C_GenerateKey(
|
||||
sessionHandle,
|
||||
{
|
||||
mechanism: pkcs11js.CKM_GENERIC_SECRET_KEY_GEN
|
||||
},
|
||||
hmacKeyTemplate
|
||||
);
|
||||
|
||||
logger.info(`HSM: HMAC key created successfully with label: ${appCfg.HSM_KEY_LABEL}_HMAC`);
|
||||
}
|
||||
|
||||
// Get slot info to check supported mechanisms
|
||||
const slotId = pkcs11.C_GetSessionInfo(sessionHandle).slotID;
|
||||
const mechanisms = pkcs11.C_GetMechanismList(slotId);
|
||||
|
||||
// Check for AES CBC PAD support
|
||||
const hasAesCbc = mechanisms.includes(pkcs11js.CKM_AES_CBC_PAD);
|
||||
|
||||
if (!hasAesCbc) {
|
||||
throw new Error(`Required mechanism CKM_AEC_CBC_PAD not supported by HSM`);
|
||||
}
|
||||
|
||||
// Run test encryption/decryption
|
||||
const testPassed = await $testPkcs11Module(sessionHandle);
|
||||
|
||||
if (!testPassed) {
|
||||
throw new Error("PKCS#11 module test failed. Please ensure that the HSM is correctly configured.");
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(error, "HSM: Error initializing HSM service:");
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
encrypt,
|
||||
startService,
|
||||
isActive,
|
||||
decrypt
|
||||
};
|
||||
};
|
11
backend/src/ee/services/hsm/hsm-types.ts
Normal file
11
backend/src/ee/services/hsm/hsm-types.ts
Normal file
@ -0,0 +1,11 @@
|
||||
import pkcs11js from "pkcs11js";
|
||||
|
||||
export type HsmModule = {
|
||||
pkcs11: pkcs11js.PKCS11;
|
||||
isInitialized: boolean;
|
||||
};
|
||||
|
||||
export enum HsmKeyType {
|
||||
AES = "AES",
|
||||
HMAC = "hmac"
|
||||
}
|
@ -36,8 +36,7 @@ export const testLDAPConfig = async (ldapConfig: TLDAPConfig): Promise<boolean>
|
||||
});
|
||||
|
||||
ldapClient.on("error", (err) => {
|
||||
logger.error("LDAP client error:", err);
|
||||
logger.error(err);
|
||||
logger.error(err, "LDAP client error");
|
||||
resolve(false);
|
||||
});
|
||||
|
||||
|
@ -29,6 +29,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
auditLogStreams: false,
|
||||
auditLogStreamLimit: 3,
|
||||
samlSSO: false,
|
||||
hsm: false,
|
||||
oidcSSO: false,
|
||||
scim: false,
|
||||
ldap: false,
|
||||
@ -47,7 +48,8 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
secretsLimit: 40
|
||||
},
|
||||
pkiEst: false,
|
||||
enforceMfa: false
|
||||
enforceMfa: false,
|
||||
projectTemplates: false
|
||||
});
|
||||
|
||||
export const setupLicenseRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {
|
||||
|
@ -129,7 +129,7 @@ export const licenseServiceFactory = ({
|
||||
}
|
||||
}
|
||||
|
||||
// this means this is self hosted oss version
|
||||
// this means this is the self-hosted oss version
|
||||
// else it would reach catch statement
|
||||
isValidLicense = true;
|
||||
} catch (error) {
|
||||
@ -161,8 +161,8 @@ export const licenseServiceFactory = ({
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`getPlan: encountered an error when fetching pan [orgId=${orgId}] [projectId=${projectId}] [error]`,
|
||||
error
|
||||
error,
|
||||
`getPlan: encountered an error when fetching pan [orgId=${orgId}] [projectId=${projectId}] [error]`
|
||||
);
|
||||
await keyStore.setItemWithExpiry(
|
||||
FEATURE_CACHE_KEY(orgId),
|
||||
|
@ -46,6 +46,7 @@ export type TFeatureSet = {
|
||||
auditLogStreams: false;
|
||||
auditLogStreamLimit: 3;
|
||||
samlSSO: false;
|
||||
hsm: false;
|
||||
oidcSSO: false;
|
||||
scim: false;
|
||||
ldap: false;
|
||||
@ -65,6 +66,7 @@ export type TFeatureSet = {
|
||||
};
|
||||
pkiEst: boolean;
|
||||
enforceMfa: boolean;
|
||||
projectTemplates: false;
|
||||
};
|
||||
|
||||
export type TOrgPlansTableDTO = {
|
||||
|
@ -17,7 +17,7 @@ import {
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, OidcAuthError } from "@app/lib/errors";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
@ -56,7 +56,7 @@ type TOidcConfigServiceFactoryDep = {
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
smtpService: Pick<TSmtpService, "sendMail" | "verify">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne" | "update" | "create">;
|
||||
};
|
||||
@ -223,6 +223,7 @@ export const oidcConfigServiceFactory = ({
|
||||
let newUser: TUsers | undefined;
|
||||
|
||||
if (serverCfg.trustOidcEmails) {
|
||||
// we prioritize getting the most complete user to create the new alias under
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
@ -230,6 +231,23 @@ export const oidcConfigServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (!newUser) {
|
||||
// this fetches user entries created via invites
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
username: email
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (newUser && !newUser.isEmailVerified) {
|
||||
// we automatically mark it as email-verified because we've configured trust for OIDC emails
|
||||
newUser = await userDAL.updateById(newUser.id, {
|
||||
isEmailVerified: true
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!newUser) {
|
||||
@ -332,14 +350,20 @@ export const oidcConfigServiceFactory = ({
|
||||
userId: user.id
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
});
|
||||
await smtpService
|
||||
.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
throw new OidcAuthError({
|
||||
message: `Error sending email confirmation code for user registration - contact the Infisical instance admin. ${err.message}`
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return { isUserCompleted, providerAuthToken };
|
||||
@ -395,6 +419,18 @@ export const oidcConfigServiceFactory = ({
|
||||
message: `Organization bot for organization with ID '${org.id}' not found`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
if (isActive && !serverCfg.trustOidcEmails) {
|
||||
const isSmtpConnected = await smtpService.verify();
|
||||
if (!isSmtpConnected) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Cannot enable OIDC when there are issues with the instance's SMTP configuration. Bypass this by turning on trust for OIDC emails in the server admin console."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
|
@ -26,7 +26,8 @@ export enum OrgPermissionSubjects {
|
||||
Identity = "identity",
|
||||
Kms = "kms",
|
||||
AdminConsole = "organization-admin-console",
|
||||
AuditLogs = "audit-logs"
|
||||
AuditLogs = "audit-logs",
|
||||
ProjectTemplates = "project-templates"
|
||||
}
|
||||
|
||||
export type OrgPermissionSet =
|
||||
@ -45,6 +46,7 @@ export type OrgPermissionSet =
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Identity]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Kms]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.AuditLogs]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.ProjectTemplates]
|
||||
| [OrgPermissionAdminConsoleAction, OrgPermissionSubjects.AdminConsole];
|
||||
|
||||
const buildAdminPermission = () => {
|
||||
@ -118,6 +120,11 @@ const buildAdminPermission = () => {
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.AuditLogs);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.AuditLogs);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.ProjectTemplates);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.ProjectTemplates);
|
||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.ProjectTemplates);
|
||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.ProjectTemplates);
|
||||
|
||||
can(OrgPermissionAdminConsoleAction.AccessAllProjects, OrgPermissionSubjects.AdminConsole);
|
||||
|
||||
return rules;
|
||||
|
@ -127,14 +127,15 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getProjectPermission = async (userId: string, projectId: string) => {
|
||||
try {
|
||||
const subQueryUserGroups = db(TableName.UserGroupMembership).where("userId", userId).select("groupId");
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.Users)
|
||||
.where(`${TableName.Users}.id`, userId)
|
||||
.leftJoin(TableName.UserGroupMembership, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.GroupProjectMembership, (queryBuilder) => {
|
||||
void queryBuilder
|
||||
.on(`${TableName.GroupProjectMembership}.projectId`, db.raw("?", [projectId]))
|
||||
.andOn(`${TableName.GroupProjectMembership}.groupId`, `${TableName.UserGroupMembership}.groupId`);
|
||||
// @ts-expect-error akhilmhdh: this is valid knexjs query. Its just ts type argument is missing it
|
||||
.andOnIn(`${TableName.GroupProjectMembership}.groupId`, subQueryUserGroups);
|
||||
})
|
||||
.leftJoin(
|
||||
TableName.GroupProjectMembershipRole,
|
||||
|
@ -29,4 +29,18 @@ function validateOrgSSO(actorAuthMethod: ActorAuthMethod, isOrgSsoEnforced: TOrg
|
||||
}
|
||||
}
|
||||
|
||||
export { isAuthMethodSaml, validateOrgSSO };
|
||||
const escapeHandlebarsMissingMetadata = (obj: Record<string, string>) => {
|
||||
const handler = {
|
||||
get(target: Record<string, string>, prop: string) {
|
||||
if (!(prop in target)) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
target[prop] = `{{identity.metadata.${prop}}}`; // Add missing key as an "own" property
|
||||
}
|
||||
return target[prop];
|
||||
}
|
||||
};
|
||||
|
||||
return new Proxy(obj, handler);
|
||||
};
|
||||
|
||||
export { escapeHandlebarsMissingMetadata, isAuthMethodSaml, validateOrgSSO };
|
||||
|
@ -21,7 +21,7 @@ import { TServiceTokenDALFactory } from "@app/services/service-token/service-tok
|
||||
|
||||
import { orgAdminPermissions, orgMemberPermissions, orgNoAccessPermissions, OrgPermissionSet } from "./org-permission";
|
||||
import { TPermissionDALFactory } from "./permission-dal";
|
||||
import { validateOrgSSO } from "./permission-fns";
|
||||
import { escapeHandlebarsMissingMetadata, validateOrgSSO } from "./permission-fns";
|
||||
import { TBuildOrgPermissionDTO, TBuildProjectPermissionDTO } from "./permission-service-types";
|
||||
import {
|
||||
buildServiceTokenProjectPermission,
|
||||
@ -227,11 +227,13 @@ export const permissionServiceFactory = ({
|
||||
})) || [];
|
||||
|
||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false, strict: true });
|
||||
const metadataKeyValuePair = objectify(
|
||||
userProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
|
||||
objectify(
|
||||
userProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
)
|
||||
);
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
@ -292,12 +294,15 @@ export const permissionServiceFactory = ({
|
||||
})) || [];
|
||||
|
||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false, strict: true });
|
||||
const metadataKeyValuePair = objectify(
|
||||
identityProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
|
||||
objectify(
|
||||
identityProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
)
|
||||
);
|
||||
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
identity: {
|
||||
|
@ -1,14 +1,7 @@
|
||||
import picomatch from "picomatch";
|
||||
import { z } from "zod";
|
||||
|
||||
export enum PermissionConditionOperators {
|
||||
$IN = "$in",
|
||||
$ALL = "$all",
|
||||
$REGEX = "$regex",
|
||||
$EQ = "$eq",
|
||||
$NEQ = "$ne",
|
||||
$GLOB = "$glob"
|
||||
}
|
||||
import { PermissionConditionOperators } from "@app/lib/casl";
|
||||
|
||||
export const PermissionConditionSchema = {
|
||||
[PermissionConditionOperators.$IN]: z.string().trim().min(1).array(),
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { AbilityBuilder, createMongoAbility, ForcedSubject, MongoAbility } from "@casl/ability";
|
||||
import { z } from "zod";
|
||||
|
||||
import { conditionsMatcher } from "@app/lib/casl";
|
||||
import { conditionsMatcher, PermissionConditionOperators } from "@app/lib/casl";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
|
||||
import { PermissionConditionOperators, PermissionConditionSchema } from "./permission-types";
|
||||
import { PermissionConditionSchema } from "./permission-types";
|
||||
|
||||
export enum ProjectPermissionActions {
|
||||
Read = "read",
|
||||
@ -694,31 +694,35 @@ export const buildServiceTokenProjectPermission = (
|
||||
const canRead = permission.includes("read");
|
||||
const { can, build } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
|
||||
scopes.forEach(({ secretPath, environment }) => {
|
||||
if (canWrite) {
|
||||
// TODO: @Akhi
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Secrets, {
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.Secrets, {
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Secrets, {
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
}
|
||||
if (canRead) {
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets, {
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
}
|
||||
[ProjectPermissionSub.Secrets, ProjectPermissionSub.SecretImports, ProjectPermissionSub.SecretFolders].forEach(
|
||||
(subject) => {
|
||||
if (canWrite) {
|
||||
// TODO: @Akhi
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Edit, subject, {
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Create, subject, {
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Delete, subject, {
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
}
|
||||
if (canRead) {
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Read, subject, {
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
return build({ conditionsMatcher });
|
||||
|
@ -0,0 +1,5 @@
|
||||
export const ProjectTemplateDefaultEnvironments = [
|
||||
{ name: "Development", slug: "dev", position: 1 },
|
||||
{ name: "Staging", slug: "staging", position: 2 },
|
||||
{ name: "Production", slug: "prod", position: 3 }
|
||||
];
|
@ -0,0 +1,7 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TProjectTemplateDALFactory = ReturnType<typeof projectTemplateDALFactory>;
|
||||
|
||||
export const projectTemplateDALFactory = (db: TDbClient) => ormify(db, TableName.ProjectTemplates);
|
@ -0,0 +1,24 @@
|
||||
import { ProjectTemplateDefaultEnvironments } from "@app/ee/services/project-template/project-template-constants";
|
||||
import {
|
||||
InfisicalProjectTemplate,
|
||||
TUnpackedPermission
|
||||
} from "@app/ee/services/project-template/project-template-types";
|
||||
import { getPredefinedRoles } from "@app/services/project-role/project-role-fns";
|
||||
|
||||
export const getDefaultProjectTemplate = (orgId: string) => ({
|
||||
id: "b11b49a9-09a9-4443-916a-4246f9ff2c69", // random ID to appease zod
|
||||
name: InfisicalProjectTemplate.Default,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
description: "Infisical's default project template",
|
||||
environments: ProjectTemplateDefaultEnvironments,
|
||||
roles: [...getPredefinedRoles("project-template")].map(({ name, slug, permissions }) => ({
|
||||
name,
|
||||
slug,
|
||||
permissions: permissions as TUnpackedPermission[]
|
||||
})),
|
||||
orgId
|
||||
});
|
||||
|
||||
export const isInfisicalProjectTemplate = (template: string) =>
|
||||
Object.values(InfisicalProjectTemplate).includes(template as InfisicalProjectTemplate);
|
@ -0,0 +1,265 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { packRules } from "@casl/ability/extra";
|
||||
|
||||
import { TProjectTemplates } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { getDefaultProjectTemplate } from "@app/ee/services/project-template/project-template-fns";
|
||||
import {
|
||||
TCreateProjectTemplateDTO,
|
||||
TProjectTemplateEnvironment,
|
||||
TProjectTemplateRole,
|
||||
TUnpackedPermission,
|
||||
TUpdateProjectTemplateDTO
|
||||
} from "@app/ee/services/project-template/project-template-types";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { unpackPermissions } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { getPredefinedRoles } from "@app/services/project-role/project-role-fns";
|
||||
|
||||
import { TProjectTemplateDALFactory } from "./project-template-dal";
|
||||
|
||||
type TProjectTemplatesServiceFactoryDep = {
|
||||
licenseService: TLicenseServiceFactory;
|
||||
permissionService: TPermissionServiceFactory;
|
||||
projectTemplateDAL: TProjectTemplateDALFactory;
|
||||
};
|
||||
|
||||
export type TProjectTemplateServiceFactory = ReturnType<typeof projectTemplateServiceFactory>;
|
||||
|
||||
const $unpackProjectTemplate = ({ roles, environments, ...rest }: TProjectTemplates) => ({
|
||||
...rest,
|
||||
environments: environments as TProjectTemplateEnvironment[],
|
||||
roles: [
|
||||
...getPredefinedRoles("project-template").map(({ name, slug, permissions }) => ({
|
||||
name,
|
||||
slug,
|
||||
permissions: permissions as TUnpackedPermission[]
|
||||
})),
|
||||
...(roles as TProjectTemplateRole[]).map((role) => ({
|
||||
...role,
|
||||
permissions: unpackPermissions(role.permissions)
|
||||
}))
|
||||
]
|
||||
});
|
||||
|
||||
export const projectTemplateServiceFactory = ({
|
||||
licenseService,
|
||||
permissionService,
|
||||
projectTemplateDAL
|
||||
}: TProjectTemplatesServiceFactoryDep) => {
|
||||
const listProjectTemplatesByOrg = async (actor: OrgServiceActor) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.projectTemplates)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to access project templates due to plan restriction. Upgrade plan to access project templates."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor.type,
|
||||
actor.id,
|
||||
actor.orgId,
|
||||
actor.authMethod,
|
||||
actor.orgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.ProjectTemplates);
|
||||
|
||||
const projectTemplates = await projectTemplateDAL.find({
|
||||
orgId: actor.orgId
|
||||
});
|
||||
|
||||
return [
|
||||
getDefaultProjectTemplate(actor.orgId),
|
||||
...projectTemplates.map((template) => $unpackProjectTemplate(template))
|
||||
];
|
||||
};
|
||||
|
||||
const findProjectTemplateByName = async (name: string, actor: OrgServiceActor) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.projectTemplates)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to access project template due to plan restriction. Upgrade plan to access project templates."
|
||||
});
|
||||
|
||||
const projectTemplate = await projectTemplateDAL.findOne({ name, orgId: actor.orgId });
|
||||
|
||||
if (!projectTemplate) throw new NotFoundError({ message: `Could not find project template with Name "${name}"` });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor.type,
|
||||
actor.id,
|
||||
projectTemplate.orgId,
|
||||
actor.authMethod,
|
||||
actor.orgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.ProjectTemplates);
|
||||
|
||||
return {
|
||||
...$unpackProjectTemplate(projectTemplate),
|
||||
packedRoles: projectTemplate.roles as TProjectTemplateRole[] // preserve packed for when applying template
|
||||
};
|
||||
};
|
||||
|
||||
const findProjectTemplateById = async (id: string, actor: OrgServiceActor) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.projectTemplates)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to access project template due to plan restriction. Upgrade plan to access project templates."
|
||||
});
|
||||
|
||||
const projectTemplate = await projectTemplateDAL.findById(id);
|
||||
|
||||
if (!projectTemplate) throw new NotFoundError({ message: `Could not find project template with ID ${id}` });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor.type,
|
||||
actor.id,
|
||||
projectTemplate.orgId,
|
||||
actor.authMethod,
|
||||
actor.orgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.ProjectTemplates);
|
||||
|
||||
return {
|
||||
...$unpackProjectTemplate(projectTemplate),
|
||||
packedRoles: projectTemplate.roles as TProjectTemplateRole[] // preserve packed for when applying template
|
||||
};
|
||||
};
|
||||
|
||||
const createProjectTemplate = async (
|
||||
{ roles, environments, ...params }: TCreateProjectTemplateDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.projectTemplates)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create project template due to plan restriction. Upgrade plan to access project templates."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor.type,
|
||||
actor.id,
|
||||
actor.orgId,
|
||||
actor.authMethod,
|
||||
actor.orgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.ProjectTemplates);
|
||||
|
||||
const isConflictingName = Boolean(
|
||||
await projectTemplateDAL.findOne({
|
||||
name: params.name,
|
||||
orgId: actor.orgId
|
||||
})
|
||||
);
|
||||
|
||||
if (isConflictingName)
|
||||
throw new BadRequestError({
|
||||
message: `A project template with the name "${params.name}" already exists.`
|
||||
});
|
||||
|
||||
const projectTemplate = await projectTemplateDAL.create({
|
||||
...params,
|
||||
roles: JSON.stringify(roles.map((role) => ({ ...role, permissions: packRules(role.permissions) }))),
|
||||
environments: JSON.stringify(environments),
|
||||
orgId: actor.orgId
|
||||
});
|
||||
|
||||
return $unpackProjectTemplate(projectTemplate);
|
||||
};
|
||||
|
||||
const updateProjectTemplateById = async (
|
||||
id: string,
|
||||
{ roles, environments, ...params }: TUpdateProjectTemplateDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.projectTemplates)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update project template due to plan restriction. Upgrade plan to access project templates."
|
||||
});
|
||||
|
||||
const projectTemplate = await projectTemplateDAL.findById(id);
|
||||
|
||||
if (!projectTemplate) throw new NotFoundError({ message: `Could not find project template with ID ${id}` });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor.type,
|
||||
actor.id,
|
||||
projectTemplate.orgId,
|
||||
actor.authMethod,
|
||||
actor.orgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.ProjectTemplates);
|
||||
|
||||
if (params.name && projectTemplate.name !== params.name) {
|
||||
const isConflictingName = Boolean(
|
||||
await projectTemplateDAL.findOne({
|
||||
name: params.name,
|
||||
orgId: projectTemplate.orgId
|
||||
})
|
||||
);
|
||||
|
||||
if (isConflictingName)
|
||||
throw new BadRequestError({
|
||||
message: `A project template with the name "${params.name}" already exists.`
|
||||
});
|
||||
}
|
||||
|
||||
const updatedProjectTemplate = await projectTemplateDAL.updateById(id, {
|
||||
...params,
|
||||
roles: roles
|
||||
? JSON.stringify(roles.map((role) => ({ ...role, permissions: packRules(role.permissions) })))
|
||||
: undefined,
|
||||
environments: environments ? JSON.stringify(environments) : undefined
|
||||
});
|
||||
|
||||
return $unpackProjectTemplate(updatedProjectTemplate);
|
||||
};
|
||||
|
||||
const deleteProjectTemplateById = async (id: string, actor: OrgServiceActor) => {
|
||||
const plan = await licenseService.getPlan(actor.orgId);
|
||||
|
||||
if (!plan.projectTemplates)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to delete project template due to plan restriction. Upgrade plan to access project templates."
|
||||
});
|
||||
|
||||
const projectTemplate = await projectTemplateDAL.findById(id);
|
||||
|
||||
if (!projectTemplate) throw new NotFoundError({ message: `Could not find project template with ID ${id}` });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor.type,
|
||||
actor.id,
|
||||
projectTemplate.orgId,
|
||||
actor.authMethod,
|
||||
actor.orgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.ProjectTemplates);
|
||||
|
||||
const deletedProjectTemplate = await projectTemplateDAL.deleteById(id);
|
||||
|
||||
return $unpackProjectTemplate(deletedProjectTemplate);
|
||||
};
|
||||
|
||||
return {
|
||||
listProjectTemplatesByOrg,
|
||||
createProjectTemplate,
|
||||
updateProjectTemplateById,
|
||||
deleteProjectTemplateById,
|
||||
findProjectTemplateById,
|
||||
findProjectTemplateByName
|
||||
};
|
||||
};
|
@ -0,0 +1,28 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { TProjectEnvironments } from "@app/db/schemas";
|
||||
import { TProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
|
||||
export type TProjectTemplateEnvironment = Pick<TProjectEnvironments, "name" | "slug" | "position">;
|
||||
|
||||
export type TProjectTemplateRole = {
|
||||
slug: string;
|
||||
name: string;
|
||||
permissions: TProjectPermissionV2Schema[];
|
||||
};
|
||||
|
||||
export type TCreateProjectTemplateDTO = {
|
||||
name: string;
|
||||
description?: string;
|
||||
roles: TProjectTemplateRole[];
|
||||
environments: TProjectTemplateEnvironment[];
|
||||
};
|
||||
|
||||
export type TUpdateProjectTemplateDTO = Partial<TCreateProjectTemplateDTO>;
|
||||
|
||||
export type TUnpackedPermission = z.infer<typeof UnpackedPermissionSchema>;
|
||||
|
||||
export enum InfisicalProjectTemplate {
|
||||
Default = "default"
|
||||
}
|
@ -46,7 +46,7 @@ export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateL
|
||||
}
|
||||
return rateLimit;
|
||||
} catch (err) {
|
||||
logger.error("Error fetching rate limits %o", err);
|
||||
logger.error(err, "Error fetching rate limits");
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
@ -69,12 +69,12 @@ export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateL
|
||||
mfaRateLimit: rateLimit.mfaRateLimit
|
||||
};
|
||||
|
||||
logger.info(`syncRateLimitConfiguration: rate limit configuration: %o`, newRateLimitMaxConfiguration);
|
||||
logger.info(newRateLimitMaxConfiguration, "syncRateLimitConfiguration: rate limit configuration");
|
||||
Object.freeze(newRateLimitMaxConfiguration);
|
||||
rateLimitMaxConfiguration = newRateLimitMaxConfiguration;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error syncing rate limit configurations: %o`, error);
|
||||
logger.error(error, "Error syncing rate limit configurations");
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -267,7 +267,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
: "",
|
||||
secretComment: el.secretVersion.encryptedComment
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedComment }).toString()
|
||||
: ""
|
||||
: "",
|
||||
tags: el.secretVersion.tags
|
||||
}
|
||||
: undefined
|
||||
}));
|
||||
@ -571,7 +572,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
reminderNote: el.reminderNote,
|
||||
skipMultilineEncoding: el.skipMultilineEncoding,
|
||||
key: el.key,
|
||||
tagIds: el?.tags.map(({ id }) => id),
|
||||
tags: el?.tags.map(({ id }) => id),
|
||||
...encryptedValue
|
||||
}
|
||||
};
|
||||
|
@ -85,7 +85,8 @@ export const secretRotationDbFn = async ({
|
||||
password,
|
||||
username,
|
||||
client,
|
||||
variables
|
||||
variables,
|
||||
options
|
||||
}: TSecretRotationDbFn) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
@ -117,7 +118,8 @@ export const secretRotationDbFn = async ({
|
||||
password,
|
||||
connectionTimeoutMillis: EXTERNAL_REQUEST_TIMEOUT,
|
||||
ssl,
|
||||
pool: { min: 0, max: 1 }
|
||||
pool: { min: 0, max: 1 },
|
||||
options
|
||||
}
|
||||
});
|
||||
const data = await db.raw(query, variables);
|
||||
@ -153,6 +155,14 @@ export const getDbSetQuery = (db: TDbProviderClients, variables: { username: str
|
||||
variables: [variables.username]
|
||||
};
|
||||
}
|
||||
|
||||
if (db === TDbProviderClients.MsSqlServer) {
|
||||
return {
|
||||
query: `ALTER LOGIN ?? WITH PASSWORD = '${variables.password}'`,
|
||||
variables: [variables.username]
|
||||
};
|
||||
}
|
||||
|
||||
// add more based on client
|
||||
return {
|
||||
query: `ALTER USER ?? IDENTIFIED BY '${variables.password}'`,
|
||||
|
@ -24,4 +24,5 @@ export type TSecretRotationDbFn = {
|
||||
query: string;
|
||||
variables: unknown[];
|
||||
ca?: string;
|
||||
options?: Record<string, unknown>;
|
||||
};
|
||||
|
@ -94,7 +94,9 @@ export const secretRotationQueueFactory = ({
|
||||
// on prod it this will be in days, in development this will be second
|
||||
every: appCfg.NODE_ENV === "development" ? secondsToMillis(interval) : daysToMillisecond(interval),
|
||||
immediately: true
|
||||
}
|
||||
},
|
||||
removeOnComplete: true,
|
||||
removeOnFail: true
|
||||
}
|
||||
);
|
||||
};
|
||||
@ -114,6 +116,7 @@ export const secretRotationQueueFactory = ({
|
||||
|
||||
queue.start(QueueName.SecretRotation, async (job) => {
|
||||
const { rotationId } = job.data;
|
||||
const appCfg = getConfig();
|
||||
logger.info(`secretRotationQueue.process: [rotationDocument=${rotationId}]`);
|
||||
const secretRotation = await secretRotationDAL.findById(rotationId);
|
||||
const rotationProvider = rotationTemplates.find(({ name }) => name === secretRotation?.provider);
|
||||
@ -172,6 +175,15 @@ export const secretRotationQueueFactory = ({
|
||||
// set a random value for new password
|
||||
newCredential.internal.rotated_password = alphaNumericNanoId(32);
|
||||
const { admin_username: username, admin_password: password, host, database, port, ca } = newCredential.inputs;
|
||||
|
||||
const options =
|
||||
provider.template.client === TDbProviderClients.MsSqlServer
|
||||
? ({
|
||||
encrypt: appCfg.ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT,
|
||||
cryptoCredentialsDetails: ca ? { ca } : {}
|
||||
} as Record<string, unknown>)
|
||||
: undefined;
|
||||
|
||||
const dbFunctionArg = {
|
||||
username,
|
||||
password,
|
||||
@ -179,8 +191,10 @@ export const secretRotationQueueFactory = ({
|
||||
database,
|
||||
port,
|
||||
ca: ca as string,
|
||||
client: provider.template.client === TDbProviderClients.MySql ? "mysql2" : provider.template.client
|
||||
client: provider.template.client === TDbProviderClients.MySql ? "mysql2" : provider.template.client,
|
||||
options
|
||||
} as TSecretRotationDbFn;
|
||||
|
||||
// set function
|
||||
await secretRotationDbFn({
|
||||
...dbFunctionArg,
|
||||
@ -189,12 +203,17 @@ export const secretRotationQueueFactory = ({
|
||||
username: newCredential.internal.username as string
|
||||
})
|
||||
});
|
||||
|
||||
// test function
|
||||
const testQuery =
|
||||
provider.template.client === TDbProviderClients.MsSqlServer ? "SELECT GETDATE()" : "SELECT NOW()";
|
||||
|
||||
await secretRotationDbFn({
|
||||
...dbFunctionArg,
|
||||
query: "SELECT NOW()",
|
||||
query: testQuery,
|
||||
variables: []
|
||||
});
|
||||
|
||||
newCredential.outputs.db_username = newCredential.internal.username;
|
||||
newCredential.outputs.db_password = newCredential.internal.rotated_password;
|
||||
// clean up
|
||||
|
@ -1,4 +1,5 @@
|
||||
import { AWS_IAM_TEMPLATE } from "./aws-iam";
|
||||
import { MSSQL_TEMPLATE } from "./mssql";
|
||||
import { MYSQL_TEMPLATE } from "./mysql";
|
||||
import { POSTGRES_TEMPLATE } from "./postgres";
|
||||
import { SENDGRID_TEMPLATE } from "./sendgrid";
|
||||
@ -26,6 +27,13 @@ export const rotationTemplates: TSecretRotationProviderTemplate[] = [
|
||||
description: "Rotate MySQL@7/MariaDB user credentials",
|
||||
template: MYSQL_TEMPLATE
|
||||
},
|
||||
{
|
||||
name: "mssql",
|
||||
title: "Microsoft SQL Server",
|
||||
image: "mssqlserver.png",
|
||||
description: "Rotate Microsoft SQL server user credentials",
|
||||
template: MSSQL_TEMPLATE
|
||||
},
|
||||
{
|
||||
name: "aws-iam",
|
||||
title: "AWS IAM",
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user