mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-28 15:29:21 +00:00
Compare commits
1 Commits
fix/remove
...
sheen/hack
Author | SHA1 | Date | |
---|---|---|---|
0762d8b172 |
.env.example.envrcattribute-based-access-controls.mdxoverview.mdxflake.lockflake.nix
.github/workflows
Dockerfile.fips.standalone-infisicalDockerfile.standalone-infisicalbackend
DockerfileDockerfile.devmain.tsvitest.unit.config.ts
e2e-test
package-lock.jsonpackage.jsonsrc
@types
db
knexfile.ts
migrations
20250212191958_create-gateway.ts20250224015833_identity-profile.ts20250226021631_secret-requests.ts20250226082254_add-gov-banner-and-consent-fields.ts20250228022604_increase-secret-reminder-note-max-length.ts20250303213350_add-folder-description.ts20250305080145_add-secret-review-comment.ts20250305131152_add-actor-id-to-secret-versions-v2.ts20250311105617_add-share-to-anyone-setting-to-organizations.ts20250314145202_identity-oidc-claim-mapping.ts20250317101525_add-instance-admin-mi.ts20250319134021_recursive-folder-index.ts
schemas
ee
routes
v1
dynamic-secret-lease-router.tsdynamic-secret-router.tsgateway-router.tsidentity-project-additional-privilege-router.tsindex.tskmip-router.tssaml-router.tssecret-approval-request-router.tssecret-router.tssecret-version-router.tssnapshot-router.tsssh-certificate-router.tsssh-certificate-template-router.tsuser-additional-privilege-router.ts
v2
services
access-approval-request
audit-log
dynamic-secret-lease
dynamic-secret
gateway
group
identity-project-additional-privilege-v2
identity-project-additional-privilege
kmip
license
permission
project-user-additional-privilege
saml-config
secret-approval-request
secret-replication
secret-rotation
secret-snapshot
ssh-certificate-template
ssh-certificate
ssh
keystore
lib
api-docs
casl
config
crypto
errors
gateway
ms
telemetry
template
turn
queue
server
lib
plugins
routes
index.tssanitizedSchemas.ts
v1
admin-router.ts
app-connection-routers
automated-security-router.tscertificate-authority-router.tscertificate-router.tscertificate-template-router.tsdashboard-router.tsidentity-aws-iam-auth-router.tsidentity-azure-auth-router.tsidentity-gcp-auth-router.tsidentity-jwt-auth-router.tsidentity-kubernetes-auth-router.tsidentity-oidc-auth-router.tsidentity-router.tsidentity-token-auth-router.tsidentity-universal-auth-router.tsindex.tsorganization-router.tspassword-router.tsproject-membership-router.tsproject-router.tssecret-folder-router.tssecret-requests-router.tssecret-sharing-router.tssecret-sync-routers
v2
v3
services
app-connection
app-connection-enums.tsapp-connection-fns.tsapp-connection-maps.tsapp-connection-service.tsapp-connection-types.ts
humanitec
auth
automated-security
certificate-authority
certificate-template
external-migration
group-project
identity-access-token
identity-aws-auth
identity-azure-auth
identity-gcp-auth
identity-jwt-auth
identity-kubernetes-auth
identity-oidc-auth
identity-project
identity-token-auth
identity-ua
identity
integration-auth
integration-app-list.tsintegration-auth-service.tsintegration-delete-secret.tsintegration-list.tsintegration-sync-secret.ts
integration
org
project-membership
project
resource-cleanup
secret-folder
secret-import
secret-sharing
secret-sync
aws-parameter-store
aws-secrets-manager
azure-app-configuration
azure-key-vault
databricks
gcp
github
humanitec
humanitec-sync-constants.tshumanitec-sync-enums.tshumanitec-sync-fns.tshumanitec-sync-schemas.tshumanitec-sync-types.tsindex.ts
secret-sync-enums.tssecret-sync-fns.tssecret-sync-maps.tssecret-sync-queue.tssecret-sync-schemas.tssecret-sync-service.tssecret-sync-types.tssecret-tag
secret-v2-bridge
secret-v2-bridge-dal.tssecret-v2-bridge-fns.tssecret-v2-bridge-service.tssecret-v2-bridge-types.tssecret-version-dal.ts
secret
service-token
slack
smtp
super-admin
telemetry
user
webhook
cli
company/handbook
docker-compose.prod.ymldocs
api-reference
endpoints
app-connections/humanitec
secret-syncs/humanitec
overview
cli/commands
documentation
guides
platform
access-controls
abac
images
add-metadata-on-machine-identity-1.pngadd-metadata-on-machine-identity-2.pngadd-metadata-on-machine-identity-3.png
managing-machine-identity-attributes.mdxmanaging-user-metadata.mdxoverview.mdxadmin-panel
audit-logs.mdxgateways
identities
kms
secret-scanning.mdxwebhooks.mdximages
app-connections/humanitec
add-humanitec-connection.pnghumanitec-add-api-token.pnghumanitec-add-user-options.pnghumanitec-add-user-role.pnghumanitec-add-user.pnghumanitec-app-connection-created.pnghumanitec-app-connection-modal.pnghumanitec-app-connection-option.pnghumanitec-applications-tab.pnghumanitec-connection.pnghumanitec-copy-api-token.pnghumanitec-create-api-token.pnghumanitec-create-new-user.pnghumanitec-service-account-filled.pnghumanitec-service-users.pnghumanitec-user-added.pngselect-humanitec-connection.png
platform
access-controls
admin-panels
audit-logs
gateways
secret-scanning
secret-syncs/humanitec
humanitec-created.pnghumanitec-destination.pnghumanitec-details.pnghumanitec-options.pnghumanitec-review.pnghumanitec-source.pngselect-humanitec-option.png
self-hosting/guides/automated-bootstrapping
integrations
app-connections
cloud
frameworks
platforms/kubernetes
secret-syncs
internals
mint.jsonself-hosting/guides
frontend
package-lock.jsonpackage.jsonconst.tsrouteTree.gen.tsroutes.ts
public
src
components
auth
organization/CreateOrgModal
page-frames
permissions
projects
secret-syncs/forms
SecretSyncDestinationFields
SecretSyncOptionsFields
SecretSyncReviewFields
schemas
secrets/SecretReferenceDetails
utilities/checks/password
v2
Blur
Dropdown
InfisicalSecretInput
Modal
Popoverv2
Select
const
context
helpers
hooks
api
admin
appConnections
auditLogs
auth
automated-security
dashboard
dynamicSecret
gateways
identities
index.tsxorganization
reactQuery.tsxsecretApprovalRequest
secretFolders
secretImports
secretSharing
secretSnapshots
secretSyncs
secrets
subscriptions
types.tsworkspace
utils
layouts
AdminLayout
OrganizationLayout
lib
main.tsxpages
admin
OverviewPage
SignUpPage
auth
PasswordResetPage
RequestNewInvitePage
SignUpInvitePage
SignUpPage
SignUpSsoPage
SignUpSsoPage.tsx
components
VerifyEmailPage
cert-manager/SettingsPage/components
kms/SettingsPage/components
middlewares
organization
AccessManagementPage/components/OrgIdentityTab/components/IdentitySection
AppConnections/AppConnectionsPage/components/AppConnectionForm
AuditLogsPage
AutomatedSecurityPage
Gateways/GatewayListPage
IdentityDetailsByIDPage/components/ViewIdentityAuthModal
RoleByIDPage/components
OrgRoleModifySection.utils.ts
RolePermissionsSection
SecretScanningPage
SecretSharingPage
SecretSharingPage.tsxShareSecretSection.tsx
components
AddShareSecretModal.tsx
route.tsxRequestSecret
AddSecretRequestModal.tsxRequestSecretForm.tsxRequestSecretTab.tsxRequestedSecretsRow.tsxRequestedSecretsTable.tsxRevealSecretValueModal.tsx
ShareSecretSection.tsxShareSecretsRow.tsxShareSecretsTable.tsxindex.tsxSecretSharingSettingsPage
SecretSharingSettingsPage.tsxroute.tsx
components
SecretSharingAllowShareToAnyone
SecretSharingSettingsGeneralTab
SecretSharingSettingsTabGroup
index.tsxSettingsPage/components/OrgNameChangeSection
UserDetailsByIDPage/components/UserProjectsSection
project
AccessControlPage/components/ServiceTokenTab/components/ServiceTokenSection
RoleDetailsBySlugPage/components
public
ShareSecretPage/components
ViewSecretRequestByIDPage
secret-manager
IntegrationsDetailsByIDPage/components
IntegrationsListPage/components/SecretSyncsTab/SecretSyncTable
OverviewPage
OverviewPage.tsx
components
CreateSecretForm
SecretOverviewTableRow
SecretSearchInput/components
SelectionPanel
SecretApprovalsPage/components/SecretApprovalRequest/components
SecretDashboardPage
SecretDashboardPage.tsx
components
ActionBar
ActionBar.tsx
CreateDynamicSecretForm
AwsElastiCacheInputForm.tsxAwsIamInputForm.tsxAzureEntraIdInputForm.tsxCassandraInputForm.tsxCreateDynamicSecretForm.tsxElasticSearchInputForm.tsxLdapInputForm.tsxMongoAtlasInputForm.tsxMongoDBInputForm.tsxRabbitMqInputForm.tsxRedisInputForm.tsxSapAseInputForm.tsxSapHanaInputForm.tsxSnowflakeInputForm.tsxSqlDatabaseInputForm.tsxTotpInputForm.tsx
FolderForm.tsxDynamicSecretListView/EditDynamicSecretForm
FolderListView
PitDrawer
SecretDropzone
SecretListView
SnapshotView
SecretSyncDetailsByIDPage/components
SettingsPage/components
integrations
BitbucketConfigurePage
DatabricksConfigurePage
WindmillAuthorizePage
WindmillConfigurePage
ssh/SettingsPage/components
user/PersonalSettingsPage/components
helm-charts
infisical-standalone-postgres
secrets-operator
k8-operator
api/v1alpha1
config
controllers/infisicalsecret
kubectl-install
packages
sink
@ -112,11 +112,4 @@ INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
|
||||
|
||||
# azure app connection
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_ID=
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
|
||||
|
||||
# datadog
|
||||
SHOULD_USE_DATADOG_TRACER=
|
||||
DATADOG_PROFILING_ENABLED=
|
||||
DATADOG_ENV=
|
||||
DATADOG_SERVICE=
|
||||
DATADOG_HOSTNAME=
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
|
3
.envrc
3
.envrc
@ -1,3 +0,0 @@
|
||||
# Learn more at https://direnv.net
|
||||
# We instruct direnv to use our Nix flake for a consistent development environment.
|
||||
use flake
|
@ -32,23 +32,10 @@ jobs:
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start the server
|
||||
run: |
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
|
||||
echo "Examining built image:"
|
||||
docker image inspect infisical-api | grep -A 5 "Entrypoint"
|
||||
|
||||
docker run --name infisical-api -d -p 4000:4000 \
|
||||
-e DB_CONNECTION_URI=$DB_CONNECTION_URI \
|
||||
-e REDIS_URL=$REDIS_URL \
|
||||
-e JWT_AUTH_SECRET=$JWT_AUTH_SECRET \
|
||||
-e ENCRYPTION_KEY=$ENCRYPTION_KEY \
|
||||
--env-file .env \
|
||||
infisical-api
|
||||
|
||||
echo "Container status right after creation:"
|
||||
docker ps -a | grep infisical-api
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
|
||||
env:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
@ -56,48 +43,35 @@ jobs:
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.21.5"
|
||||
go-version: '1.21.5'
|
||||
- name: Wait for container to be stable and check logs
|
||||
run: |
|
||||
SECONDS=0
|
||||
HEALTHY=0
|
||||
while [ $SECONDS -lt 60 ]; do
|
||||
# Check if container is running
|
||||
if docker ps | grep infisical-api; then
|
||||
# Try to access the API endpoint
|
||||
if curl -s -f http://localhost:4000/api/docs/json > /dev/null 2>&1; then
|
||||
echo "API endpoint is responding. Container seems healthy."
|
||||
HEALTHY=1
|
||||
break
|
||||
fi
|
||||
else
|
||||
echo "Container is not running!"
|
||||
docker ps -a | grep infisical-api
|
||||
if docker ps | grep infisical-api | grep -q healthy; then
|
||||
echo "Container is healthy."
|
||||
HEALTHY=1
|
||||
break
|
||||
fi
|
||||
|
||||
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
|
||||
sleep 5
|
||||
SECONDS=$((SECONDS+5))
|
||||
|
||||
docker logs infisical-api
|
||||
|
||||
sleep 2
|
||||
SECONDS=$((SECONDS+2))
|
||||
done
|
||||
|
||||
|
||||
if [ $HEALTHY -ne 1 ]; then
|
||||
echo "Container did not become healthy in time"
|
||||
echo "Container status:"
|
||||
docker ps -a | grep infisical-api
|
||||
echo "Container logs (if any):"
|
||||
docker logs infisical-api || echo "No logs available"
|
||||
echo "Container inspection:"
|
||||
docker inspect infisical-api | grep -A 5 "State"
|
||||
exit 1
|
||||
fi
|
||||
- name: Install openapi-diff
|
||||
run: go install github.com/oasdiff/oasdiff@latest
|
||||
run: go install github.com/tufin/oasdiff@latest
|
||||
- name: Running OpenAPI Spec diff action
|
||||
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
|
||||
- name: cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
docker stop infisical-api || true
|
||||
docker rm infisical-api || true
|
||||
docker stop infisical-api
|
||||
docker remove infisical-api
|
||||
|
@ -26,7 +26,7 @@ jobs:
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
npm-release:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
env:
|
||||
working-directory: ./npm
|
||||
needs:
|
||||
@ -83,7 +83,7 @@ jobs:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [cli-integration-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@ -103,12 +103,11 @@ jobs:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: Setup for libssl1.0-dev
|
||||
- name: libssl1.1 => libssl1.0-dev for OSXCross
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
|
||||
sudo apt update
|
||||
sudo apt-get install -y libssl1.0-dev
|
||||
sudo apt update && apt-cache policy libssl1.0-dev
|
||||
sudo apt-get install libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
|
8
.github/workflows/run-backend-tests.yml
vendored
8
.github/workflows/run-backend-tests.yml
vendored
@ -34,10 +34,7 @@ jobs:
|
||||
working-directory: backend
|
||||
- name: Start postgres and redis
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Run unit test
|
||||
run: npm run test:unit
|
||||
working-directory: backend
|
||||
- name: Run integration test
|
||||
- name: Start integration test
|
||||
run: npm run test:e2e
|
||||
working-directory: backend
|
||||
env:
|
||||
@ -47,5 +44,4 @@ jobs:
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- name: cleanup
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
|
||||
docker compose -f "docker-compose.dev.yml" down
|
@ -161,9 +161,6 @@ COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
ENV HTTPS_ENABLED false
|
||||
|
@ -3,10 +3,13 @@ ARG POSTHOG_API_KEY=posthog-api-key
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||
|
||||
FROM node:20-slim AS base
|
||||
FROM node:20-alpine AS base
|
||||
|
||||
FROM base AS frontend-dependencies
|
||||
|
||||
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
|
||||
RUN apk add --no-cache libc6-compat
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json ./
|
||||
@ -42,8 +45,8 @@ RUN npm run build
|
||||
FROM base AS frontend-runner
|
||||
WORKDIR /app
|
||||
|
||||
RUN groupadd --system --gid 1001 nodejs
|
||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
RUN addgroup --system --gid 1001 nodejs
|
||||
RUN adduser --system --uid 1001 non-root-user
|
||||
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
|
||||
|
||||
@ -53,23 +56,21 @@ USER non-root-user
|
||||
## BACKEND
|
||||
##
|
||||
FROM base AS backend-build
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for build
|
||||
RUN apt-get update && apt-get install -y \
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN groupadd --system --gid 1001 nodejs
|
||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
freetds-dev
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -85,19 +86,18 @@ FROM base AS backend-runner
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for runtime
|
||||
RUN apt-get update && apt-get install -y \
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
freetds-dev
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -109,36 +109,34 @@ RUN mkdir frontend-build
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
bash \
|
||||
curl \
|
||||
git \
|
||||
RUN apk add --upgrade --no-cache ca-certificates
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.31.1 && apk add --no-cache git
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install all required runtime dependencies
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
wget \
|
||||
openssh-client \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /
|
||||
bash \
|
||||
curl \
|
||||
git \
|
||||
openssh
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Setup user permissions
|
||||
RUN groupadd --system --gid 1001 nodejs \
|
||||
&& useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
# Give non-root-user permission to update SSL certs
|
||||
RUN chown -R non-root-user /etc/ssl/certs
|
||||
@ -156,11 +154,11 @@ ENV INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
@ -168,7 +166,6 @@ ENV HTTPS_ENABLED false
|
||||
ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
ENV TELEMETRY_ENABLED true
|
||||
|
@ -1,22 +1,23 @@
|
||||
# Build stage
|
||||
FROM node:20-slim AS build
|
||||
FROM node:20-alpine AS build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds-dev \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -25,36 +26,36 @@ COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM node:20-slim
|
||||
FROM node:20-alpine
|
||||
WORKDIR /app
|
||||
|
||||
ENV npm_config_cache /home/node/.npm
|
||||
|
||||
COPY package*.json ./
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds-bin \
|
||||
freetds-dev \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN npm ci --only-production && npm cache clean --force
|
||||
|
||||
COPY --from=build /app .
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN apt-get install -y curl bash && \
|
||||
curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && apt-get install -y infisical=0.8.1 git
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
|
@ -1,4 +1,4 @@
|
||||
FROM node:20-slim
|
||||
FROM node:20-alpine
|
||||
|
||||
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
||||
|
||||
@ -7,32 +7,32 @@ ARG SOFTHSM2_VERSION=2.5.0
|
||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||
|
||||
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
libssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client \
|
||||
curl \
|
||||
pkg-config
|
||||
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apk --update add \
|
||||
alpine-sdk \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
openssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh
|
||||
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Build and install SoftHSM2
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# build and install SoftHSM2
|
||||
|
||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||
WORKDIR ${SOFTHSM2_SOURCES}
|
||||
|
||||
@ -45,18 +45,16 @@ RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
|
||||
WORKDIR /root
|
||||
RUN rm -fr ${SOFTHSM2_SOURCES}
|
||||
|
||||
# Install pkcs11-tool
|
||||
RUN apt-get install -y opensc
|
||||
# install pkcs11-tool
|
||||
RUN apk --update add opensc
|
||||
|
||||
RUN mkdir -p /etc/softhsm2/tokens && \
|
||||
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
|
||||
# ? App setup
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && \
|
||||
apt-get install -y infisical=0.8.1
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
@ -120,3 +120,4 @@ export default {
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
|
3341
backend/package-lock.json
generated
3341
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -40,7 +40,6 @@
|
||||
"type:check": "tsc --noEmit",
|
||||
"lint:fix": "eslint --fix --ext js,ts ./src",
|
||||
"lint": "eslint 'src/**/*.ts'",
|
||||
"test:unit": "vitest run -c vitest.unit.config.ts",
|
||||
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
|
||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||
@ -61,17 +60,9 @@
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
|
||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
|
||||
"migration:up-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
"migration:down-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback-dev": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migration:unlock-dev": "knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
|
||||
"seed-dev": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
||||
},
|
||||
"keywords": [],
|
||||
@ -147,18 +138,17 @@
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@infisical/quic": "^1.0.8",
|
||||
"@node-saml/passport-saml": "^5.0.1",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/auth-app": "^7.1.5",
|
||||
"@octokit/plugin-retry": "^7.1.4",
|
||||
"@octokit/rest": "^21.1.1",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@octopusdeploy/api-client": "^3.4.1",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
||||
"@opentelemetry/instrumentation": "^0.55.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.57.2",
|
||||
"@opentelemetry/resources": "^1.28.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.28.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.27.0",
|
||||
@ -179,7 +169,6 @@
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"connect-redis": "^7.1.1",
|
||||
"cron": "^3.1.7",
|
||||
"dd-trace": "^5.40.0",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.28.1",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
@ -188,7 +177,6 @@
|
||||
"handlebars": "^4.7.8",
|
||||
"hdb": "^0.19.10",
|
||||
"ioredis": "^5.3.2",
|
||||
"isomorphic-dompurify": "^2.22.0",
|
||||
"jmespath": "^0.16.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"jsrp": "^0.2.4",
|
||||
@ -204,6 +192,7 @@
|
||||
"nanoid": "^3.3.8",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openai": "^4.85.4",
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
|
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import "@fastify/request-context";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
}
|
||||
}
|
11
backend/src/@types/fastify.d.ts
vendored
11
backend/src/@types/fastify.d.ts
vendored
@ -13,7 +13,6 @@ import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
||||
@ -46,6 +45,7 @@ import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
|
||||
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
|
||||
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TAutomatedSecurityServiceFactory } from "@app/services/automated-security/automated-security-service";
|
||||
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
|
||||
@ -100,13 +100,6 @@ import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integ
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
identityAuthInfo?: {
|
||||
identityId: string;
|
||||
oidc?: {
|
||||
claims: Record<string, string>;
|
||||
};
|
||||
};
|
||||
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
|
||||
}
|
||||
}
|
||||
|
||||
@ -236,7 +229,7 @@ declare module "fastify" {
|
||||
secretSync: TSecretSyncServiceFactory;
|
||||
kmip: TKmipServiceFactory;
|
||||
kmipOperation: TKmipOperationServiceFactory;
|
||||
gateway: TGatewayServiceFactory;
|
||||
automatedSecurity: TAutomatedSecurityServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
32
backend/src/@types/knex.d.ts
vendored
32
backend/src/@types/knex.d.ts
vendored
@ -29,6 +29,9 @@ import {
|
||||
TAuthTokenSessionsUpdate,
|
||||
TAuthTokensInsert,
|
||||
TAuthTokensUpdate,
|
||||
TAutomatedSecurityReports,
|
||||
TAutomatedSecurityReportsInsert,
|
||||
TAutomatedSecurityReportsUpdate,
|
||||
TBackupPrivateKey,
|
||||
TBackupPrivateKeyInsert,
|
||||
TBackupPrivateKeyUpdate,
|
||||
@ -68,9 +71,6 @@ import {
|
||||
TExternalKms,
|
||||
TExternalKmsInsert,
|
||||
TExternalKmsUpdate,
|
||||
TGateways,
|
||||
TGatewaysInsert,
|
||||
TGatewaysUpdate,
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
TGitAppInstallSessionsUpdate,
|
||||
@ -116,6 +116,9 @@ import {
|
||||
TIdentityOrgMemberships,
|
||||
TIdentityOrgMembershipsInsert,
|
||||
TIdentityOrgMembershipsUpdate,
|
||||
TIdentityProfile,
|
||||
TIdentityProfileInsert,
|
||||
TIdentityProfileUpdate,
|
||||
TIdentityProjectAdditionalPrivilege,
|
||||
TIdentityProjectAdditionalPrivilegeInsert,
|
||||
TIdentityProjectAdditionalPrivilegeUpdate,
|
||||
@ -182,9 +185,6 @@ import {
|
||||
TOrgBots,
|
||||
TOrgBotsInsert,
|
||||
TOrgBotsUpdate,
|
||||
TOrgGatewayConfig,
|
||||
TOrgGatewayConfigInsert,
|
||||
TOrgGatewayConfigUpdate,
|
||||
TOrgMemberships,
|
||||
TOrgMembershipsInsert,
|
||||
TOrgMembershipsUpdate,
|
||||
@ -206,9 +206,6 @@ import {
|
||||
TProjectEnvironments,
|
||||
TProjectEnvironmentsInsert,
|
||||
TProjectEnvironmentsUpdate,
|
||||
TProjectGateways,
|
||||
TProjectGatewaysInsert,
|
||||
TProjectGatewaysUpdate,
|
||||
TProjectKeys,
|
||||
TProjectKeysInsert,
|
||||
TProjectKeysUpdate,
|
||||
@ -939,16 +936,15 @@ declare module "knex/types/tables" {
|
||||
TKmipClientCertificatesInsert,
|
||||
TKmipClientCertificatesUpdate
|
||||
>;
|
||||
[TableName.Gateway]: KnexOriginal.CompositeTableType<TGateways, TGatewaysInsert, TGatewaysUpdate>;
|
||||
[TableName.ProjectGateway]: KnexOriginal.CompositeTableType<
|
||||
TProjectGateways,
|
||||
TProjectGatewaysInsert,
|
||||
TProjectGatewaysUpdate
|
||||
[TableName.IdentityProfile]: KnexOriginal.CompositeTableType<
|
||||
TIdentityProfile,
|
||||
TIdentityProfileInsert,
|
||||
TIdentityProfileUpdate
|
||||
>;
|
||||
[TableName.OrgGatewayConfig]: KnexOriginal.CompositeTableType<
|
||||
TOrgGatewayConfig,
|
||||
TOrgGatewayConfigInsert,
|
||||
TOrgGatewayConfigUpdate
|
||||
[TableName.AutomatedSecurityReports]: KnexOriginal.CompositeTableType<
|
||||
TAutomatedSecurityReports,
|
||||
TAutomatedSecurityReportsInsert,
|
||||
TAutomatedSecurityReportsUpdate
|
||||
>;
|
||||
}
|
||||
}
|
||||
|
@ -39,7 +39,7 @@ export default {
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations",
|
||||
loadExtensions: [".mjs", ".ts"]
|
||||
loadExtensions: [".mjs"]
|
||||
}
|
||||
},
|
||||
production: {
|
||||
@ -64,7 +64,7 @@ export default {
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations",
|
||||
loadExtensions: [".mjs", ".ts"]
|
||||
loadExtensions: [".mjs"]
|
||||
}
|
||||
}
|
||||
} as Knex.Config;
|
||||
|
@ -1,115 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.OrgGatewayConfig))) {
|
||||
await knex.schema.createTable(TableName.OrgGatewayConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("rootCaKeyAlgorithm").notNullable();
|
||||
|
||||
t.datetime("rootCaIssuedAt").notNullable();
|
||||
t.datetime("rootCaExpiration").notNullable();
|
||||
t.string("rootCaSerialNumber").notNullable();
|
||||
t.binary("encryptedRootCaCertificate").notNullable();
|
||||
t.binary("encryptedRootCaPrivateKey").notNullable();
|
||||
|
||||
t.datetime("clientCaIssuedAt").notNullable();
|
||||
t.datetime("clientCaExpiration").notNullable();
|
||||
t.string("clientCaSerialNumber");
|
||||
t.binary("encryptedClientCaCertificate").notNullable();
|
||||
t.binary("encryptedClientCaPrivateKey").notNullable();
|
||||
|
||||
t.string("clientCertSerialNumber").notNullable();
|
||||
t.string("clientCertKeyAlgorithm").notNullable();
|
||||
t.datetime("clientCertIssuedAt").notNullable();
|
||||
t.datetime("clientCertExpiration").notNullable();
|
||||
t.binary("encryptedClientCertificate").notNullable();
|
||||
t.binary("encryptedClientPrivateKey").notNullable();
|
||||
|
||||
t.datetime("gatewayCaIssuedAt").notNullable();
|
||||
t.datetime("gatewayCaExpiration").notNullable();
|
||||
t.string("gatewayCaSerialNumber").notNullable();
|
||||
t.binary("encryptedGatewayCaCertificate").notNullable();
|
||||
t.binary("encryptedGatewayCaPrivateKey").notNullable();
|
||||
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.unique("orgId");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.OrgGatewayConfig);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.Gateway))) {
|
||||
await knex.schema.createTable(TableName.Gateway, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.string("name").notNullable();
|
||||
t.string("serialNumber").notNullable();
|
||||
t.string("keyAlgorithm").notNullable();
|
||||
t.datetime("issuedAt").notNullable();
|
||||
t.datetime("expiration").notNullable();
|
||||
t.datetime("heartbeat");
|
||||
|
||||
t.binary("relayAddress").notNullable();
|
||||
|
||||
t.uuid("orgGatewayRootCaId").notNullable();
|
||||
t.foreign("orgGatewayRootCaId").references("id").inTable(TableName.OrgGatewayConfig).onDelete("CASCADE");
|
||||
|
||||
t.uuid("identityId").notNullable();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.Gateway);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.ProjectGateway))) {
|
||||
await knex.schema.createTable(TableName.ProjectGateway, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
|
||||
t.uuid("gatewayId").notNullable();
|
||||
t.foreign("gatewayId").references("id").inTable(TableName.Gateway).onDelete("CASCADE");
|
||||
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.ProjectGateway);
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
|
||||
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "projectGatewayId");
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
// not setting a foreign constraint so that cascade effects are not triggered
|
||||
if (!doesGatewayColExist) {
|
||||
t.uuid("projectGatewayId");
|
||||
t.foreign("projectGatewayId").references("id").inTable(TableName.ProjectGateway);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
|
||||
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "projectGatewayId");
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (doesGatewayColExist) t.dropColumn("projectGatewayId");
|
||||
});
|
||||
}
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.ProjectGateway);
|
||||
await dropOnUpdateTrigger(knex, TableName.ProjectGateway);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.Gateway);
|
||||
await dropOnUpdateTrigger(knex, TableName.Gateway);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.OrgGatewayConfig);
|
||||
await dropOnUpdateTrigger(knex, TableName.OrgGatewayConfig);
|
||||
}
|
53
backend/src/db/migrations/20250224015833_identity-profile.ts
Normal file
53
backend/src/db/migrations/20250224015833_identity-profile.ts
Normal file
@ -0,0 +1,53 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityProfile))) {
|
||||
await knex.schema.createTable(TableName.IdentityProfile, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.uuid("userId");
|
||||
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
t.uuid("identityId");
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
|
||||
t.text("temporalProfile").notNullable(); // access pattern or frequency
|
||||
t.text("scopeProfile").notNullable(); // scope of usage - are they accessing development environment secrets. which paths? are they doing mainly admin work?
|
||||
t.text("usageProfile").notNullable(); // method of usage
|
||||
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityProfile);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.AutomatedSecurityReports))) {
|
||||
await knex.schema.createTable(TableName.AutomatedSecurityReports, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.uuid("profileId").notNullable();
|
||||
t.foreign("profileId").references("id").inTable(TableName.IdentityProfile).onDelete("CASCADE");
|
||||
|
||||
t.jsonb("event").notNullable();
|
||||
|
||||
t.string("remarks").notNullable();
|
||||
t.string("severity").notNullable();
|
||||
t.string("status").notNullable();
|
||||
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.AutomatedSecurityReports);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityProfile);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.AutomatedSecurityReports);
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { SecretSharingType } from "@app/services/secret-sharing/secret-sharing-types";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
|
||||
if (!hasSharingTypeColumn) {
|
||||
table.string("type", 32).defaultTo(SecretSharingType.Share).notNullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
|
||||
if (hasSharingTypeColumn) {
|
||||
table.dropColumn("type");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
|
||||
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
|
||||
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
if (!hasAuthConsentContentCol) {
|
||||
t.text("authConsentContent");
|
||||
}
|
||||
if (!hasPageFrameContentCol) {
|
||||
t.text("pageFrameContent");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
|
||||
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
if (hasAuthConsentContentCol) {
|
||||
t.dropColumn("authConsentContent");
|
||||
}
|
||||
if (hasPageFrameContentCol) {
|
||||
t.dropColumn("pageFrameContent");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,35 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
for await (const tableName of [
|
||||
TableName.SecretV2,
|
||||
TableName.SecretVersionV2,
|
||||
TableName.SecretApprovalRequestSecretV2
|
||||
]) {
|
||||
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
|
||||
|
||||
if (hasReminderNoteCol) {
|
||||
await knex.schema.alterTable(tableName, (t) => {
|
||||
t.string("reminderNote", 1024).alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
for await (const tableName of [
|
||||
TableName.SecretV2,
|
||||
TableName.SecretVersionV2,
|
||||
TableName.SecretApprovalRequestSecretV2
|
||||
]) {
|
||||
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
|
||||
|
||||
if (hasReminderNoteCol) {
|
||||
await knex.schema.alterTable(tableName, (t) => {
|
||||
t.string("reminderNote").alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
|
||||
|
||||
if (!hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.string("description");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
|
||||
|
||||
if (hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.dropColumn("description");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment"))) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
|
||||
t.string("comment");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment")) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
|
||||
t.dropColumn("comment");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,45 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
|
||||
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
|
||||
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
|
||||
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
|
||||
if (!hasSecretVersionV2UserActorId) {
|
||||
t.uuid("userActorId");
|
||||
t.foreign("userActorId").references("id").inTable(TableName.Users);
|
||||
}
|
||||
if (!hasSecretVersionV2IdentityActorId) {
|
||||
t.uuid("identityActorId");
|
||||
t.foreign("identityActorId").references("id").inTable(TableName.Identity);
|
||||
}
|
||||
if (!hasSecretVersionV2ActorType) {
|
||||
t.string("actorType");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
|
||||
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
|
||||
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
|
||||
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
|
||||
if (hasSecretVersionV2UserActorId) {
|
||||
t.dropColumn("userActorId");
|
||||
}
|
||||
if (hasSecretVersionV2IdentityActorId) {
|
||||
t.dropColumn("identityActorId");
|
||||
}
|
||||
if (hasSecretVersionV2ActorType) {
|
||||
t.dropColumn("actorType");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -1,32 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Organization)) {
|
||||
const hasSecretShareToAnyoneCol = await knex.schema.hasColumn(
|
||||
TableName.Organization,
|
||||
"allowSecretSharingOutsideOrganization"
|
||||
);
|
||||
|
||||
if (!hasSecretShareToAnyoneCol) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.boolean("allowSecretSharingOutsideOrganization").defaultTo(true);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Organization)) {
|
||||
const hasSecretShareToAnyoneCol = await knex.schema.hasColumn(
|
||||
TableName.Organization,
|
||||
"allowSecretSharingOutsideOrganization"
|
||||
);
|
||||
if (hasSecretShareToAnyoneCol) {
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
t.dropColumn("allowSecretSharingOutsideOrganization");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
|
||||
if (!hasMappingField) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
t.jsonb("claimMetadataMapping");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
|
||||
if (hasMappingField) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
t.dropColumn("claimMetadataMapping");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas/models";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds"))) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.specificType("adminIdentityIds", "text[]");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("adminIdentityIds");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
|
||||
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
|
||||
if (doesParentColumExist && doesNameColumnExist) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.index(["parentId", "name"]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
|
||||
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
|
||||
if (doesParentColumExist && doesNameColumnExist) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.dropIndex(["parentId", "name"]);
|
||||
});
|
||||
}
|
||||
}
|
25
backend/src/db/schemas/automated-security-reports.ts
Normal file
25
backend/src/db/schemas/automated-security-reports.ts
Normal file
@ -0,0 +1,25 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const AutomatedSecurityReportsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
profileId: z.string().uuid(),
|
||||
event: z.unknown(),
|
||||
remarks: z.string(),
|
||||
severity: z.string(),
|
||||
status: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TAutomatedSecurityReports = z.infer<typeof AutomatedSecurityReportsSchema>;
|
||||
export type TAutomatedSecurityReportsInsert = Omit<z.input<typeof AutomatedSecurityReportsSchema>, TImmutableDBKeys>;
|
||||
export type TAutomatedSecurityReportsUpdate = Partial<
|
||||
Omit<z.input<typeof AutomatedSecurityReportsSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -26,8 +26,7 @@ export const DynamicSecretsSchema = z.object({
|
||||
statusDetails: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
encryptedInput: zodBuffer,
|
||||
projectGatewayId: z.string().uuid().nullable().optional()
|
||||
encryptedInput: zodBuffer
|
||||
});
|
||||
|
||||
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;
|
||||
|
@ -1,29 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const GatewaysSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
serialNumber: z.string(),
|
||||
keyAlgorithm: z.string(),
|
||||
issuedAt: z.date(),
|
||||
expiration: z.date(),
|
||||
heartbeat: z.date().nullable().optional(),
|
||||
relayAddress: zodBuffer,
|
||||
orgGatewayRootCaId: z.string().uuid(),
|
||||
identityId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TGateways = z.infer<typeof GatewaysSchema>;
|
||||
export type TGatewaysInsert = Omit<z.input<typeof GatewaysSchema>, TImmutableDBKeys>;
|
||||
export type TGatewaysUpdate = Partial<Omit<z.input<typeof GatewaysSchema>, TImmutableDBKeys>>;
|
@ -26,8 +26,7 @@ export const IdentityOidcAuthsSchema = z.object({
|
||||
boundSubject: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
encryptedCaCertificate: zodBuffer.nullable().optional(),
|
||||
claimMetadataMapping: z.unknown().nullable().optional()
|
||||
encryptedCaCertificate: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;
|
||||
|
24
backend/src/db/schemas/identity-profile.ts
Normal file
24
backend/src/db/schemas/identity-profile.ts
Normal file
@ -0,0 +1,24 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityProfileSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
identityId: z.string().uuid().nullable().optional(),
|
||||
temporalProfile: z.string(),
|
||||
scopeProfile: z.string(),
|
||||
usageProfile: z.string(),
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TIdentityProfile = z.infer<typeof IdentityProfileSchema>;
|
||||
export type TIdentityProfileInsert = Omit<z.input<typeof IdentityProfileSchema>, TImmutableDBKeys>;
|
||||
export type TIdentityProfileUpdate = Partial<Omit<z.input<typeof IdentityProfileSchema>, TImmutableDBKeys>>;
|
@ -7,6 +7,7 @@ export * from "./audit-log-streams";
|
||||
export * from "./audit-logs";
|
||||
export * from "./auth-token-sessions";
|
||||
export * from "./auth-tokens";
|
||||
export * from "./automated-security-reports";
|
||||
export * from "./backup-private-key";
|
||||
export * from "./certificate-authorities";
|
||||
export * from "./certificate-authority-certs";
|
||||
@ -20,7 +21,6 @@ export * from "./certificates";
|
||||
export * from "./dynamic-secret-leases";
|
||||
export * from "./dynamic-secrets";
|
||||
export * from "./external-kms";
|
||||
export * from "./gateways";
|
||||
export * from "./git-app-install-sessions";
|
||||
export * from "./git-app-org";
|
||||
export * from "./group-project-membership-roles";
|
||||
@ -36,6 +36,7 @@ export * from "./identity-kubernetes-auths";
|
||||
export * from "./identity-metadata";
|
||||
export * from "./identity-oidc-auths";
|
||||
export * from "./identity-org-memberships";
|
||||
export * from "./identity-profile";
|
||||
export * from "./identity-project-additional-privilege";
|
||||
export * from "./identity-project-membership-role";
|
||||
export * from "./identity-project-memberships";
|
||||
@ -58,7 +59,6 @@ export * from "./ldap-group-maps";
|
||||
export * from "./models";
|
||||
export * from "./oidc-configs";
|
||||
export * from "./org-bots";
|
||||
export * from "./org-gateway-config";
|
||||
export * from "./org-memberships";
|
||||
export * from "./org-roles";
|
||||
export * from "./organizations";
|
||||
@ -67,7 +67,6 @@ export * from "./pki-collection-items";
|
||||
export * from "./pki-collections";
|
||||
export * from "./project-bots";
|
||||
export * from "./project-environments";
|
||||
export * from "./project-gateways";
|
||||
export * from "./project-keys";
|
||||
export * from "./project-memberships";
|
||||
export * from "./project-roles";
|
||||
|
@ -80,6 +80,8 @@ export enum TableName {
|
||||
IdentityProjectAdditionalPrivilege = "identity_project_additional_privilege",
|
||||
// used by both identity and users
|
||||
IdentityMetadata = "identity_metadata",
|
||||
IdentityProfile = "identity_profile",
|
||||
AutomatedSecurityReports = "automated_security_reports",
|
||||
ResourceMetadata = "resource_metadata",
|
||||
ScimToken = "scim_tokens",
|
||||
AccessApprovalPolicy = "access_approval_policies",
|
||||
@ -113,10 +115,6 @@ export enum TableName {
|
||||
SecretApprovalRequestSecretTagV2 = "secret_approval_request_secret_tags_v2",
|
||||
SnapshotSecretV2 = "secret_snapshot_secrets_v2",
|
||||
ProjectSplitBackfillIds = "project_split_backfill_ids",
|
||||
// Gateway
|
||||
OrgGatewayConfig = "org_gateway_config",
|
||||
Gateway = "gateways",
|
||||
ProjectGateway = "project_gateways",
|
||||
// junction tables with tags
|
||||
SecretV2JnTag = "secret_v2_tag_junction",
|
||||
JnSecretTag = "secret_tag_junction",
|
||||
|
@ -1,43 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const OrgGatewayConfigSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
rootCaKeyAlgorithm: z.string(),
|
||||
rootCaIssuedAt: z.date(),
|
||||
rootCaExpiration: z.date(),
|
||||
rootCaSerialNumber: z.string(),
|
||||
encryptedRootCaCertificate: zodBuffer,
|
||||
encryptedRootCaPrivateKey: zodBuffer,
|
||||
clientCaIssuedAt: z.date(),
|
||||
clientCaExpiration: z.date(),
|
||||
clientCaSerialNumber: z.string().nullable().optional(),
|
||||
encryptedClientCaCertificate: zodBuffer,
|
||||
encryptedClientCaPrivateKey: zodBuffer,
|
||||
clientCertSerialNumber: z.string(),
|
||||
clientCertKeyAlgorithm: z.string(),
|
||||
clientCertIssuedAt: z.date(),
|
||||
clientCertExpiration: z.date(),
|
||||
encryptedClientCertificate: zodBuffer,
|
||||
encryptedClientPrivateKey: zodBuffer,
|
||||
gatewayCaIssuedAt: z.date(),
|
||||
gatewayCaExpiration: z.date(),
|
||||
gatewayCaSerialNumber: z.string(),
|
||||
encryptedGatewayCaCertificate: zodBuffer,
|
||||
encryptedGatewayCaPrivateKey: zodBuffer,
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TOrgGatewayConfig = z.infer<typeof OrgGatewayConfigSchema>;
|
||||
export type TOrgGatewayConfigInsert = Omit<z.input<typeof OrgGatewayConfigSchema>, TImmutableDBKeys>;
|
||||
export type TOrgGatewayConfigUpdate = Partial<Omit<z.input<typeof OrgGatewayConfigSchema>, TImmutableDBKeys>>;
|
@ -22,8 +22,7 @@ export const OrganizationsSchema = z.object({
|
||||
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||
defaultMembershipRole: z.string().default("member"),
|
||||
enforceMfa: z.boolean().default(false),
|
||||
selectedMfaMethod: z.string().nullable().optional(),
|
||||
allowSecretSharingOutsideOrganization: z.boolean().default(true).nullable().optional()
|
||||
selectedMfaMethod: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
|
||||
|
@ -1,20 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ProjectGatewaysSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
projectId: z.string(),
|
||||
gatewayId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TProjectGateways = z.infer<typeof ProjectGatewaysSchema>;
|
||||
export type TProjectGatewaysInsert = Omit<z.input<typeof ProjectGatewaysSchema>, TImmutableDBKeys>;
|
||||
export type TProjectGatewaysUpdate = Partial<Omit<z.input<typeof ProjectGatewaysSchema>, TImmutableDBKeys>>;
|
@ -13,8 +13,7 @@ export const SecretApprovalRequestsReviewersSchema = z.object({
|
||||
requestId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
reviewerUserId: z.string().uuid(),
|
||||
comment: z.string().nullable().optional()
|
||||
reviewerUserId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;
|
||||
|
@ -15,8 +15,7 @@ export const SecretFoldersSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
envId: z.string().uuid(),
|
||||
parentId: z.string().uuid().nullable().optional(),
|
||||
isReserved: z.boolean().default(false).nullable().optional(),
|
||||
description: z.string().nullable().optional()
|
||||
isReserved: z.boolean().default(false).nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;
|
||||
|
@ -26,8 +26,7 @@ export const SecretSharingSchema = z.object({
|
||||
lastViewedAt: z.date().nullable().optional(),
|
||||
password: z.string().nullable().optional(),
|
||||
encryptedSecret: zodBuffer.nullable().optional(),
|
||||
identifier: z.string().nullable().optional(),
|
||||
type: z.string().default("share")
|
||||
identifier: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;
|
||||
|
@ -25,10 +25,7 @@ export const SecretVersionsV2Schema = z.object({
|
||||
folderId: z.string().uuid(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
userActorId: z.string().uuid().nullable().optional(),
|
||||
identityActorId: z.string().uuid().nullable().optional(),
|
||||
actorType: z.string().nullable().optional()
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretVersionsV2 = z.infer<typeof SecretVersionsV2Schema>;
|
||||
|
@ -23,10 +23,7 @@ export const SuperAdminSchema = z.object({
|
||||
defaultAuthOrgId: z.string().uuid().nullable().optional(),
|
||||
enabledLoginMethods: z.string().array().nullable().optional(),
|
||||
encryptedSlackClientId: zodBuffer.nullable().optional(),
|
||||
encryptedSlackClientSecret: zodBuffer.nullable().optional(),
|
||||
authConsentContent: z.string().nullable().optional(),
|
||||
pageFrameContent: z.string().nullable().optional(),
|
||||
adminIdentityIds: z.string().array().nullable().optional()
|
||||
encryptedSlackClientSecret: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
|
||||
|
@ -1,10 +1,10 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
||||
import { DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
|
||||
import { daysToMillisecond } from "@app/lib/dates";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
|
@ -1,3 +1,4 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
|
||||
@ -5,7 +6,6 @@ import { DynamicSecretProviderSchema } from "@app/ee/services/dynamic-secret/pro
|
||||
import { DYNAMIC_SECRETS } from "@app/lib/api-docs";
|
||||
import { daysToMillisecond } from "@app/lib/dates";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
|
@ -1,265 +0,0 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { GatewaysSchema } from "@app/db/schemas";
|
||||
import { isValidIp } from "@app/lib/ip";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const SanitizedGatewaySchema = GatewaysSchema.pick({
|
||||
id: true,
|
||||
identityId: true,
|
||||
name: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
issuedAt: true,
|
||||
serialNumber: true,
|
||||
heartbeat: true
|
||||
});
|
||||
|
||||
const isValidRelayAddress = (relayAddress: string) => {
|
||||
const [ip, port] = relayAddress.split(":");
|
||||
return isValidIp(ip) && Number(port) <= 65535 && Number(port) >= 40000;
|
||||
};
|
||||
|
||||
export const registerGatewayRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/register-identity",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
turnServerUsername: z.string(),
|
||||
turnServerPassword: z.string(),
|
||||
turnServerRealm: z.string(),
|
||||
turnServerAddress: z.string(),
|
||||
infisicalStaticIp: z.string().optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const relayDetails = await server.services.gateway.getGatewayRelayDetails(
|
||||
req.permission.id,
|
||||
req.permission.orgId,
|
||||
req.permission.authMethod
|
||||
);
|
||||
return relayDetails;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/exchange-cert",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
relayAddress: z.string().refine(isValidRelayAddress, { message: "Invalid relay address" })
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
serialNumber: z.string(),
|
||||
privateKey: z.string(),
|
||||
certificate: z.string(),
|
||||
certificateChain: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const gatewayCertificates = await server.services.gateway.exchangeAllocatedRelayAddress({
|
||||
identityOrg: req.permission.orgId,
|
||||
identityId: req.permission.id,
|
||||
relayAddress: req.body.relayAddress,
|
||||
identityOrgAuthMethod: req.permission.authMethod
|
||||
});
|
||||
return gatewayCertificates;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/heartbeat",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
message: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
await server.services.gateway.heartbeat({
|
||||
orgPermission: req.permission
|
||||
});
|
||||
return { message: "Successfully registered heartbeat" };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
projectId: z.string().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
gateways: SanitizedGatewaySchema.extend({
|
||||
identity: z.object({
|
||||
name: z.string(),
|
||||
id: z.string()
|
||||
}),
|
||||
projects: z
|
||||
.object({
|
||||
name: z.string(),
|
||||
id: z.string(),
|
||||
slug: z.string()
|
||||
})
|
||||
.array()
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const gateways = await server.services.gateway.listGateways({
|
||||
orgPermission: req.permission
|
||||
});
|
||||
return { gateways };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/projects/:projectId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
projectId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
gateways: SanitizedGatewaySchema.extend({
|
||||
identity: z.object({
|
||||
name: z.string(),
|
||||
id: z.string()
|
||||
}),
|
||||
projectGatewayId: z.string()
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const gateways = await server.services.gateway.getProjectGateways({
|
||||
projectId: req.params.projectId,
|
||||
projectPermission: req.permission
|
||||
});
|
||||
return { gateways };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
gateway: SanitizedGatewaySchema.extend({
|
||||
identity: z.object({
|
||||
name: z.string(),
|
||||
id: z.string()
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const gateway = await server.services.gateway.getGatewayById({
|
||||
orgPermission: req.permission,
|
||||
id: req.params.id
|
||||
});
|
||||
return { gateway };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
name: slugSchema({ field: "name" }).optional(),
|
||||
projectIds: z.string().array().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
gateway: SanitizedGatewaySchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const gateway = await server.services.gateway.updateGatewayById({
|
||||
orgPermission: req.permission,
|
||||
id: req.params.id,
|
||||
name: req.body.name,
|
||||
projectIds: req.body.projectIds
|
||||
});
|
||||
return { gateway };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
gateway: SanitizedGatewaySchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const gateway = await server.services.gateway.deleteGatewayById({
|
||||
orgPermission: req.permission,
|
||||
id: req.params.id
|
||||
});
|
||||
return { gateway };
|
||||
}
|
||||
});
|
||||
};
|
@ -1,11 +1,11 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types";
|
||||
import { backfillPermissionV1SchemaToV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||
import { UnauthorizedError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
|
@ -7,7 +7,6 @@ import { registerCaCrlRouter } from "./certificate-authority-crl-router";
|
||||
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
|
||||
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
|
||||
import { registerExternalKmsRouter } from "./external-kms-router";
|
||||
import { registerGatewayRouter } from "./gateway-router";
|
||||
import { registerGroupRouter } from "./group-router";
|
||||
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
|
||||
import { registerKmipRouter } from "./kmip-router";
|
||||
@ -68,8 +67,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
{ prefix: "/dynamic-secrets" }
|
||||
);
|
||||
|
||||
await server.register(registerGatewayRouter, { prefix: "/gateways" });
|
||||
|
||||
await server.register(
|
||||
async (pkiRouter) => {
|
||||
await pkiRouter.register(registerCaCrlRouter, { prefix: "/crl" });
|
||||
|
@ -1,10 +1,10 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { KmipClientsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { KmipPermission } from "@app/ee/services/kmip/kmip-enum";
|
||||
import { KmipClientOrderBy } from "@app/ee/services/kmip/kmip-types";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
|
@ -25,7 +25,7 @@ type TSAMLConfig = {
|
||||
callbackUrl: string;
|
||||
entryPoint: string;
|
||||
issuer: string;
|
||||
idpCert: string;
|
||||
cert: string;
|
||||
audience: string;
|
||||
wantAuthnResponseSigned?: boolean;
|
||||
wantAssertionsSigned?: boolean;
|
||||
@ -72,7 +72,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
callbackUrl: `${appCfg.SITE_URL}/api/v1/sso/saml2/${ssoConfig.id}`,
|
||||
entryPoint: ssoConfig.entryPoint,
|
||||
issuer: ssoConfig.issuer,
|
||||
idpCert: ssoConfig.cert,
|
||||
cert: ssoConfig.cert,
|
||||
audience: appCfg.SITE_URL || ""
|
||||
};
|
||||
if (ssoConfig.authProvider === SamlProviders.JUMPCLOUD_SAML) {
|
||||
@ -302,21 +302,15 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { isActive, authProvider, issuer, entryPoint, cert } = req.body;
|
||||
const { permission } = req;
|
||||
|
||||
return server.services.saml.createSamlCfg({
|
||||
isActive,
|
||||
authProvider,
|
||||
issuer,
|
||||
entryPoint,
|
||||
idpCert: cert,
|
||||
actor: permission.type,
|
||||
actorId: permission.id,
|
||||
actorAuthMethod: permission.authMethod,
|
||||
actorOrgId: permission.orgId,
|
||||
orgId: req.body.organizationId
|
||||
const saml = await server.services.saml.createSamlCfg({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
orgId: req.body.organizationId,
|
||||
...req.body
|
||||
});
|
||||
return saml;
|
||||
}
|
||||
});
|
||||
|
||||
@ -343,21 +337,15 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { isActive, authProvider, issuer, entryPoint, cert } = req.body;
|
||||
const { permission } = req;
|
||||
|
||||
return server.services.saml.updateSamlCfg({
|
||||
isActive,
|
||||
authProvider,
|
||||
issuer,
|
||||
entryPoint,
|
||||
idpCert: cert,
|
||||
actor: permission.type,
|
||||
actorId: permission.id,
|
||||
actorAuthMethod: permission.authMethod,
|
||||
actorOrgId: permission.orgId,
|
||||
orgId: req.body.organizationId
|
||||
const saml = await server.services.saml.updateSamlCfg({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
orgId: req.body.organizationId,
|
||||
...req.body
|
||||
});
|
||||
return saml;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -1,11 +1,16 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretApprovalRequestsReviewersSchema, SecretApprovalRequestsSchema, UsersSchema } from "@app/db/schemas";
|
||||
import {
|
||||
SecretApprovalRequestsReviewersSchema,
|
||||
SecretApprovalRequestsSchema,
|
||||
SecretTagsSchema,
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { ApprovalStatus, RequestState } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
|
||||
|
||||
@ -154,8 +159,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
id: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED]),
|
||||
comment: z.string().optional()
|
||||
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED])
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -171,25 +175,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
approvalId: req.params.id,
|
||||
status: req.body.status,
|
||||
comment: req.body.comment
|
||||
status: req.body.status
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
projectId: review.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW,
|
||||
metadata: {
|
||||
secretApprovalRequestId: review.requestId,
|
||||
reviewedBy: review.reviewerUserId,
|
||||
status: review.status as ApprovalStatus,
|
||||
comment: review.comment || ""
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { review };
|
||||
}
|
||||
});
|
||||
@ -245,6 +232,15 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
}
|
||||
});
|
||||
|
||||
const tagSchema = SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
name: true,
|
||||
color: true
|
||||
})
|
||||
.array()
|
||||
.optional();
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id",
|
||||
@ -272,13 +268,13 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
environment: z.string(),
|
||||
statusChangedByUser: approvalRequestUser.optional(),
|
||||
committerUser: approvalRequestUser,
|
||||
reviewers: approvalRequestUser.extend({ status: z.string(), comment: z.string().optional() }).array(),
|
||||
reviewers: approvalRequestUser.extend({ status: z.string() }).array(),
|
||||
secretPath: z.string(),
|
||||
commits: secretRawSchema
|
||||
.omit({ _id: true, environment: true, workspace: true, type: true, version: true })
|
||||
.extend({
|
||||
op: z.string(),
|
||||
tags: SanitizedTagSchema.array().optional(),
|
||||
tags: tagSchema,
|
||||
secretMetadata: ResourceMetadataSchema.nullish(),
|
||||
secret: z
|
||||
.object({
|
||||
@ -297,7 +293,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
secretKey: z.string(),
|
||||
secretValue: z.string().optional(),
|
||||
secretComment: z.string().optional(),
|
||||
tags: SanitizedTagSchema.array().optional(),
|
||||
tags: tagSchema,
|
||||
secretMetadata: ResourceMetadataSchema.nullish()
|
||||
})
|
||||
.optional()
|
||||
|
@ -1,6 +1,6 @@
|
||||
import z from "zod";
|
||||
|
||||
import { ProjectPermissionSecretActions } from "@app/ee/services/permission/project-permission";
|
||||
import { ProjectPermissionActions } from "@app/ee/services/permission/project-permission";
|
||||
import { RAW_SECRETS } from "@app/lib/api-docs";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
@ -9,7 +9,7 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const AccessListEntrySchema = z
|
||||
.object({
|
||||
allowedActions: z.nativeEnum(ProjectPermissionSecretActions).array(),
|
||||
allowedActions: z.nativeEnum(ProjectPermissionActions).array(),
|
||||
id: z.string(),
|
||||
membershipId: z.string(),
|
||||
name: z.string()
|
||||
|
@ -22,11 +22,7 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) =>
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secretVersions: secretRawSchema
|
||||
.extend({
|
||||
secretValueHidden: z.boolean()
|
||||
})
|
||||
.array()
|
||||
secretVersions: secretRawSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
@ -41,7 +37,6 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) =>
|
||||
offset: req.query.offset,
|
||||
secretId: req.params.secretId
|
||||
});
|
||||
|
||||
return { secretVersions };
|
||||
}
|
||||
});
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretSnapshotsSchema } from "@app/db/schemas";
|
||||
import { SecretSnapshotsSchema, SecretTagsSchema } from "@app/db/schemas";
|
||||
import { PROJECTS } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
@ -31,9 +31,13 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
secretVersions: secretRawSchema
|
||||
.omit({ _id: true, environment: true, workspace: true, type: true })
|
||||
.extend({
|
||||
secretValueHidden: z.boolean(),
|
||||
secretId: z.string(),
|
||||
tags: SanitizedTagSchema.array()
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
name: true,
|
||||
color: true
|
||||
}).array()
|
||||
})
|
||||
.array(),
|
||||
folderVersion: z.object({ id: z.string(), name: z.string() }).array(),
|
||||
@ -52,7 +56,6 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.secretSnapshotId
|
||||
});
|
||||
|
||||
return { secretSnapshot };
|
||||
}
|
||||
});
|
||||
|
@ -1,9 +1,9 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
|
||||
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
@ -1,4 +1,5 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
@ -9,7 +10,6 @@ import {
|
||||
isValidUserPattern
|
||||
} from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-validators";
|
||||
import { SSH_CERTIFICATE_TEMPLATES } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
@ -1,11 +1,10 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
|
||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { ProjectUserAdditionalPrivilegeTemporaryMode } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-types";
|
||||
import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
@ -24,9 +23,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
body: z.object({
|
||||
projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId),
|
||||
slug: slugSchema({ min: 1, max: 60 }).optional().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions)
|
||||
.refine(checkForInvalidPermissionCombination),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({
|
||||
isTemporary: z.literal(false)
|
||||
@ -84,8 +81,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
slug: slugSchema({ min: 1, max: 60 }).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.optional()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions)
|
||||
.refine(checkForInvalidPermissionCombination),
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({ isTemporary: z.literal(false).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary) }),
|
||||
z.object({
|
||||
|
@ -1,11 +1,10 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-types";
|
||||
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
|
||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { IDENTITY_ADDITIONAL_PRIVILEGE_V2 } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
@ -31,9 +30,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.identityId),
|
||||
projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.projectId),
|
||||
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission)
|
||||
.refine(checkForInvalidPermissionCombination),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({
|
||||
isTemporary: z.literal(false)
|
||||
@ -97,8 +94,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.optional()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission)
|
||||
.refine(checkForInvalidPermissionCombination),
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({ isTemporary: z.literal(false).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.isTemporary) }),
|
||||
z.object({
|
||||
|
@ -2,7 +2,6 @@ import { packRules } from "@casl/ability/extra";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectMembershipRole, ProjectRolesSchema } from "@app/db/schemas";
|
||||
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
|
||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { PROJECT_ROLE } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
@ -38,9 +37,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
.describe(PROJECT_ROLE.CREATE.slug),
|
||||
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
|
||||
description: z.string().trim().nullish().describe(PROJECT_ROLE.CREATE.description),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.describe(PROJECT_ROLE.CREATE.permissions)
|
||||
.refine(checkForInvalidPermissionCombination)
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -95,10 +92,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
.describe(PROJECT_ROLE.UPDATE.slug),
|
||||
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
||||
description: z.string().trim().nullish().describe(PROJECT_ROLE.UPDATE.description),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.describe(PROJECT_ROLE.UPDATE.permissions)
|
||||
.optional()
|
||||
.superRefine(checkForInvalidPermissionCombination)
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -1,10 +1,9 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import msFn from "ms";
|
||||
import ms from "ms";
|
||||
|
||||
import { ActionProjectType, ProjectMembershipRole } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
@ -247,7 +246,7 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
requesterEmail: requestedByUser.email,
|
||||
isTemporary,
|
||||
...(isTemporary && {
|
||||
expiresIn: msFn(ms(temporaryRange || ""), { long: true })
|
||||
expiresIn: ms(ms(temporaryRange || ""), { long: true })
|
||||
}),
|
||||
secretPath,
|
||||
environment: envSlug,
|
||||
|
@ -1,10 +1,8 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { requestContext } from "@fastify/request-context";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
@ -83,12 +81,8 @@ export const auditLogServiceFactory = ({
|
||||
if (!data.projectId && !data.orgId)
|
||||
throw new BadRequestError({ message: "Must specify either project id or org id" });
|
||||
}
|
||||
const el = { ...data };
|
||||
if (el.actor.type === ActorType.USER || el.actor.type === ActorType.IDENTITY) {
|
||||
const permissionMetadata = requestContext.get("identityPermissionMetadata");
|
||||
el.actor.metadata.permission = permissionMetadata;
|
||||
}
|
||||
return auditLogQueue.pushToLog(el);
|
||||
|
||||
return auditLogQueue.pushToLog(data);
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -22,7 +22,6 @@ import {
|
||||
} from "@app/services/secret-sync/secret-sync-types";
|
||||
|
||||
import { KmipPermission } from "../kmip/kmip-enum";
|
||||
import { ApprovalStatus } from "../secret-approval-request/secret-approval-request-types";
|
||||
|
||||
export type TListProjectAuditLogDTO = {
|
||||
filter: {
|
||||
@ -166,7 +165,6 @@ export enum EventType {
|
||||
SECRET_APPROVAL_REQUEST = "secret-approval-request",
|
||||
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
|
||||
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
|
||||
SECRET_APPROVAL_REQUEST_REVIEW = "secret-approval-request-review",
|
||||
SIGN_SSH_KEY = "sign-ssh-key",
|
||||
ISSUE_SSH_CREDS = "issue-ssh-creds",
|
||||
CREATE_SSH_CA = "create-ssh-certificate-authority",
|
||||
@ -252,7 +250,6 @@ export enum EventType {
|
||||
UPDATE_APP_CONNECTION = "update-app-connection",
|
||||
DELETE_APP_CONNECTION = "delete-app-connection",
|
||||
CREATE_SHARED_SECRET = "create-shared-secret",
|
||||
CREATE_SECRET_REQUEST = "create-secret-request",
|
||||
DELETE_SHARED_SECRET = "delete-shared-secret",
|
||||
READ_SHARED_SECRET = "read-shared-secret",
|
||||
GET_SECRET_SYNCS = "get-secret-syncs",
|
||||
@ -290,7 +287,6 @@ interface UserActorMetadata {
|
||||
userId: string;
|
||||
email?: string | null;
|
||||
username: string;
|
||||
permission?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
interface ServiceActorMetadata {
|
||||
@ -301,7 +297,6 @@ interface ServiceActorMetadata {
|
||||
interface IdentityActorMetadata {
|
||||
identityId: string;
|
||||
name: string;
|
||||
permission?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
interface ScimClientActorMetadata {}
|
||||
@ -980,7 +975,6 @@ interface AddIdentityOidcAuthEvent {
|
||||
boundIssuer: string;
|
||||
boundAudiences: string;
|
||||
boundClaims: Record<string, string>;
|
||||
claimMetadataMapping: Record<string, string>;
|
||||
boundSubject: string;
|
||||
accessTokenTTL: number;
|
||||
accessTokenMaxTTL: number;
|
||||
@ -1005,7 +999,6 @@ interface UpdateIdentityOidcAuthEvent {
|
||||
boundIssuer?: string;
|
||||
boundAudiences?: string;
|
||||
boundClaims?: Record<string, string>;
|
||||
claimMetadataMapping?: Record<string, string>;
|
||||
boundSubject?: string;
|
||||
accessTokenTTL?: number;
|
||||
accessTokenMaxTTL?: number;
|
||||
@ -1148,7 +1141,6 @@ interface CreateFolderEvent {
|
||||
folderId: string;
|
||||
folderName: string;
|
||||
folderPath: string;
|
||||
description?: string;
|
||||
};
|
||||
}
|
||||
|
||||
@ -1320,16 +1312,6 @@ interface SecretApprovalRequest {
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretApprovalRequestReview {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW;
|
||||
metadata: {
|
||||
secretApprovalRequestId: string;
|
||||
reviewedBy: string;
|
||||
status: ApprovalStatus;
|
||||
comment: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface SignSshKey {
|
||||
type: EventType.SIGN_SSH_KEY;
|
||||
metadata: {
|
||||
@ -2038,15 +2020,6 @@ interface CreateSharedSecretEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateSecretRequestEvent {
|
||||
type: EventType.CREATE_SECRET_REQUEST;
|
||||
metadata: {
|
||||
id: string;
|
||||
accessType: string;
|
||||
name?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteSharedSecretEvent {
|
||||
type: EventType.DELETE_SHARED_SECRET;
|
||||
metadata: {
|
||||
@ -2497,6 +2470,4 @@ export type Event =
|
||||
| KmipOperationActivateEvent
|
||||
| KmipOperationRevokeEvent
|
||||
| KmipOperationLocateEvent
|
||||
| KmipOperationRegisterEvent
|
||||
| CreateSecretRequestEvent
|
||||
| SecretApprovalRequestReview;
|
||||
| KmipOperationRegisterEvent;
|
||||
|
@ -1,4 +1,5 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import ms from "ms";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
@ -10,7 +11,6 @@ import {
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
|
@ -1,31 +1,20 @@
|
||||
import crypto from "node:crypto";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
|
||||
export const verifyHostInputValidity = (host: string, isGateway = false) => {
|
||||
export const verifyHostInputValidity = (host: string) => {
|
||||
const appCfg = getConfig();
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
// no need for validation when it's dev
|
||||
if (appCfg.NODE_ENV === "development") return;
|
||||
|
||||
if (host === "host.docker.internal") throw new BadRequestError({ message: "Invalid db host" });
|
||||
|
||||
if (
|
||||
appCfg.isCloud &&
|
||||
!isGateway &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
|
||||
(host === "host.docker.internal" || host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
|
||||
if (
|
||||
host === "localhost" ||
|
||||
host === "127.0.0.1" ||
|
||||
(dbHost?.length === host.length && crypto.timingSafeEqual(Buffer.from(dbHost || ""), Buffer.from(host)))
|
||||
) {
|
||||
if (host === "localhost" || host === "127.0.0.1" || dbHost === host) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
};
|
||||
|
@ -16,7 +16,6 @@ import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-fold
|
||||
|
||||
import { TDynamicSecretLeaseDALFactory } from "../dynamic-secret-lease/dynamic-secret-lease-dal";
|
||||
import { TDynamicSecretLeaseQueueServiceFactory } from "../dynamic-secret-lease/dynamic-secret-lease-queue";
|
||||
import { TProjectGatewayDALFactory } from "../gateway/project-gateway-dal";
|
||||
import { TDynamicSecretDALFactory } from "./dynamic-secret-dal";
|
||||
import {
|
||||
DynamicSecretStatus,
|
||||
@ -45,7 +44,6 @@ type TDynamicSecretServiceFactoryDep = {
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
projectGatewayDAL: Pick<TProjectGatewayDALFactory, "findOne">;
|
||||
};
|
||||
|
||||
export type TDynamicSecretServiceFactory = ReturnType<typeof dynamicSecretServiceFactory>;
|
||||
@ -59,8 +57,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
permissionService,
|
||||
dynamicSecretQueueService,
|
||||
projectDAL,
|
||||
kmsService,
|
||||
projectGatewayDAL
|
||||
kmsService
|
||||
}: TDynamicSecretServiceFactoryDep) => {
|
||||
const create = async ({
|
||||
path,
|
||||
@ -111,18 +108,6 @@ export const dynamicSecretServiceFactory = ({
|
||||
const selectedProvider = dynamicSecretProviders[provider.type];
|
||||
const inputs = await selectedProvider.validateProviderInputs(provider.inputs);
|
||||
|
||||
let selectedGatewayId: string | null = null;
|
||||
if (inputs && typeof inputs === "object" && "projectGatewayId" in inputs && inputs.projectGatewayId) {
|
||||
const projectGatewayId = inputs.projectGatewayId as string;
|
||||
|
||||
const projectGateway = await projectGatewayDAL.findOne({ id: projectGatewayId, projectId });
|
||||
if (!projectGateway)
|
||||
throw new NotFoundError({
|
||||
message: `Project gateway with ${projectGatewayId} not found`
|
||||
});
|
||||
selectedGatewayId = projectGateway.id;
|
||||
}
|
||||
|
||||
const isConnected = await selectedProvider.validateConnection(provider.inputs);
|
||||
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
|
||||
|
||||
@ -138,8 +123,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
maxTTL,
|
||||
defaultTTL,
|
||||
folderId: folder.id,
|
||||
name,
|
||||
projectGatewayId: selectedGatewayId
|
||||
name
|
||||
});
|
||||
return dynamicSecretCfg;
|
||||
};
|
||||
@ -211,23 +195,6 @@ export const dynamicSecretServiceFactory = ({
|
||||
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
|
||||
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
|
||||
|
||||
let selectedGatewayId: string | null = null;
|
||||
if (
|
||||
updatedInput &&
|
||||
typeof updatedInput === "object" &&
|
||||
"projectGatewayId" in updatedInput &&
|
||||
updatedInput?.projectGatewayId
|
||||
) {
|
||||
const projectGatewayId = updatedInput.projectGatewayId as string;
|
||||
|
||||
const projectGateway = await projectGatewayDAL.findOne({ id: projectGatewayId, projectId });
|
||||
if (!projectGateway)
|
||||
throw new NotFoundError({
|
||||
message: `Project gateway with ${projectGatewayId} not found`
|
||||
});
|
||||
selectedGatewayId = projectGateway.id;
|
||||
}
|
||||
|
||||
const isConnected = await selectedProvider.validateConnection(newInput);
|
||||
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
|
||||
|
||||
@ -237,8 +204,7 @@ export const dynamicSecretServiceFactory = ({
|
||||
defaultTTL,
|
||||
name: newName ?? name,
|
||||
status: null,
|
||||
statusDetails: null,
|
||||
projectGatewayId: selectedGatewayId
|
||||
statusDetails: null
|
||||
});
|
||||
|
||||
return updatedDynamicCfg;
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { SnowflakeProvider } from "@app/ee/services/dynamic-secret/providers/snowflake";
|
||||
|
||||
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
|
||||
import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache";
|
||||
import { AwsIamProvider } from "./aws-iam";
|
||||
import { AzureEntraIDProvider } from "./azure-entra-id";
|
||||
@ -17,14 +16,8 @@ import { SapHanaProvider } from "./sap-hana";
|
||||
import { SqlDatabaseProvider } from "./sql-database";
|
||||
import { TotpProvider } from "./totp";
|
||||
|
||||
type TBuildDynamicSecretProviderDTO = {
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTls">;
|
||||
};
|
||||
|
||||
export const buildDynamicSecretProviders = ({
|
||||
gatewayService
|
||||
}: TBuildDynamicSecretProviderDTO): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
|
||||
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider({ gatewayService }),
|
||||
export const buildDynamicSecretProviders = (): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
|
||||
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
|
||||
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
|
||||
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
|
||||
[DynamicSecretProviders.Redis]: RedisDatabaseProvider(),
|
||||
|
@ -1,16 +1,5 @@
|
||||
import { z } from "zod";
|
||||
|
||||
export type PasswordRequirements = {
|
||||
length: number;
|
||||
required: {
|
||||
lowercase: number;
|
||||
uppercase: number;
|
||||
digits: number;
|
||||
symbols: number;
|
||||
};
|
||||
allowedSymbols?: string;
|
||||
};
|
||||
|
||||
export enum SqlProviders {
|
||||
Postgres = "postgres",
|
||||
MySQL = "mysql2",
|
||||
@ -111,33 +100,10 @@ export const DynamicSecretSqlDBSchema = z.object({
|
||||
database: z.string().trim(),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim(),
|
||||
passwordRequirements: z
|
||||
.object({
|
||||
length: z.number().min(1).max(250),
|
||||
required: z
|
||||
.object({
|
||||
lowercase: z.number().min(0),
|
||||
uppercase: z.number().min(0),
|
||||
digits: z.number().min(0),
|
||||
symbols: z.number().min(0)
|
||||
})
|
||||
.refine((data) => {
|
||||
const total = Object.values(data).reduce((sum, count) => sum + count, 0);
|
||||
return total <= 250;
|
||||
}, "Sum of required characters cannot exceed 250"),
|
||||
allowedSymbols: z.string().optional()
|
||||
})
|
||||
.refine((data) => {
|
||||
const total = Object.values(data.required).reduce((sum, count) => sum + count, 0);
|
||||
return total <= data.length;
|
||||
}, "Sum of required characters cannot exceed the total length")
|
||||
.optional()
|
||||
.describe("Password generation requirements"),
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim(),
|
||||
renewStatement: z.string().trim().optional(),
|
||||
ca: z.string().optional(),
|
||||
projectGatewayId: z.string().nullable().optional()
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretCassandraSchema = z.object({
|
||||
|
@ -1,106 +1,21 @@
|
||||
import { randomInt } from "crypto";
|
||||
import handlebars from "handlebars";
|
||||
import knex from "knex";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { withGatewayProxy } from "@app/lib/gateway";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSqlDBSchema, PasswordRequirements, SqlProviders, TDynamicProviderFns } from "./models";
|
||||
import { DynamicSecretSqlDBSchema, SqlProviders, TDynamicProviderFns } from "./models";
|
||||
|
||||
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
|
||||
|
||||
const DEFAULT_PASSWORD_REQUIREMENTS = {
|
||||
length: 48,
|
||||
required: {
|
||||
lowercase: 1,
|
||||
uppercase: 1,
|
||||
digits: 1,
|
||||
symbols: 0
|
||||
},
|
||||
allowedSymbols: "-_.~!*"
|
||||
};
|
||||
const generatePassword = (provider: SqlProviders) => {
|
||||
// oracle has limit of 48 password length
|
||||
const size = provider === SqlProviders.Oracle ? 30 : 48;
|
||||
|
||||
const ORACLE_PASSWORD_REQUIREMENTS = {
|
||||
...DEFAULT_PASSWORD_REQUIREMENTS,
|
||||
length: 30
|
||||
};
|
||||
|
||||
const generatePassword = (provider: SqlProviders, requirements?: PasswordRequirements) => {
|
||||
const defaultReqs = provider === SqlProviders.Oracle ? ORACLE_PASSWORD_REQUIREMENTS : DEFAULT_PASSWORD_REQUIREMENTS;
|
||||
const finalReqs = requirements || defaultReqs;
|
||||
|
||||
try {
|
||||
const { length, required, allowedSymbols } = finalReqs;
|
||||
|
||||
const chars = {
|
||||
lowercase: "abcdefghijklmnopqrstuvwxyz",
|
||||
uppercase: "ABCDEFGHIJKLMNOPQRSTUVWXYZ",
|
||||
digits: "0123456789",
|
||||
symbols: allowedSymbols || "-_.~!*"
|
||||
};
|
||||
|
||||
const parts: string[] = [];
|
||||
|
||||
if (required.lowercase > 0) {
|
||||
parts.push(
|
||||
...Array(required.lowercase)
|
||||
.fill(0)
|
||||
.map(() => chars.lowercase[randomInt(chars.lowercase.length)])
|
||||
);
|
||||
}
|
||||
|
||||
if (required.uppercase > 0) {
|
||||
parts.push(
|
||||
...Array(required.uppercase)
|
||||
.fill(0)
|
||||
.map(() => chars.uppercase[randomInt(chars.uppercase.length)])
|
||||
);
|
||||
}
|
||||
|
||||
if (required.digits > 0) {
|
||||
parts.push(
|
||||
...Array(required.digits)
|
||||
.fill(0)
|
||||
.map(() => chars.digits[randomInt(chars.digits.length)])
|
||||
);
|
||||
}
|
||||
|
||||
if (required.symbols > 0) {
|
||||
parts.push(
|
||||
...Array(required.symbols)
|
||||
.fill(0)
|
||||
.map(() => chars.symbols[randomInt(chars.symbols.length)])
|
||||
);
|
||||
}
|
||||
|
||||
const requiredTotal = Object.values(required).reduce<number>((a, b) => a + b, 0);
|
||||
const remainingLength = Math.max(length - requiredTotal, 0);
|
||||
|
||||
const allowedChars = Object.entries(chars)
|
||||
.filter(([key]) => required[key as keyof typeof required] > 0)
|
||||
.map(([, value]) => value)
|
||||
.join("");
|
||||
|
||||
parts.push(
|
||||
...Array(remainingLength)
|
||||
.fill(0)
|
||||
.map(() => allowedChars[randomInt(allowedChars.length)])
|
||||
);
|
||||
|
||||
// shuffle the array to mix up the characters
|
||||
for (let i = parts.length - 1; i > 0; i -= 1) {
|
||||
const j = randomInt(i + 1);
|
||||
[parts[i], parts[j]] = [parts[j], parts[i]];
|
||||
}
|
||||
|
||||
return parts.join("");
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
throw new Error(`Failed to generate password: ${message}`);
|
||||
}
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = (provider: SqlProviders) => {
|
||||
@ -110,14 +25,10 @@ const generateUsername = (provider: SqlProviders) => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
type TSqlDatabaseProviderDTO = {
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTls">;
|
||||
};
|
||||
|
||||
export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO): TDynamicProviderFns => {
|
||||
export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
@ -134,6 +45,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
user: providerInputs.username,
|
||||
password: providerInputs.password,
|
||||
ssl,
|
||||
pool: { min: 0, max: 1 },
|
||||
// @ts-expect-error this is because of knexjs type signature issue. This is directly passed to driver
|
||||
// https://github.com/knex/knex/blob/b6507a7129d2b9fafebf5f831494431e64c6a8a0/lib/dialects/mssql/index.js#L66
|
||||
// https://github.com/tediousjs/tedious/blob/ebb023ed90969a7ec0e4b036533ad52739d921f7/test/config.ci.ts#L19
|
||||
@ -149,112 +61,61 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
return db;
|
||||
};
|
||||
|
||||
const gatewayProxyWrapper = async (
|
||||
providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>,
|
||||
gatewayCallback: (host: string, port: number) => Promise<void>
|
||||
) => {
|
||||
const relayDetails = await gatewayService.fnGetGatewayClientTls(providerInputs.projectGatewayId as string);
|
||||
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
|
||||
await withGatewayProxy(
|
||||
async (port) => {
|
||||
await gatewayCallback("localhost", port);
|
||||
},
|
||||
{
|
||||
targetHost: providerInputs.host,
|
||||
targetPort: providerInputs.port,
|
||||
relayHost,
|
||||
relayPort: Number(relayPort),
|
||||
identityId: relayDetails.identityId,
|
||||
orgId: relayDetails.orgId,
|
||||
tlsOptions: {
|
||||
ca: relayDetails.certChain,
|
||||
cert: relayDetails.certificate,
|
||||
key: relayDetails.privateKey.toString()
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
let isConnected = false;
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
const db = await $getClient({ ...providerInputs, port, host });
|
||||
// oracle needs from keyword
|
||||
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
||||
const db = await $getClient(providerInputs);
|
||||
// oracle needs from keyword
|
||||
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
||||
|
||||
isConnected = await db.raw(testStatement).then(() => true);
|
||||
await db.destroy();
|
||||
};
|
||||
|
||||
if (providerInputs.projectGatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
await gatewayCallback();
|
||||
}
|
||||
const isConnected = await db.raw(testStatement).then(() => true);
|
||||
await db.destroy();
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername(providerInputs.client);
|
||||
const password = generatePassword(providerInputs.client, providerInputs.passwordRequirements);
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
const db = await $getClient({ ...providerInputs, port, host });
|
||||
try {
|
||||
const { database } = providerInputs;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const password = generatePassword(providerInputs.client);
|
||||
const { database } = providerInputs;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
expiration,
|
||||
database
|
||||
});
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
expiration,
|
||||
database
|
||||
});
|
||||
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
} finally {
|
||||
await db.destroy();
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
};
|
||||
if (providerInputs.projectGatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
await gatewayCallback();
|
||||
}
|
||||
});
|
||||
await db.destroy();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const { database } = providerInputs;
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
const db = await $getClient({ ...providerInputs, port, host });
|
||||
try {
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, database });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
} finally {
|
||||
await db.destroy();
|
||||
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, database });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
};
|
||||
if (providerInputs.projectGatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
await gatewayCallback();
|
||||
}
|
||||
});
|
||||
|
||||
await db.destroy();
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
@ -262,35 +123,28 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
const db = await $getClient({ ...providerInputs, port, host });
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const { database } = providerInputs;
|
||||
const db = await $getClient(providerInputs);
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
expiration,
|
||||
database
|
||||
});
|
||||
try {
|
||||
if (renewStatement) {
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const { database } = providerInputs;
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
expiration,
|
||||
database
|
||||
});
|
||||
|
||||
if (renewStatement) {
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
if (providerInputs.projectGatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
await gatewayCallback();
|
||||
});
|
||||
}
|
||||
|
||||
await db.destroy();
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -1,86 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { GatewaysSchema, TableName, TGateways } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import {
|
||||
buildFindFilter,
|
||||
ormify,
|
||||
selectAllTableCols,
|
||||
sqlNestRelationships,
|
||||
TFindFilter,
|
||||
TFindOpt
|
||||
} from "@app/lib/knex";
|
||||
|
||||
export type TGatewayDALFactory = ReturnType<typeof gatewayDALFactory>;
|
||||
|
||||
export const gatewayDALFactory = (db: TDbClient) => {
|
||||
const orm = ormify(db, TableName.Gateway);
|
||||
|
||||
const find = async (filter: TFindFilter<TGateways>, { offset, limit, sort, tx }: TFindOpt<TGateways> = {}) => {
|
||||
try {
|
||||
const query = (tx || db)(TableName.Gateway)
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
.where(buildFindFilter(filter))
|
||||
.join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.Gateway}.identityId`)
|
||||
.leftJoin(TableName.ProjectGateway, `${TableName.ProjectGateway}.gatewayId`, `${TableName.Gateway}.id`)
|
||||
.leftJoin(TableName.Project, `${TableName.Project}.id`, `${TableName.ProjectGateway}.projectId`)
|
||||
.select(selectAllTableCols(TableName.Gateway))
|
||||
.select(
|
||||
db.ref("name").withSchema(TableName.Identity).as("identityName"),
|
||||
db.ref("name").withSchema(TableName.Project).as("projectName"),
|
||||
db.ref("slug").withSchema(TableName.Project).as("projectSlug"),
|
||||
db.ref("id").withSchema(TableName.Project).as("projectId")
|
||||
);
|
||||
if (limit) void query.limit(limit);
|
||||
if (offset) void query.offset(offset);
|
||||
if (sort) {
|
||||
void query.orderBy(sort.map(([column, order, nulls]) => ({ column: column as string, order, nulls })));
|
||||
}
|
||||
|
||||
const docs = await query;
|
||||
return sqlNestRelationships({
|
||||
data: docs,
|
||||
key: "id",
|
||||
parentMapper: (data) => ({
|
||||
...GatewaysSchema.parse(data),
|
||||
identity: { id: data.identityId, name: data.identityName }
|
||||
}),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "projectId",
|
||||
label: "projects" as const,
|
||||
mapper: ({ projectId, projectName, projectSlug }) => ({
|
||||
id: projectId,
|
||||
name: projectName,
|
||||
slug: projectSlug
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: `${TableName.Gateway}: Find` });
|
||||
}
|
||||
};
|
||||
|
||||
const findByProjectId = async (projectId: string, tx?: Knex) => {
|
||||
try {
|
||||
const query = (tx || db)(TableName.Gateway)
|
||||
.join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.Gateway}.identityId`)
|
||||
.join(TableName.ProjectGateway, `${TableName.ProjectGateway}.gatewayId`, `${TableName.Gateway}.id`)
|
||||
.select(selectAllTableCols(TableName.Gateway))
|
||||
.select(
|
||||
db.ref("name").withSchema(TableName.Identity).as("identityName"),
|
||||
db.ref("id").withSchema(TableName.ProjectGateway).as("projectGatewayId")
|
||||
)
|
||||
.where({ [`${TableName.ProjectGateway}.projectId` as "projectId"]: projectId });
|
||||
|
||||
const docs = await query;
|
||||
return docs.map((el) => ({ ...el, identity: { id: el.identityId, name: el.identityName } }));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: `${TableName.Gateway}: Find by project id` });
|
||||
}
|
||||
};
|
||||
|
||||
return { ...orm, find, findByProjectId };
|
||||
};
|
@ -1,652 +0,0 @@
|
||||
import crypto from "node:crypto";
|
||||
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import * as x509 from "@peculiar/x509";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { KeyStorePrefixes, PgSqlLock, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { pingGatewayAndVerify } from "@app/lib/gateway";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { getTurnCredentials } from "@app/lib/turn/credentials";
|
||||
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
||||
import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "@app/services/certificate/certificate-types";
|
||||
import {
|
||||
createSerialNumber,
|
||||
keyAlgorithmToAlgCfg
|
||||
} from "@app/services/certificate-authority/certificate-authority-fns";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionGatewayActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { TGatewayDALFactory } from "./gateway-dal";
|
||||
import {
|
||||
TExchangeAllocatedRelayAddressDTO,
|
||||
TGetGatewayByIdDTO,
|
||||
TGetProjectGatewayByIdDTO,
|
||||
THeartBeatDTO,
|
||||
TListGatewaysDTO,
|
||||
TUpdateGatewayByIdDTO
|
||||
} from "./gateway-types";
|
||||
import { TOrgGatewayConfigDALFactory } from "./org-gateway-config-dal";
|
||||
import { TProjectGatewayDALFactory } from "./project-gateway-dal";
|
||||
|
||||
type TGatewayServiceFactoryDep = {
|
||||
gatewayDAL: TGatewayDALFactory;
|
||||
projectGatewayDAL: TProjectGatewayDALFactory;
|
||||
orgGatewayConfigDAL: Pick<TOrgGatewayConfigDALFactory, "findOne" | "create" | "transaction" | "findById">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "onPremFeatures" | "getPlan">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey" | "decryptWithRootKey">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission" | "getProjectPermission">;
|
||||
keyStore: Pick<TKeyStoreFactory, "getItem" | "setItemWithExpiry">;
|
||||
};
|
||||
|
||||
export type TGatewayServiceFactory = ReturnType<typeof gatewayServiceFactory>;
|
||||
const TURN_SERVER_CREDENTIALS_SCHEMA = z.object({
|
||||
username: z.string(),
|
||||
password: z.string()
|
||||
});
|
||||
|
||||
export const gatewayServiceFactory = ({
|
||||
gatewayDAL,
|
||||
licenseService,
|
||||
kmsService,
|
||||
permissionService,
|
||||
orgGatewayConfigDAL,
|
||||
keyStore,
|
||||
projectGatewayDAL
|
||||
}: TGatewayServiceFactoryDep) => {
|
||||
const $validateOrgAccessToGateway = async (orgId: string, actorId: string, actorAuthMethod: ActorAuthMethod) => {
|
||||
// if (!licenseService.onPremFeatures.gateway) {
|
||||
// throw new BadRequestError({
|
||||
// message:
|
||||
// "Gateway handshake failed due to instance plan restrictions. Please upgrade your instance to Infisical's Enterprise plan."
|
||||
// });
|
||||
// }
|
||||
const orgLicensePlan = await licenseService.getPlan(orgId);
|
||||
if (!orgLicensePlan.gateway) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Gateway handshake failed due to organization plan restrictions. Please upgrade your instance to Infisical's Enterprise plan."
|
||||
});
|
||||
}
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
ActorType.IDENTITY,
|
||||
actorId,
|
||||
orgId,
|
||||
actorAuthMethod,
|
||||
orgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionGatewayActions.CreateGateways,
|
||||
OrgPermissionSubjects.Gateway
|
||||
);
|
||||
};
|
||||
|
||||
const getGatewayRelayDetails = async (actorId: string, actorOrgId: string, actorAuthMethod: ActorAuthMethod) => {
|
||||
const TURN_CRED_EXPIRY = 10 * 60; // 10 minutes
|
||||
|
||||
const envCfg = getConfig();
|
||||
await $validateOrgAccessToGateway(actorOrgId, actorId, actorAuthMethod);
|
||||
const { encryptor, decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: actorOrgId
|
||||
});
|
||||
|
||||
if (!envCfg.GATEWAY_RELAY_AUTH_SECRET || !envCfg.GATEWAY_RELAY_ADDRESS || !envCfg.GATEWAY_RELAY_REALM) {
|
||||
throw new BadRequestError({
|
||||
message: "Gateway handshake failed due to missing instance configuration."
|
||||
});
|
||||
}
|
||||
|
||||
let turnServerUsername = "";
|
||||
let turnServerPassword = "";
|
||||
// keep it in redis for 5mins to avoid generating so many credentials
|
||||
const previousCredential = await keyStore.getItem(KeyStorePrefixes.GatewayIdentityCredential(actorId));
|
||||
if (previousCredential) {
|
||||
const el = await TURN_SERVER_CREDENTIALS_SCHEMA.parseAsync(
|
||||
JSON.parse(decryptor({ cipherTextBlob: Buffer.from(previousCredential, "hex") }).toString())
|
||||
);
|
||||
turnServerUsername = el.username;
|
||||
turnServerPassword = el.password;
|
||||
} else {
|
||||
const el = getTurnCredentials(actorId, envCfg.GATEWAY_RELAY_AUTH_SECRET);
|
||||
await keyStore.setItemWithExpiry(
|
||||
KeyStorePrefixes.GatewayIdentityCredential(actorId),
|
||||
TURN_CRED_EXPIRY,
|
||||
encryptor({
|
||||
plainText: Buffer.from(JSON.stringify({ username: el.username, password: el.password }))
|
||||
}).cipherTextBlob.toString("hex")
|
||||
);
|
||||
turnServerUsername = el.username;
|
||||
turnServerPassword = el.password;
|
||||
}
|
||||
|
||||
return {
|
||||
turnServerUsername,
|
||||
turnServerPassword,
|
||||
turnServerRealm: envCfg.GATEWAY_RELAY_REALM,
|
||||
turnServerAddress: envCfg.GATEWAY_RELAY_ADDRESS,
|
||||
infisicalStaticIp: envCfg.GATEWAY_INFISICAL_STATIC_IP_ADDRESS
|
||||
};
|
||||
};
|
||||
|
||||
const exchangeAllocatedRelayAddress = async ({
|
||||
identityId,
|
||||
identityOrg,
|
||||
relayAddress,
|
||||
identityOrgAuthMethod
|
||||
}: TExchangeAllocatedRelayAddressDTO) => {
|
||||
await $validateOrgAccessToGateway(identityOrg, identityId, identityOrgAuthMethod);
|
||||
const { encryptor: orgKmsEncryptor, decryptor: orgKmsDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: identityOrg
|
||||
});
|
||||
|
||||
const orgGatewayConfig = await orgGatewayConfigDAL.transaction(async (tx) => {
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.OrgGatewayRootCaInit(identityOrg)]);
|
||||
const existingGatewayConfig = await orgGatewayConfigDAL.findOne({ orgId: identityOrg });
|
||||
if (existingGatewayConfig) return existingGatewayConfig;
|
||||
|
||||
const alg = keyAlgorithmToAlgCfg(CertKeyAlgorithm.RSA_2048);
|
||||
// generate root CA
|
||||
const rootCaKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
|
||||
const rootCaSerialNumber = createSerialNumber();
|
||||
const rootCaSkObj = crypto.KeyObject.from(rootCaKeys.privateKey);
|
||||
const rootCaIssuedAt = new Date();
|
||||
const rootCaKeyAlgorithm = CertKeyAlgorithm.RSA_2048;
|
||||
const rootCaExpiration = new Date(new Date().setFullYear(2045));
|
||||
const rootCaCert = await x509.X509CertificateGenerator.createSelfSigned({
|
||||
name: `O=${identityOrg},CN=Infisical Gateway Root CA`,
|
||||
serialNumber: rootCaSerialNumber,
|
||||
notBefore: rootCaIssuedAt,
|
||||
notAfter: rootCaExpiration,
|
||||
signingAlgorithm: alg,
|
||||
keys: rootCaKeys,
|
||||
extensions: [
|
||||
// eslint-disable-next-line no-bitwise
|
||||
new x509.KeyUsagesExtension(x509.KeyUsageFlags.keyCertSign | x509.KeyUsageFlags.cRLSign, true),
|
||||
await x509.SubjectKeyIdentifierExtension.create(rootCaKeys.publicKey)
|
||||
]
|
||||
});
|
||||
|
||||
// generate client ca
|
||||
const clientCaSerialNumber = createSerialNumber();
|
||||
const clientCaIssuedAt = new Date();
|
||||
const clientCaExpiration = new Date(new Date().setFullYear(2045));
|
||||
const clientCaKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
|
||||
const clientCaSkObj = crypto.KeyObject.from(clientCaKeys.privateKey);
|
||||
|
||||
const clientCaCert = await x509.X509CertificateGenerator.create({
|
||||
serialNumber: clientCaSerialNumber,
|
||||
subject: `O=${identityOrg},CN=Client Intermediate CA`,
|
||||
issuer: rootCaCert.subject,
|
||||
notBefore: clientCaIssuedAt,
|
||||
notAfter: clientCaExpiration,
|
||||
signingKey: rootCaKeys.privateKey,
|
||||
publicKey: clientCaKeys.publicKey,
|
||||
signingAlgorithm: alg,
|
||||
extensions: [
|
||||
new x509.KeyUsagesExtension(
|
||||
// eslint-disable-next-line no-bitwise
|
||||
x509.KeyUsageFlags.keyCertSign |
|
||||
x509.KeyUsageFlags.cRLSign |
|
||||
x509.KeyUsageFlags.digitalSignature |
|
||||
x509.KeyUsageFlags.keyEncipherment,
|
||||
true
|
||||
),
|
||||
new x509.BasicConstraintsExtension(true, 0, true),
|
||||
await x509.AuthorityKeyIdentifierExtension.create(rootCaCert, false),
|
||||
await x509.SubjectKeyIdentifierExtension.create(clientCaKeys.publicKey)
|
||||
]
|
||||
});
|
||||
|
||||
const clientKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
|
||||
const clientCertSerialNumber = createSerialNumber();
|
||||
const clientCert = await x509.X509CertificateGenerator.create({
|
||||
serialNumber: clientCertSerialNumber,
|
||||
subject: `O=${identityOrg},OU=gateway-client,CN=cloud`,
|
||||
issuer: clientCaCert.subject,
|
||||
notAfter: clientCaExpiration,
|
||||
notBefore: clientCaIssuedAt,
|
||||
signingKey: clientCaKeys.privateKey,
|
||||
publicKey: clientKeys.publicKey,
|
||||
signingAlgorithm: alg,
|
||||
extensions: [
|
||||
new x509.BasicConstraintsExtension(false),
|
||||
await x509.AuthorityKeyIdentifierExtension.create(clientCaCert, false),
|
||||
await x509.SubjectKeyIdentifierExtension.create(clientKeys.publicKey),
|
||||
new x509.CertificatePolicyExtension(["2.5.29.32.0"]), // anyPolicy
|
||||
new x509.KeyUsagesExtension(
|
||||
// eslint-disable-next-line no-bitwise
|
||||
x509.KeyUsageFlags[CertKeyUsage.DIGITAL_SIGNATURE] |
|
||||
x509.KeyUsageFlags[CertKeyUsage.KEY_ENCIPHERMENT] |
|
||||
x509.KeyUsageFlags[CertKeyUsage.KEY_AGREEMENT],
|
||||
true
|
||||
),
|
||||
new x509.ExtendedKeyUsageExtension([x509.ExtendedKeyUsage[CertExtendedKeyUsage.CLIENT_AUTH]], true)
|
||||
]
|
||||
});
|
||||
const clientSkObj = crypto.KeyObject.from(clientKeys.privateKey);
|
||||
|
||||
// generate gateway ca
|
||||
const gatewayCaSerialNumber = createSerialNumber();
|
||||
const gatewayCaIssuedAt = new Date();
|
||||
const gatewayCaExpiration = new Date(new Date().setFullYear(2045));
|
||||
const gatewayCaKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
|
||||
const gatewayCaSkObj = crypto.KeyObject.from(gatewayCaKeys.privateKey);
|
||||
const gatewayCaCert = await x509.X509CertificateGenerator.create({
|
||||
serialNumber: gatewayCaSerialNumber,
|
||||
subject: `O=${identityOrg},CN=Gateway CA`,
|
||||
issuer: rootCaCert.subject,
|
||||
notBefore: gatewayCaIssuedAt,
|
||||
notAfter: gatewayCaExpiration,
|
||||
signingKey: rootCaKeys.privateKey,
|
||||
publicKey: gatewayCaKeys.publicKey,
|
||||
signingAlgorithm: alg,
|
||||
extensions: [
|
||||
new x509.KeyUsagesExtension(
|
||||
// eslint-disable-next-line no-bitwise
|
||||
x509.KeyUsageFlags.keyCertSign |
|
||||
x509.KeyUsageFlags.cRLSign |
|
||||
x509.KeyUsageFlags.digitalSignature |
|
||||
x509.KeyUsageFlags.keyEncipherment,
|
||||
true
|
||||
),
|
||||
new x509.BasicConstraintsExtension(true, 0, true),
|
||||
await x509.AuthorityKeyIdentifierExtension.create(rootCaCert, false),
|
||||
await x509.SubjectKeyIdentifierExtension.create(gatewayCaKeys.publicKey)
|
||||
]
|
||||
});
|
||||
|
||||
return orgGatewayConfigDAL.create({
|
||||
orgId: identityOrg,
|
||||
rootCaIssuedAt,
|
||||
rootCaExpiration,
|
||||
rootCaSerialNumber,
|
||||
rootCaKeyAlgorithm,
|
||||
encryptedRootCaPrivateKey: orgKmsEncryptor({
|
||||
plainText: rootCaSkObj.export({
|
||||
type: "pkcs8",
|
||||
format: "der"
|
||||
})
|
||||
}).cipherTextBlob,
|
||||
encryptedRootCaCertificate: orgKmsEncryptor({ plainText: Buffer.from(rootCaCert.rawData) }).cipherTextBlob,
|
||||
|
||||
clientCaIssuedAt,
|
||||
clientCaExpiration,
|
||||
clientCaSerialNumber,
|
||||
encryptedClientCaPrivateKey: orgKmsEncryptor({
|
||||
plainText: clientCaSkObj.export({
|
||||
type: "pkcs8",
|
||||
format: "der"
|
||||
})
|
||||
}).cipherTextBlob,
|
||||
encryptedClientCaCertificate: orgKmsEncryptor({
|
||||
plainText: Buffer.from(clientCaCert.rawData)
|
||||
}).cipherTextBlob,
|
||||
|
||||
clientCertIssuedAt: clientCaIssuedAt,
|
||||
clientCertExpiration: clientCaExpiration,
|
||||
clientCertKeyAlgorithm: CertKeyAlgorithm.RSA_2048,
|
||||
clientCertSerialNumber,
|
||||
encryptedClientPrivateKey: orgKmsEncryptor({
|
||||
plainText: clientSkObj.export({
|
||||
type: "pkcs8",
|
||||
format: "der"
|
||||
})
|
||||
}).cipherTextBlob,
|
||||
encryptedClientCertificate: orgKmsEncryptor({
|
||||
plainText: Buffer.from(clientCert.rawData)
|
||||
}).cipherTextBlob,
|
||||
|
||||
gatewayCaIssuedAt,
|
||||
gatewayCaExpiration,
|
||||
gatewayCaSerialNumber,
|
||||
encryptedGatewayCaPrivateKey: orgKmsEncryptor({
|
||||
plainText: gatewayCaSkObj.export({
|
||||
type: "pkcs8",
|
||||
format: "der"
|
||||
})
|
||||
}).cipherTextBlob,
|
||||
encryptedGatewayCaCertificate: orgKmsEncryptor({
|
||||
plainText: Buffer.from(gatewayCaCert.rawData)
|
||||
}).cipherTextBlob
|
||||
});
|
||||
});
|
||||
|
||||
const rootCaCert = new x509.X509Certificate(
|
||||
orgKmsDecryptor({
|
||||
cipherTextBlob: orgGatewayConfig.encryptedRootCaCertificate
|
||||
})
|
||||
);
|
||||
const clientCaCert = new x509.X509Certificate(
|
||||
orgKmsDecryptor({
|
||||
cipherTextBlob: orgGatewayConfig.encryptedClientCaCertificate
|
||||
})
|
||||
);
|
||||
|
||||
const gatewayCaAlg = keyAlgorithmToAlgCfg(orgGatewayConfig.rootCaKeyAlgorithm as CertKeyAlgorithm);
|
||||
const gatewayCaSkObj = crypto.createPrivateKey({
|
||||
key: orgKmsDecryptor({ cipherTextBlob: orgGatewayConfig.encryptedGatewayCaPrivateKey }),
|
||||
format: "der",
|
||||
type: "pkcs8"
|
||||
});
|
||||
const gatewayCaCert = new x509.X509Certificate(
|
||||
orgKmsDecryptor({
|
||||
cipherTextBlob: orgGatewayConfig.encryptedGatewayCaCertificate
|
||||
})
|
||||
);
|
||||
|
||||
const gatewayCaPrivateKey = await crypto.subtle.importKey(
|
||||
"pkcs8",
|
||||
gatewayCaSkObj.export({ format: "der", type: "pkcs8" }),
|
||||
gatewayCaAlg,
|
||||
true,
|
||||
["sign"]
|
||||
);
|
||||
|
||||
const alg = keyAlgorithmToAlgCfg(CertKeyAlgorithm.RSA_2048);
|
||||
const gatewayKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
|
||||
const certIssuedAt = new Date();
|
||||
// then need to periodically init
|
||||
const certExpireAt = new Date(new Date().setMonth(new Date().getMonth() + 1));
|
||||
|
||||
const extensions: x509.Extension[] = [
|
||||
new x509.BasicConstraintsExtension(false),
|
||||
await x509.AuthorityKeyIdentifierExtension.create(gatewayCaCert, false),
|
||||
await x509.SubjectKeyIdentifierExtension.create(gatewayKeys.publicKey),
|
||||
new x509.CertificatePolicyExtension(["2.5.29.32.0"]), // anyPolicy
|
||||
new x509.KeyUsagesExtension(
|
||||
// eslint-disable-next-line no-bitwise
|
||||
x509.KeyUsageFlags[CertKeyUsage.DIGITAL_SIGNATURE] | x509.KeyUsageFlags[CertKeyUsage.KEY_ENCIPHERMENT],
|
||||
true
|
||||
),
|
||||
new x509.ExtendedKeyUsageExtension([x509.ExtendedKeyUsage[CertExtendedKeyUsage.SERVER_AUTH]], true),
|
||||
// san
|
||||
new x509.SubjectAlternativeNameExtension([{ type: "ip", value: relayAddress.split(":")[0] }], false)
|
||||
];
|
||||
|
||||
const serialNumber = createSerialNumber();
|
||||
const privateKey = crypto.KeyObject.from(gatewayKeys.privateKey);
|
||||
const gatewayCertificate = await x509.X509CertificateGenerator.create({
|
||||
serialNumber,
|
||||
subject: `CN=${identityId},O=${identityOrg},OU=Gateway`,
|
||||
issuer: gatewayCaCert.subject,
|
||||
notBefore: certIssuedAt,
|
||||
notAfter: certExpireAt,
|
||||
signingKey: gatewayCaPrivateKey,
|
||||
publicKey: gatewayKeys.publicKey,
|
||||
signingAlgorithm: alg,
|
||||
extensions
|
||||
});
|
||||
|
||||
const appCfg = getConfig();
|
||||
// just for local development
|
||||
const formatedRelayAddress =
|
||||
appCfg.NODE_ENV === "development" ? relayAddress.replace("127.0.0.1", "host.docker.internal") : relayAddress;
|
||||
|
||||
await gatewayDAL.transaction(async (tx) => {
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.OrgGatewayCertExchange(identityOrg)]);
|
||||
const existingGateway = await gatewayDAL.findOne({ identityId, orgGatewayRootCaId: orgGatewayConfig.id });
|
||||
|
||||
if (existingGateway) {
|
||||
return gatewayDAL.updateById(existingGateway.id, {
|
||||
keyAlgorithm: CertKeyAlgorithm.RSA_2048,
|
||||
issuedAt: certIssuedAt,
|
||||
expiration: certExpireAt,
|
||||
serialNumber,
|
||||
relayAddress: orgKmsEncryptor({
|
||||
plainText: Buffer.from(formatedRelayAddress)
|
||||
}).cipherTextBlob
|
||||
});
|
||||
}
|
||||
|
||||
return gatewayDAL.create({
|
||||
keyAlgorithm: CertKeyAlgorithm.RSA_2048,
|
||||
issuedAt: certIssuedAt,
|
||||
expiration: certExpireAt,
|
||||
serialNumber,
|
||||
relayAddress: orgKmsEncryptor({
|
||||
plainText: Buffer.from(formatedRelayAddress)
|
||||
}).cipherTextBlob,
|
||||
identityId,
|
||||
orgGatewayRootCaId: orgGatewayConfig.id,
|
||||
name: `gateway-${alphaNumericNanoId(6).toLowerCase()}`
|
||||
});
|
||||
});
|
||||
|
||||
const gatewayCertificateChain = `${clientCaCert.toString("pem")}\n${rootCaCert.toString("pem")}`.trim();
|
||||
|
||||
return {
|
||||
serialNumber,
|
||||
privateKey: privateKey.export({ format: "pem", type: "pkcs8" }) as string,
|
||||
certificate: gatewayCertificate.toString("pem"),
|
||||
certificateChain: gatewayCertificateChain
|
||||
};
|
||||
};
|
||||
|
||||
const heartbeat = async ({ orgPermission }: THeartBeatDTO) => {
|
||||
await $validateOrgAccessToGateway(orgPermission.orgId, orgPermission.id, orgPermission.authMethod);
|
||||
const orgGatewayConfig = await orgGatewayConfigDAL.findOne({ orgId: orgPermission.orgId });
|
||||
if (!orgGatewayConfig) throw new NotFoundError({ message: `Identity with ID ${orgPermission.id} not found.` });
|
||||
|
||||
const [gateway] = await gatewayDAL.find({ identityId: orgPermission.id, orgGatewayRootCaId: orgGatewayConfig.id });
|
||||
if (!gateway) throw new NotFoundError({ message: `Gateway with ID ${orgPermission.id} not found.` });
|
||||
|
||||
const { decryptor: orgKmsDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: orgGatewayConfig.orgId
|
||||
});
|
||||
|
||||
const rootCaCert = new x509.X509Certificate(
|
||||
orgKmsDecryptor({
|
||||
cipherTextBlob: orgGatewayConfig.encryptedRootCaCertificate
|
||||
})
|
||||
);
|
||||
const gatewayCaCert = new x509.X509Certificate(
|
||||
orgKmsDecryptor({
|
||||
cipherTextBlob: orgGatewayConfig.encryptedGatewayCaCertificate
|
||||
})
|
||||
);
|
||||
const clientCert = new x509.X509Certificate(
|
||||
orgKmsDecryptor({
|
||||
cipherTextBlob: orgGatewayConfig.encryptedClientCertificate
|
||||
})
|
||||
);
|
||||
|
||||
const privateKey = crypto
|
||||
.createPrivateKey({
|
||||
key: orgKmsDecryptor({ cipherTextBlob: orgGatewayConfig.encryptedClientPrivateKey }),
|
||||
format: "der",
|
||||
type: "pkcs8"
|
||||
})
|
||||
.export({ type: "pkcs8", format: "pem" });
|
||||
|
||||
const relayAddress = orgKmsDecryptor({ cipherTextBlob: gateway.relayAddress }).toString();
|
||||
const [relayHost, relayPort] = relayAddress.split(":");
|
||||
|
||||
await pingGatewayAndVerify({
|
||||
relayHost,
|
||||
relayPort: Number(relayPort),
|
||||
tlsOptions: {
|
||||
key: privateKey.toString(),
|
||||
ca: `${gatewayCaCert.toString("pem")}\n${rootCaCert.toString("pem")}`.trim(),
|
||||
cert: clientCert.toString("pem")
|
||||
},
|
||||
identityId: orgPermission.id,
|
||||
orgId: orgPermission.orgId
|
||||
});
|
||||
|
||||
await gatewayDAL.updateById(gateway.id, { heartbeat: new Date() });
|
||||
};
|
||||
|
||||
const listGateways = async ({ orgPermission }: TListGatewaysDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
orgPermission.type,
|
||||
orgPermission.id,
|
||||
orgPermission.orgId,
|
||||
orgPermission.authMethod,
|
||||
orgPermission.orgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionGatewayActions.ListGateways,
|
||||
OrgPermissionSubjects.Gateway
|
||||
);
|
||||
const orgGatewayConfig = await orgGatewayConfigDAL.findOne({ orgId: orgPermission.orgId });
|
||||
if (!orgGatewayConfig) return [];
|
||||
|
||||
const gateways = await gatewayDAL.find({
|
||||
orgGatewayRootCaId: orgGatewayConfig.id
|
||||
});
|
||||
return gateways;
|
||||
};
|
||||
|
||||
const getGatewayById = async ({ orgPermission, id }: TGetGatewayByIdDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
orgPermission.type,
|
||||
orgPermission.id,
|
||||
orgPermission.orgId,
|
||||
orgPermission.authMethod,
|
||||
orgPermission.orgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionGatewayActions.ListGateways,
|
||||
OrgPermissionSubjects.Gateway
|
||||
);
|
||||
const orgGatewayConfig = await orgGatewayConfigDAL.findOne({ orgId: orgPermission.orgId });
|
||||
if (!orgGatewayConfig) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
|
||||
|
||||
const [gateway] = await gatewayDAL.find({ id, orgGatewayRootCaId: orgGatewayConfig.id });
|
||||
if (!gateway) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
|
||||
return gateway;
|
||||
};
|
||||
|
||||
const updateGatewayById = async ({ orgPermission, id, name, projectIds }: TUpdateGatewayByIdDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
orgPermission.type,
|
||||
orgPermission.id,
|
||||
orgPermission.orgId,
|
||||
orgPermission.authMethod,
|
||||
orgPermission.orgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionGatewayActions.EditGateways,
|
||||
OrgPermissionSubjects.Gateway
|
||||
);
|
||||
const orgGatewayConfig = await orgGatewayConfigDAL.findOne({ orgId: orgPermission.orgId });
|
||||
if (!orgGatewayConfig) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
|
||||
|
||||
const [gateway] = await gatewayDAL.update({ id, orgGatewayRootCaId: orgGatewayConfig.id }, { name });
|
||||
if (!gateway) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
|
||||
if (projectIds) {
|
||||
await projectGatewayDAL.transaction(async (tx) => {
|
||||
await projectGatewayDAL.delete({ gatewayId: gateway.id }, tx);
|
||||
await projectGatewayDAL.insertMany(
|
||||
projectIds.map((el) => ({ gatewayId: gateway.id, projectId: el })),
|
||||
tx
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
return gateway;
|
||||
};
|
||||
|
||||
const deleteGatewayById = async ({ orgPermission, id }: TGetGatewayByIdDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
orgPermission.type,
|
||||
orgPermission.id,
|
||||
orgPermission.orgId,
|
||||
orgPermission.authMethod,
|
||||
orgPermission.orgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionGatewayActions.DeleteGateways,
|
||||
OrgPermissionSubjects.Gateway
|
||||
);
|
||||
const orgGatewayConfig = await orgGatewayConfigDAL.findOne({ orgId: orgPermission.orgId });
|
||||
if (!orgGatewayConfig) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
|
||||
|
||||
const [gateway] = await gatewayDAL.delete({ id, orgGatewayRootCaId: orgGatewayConfig.id });
|
||||
if (!gateway) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
|
||||
return gateway;
|
||||
};
|
||||
|
||||
const getProjectGateways = async ({ projectId, projectPermission }: TGetProjectGatewayByIdDTO) => {
|
||||
await permissionService.getProjectPermission({
|
||||
projectId,
|
||||
actor: projectPermission.type,
|
||||
actorId: projectPermission.id,
|
||||
actorOrgId: projectPermission.orgId,
|
||||
actorAuthMethod: projectPermission.authMethod,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
|
||||
const gateways = await gatewayDAL.findByProjectId(projectId);
|
||||
return gateways;
|
||||
};
|
||||
|
||||
// this has no permission check and used for dynamic secrets directly
|
||||
// assumes permission check is already done
|
||||
const fnGetGatewayClientTls = async (projectGatewayId: string) => {
|
||||
const projectGateway = await projectGatewayDAL.findById(projectGatewayId);
|
||||
if (!projectGateway) throw new NotFoundError({ message: `Project gateway with ID ${projectGatewayId} not found.` });
|
||||
|
||||
const { gatewayId } = projectGateway;
|
||||
const gateway = await gatewayDAL.findById(gatewayId);
|
||||
if (!gateway) throw new NotFoundError({ message: `Gateway with ID ${gatewayId} not found.` });
|
||||
|
||||
const orgGatewayConfig = await orgGatewayConfigDAL.findById(gateway.orgGatewayRootCaId);
|
||||
const { decryptor: orgKmsDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: orgGatewayConfig.orgId
|
||||
});
|
||||
|
||||
const rootCaCert = new x509.X509Certificate(
|
||||
orgKmsDecryptor({
|
||||
cipherTextBlob: orgGatewayConfig.encryptedRootCaCertificate
|
||||
})
|
||||
);
|
||||
const gatewayCaCert = new x509.X509Certificate(
|
||||
orgKmsDecryptor({
|
||||
cipherTextBlob: orgGatewayConfig.encryptedGatewayCaCertificate
|
||||
})
|
||||
);
|
||||
const clientCert = new x509.X509Certificate(
|
||||
orgKmsDecryptor({
|
||||
cipherTextBlob: orgGatewayConfig.encryptedClientCertificate
|
||||
})
|
||||
);
|
||||
|
||||
const clientSkObj = crypto.createPrivateKey({
|
||||
key: orgKmsDecryptor({ cipherTextBlob: orgGatewayConfig.encryptedClientPrivateKey }),
|
||||
format: "der",
|
||||
type: "pkcs8"
|
||||
});
|
||||
|
||||
return {
|
||||
relayAddress: orgKmsDecryptor({ cipherTextBlob: gateway.relayAddress }).toString(),
|
||||
privateKey: clientSkObj.export({ type: "pkcs8", format: "pem" }),
|
||||
certificate: clientCert.toString("pem"),
|
||||
certChain: `${gatewayCaCert.toString("pem")}\n${rootCaCert.toString("pem")}`.trim(),
|
||||
identityId: gateway.identityId,
|
||||
orgId: orgGatewayConfig.orgId
|
||||
};
|
||||
};
|
||||
|
||||
return {
|
||||
getGatewayRelayDetails,
|
||||
exchangeAllocatedRelayAddress,
|
||||
listGateways,
|
||||
getGatewayById,
|
||||
updateGatewayById,
|
||||
deleteGatewayById,
|
||||
getProjectGateways,
|
||||
fnGetGatewayClientTls,
|
||||
heartbeat
|
||||
};
|
||||
};
|
@ -1,39 +0,0 @@
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { ActorAuthMethod } from "@app/services/auth/auth-type";
|
||||
|
||||
export type TExchangeAllocatedRelayAddressDTO = {
|
||||
identityId: string;
|
||||
identityOrg: string;
|
||||
identityOrgAuthMethod: ActorAuthMethod;
|
||||
relayAddress: string;
|
||||
};
|
||||
|
||||
export type TListGatewaysDTO = {
|
||||
orgPermission: OrgServiceActor;
|
||||
};
|
||||
|
||||
export type TGetGatewayByIdDTO = {
|
||||
id: string;
|
||||
orgPermission: OrgServiceActor;
|
||||
};
|
||||
|
||||
export type TUpdateGatewayByIdDTO = {
|
||||
id: string;
|
||||
name?: string;
|
||||
projectIds?: string[];
|
||||
orgPermission: OrgServiceActor;
|
||||
};
|
||||
|
||||
export type TDeleteGatewayByIdDTO = {
|
||||
id: string;
|
||||
orgPermission: OrgServiceActor;
|
||||
};
|
||||
|
||||
export type TGetProjectGatewayByIdDTO = {
|
||||
projectId: string;
|
||||
projectPermission: OrgServiceActor;
|
||||
};
|
||||
|
||||
export type THeartBeatDTO = {
|
||||
orgPermission: OrgServiceActor;
|
||||
};
|
@ -1,10 +0,0 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TOrgGatewayConfigDALFactory = ReturnType<typeof orgGatewayConfigDALFactory>;
|
||||
|
||||
export const orgGatewayConfigDALFactory = (db: TDbClient) => {
|
||||
const orm = ormify(db, TableName.OrgGatewayConfig);
|
||||
return orm;
|
||||
};
|
@ -1,10 +0,0 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TProjectGatewayDALFactory = ReturnType<typeof projectGatewayDALFactory>;
|
||||
|
||||
export const projectGatewayDALFactory = (db: TDbClient) => {
|
||||
const orm = ormify(db, TableName.ProjectGateway);
|
||||
return orm;
|
||||
};
|
@ -3,7 +3,7 @@ import slugify from "@sindresorhus/slugify";
|
||||
|
||||
import { OrgMembershipRole, TOrgRoles } from "@app/db/schemas";
|
||||
import { TOidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
@ -87,14 +87,9 @@ export const groupServiceFactory = ({
|
||||
actorOrgId
|
||||
);
|
||||
const isCustomRole = Boolean(customRole);
|
||||
|
||||
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to create a more privileged group",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, rolePermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to create a more privileged group" });
|
||||
|
||||
const group = await groupDAL.transaction(async (tx) => {
|
||||
const existingGroup = await groupDAL.findOne({ orgId: actorOrgId, name }, tx);
|
||||
@ -161,13 +156,9 @@ export const groupServiceFactory = ({
|
||||
);
|
||||
|
||||
const isCustomRole = Boolean(customOrgRole);
|
||||
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update a more privileged group",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
const hasRequiredNewRolePermission = isAtLeastAsPrivileged(permission, rolePermission);
|
||||
if (!hasRequiredNewRolePermission)
|
||||
throw new ForbiddenRequestError({ message: "Failed to create a more privileged group" });
|
||||
if (isCustomRole) customRole = customOrgRole;
|
||||
}
|
||||
|
||||
@ -338,13 +329,9 @@ export const groupServiceFactory = ({
|
||||
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
|
||||
|
||||
// check if user has broader or equal to privileges than group
|
||||
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to add user to more privileged group",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, groupRolePermission);
|
||||
if (!hasRequiredPrivileges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to add user to more privileged group" });
|
||||
|
||||
const user = await userDAL.findOne({ username });
|
||||
if (!user) throw new NotFoundError({ message: `Failed to find user with username ${username}` });
|
||||
@ -409,13 +396,9 @@ export const groupServiceFactory = ({
|
||||
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
|
||||
|
||||
// check if user has broader or equal to privileges than group
|
||||
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to delete user from more privileged group",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, groupRolePermission);
|
||||
if (!hasRequiredPrivileges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to delete user from more privileged group" });
|
||||
|
||||
const user = await userDAL.findOne({ username });
|
||||
if (!user) throw new NotFoundError({ message: `Failed to find user with username ${username}` });
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import { packRules } from "@casl/ability/extra";
|
||||
import ms from "ms";
|
||||
|
||||
import { ActionProjectType, TableName } from "@app/db/schemas";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
@ -79,13 +79,9 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
|
||||
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged identity",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
|
||||
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
@ -165,13 +161,9 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
|
||||
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged identity",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
|
||||
if (data?.slug) {
|
||||
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
@ -247,13 +239,9 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged identity",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
|
||||
const deletedPrivilege = await identityProjectAdditionalPrivilegeDAL.deleteById(identityPrivilege.id);
|
||||
return {
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { ForbiddenError, MongoAbility, RawRuleOf, subject } from "@casl/ability";
|
||||
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
|
||||
import ms from "ms";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
@ -88,13 +88,9 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
|
||||
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged identity",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
|
||||
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
@ -176,13 +172,9 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
|
||||
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged identity",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
|
||||
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
@ -276,13 +268,9 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to edit more privileged identity",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to edit more privileged identity" });
|
||||
|
||||
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
|
@ -1,11 +1,11 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import * as x509 from "@peculiar/x509";
|
||||
import crypto, { KeyObject } from "crypto";
|
||||
import ms from "ms";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { BadRequestError, InternalServerError, NotFoundError } from "@app/lib/errors";
|
||||
import { isValidHostname, isValidIp } from "@app/lib/ip";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { constructPemChainFromCerts } from "@app/services/certificate/certificate-fns";
|
||||
import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "@app/services/certificate/certificate-types";
|
||||
import {
|
||||
|
@ -25,8 +25,8 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
customRateLimits: false,
|
||||
customAlerts: false,
|
||||
secretAccessInsights: false,
|
||||
auditLogs: false,
|
||||
auditLogsRetentionDays: 0,
|
||||
auditLogs: true,
|
||||
auditLogsRetentionDays: 3,
|
||||
auditLogStreams: false,
|
||||
auditLogStreamLimit: 3,
|
||||
samlSSO: false,
|
||||
@ -51,8 +51,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
pkiEst: false,
|
||||
enforceMfa: false,
|
||||
projectTemplates: false,
|
||||
kmip: false,
|
||||
gateway: false
|
||||
kmip: false
|
||||
});
|
||||
|
||||
export const setupLicenseRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {
|
||||
|
@ -50,7 +50,7 @@ export type TLicenseServiceFactory = ReturnType<typeof licenseServiceFactory>;
|
||||
const LICENSE_SERVER_CLOUD_LOGIN = "/api/auth/v1/license-server-login";
|
||||
const LICENSE_SERVER_ON_PREM_LOGIN = "/api/auth/v1/license-login";
|
||||
|
||||
const LICENSE_SERVER_CLOUD_PLAN_TTL = 5 * 60; // 5 mins
|
||||
const LICENSE_SERVER_CLOUD_PLAN_TTL = 30; // 30 second
|
||||
const FEATURE_CACHE_KEY = (orgId: string) => `infisical-cloud-plan-${orgId}`;
|
||||
|
||||
export const licenseServiceFactory = ({
|
||||
@ -142,10 +142,7 @@ export const licenseServiceFactory = ({
|
||||
try {
|
||||
if (instanceType === InstanceType.Cloud) {
|
||||
const cachedPlan = await keyStore.getItem(FEATURE_CACHE_KEY(orgId));
|
||||
if (cachedPlan) {
|
||||
logger.info(`getPlan: plan fetched from cache [orgId=${orgId}] [projectId=${projectId}]`);
|
||||
return JSON.parse(cachedPlan) as TFeatureSet;
|
||||
}
|
||||
if (cachedPlan) return JSON.parse(cachedPlan) as TFeatureSet;
|
||||
|
||||
const org = await orgDAL.findOrgById(orgId);
|
||||
if (!org) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` });
|
||||
@ -173,8 +170,6 @@ export const licenseServiceFactory = ({
|
||||
JSON.stringify(onPremFeatures)
|
||||
);
|
||||
return onPremFeatures;
|
||||
} finally {
|
||||
logger.info(`getPlan: Process done for [orgId=${orgId}] [projectId=${projectId}]`);
|
||||
}
|
||||
return onPremFeatures;
|
||||
};
|
||||
|
@ -69,7 +69,6 @@ export type TFeatureSet = {
|
||||
enforceMfa: boolean;
|
||||
projectTemplates: false;
|
||||
kmip: false;
|
||||
gateway: false;
|
||||
};
|
||||
|
||||
export type TOrgPlansTableDTO = {
|
||||
|
@ -32,18 +32,6 @@ export enum OrgPermissionAdminConsoleAction {
|
||||
AccessAllProjects = "access-all-projects"
|
||||
}
|
||||
|
||||
export enum OrgPermissionSecretShareAction {
|
||||
ManageSettings = "manage-settings"
|
||||
}
|
||||
|
||||
export enum OrgPermissionGatewayActions {
|
||||
// is there a better word for this. This mean can an identity be a gateway
|
||||
CreateGateways = "create-gateways",
|
||||
ListGateways = "list-gateways",
|
||||
EditGateways = "edit-gateways",
|
||||
DeleteGateways = "delete-gateways"
|
||||
}
|
||||
|
||||
export enum OrgPermissionSubjects {
|
||||
Workspace = "workspace",
|
||||
Role = "role",
|
||||
@ -62,9 +50,7 @@ export enum OrgPermissionSubjects {
|
||||
AuditLogs = "audit-logs",
|
||||
ProjectTemplates = "project-templates",
|
||||
AppConnections = "app-connections",
|
||||
Kmip = "kmip",
|
||||
Gateway = "gateway",
|
||||
SecretShare = "secret-share"
|
||||
Kmip = "kmip"
|
||||
}
|
||||
|
||||
export type AppConnectionSubjectFields = {
|
||||
@ -87,7 +73,6 @@ export type OrgPermissionSet =
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Kms]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.AuditLogs]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.ProjectTemplates]
|
||||
| [OrgPermissionGatewayActions, OrgPermissionSubjects.Gateway]
|
||||
| [
|
||||
OrgPermissionAppConnectionActions,
|
||||
(
|
||||
@ -96,8 +81,7 @@ export type OrgPermissionSet =
|
||||
)
|
||||
]
|
||||
| [OrgPermissionAdminConsoleAction, OrgPermissionSubjects.AdminConsole]
|
||||
| [OrgPermissionKmipActions, OrgPermissionSubjects.Kmip]
|
||||
| [OrgPermissionSecretShareAction, OrgPermissionSubjects.SecretShare];
|
||||
| [OrgPermissionKmipActions, OrgPermissionSubjects.Kmip];
|
||||
|
||||
const AppConnectionConditionSchema = z
|
||||
.object({
|
||||
@ -191,23 +175,11 @@ export const OrgPermissionSchema = z.discriminatedUnion("subject", [
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(OrgPermissionSubjects.SecretShare).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionSecretShareAction).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(OrgPermissionSubjects.Kmip).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionKmipActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(OrgPermissionSubjects.Gateway).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionGatewayActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
@ -292,11 +264,6 @@ const buildAdminPermission = () => {
|
||||
can(OrgPermissionAppConnectionActions.Delete, OrgPermissionSubjects.AppConnections);
|
||||
can(OrgPermissionAppConnectionActions.Connect, OrgPermissionSubjects.AppConnections);
|
||||
|
||||
can(OrgPermissionGatewayActions.ListGateways, OrgPermissionSubjects.Gateway);
|
||||
can(OrgPermissionGatewayActions.CreateGateways, OrgPermissionSubjects.Gateway);
|
||||
can(OrgPermissionGatewayActions.EditGateways, OrgPermissionSubjects.Gateway);
|
||||
can(OrgPermissionGatewayActions.DeleteGateways, OrgPermissionSubjects.Gateway);
|
||||
|
||||
can(OrgPermissionAdminConsoleAction.AccessAllProjects, OrgPermissionSubjects.AdminConsole);
|
||||
|
||||
can(OrgPermissionKmipActions.Setup, OrgPermissionSubjects.Kmip);
|
||||
@ -304,8 +271,6 @@ const buildAdminPermission = () => {
|
||||
// the proxy assignment is temporary in order to prevent "more privilege" error during role assignment to MI
|
||||
can(OrgPermissionKmipActions.Proxy, OrgPermissionSubjects.Kmip);
|
||||
|
||||
can(OrgPermissionSecretShareAction.ManageSettings, OrgPermissionSubjects.SecretShare);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
||||
@ -335,8 +300,6 @@ const buildMemberPermission = () => {
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
|
||||
|
||||
can(OrgPermissionAppConnectionActions.Connect, OrgPermissionSubjects.AppConnections);
|
||||
can(OrgPermissionGatewayActions.ListGateways, OrgPermissionSubjects.Gateway);
|
||||
can(OrgPermissionGatewayActions.CreateGateways, OrgPermissionSubjects.Gateway);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
@ -1,109 +1,7 @@
|
||||
/* eslint-disable no-nested-ternary */
|
||||
import { ForbiddenError, MongoAbility, PureAbility, subject } from "@casl/ability";
|
||||
import { z } from "zod";
|
||||
|
||||
import { TOrganizations } from "@app/db/schemas";
|
||||
import { BadRequestError, ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { ActorAuthMethod, AuthMethod } from "@app/services/auth/auth-type";
|
||||
|
||||
import {
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSet,
|
||||
ProjectPermissionSub,
|
||||
ProjectPermissionV2Schema,
|
||||
SecretSubjectFields
|
||||
} from "./project-permission";
|
||||
|
||||
export function throwIfMissingSecretReadValueOrDescribePermission(
|
||||
permission: MongoAbility<ProjectPermissionSet> | PureAbility,
|
||||
action: Extract<
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSecretActions.ReadValue | ProjectPermissionSecretActions.DescribeSecret
|
||||
>,
|
||||
subjectFields?: SecretSubjectFields
|
||||
) {
|
||||
try {
|
||||
if (subjectFields) {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, subjectFields)
|
||||
);
|
||||
} else {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
ProjectPermissionSub.Secrets
|
||||
);
|
||||
}
|
||||
} catch {
|
||||
if (subjectFields) {
|
||||
ForbiddenError.from(permission).throwUnlessCan(action, subject(ProjectPermissionSub.Secrets, subjectFields));
|
||||
} else {
|
||||
ForbiddenError.from(permission).throwUnlessCan(action, ProjectPermissionSub.Secrets);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function hasSecretReadValueOrDescribePermission(
|
||||
permission: MongoAbility<ProjectPermissionSet>,
|
||||
action: Extract<
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSecretActions.DescribeSecret | ProjectPermissionSecretActions.ReadValue
|
||||
>,
|
||||
subjectFields?: SecretSubjectFields
|
||||
) {
|
||||
let canNewPermission = false;
|
||||
let canOldPermission = false;
|
||||
|
||||
if (subjectFields) {
|
||||
canNewPermission = permission.can(action, subject(ProjectPermissionSub.Secrets, subjectFields));
|
||||
canOldPermission = permission.can(
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, subjectFields)
|
||||
);
|
||||
} else {
|
||||
canNewPermission = permission.can(action, ProjectPermissionSub.Secrets);
|
||||
canOldPermission = permission.can(
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
ProjectPermissionSub.Secrets
|
||||
);
|
||||
}
|
||||
|
||||
return canNewPermission || canOldPermission;
|
||||
}
|
||||
|
||||
const OptionalArrayPermissionSchema = ProjectPermissionV2Schema.array().optional();
|
||||
export function checkForInvalidPermissionCombination(permissions: z.infer<typeof OptionalArrayPermissionSchema>) {
|
||||
if (!permissions) return;
|
||||
|
||||
for (const permission of permissions) {
|
||||
if (permission.subject === ProjectPermissionSub.Secrets) {
|
||||
if (permission.action.includes(ProjectPermissionSecretActions.DescribeAndReadValue)) {
|
||||
const hasReadValue = permission.action.includes(ProjectPermissionSecretActions.ReadValue);
|
||||
const hasDescribeSecret = permission.action.includes(ProjectPermissionSecretActions.DescribeSecret);
|
||||
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!hasReadValue && !hasDescribeSecret) continue;
|
||||
|
||||
const hasBothDescribeAndReadValue = hasReadValue && hasDescribeSecret;
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `You have selected Read, and ${
|
||||
hasBothDescribeAndReadValue
|
||||
? "both Read Value and Describe Secret"
|
||||
: hasReadValue
|
||||
? "Read Value"
|
||||
: hasDescribeSecret
|
||||
? "Describe Secret"
|
||||
: ""
|
||||
}. You cannot select Read Value or Describe Secret if you have selected Read. The Read permission is a legacy action which has been replaced by Describe Secret and Read Value.`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function isAuthMethodSaml(actorAuthMethod: ActorAuthMethod) {
|
||||
if (!actorAuthMethod) return false;
|
||||
|
||||
@ -131,12 +29,12 @@ function validateOrgSSO(actorAuthMethod: ActorAuthMethod, isOrgSsoEnforced: TOrg
|
||||
}
|
||||
}
|
||||
|
||||
const escapeHandlebarsMissingDict = (obj: Record<string, string>, key: string) => {
|
||||
const escapeHandlebarsMissingMetadata = (obj: Record<string, string>) => {
|
||||
const handler = {
|
||||
get(target: Record<string, string>, prop: string) {
|
||||
if (!Object.hasOwn(target, prop)) {
|
||||
if (!(prop in target)) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
target[prop] = `{{${key}.${prop}}}`; // Add missing key as an "own" property
|
||||
target[prop] = `{{identity.metadata.${prop}}}`; // Add missing key as an "own" property
|
||||
}
|
||||
return target[prop];
|
||||
}
|
||||
@ -145,4 +43,4 @@ const escapeHandlebarsMissingDict = (obj: Record<string, string>, key: string) =
|
||||
return new Proxy(obj, handler);
|
||||
};
|
||||
|
||||
export { escapeHandlebarsMissingDict, isAuthMethodSaml, validateOrgSSO };
|
||||
export { escapeHandlebarsMissingMetadata, isAuthMethodSaml, validateOrgSSO };
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { createMongoAbility, MongoAbility, RawRuleOf } from "@casl/ability";
|
||||
import { PackRule, unpackRules } from "@casl/ability/extra";
|
||||
import { requestContext } from "@fastify/request-context";
|
||||
import { MongoQuery } from "@ucast/mongo2js";
|
||||
import handlebars from "handlebars";
|
||||
|
||||
@ -23,7 +22,7 @@ import { TServiceTokenDALFactory } from "@app/services/service-token/service-tok
|
||||
|
||||
import { orgAdminPermissions, orgMemberPermissions, orgNoAccessPermissions, OrgPermissionSet } from "./org-permission";
|
||||
import { TPermissionDALFactory } from "./permission-dal";
|
||||
import { escapeHandlebarsMissingDict, validateOrgSSO } from "./permission-fns";
|
||||
import { escapeHandlebarsMissingMetadata, validateOrgSSO } from "./permission-fns";
|
||||
import {
|
||||
TBuildOrgPermissionDTO,
|
||||
TBuildProjectPermissionDTO,
|
||||
@ -244,13 +243,13 @@ export const permissionServiceFactory = ({
|
||||
|
||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||
const unescapedMetadata = objectify(
|
||||
userProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
|
||||
objectify(
|
||||
userProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
)
|
||||
);
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingDict(unescapedMetadata, "identity.metadata");
|
||||
requestContext.set("identityPermissionMetadata", { metadata: unescapedMetadata });
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
identity: {
|
||||
@ -318,26 +317,20 @@ export const permissionServiceFactory = ({
|
||||
|
||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||
const unescapedIdentityAuthInfo = requestContext.get("identityAuthInfo");
|
||||
const unescapedMetadata = objectify(
|
||||
identityProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
|
||||
objectify(
|
||||
identityProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
)
|
||||
);
|
||||
const identityAuthInfo =
|
||||
unescapedIdentityAuthInfo?.identityId === identityId && unescapedIdentityAuthInfo
|
||||
? escapeHandlebarsMissingDict(unescapedIdentityAuthInfo as never, "identity.auth")
|
||||
: {};
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingDict(unescapedMetadata, "identity.metadata");
|
||||
|
||||
requestContext.set("identityPermissionMetadata", { metadata: unescapedMetadata, auth: unescapedIdentityAuthInfo });
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
identity: {
|
||||
id: identityProjectPermission.identityId,
|
||||
username: identityProjectPermission.username,
|
||||
metadata: metadataKeyValuePair,
|
||||
auth: identityAuthInfo
|
||||
metadata: metadataKeyValuePair
|
||||
}
|
||||
},
|
||||
{ data: false }
|
||||
@ -431,13 +424,12 @@ export const permissionServiceFactory = ({
|
||||
|
||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingDict(
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
|
||||
objectify(
|
||||
userProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
),
|
||||
"identity.metadata"
|
||||
)
|
||||
);
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
@ -477,14 +469,14 @@ export const permissionServiceFactory = ({
|
||||
|
||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingDict(
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
|
||||
objectify(
|
||||
identityProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
),
|
||||
"identity.metadata"
|
||||
)
|
||||
);
|
||||
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
identity: {
|
||||
|
@ -5,6 +5,22 @@ import { PermissionConditionOperators } from "@app/lib/casl";
|
||||
|
||||
export const PermissionConditionSchema = {
|
||||
[PermissionConditionOperators.$IN]: z.string().trim().min(1).array(),
|
||||
[PermissionConditionOperators.$ALL]: z.string().trim().min(1).array(),
|
||||
[PermissionConditionOperators.$REGEX]: z
|
||||
.string()
|
||||
.min(1)
|
||||
.refine(
|
||||
(el) => {
|
||||
try {
|
||||
// eslint-disable-next-line no-new
|
||||
new RegExp(el);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
{ message: "Invalid regex pattern" }
|
||||
),
|
||||
[PermissionConditionOperators.$EQ]: z.string().min(1),
|
||||
[PermissionConditionOperators.$NEQ]: z.string().min(1),
|
||||
[PermissionConditionOperators.$GLOB]: z
|
||||
|
@ -17,15 +17,6 @@ export enum ProjectPermissionActions {
|
||||
Delete = "delete"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionSecretActions {
|
||||
DescribeAndReadValue = "read",
|
||||
DescribeSecret = "describeSecret",
|
||||
ReadValue = "readValue",
|
||||
Create = "create",
|
||||
Edit = "edit",
|
||||
Delete = "delete"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionCmekActions {
|
||||
Read = "read",
|
||||
Create = "create",
|
||||
@ -124,7 +115,7 @@ export type IdentityManagementSubjectFields = {
|
||||
|
||||
export type ProjectPermissionSet =
|
||||
| [
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSub.Secrets | (ForcedSubject<ProjectPermissionSub.Secrets> & SecretSubjectFields)
|
||||
]
|
||||
| [
|
||||
@ -438,7 +429,6 @@ const GeneralPermissionSchema = [
|
||||
})
|
||||
];
|
||||
|
||||
// Do not update this schema anymore, as it's kept purely for backwards compatability. Update V2 schema only.
|
||||
export const ProjectPermissionV1Schema = z.discriminatedUnion("subject", [
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Secrets).describe("The entity this permission pertains to."),
|
||||
@ -470,7 +460,7 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Secrets).describe("The entity this permission pertains to."),
|
||||
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretActions).describe(
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
),
|
||||
conditions: SecretConditionV2Schema.describe(
|
||||
@ -527,6 +517,7 @@ const buildAdminPermissionRules = () => {
|
||||
|
||||
// Admins get full access to everything
|
||||
[
|
||||
ProjectPermissionSub.Secrets,
|
||||
ProjectPermissionSub.SecretFolders,
|
||||
ProjectPermissionSub.SecretImports,
|
||||
ProjectPermissionSub.SecretApproval,
|
||||
@ -559,22 +550,10 @@ const buildAdminPermissionRules = () => {
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionActions.Delete
|
||||
],
|
||||
el
|
||||
el as ProjectPermissionSub
|
||||
);
|
||||
});
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
ProjectPermissionSecretActions.DescribeSecret,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
ProjectPermissionSecretActions.Create,
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
ProjectPermissionSecretActions.Delete
|
||||
],
|
||||
ProjectPermissionSub.Secrets
|
||||
);
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionDynamicSecretActions.ReadRootCredential,
|
||||
@ -634,12 +613,10 @@ const buildMemberPermissionRules = () => {
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionSecretActions.DescribeAndReadValue,
|
||||
ProjectPermissionSecretActions.DescribeSecret,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
ProjectPermissionSecretActions.Create,
|
||||
ProjectPermissionSecretActions.Delete
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionActions.Delete
|
||||
],
|
||||
ProjectPermissionSub.Secrets
|
||||
);
|
||||
@ -811,9 +788,7 @@ export const projectMemberPermissions = buildMemberPermissionRules();
|
||||
const buildViewerPermissionRules = () => {
|
||||
const { can, rules } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
|
||||
|
||||
can(ProjectPermissionSecretActions.DescribeAndReadValue, ProjectPermissionSub.Secrets);
|
||||
can(ProjectPermissionSecretActions.DescribeSecret, ProjectPermissionSub.Secrets);
|
||||
can(ProjectPermissionSecretActions.ReadValue, ProjectPermissionSub.Secrets);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretFolders);
|
||||
can(ProjectPermissionDynamicSecretActions.ReadRootCredential, ProjectPermissionSub.DynamicSecrets);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretImports);
|
||||
@ -862,6 +837,7 @@ export const buildServiceTokenProjectPermission = (
|
||||
(subject) => {
|
||||
if (canWrite) {
|
||||
can(ProjectPermissionActions.Edit, subject, {
|
||||
// TODO: @Akhi
|
||||
// @ts-expect-error type
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
@ -940,17 +916,7 @@ export const backfillPermissionV1SchemaToV2Schema = (
|
||||
subject: ProjectPermissionSub.SecretImports as const
|
||||
}));
|
||||
|
||||
const secretPolicies = secretSubjects.map(({ subject, ...el }) => ({
|
||||
subject: ProjectPermissionSub.Secrets as const,
|
||||
...el,
|
||||
action:
|
||||
el.action.includes(ProjectPermissionActions.Read) && !el.action.includes(ProjectPermissionSecretActions.ReadValue)
|
||||
? el.action.concat(ProjectPermissionSecretActions.ReadValue)
|
||||
: el.action
|
||||
}));
|
||||
|
||||
const secretFolderPolicies = secretSubjects
|
||||
|
||||
.map(({ subject, ...el }) => ({
|
||||
...el,
|
||||
// read permission is not needed anymore
|
||||
@ -992,7 +958,6 @@ export const backfillPermissionV1SchemaToV2Schema = (
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore-error this is valid ts
|
||||
secretImportPolicies,
|
||||
secretPolicies,
|
||||
dynamicSecretPolicies,
|
||||
hasReadOnlyFolder.length ? [] : secretFolderPolicies
|
||||
);
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { ForbiddenError, MongoAbility, RawRuleOf } from "@casl/ability";
|
||||
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
|
||||
import ms from "ms";
|
||||
|
||||
import { ActionProjectType, TableName } from "@app/db/schemas";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
@ -76,13 +76,9 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetUserPermission.update(targetUserPermission.rules.concat(customPermission));
|
||||
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged user",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetUserPermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
|
||||
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
@ -167,13 +163,9 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
// we need to validate that the privilege given is not higher than the assigning users permission
|
||||
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
|
||||
targetUserPermission.update(targetUserPermission.rules.concat(dto.permissions || []));
|
||||
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
|
||||
if (!permissionBoundary.isValid)
|
||||
throw new ForbiddenRequestError({
|
||||
name: "PermissionBoundaryError",
|
||||
message: "Failed to update more privileged identity",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetUserPermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
|
||||
|
||||
if (dto?.slug) {
|
||||
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({
|
||||
|
@ -63,7 +63,7 @@ export const samlConfigServiceFactory = ({
|
||||
kmsService
|
||||
}: TSamlConfigServiceFactoryDep) => {
|
||||
const createSamlCfg = async ({
|
||||
idpCert,
|
||||
cert,
|
||||
actor,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
@ -93,9 +93,9 @@ export const samlConfigServiceFactory = ({
|
||||
orgId,
|
||||
authProvider,
|
||||
isActive,
|
||||
encryptedSamlCertificate: encryptor({ plainText: Buffer.from(idpCert) }).cipherTextBlob,
|
||||
encryptedSamlIssuer: encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob,
|
||||
encryptedSamlEntryPoint: encryptor({ plainText: Buffer.from(entryPoint) }).cipherTextBlob,
|
||||
encryptedSamlIssuer: encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob
|
||||
encryptedSamlCertificate: encryptor({ plainText: Buffer.from(cert) }).cipherTextBlob
|
||||
});
|
||||
|
||||
return samlConfig;
|
||||
@ -106,7 +106,7 @@ export const samlConfigServiceFactory = ({
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
idpCert,
|
||||
cert,
|
||||
actorId,
|
||||
issuer,
|
||||
isActive,
|
||||
@ -136,8 +136,8 @@ export const samlConfigServiceFactory = ({
|
||||
updateQuery.encryptedSamlIssuer = encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
if (idpCert !== undefined) {
|
||||
updateQuery.encryptedSamlCertificate = encryptor({ plainText: Buffer.from(idpCert) }).cipherTextBlob;
|
||||
if (cert !== undefined) {
|
||||
updateQuery.encryptedSamlCertificate = encryptor({ plainText: Buffer.from(cert) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
const [ssoConfig] = await samlConfigDAL.update({ orgId }, updateQuery);
|
||||
|
@ -15,7 +15,7 @@ export type TCreateSamlCfgDTO = {
|
||||
isActive: boolean;
|
||||
entryPoint: string;
|
||||
issuer: string;
|
||||
idpCert: string;
|
||||
cert: string;
|
||||
} & TOrgPermission;
|
||||
|
||||
export type TUpdateSamlCfgDTO = Partial<{
|
||||
@ -23,7 +23,7 @@ export type TUpdateSamlCfgDTO = Partial<{
|
||||
isActive: boolean;
|
||||
entryPoint: string;
|
||||
issuer: string;
|
||||
idpCert: string;
|
||||
cert: string;
|
||||
}> &
|
||||
TOrgPermission;
|
||||
|
||||
|
@ -100,7 +100,6 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("lastName").withSchema("committerUser").as("committerUserLastName"),
|
||||
tx.ref("reviewerUserId").withSchema(TableName.SecretApprovalRequestReviewer),
|
||||
tx.ref("status").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerStatus"),
|
||||
tx.ref("comment").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerComment"),
|
||||
tx.ref("email").withSchema("secretApprovalReviewerUser").as("reviewerEmail"),
|
||||
tx.ref("username").withSchema("secretApprovalReviewerUser").as("reviewerUsername"),
|
||||
tx.ref("firstName").withSchema("secretApprovalReviewerUser").as("reviewerFirstName"),
|
||||
@ -163,10 +162,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
reviewerEmail: email,
|
||||
reviewerLastName: lastName,
|
||||
reviewerUsername: username,
|
||||
reviewerFirstName: firstName,
|
||||
reviewerComment: comment
|
||||
}) =>
|
||||
userId ? { userId, status, email, firstName, lastName, username, comment: comment ?? "" } : undefined
|
||||
reviewerFirstName: firstName
|
||||
}) => (userId ? { userId, status, email, firstName, lastName, username } : undefined)
|
||||
},
|
||||
{
|
||||
key: "approverUserId",
|
||||
|
@ -6,7 +6,6 @@ import {
|
||||
SecretEncryptionAlgo,
|
||||
SecretKeyEncoding,
|
||||
SecretType,
|
||||
TableName,
|
||||
TSecretApprovalRequestsSecretsInsert,
|
||||
TSecretApprovalRequestsSecretsV2Insert
|
||||
} from "@app/db/schemas";
|
||||
@ -58,9 +57,8 @@ import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { throwIfMissingSecretReadValueOrDescribePermission } from "../permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TSecretApprovalPolicyDALFactory } from "../secret-approval-policy/secret-approval-policy-dal";
|
||||
import { TSecretSnapshotServiceFactory } from "../secret-snapshot/secret-snapshot-service";
|
||||
import { TSecretApprovalRequestDALFactory } from "./secret-approval-request-dal";
|
||||
@ -90,12 +88,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
secretDAL: TSecretDALFactory;
|
||||
secretTagDAL: Pick<
|
||||
TSecretTagDALFactory,
|
||||
| "findManyTagsById"
|
||||
| "saveTagsToSecret"
|
||||
| "deleteTagsManySecret"
|
||||
| "saveTagsToSecretV2"
|
||||
| "deleteTagsToSecretV2"
|
||||
| "find"
|
||||
"findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret" | "saveTagsToSecretV2" | "deleteTagsToSecretV2"
|
||||
>;
|
||||
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
|
||||
snapshotService: Pick<TSecretSnapshotServiceFactory, "performSnapshot">;
|
||||
@ -113,7 +106,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey" | "encryptWithInputKey" | "decryptWithInputKey">;
|
||||
secretV2BridgeDAL: Pick<
|
||||
TSecretV2BridgeDALFactory,
|
||||
"insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "bulkUpdate" | "deleteMany" | "find"
|
||||
"insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "bulkUpdate" | "deleteMany"
|
||||
>;
|
||||
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
|
||||
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
|
||||
@ -327,7 +320,6 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
approvalId,
|
||||
actor,
|
||||
status,
|
||||
comment,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
@ -380,18 +372,15 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
return secretApprovalRequestReviewerDAL.create(
|
||||
{
|
||||
status,
|
||||
comment,
|
||||
requestId: secretApprovalRequest.id,
|
||||
reviewerUserId: actorId
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
return secretApprovalRequestReviewerDAL.updateById(review.id, { status, comment }, tx);
|
||||
return secretApprovalRequestReviewerDAL.updateById(review.id, { status }, tx);
|
||||
});
|
||||
|
||||
return { ...reviewStatus, projectId: secretApprovalRequest.projectId };
|
||||
return reviewStatus;
|
||||
};
|
||||
|
||||
const updateApprovalStatus = async ({
|
||||
@ -510,7 +499,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
if (!hasMinApproval && !isSoftEnforcement)
|
||||
throw new BadRequestError({ message: "Doesn't have minimum approvals needed" });
|
||||
|
||||
const { botKey, shouldUseSecretV2Bridge, project } = await projectBotService.getBotKey(projectId);
|
||||
const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId);
|
||||
let mergeStatus;
|
||||
if (shouldUseSecretV2Bridge) {
|
||||
// this cycle if for bridged secrets
|
||||
@ -868,6 +857,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
|
||||
if (isSoftEnforcement) {
|
||||
const cfg = getConfig();
|
||||
const project = await projectDAL.findProjectById(projectId);
|
||||
const env = await projectEnvDAL.findOne({ id: policy.envId });
|
||||
const requestedByUser = await userDAL.findOne({ id: actorId });
|
||||
const approverUsers = await userDAL.find({
|
||||
@ -919,11 +909,10 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
|
||||
throwIfMissingSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
|
||||
environment,
|
||||
secretPath
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
);
|
||||
|
||||
await projectDAL.checkProjectUpgradeStatus(projectId);
|
||||
|
||||
@ -1008,7 +997,6 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
: keyName2BlindIndex[secretName];
|
||||
// add tags
|
||||
if (tagIds?.length) commitTagIds[keyName2BlindIndex[secretName]] = tagIds;
|
||||
|
||||
return {
|
||||
...latestSecretVersions[secretId],
|
||||
...el,
|
||||
@ -1164,8 +1152,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
environment: env.name,
|
||||
secretPath,
|
||||
projectId,
|
||||
requestId: secretApprovalRequest.id,
|
||||
secretKeys: [...new Set(Object.values(data).flatMap((arr) => arr?.map((item) => item.secretName) ?? []))]
|
||||
requestId: secretApprovalRequest.id
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -1307,7 +1294,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
secretMetadata
|
||||
}) => {
|
||||
const secretId = updatingSecretsGroupByKey[secretKey][0].id;
|
||||
if (tagIds?.length) commitTagIds[newSecretName ?? secretKey] = tagIds;
|
||||
if (tagIds?.length) commitTagIds[secretKey] = tagIds;
|
||||
return {
|
||||
...latestSecretVersions[secretId],
|
||||
secretMetadata,
|
||||
@ -1336,48 +1323,17 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
// deleted secrets
|
||||
const deletedSecrets = data[SecretOperations.Delete];
|
||||
if (deletedSecrets && deletedSecrets.length) {
|
||||
const secretsToDeleteInDB = await secretV2BridgeDAL.find({
|
||||
const secretsToDeleteInDB = await secretV2BridgeDAL.findBySecretKeys(
|
||||
folderId,
|
||||
$complex: {
|
||||
operator: "and",
|
||||
value: [
|
||||
{
|
||||
operator: "or",
|
||||
value: deletedSecrets.map((el) => ({
|
||||
operator: "and",
|
||||
value: [
|
||||
{
|
||||
operator: "eq",
|
||||
field: `${TableName.SecretV2}.key` as "key",
|
||||
value: el.secretKey
|
||||
},
|
||||
{
|
||||
operator: "eq",
|
||||
field: "type",
|
||||
value: SecretType.Shared
|
||||
}
|
||||
]
|
||||
}))
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
deletedSecrets.map((el) => ({
|
||||
key: el.secretKey,
|
||||
type: SecretType.Shared
|
||||
}))
|
||||
);
|
||||
if (secretsToDeleteInDB.length !== deletedSecrets.length)
|
||||
throw new NotFoundError({
|
||||
message: `Secret does not exist: ${secretsToDeleteInDB.map((el) => el.key).join(",")}`
|
||||
});
|
||||
secretsToDeleteInDB.forEach((el) => {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretActions.Delete,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment,
|
||||
secretPath,
|
||||
secretName: el.key,
|
||||
secretTags: el.tags?.map((i) => i.slug)
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
const secretsGroupedByKey = groupBy(secretsToDeleteInDB, (i) => i.key);
|
||||
const deletedSecretIds = deletedSecrets.map((el) => secretsGroupedByKey[el.secretKey][0].id);
|
||||
const latestSecretVersions = await secretVersionV2BridgeDAL.findLatestVersionMany(folderId, deletedSecretIds);
|
||||
@ -1403,9 +1359,9 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
const tagsGroupById = groupBy(tags, (i) => i.id);
|
||||
|
||||
commits.forEach((commit) => {
|
||||
let action = ProjectPermissionSecretActions.Create;
|
||||
if (commit.op === SecretOperations.Update) action = ProjectPermissionSecretActions.Edit;
|
||||
if (commit.op === SecretOperations.Delete) return; // we do the validation on top
|
||||
let action = ProjectPermissionActions.Create;
|
||||
if (commit.op === SecretOperations.Update) action = ProjectPermissionActions.Edit;
|
||||
if (commit.op === SecretOperations.Delete) action = ProjectPermissionActions.Delete;
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
action,
|
||||
@ -1496,8 +1452,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
environment: env.name,
|
||||
secretPath,
|
||||
projectId,
|
||||
requestId: secretApprovalRequest.id,
|
||||
secretKeys: [...new Set(Object.values(data).flatMap((arr) => arr?.map((item) => item.secretKey) ?? []))]
|
||||
requestId: secretApprovalRequest.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -80,7 +80,6 @@ export type TStatusChangeDTO = {
|
||||
export type TReviewRequestDTO = {
|
||||
approvalId: string;
|
||||
status: ApprovalStatus;
|
||||
comment?: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TApprovalRequestCountDTO = TProjectPermission;
|
||||
|
@ -265,7 +265,6 @@ export const secretReplicationServiceFactory = ({
|
||||
folderDAL,
|
||||
secretImportDAL,
|
||||
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : ""),
|
||||
viewSecretValue: true,
|
||||
hasSecretAccess: () => true
|
||||
});
|
||||
// secrets that gets replicated across imports
|
||||
|
@ -13,7 +13,6 @@ import { NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
@ -333,7 +332,6 @@ export const secretRotationQueueFactory = ({
|
||||
await secretVersionV2BridgeDAL.insertMany(
|
||||
updatedSecrets.map(({ id, updatedAt, createdAt, ...el }) => ({
|
||||
...el,
|
||||
actorType: ActorType.PLATFORM,
|
||||
secretId: id
|
||||
})),
|
||||
tx
|
||||
|
@ -15,11 +15,7 @@ import { TSecretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import {
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSub
|
||||
} from "../permission/project-permission";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TSecretRotationDALFactory } from "./secret-rotation-dal";
|
||||
import { TSecretRotationQueueFactory } from "./secret-rotation-queue";
|
||||
import { TSecretRotationEncData } from "./secret-rotation-queue/secret-rotation-queue-types";
|
||||
@ -110,7 +106,7 @@ export const secretRotationServiceFactory = ({
|
||||
});
|
||||
}
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
);
|
||||
|
||||
|
@ -1,18 +1,16 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment,@typescript-eslint/no-unsafe-member-access,@typescript-eslint/no-unsafe-argument */
|
||||
// akhilmhdh: I did this, quite strange bug with eslint. Everything do have a type stil has this error
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
|
||||
import { ActionProjectType, TableName, TSecretTagJunctionInsert, TSecretV2TagJunctionInsert } from "@app/db/schemas";
|
||||
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
import { InternalServerError, NotFoundError } from "@app/lib/errors";
|
||||
import { groupBy } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
|
||||
import { INFISICAL_SECRET_VALUE_HIDDEN_MASK } from "@app/services/secret/secret-fns";
|
||||
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
|
||||
import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
@ -23,16 +21,8 @@ import { TSecretVersionV2DALFactory } from "@app/services/secret-v2-bridge/secre
|
||||
import { TSecretVersionV2TagDALFactory } from "@app/services/secret-v2-bridge/secret-version-tag-dal";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import {
|
||||
hasSecretReadValueOrDescribePermission,
|
||||
throwIfMissingSecretReadValueOrDescribePermission
|
||||
} from "../permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import {
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSub
|
||||
} from "../permission/project-permission";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import {
|
||||
TGetSnapshotDataDTO,
|
||||
TProjectSnapshotCountDTO,
|
||||
@ -106,10 +96,10 @@ export const secretSnapshotServiceFactory = ({
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
|
||||
|
||||
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
|
||||
throwIfMissingSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.DescribeSecret, {
|
||||
environment,
|
||||
secretPath: path
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder) {
|
||||
@ -143,10 +133,10 @@ export const secretSnapshotServiceFactory = ({
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
|
||||
|
||||
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
|
||||
throwIfMissingSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.DescribeSecret, {
|
||||
environment,
|
||||
secretPath: path
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
if (!folder)
|
||||
@ -171,7 +161,6 @@ export const secretSnapshotServiceFactory = ({
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
|
||||
|
||||
const shouldUseBridge = snapshot.projectVersion === 3;
|
||||
let snapshotDetails;
|
||||
if (shouldUseBridge) {
|
||||
@ -180,112 +169,68 @@ export const secretSnapshotServiceFactory = ({
|
||||
projectId: snapshot.projectId
|
||||
});
|
||||
const encryptedSnapshotDetails = await snapshotDAL.findSecretSnapshotV2DataById(id);
|
||||
|
||||
const fullFolderPath = await getFullFolderPath({
|
||||
folderDAL,
|
||||
folderId: encryptedSnapshotDetails.folderId,
|
||||
envId: encryptedSnapshotDetails.environment.id
|
||||
});
|
||||
|
||||
snapshotDetails = {
|
||||
...encryptedSnapshotDetails,
|
||||
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => {
|
||||
const canReadValue = hasSecretReadValueOrDescribePermission(
|
||||
permission,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
{
|
||||
environment: encryptedSnapshotDetails.environment.slug,
|
||||
secretPath: fullFolderPath,
|
||||
secretName: el.key,
|
||||
secretTags: el.tags.length ? el.tags.map((tag) => tag.slug) : undefined
|
||||
}
|
||||
);
|
||||
|
||||
let secretValue = "";
|
||||
if (canReadValue) {
|
||||
secretValue = el.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
|
||||
: "";
|
||||
} else {
|
||||
secretValue = INFISICAL_SECRET_VALUE_HIDDEN_MASK;
|
||||
}
|
||||
|
||||
return {
|
||||
...el,
|
||||
secretKey: el.key,
|
||||
secretValueHidden: !canReadValue,
|
||||
secretValue,
|
||||
secretComment: el.encryptedComment
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
|
||||
: ""
|
||||
};
|
||||
})
|
||||
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => ({
|
||||
...el,
|
||||
secretKey: el.key,
|
||||
secretValue: el.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
|
||||
: "",
|
||||
secretComment: el.encryptedComment
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
|
||||
: ""
|
||||
}))
|
||||
};
|
||||
} else {
|
||||
const encryptedSnapshotDetails = await snapshotDAL.findSecretSnapshotDataById(id);
|
||||
|
||||
const fullFolderPath = await getFullFolderPath({
|
||||
folderDAL,
|
||||
folderId: encryptedSnapshotDetails.folderId,
|
||||
envId: encryptedSnapshotDetails.environment.id
|
||||
});
|
||||
|
||||
const { botKey } = await projectBotService.getBotKey(snapshot.projectId);
|
||||
if (!botKey)
|
||||
throw new NotFoundError({ message: `Project bot key not found for project with ID '${snapshot.projectId}'` });
|
||||
snapshotDetails = {
|
||||
...encryptedSnapshotDetails,
|
||||
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => {
|
||||
const secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => ({
|
||||
...el,
|
||||
secretKey: decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: el.secretKeyCiphertext,
|
||||
iv: el.secretKeyIV,
|
||||
tag: el.secretKeyTag,
|
||||
key: botKey
|
||||
});
|
||||
|
||||
const canReadValue = hasSecretReadValueOrDescribePermission(
|
||||
permission,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
{
|
||||
environment: encryptedSnapshotDetails.environment.slug,
|
||||
secretPath: fullFolderPath,
|
||||
secretName: secretKey,
|
||||
secretTags: el.tags.length ? el.tags.map((tag) => tag.slug) : undefined
|
||||
}
|
||||
);
|
||||
|
||||
let secretValue = "";
|
||||
|
||||
if (canReadValue) {
|
||||
secretValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: el.secretValueCiphertext,
|
||||
iv: el.secretValueIV,
|
||||
tag: el.secretValueTag,
|
||||
key: botKey
|
||||
});
|
||||
} else {
|
||||
secretValue = INFISICAL_SECRET_VALUE_HIDDEN_MASK;
|
||||
}
|
||||
|
||||
return {
|
||||
...el,
|
||||
secretKey,
|
||||
secretValueHidden: !canReadValue,
|
||||
secretValue,
|
||||
secretComment:
|
||||
el.secretCommentTag && el.secretCommentIV && el.secretCommentCiphertext
|
||||
? decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: el.secretCommentCiphertext,
|
||||
iv: el.secretCommentIV,
|
||||
tag: el.secretCommentTag,
|
||||
key: botKey
|
||||
})
|
||||
: ""
|
||||
};
|
||||
})
|
||||
}),
|
||||
secretValue: decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: el.secretValueCiphertext,
|
||||
iv: el.secretValueIV,
|
||||
tag: el.secretValueTag,
|
||||
key: botKey
|
||||
}),
|
||||
secretComment:
|
||||
el.secretCommentTag && el.secretCommentIV && el.secretCommentCiphertext
|
||||
? decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: el.secretCommentCiphertext,
|
||||
iv: el.secretCommentIV,
|
||||
tag: el.secretCommentTag,
|
||||
key: botKey
|
||||
})
|
||||
: ""
|
||||
}))
|
||||
};
|
||||
}
|
||||
|
||||
const fullFolderPath = await getFullFolderPath({
|
||||
folderDAL,
|
||||
folderId: snapshotDetails.folderId,
|
||||
envId: snapshotDetails.environment.id
|
||||
});
|
||||
|
||||
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: snapshotDetails.environment.slug,
|
||||
secretPath: fullFolderPath
|
||||
})
|
||||
);
|
||||
|
||||
return snapshotDetails;
|
||||
};
|
||||
|
||||
@ -425,21 +370,7 @@ export const secretSnapshotServiceFactory = ({
|
||||
const secrets = await secretV2BridgeDAL.insertMany(
|
||||
rollbackSnaps.flatMap(({ secretVersions, folderId }) =>
|
||||
secretVersions.map(
|
||||
({
|
||||
latestSecretVersion,
|
||||
version,
|
||||
updatedAt,
|
||||
createdAt,
|
||||
secretId,
|
||||
envId,
|
||||
id,
|
||||
tags,
|
||||
// exclude the bottom fields from the secret - they are for versioning only.
|
||||
userActorId,
|
||||
identityActorId,
|
||||
actorType,
|
||||
...el
|
||||
}) => ({
|
||||
({ latestSecretVersion, version, updatedAt, createdAt, secretId, envId, id, tags, ...el }) => ({
|
||||
...el,
|
||||
id: secretId,
|
||||
version: deletedTopLevelSecsGroupById[secretId] ? latestSecretVersion + 1 : latestSecretVersion,
|
||||
@ -470,18 +401,8 @@ export const secretSnapshotServiceFactory = ({
|
||||
})),
|
||||
tx
|
||||
);
|
||||
const userActorId = actor === ActorType.USER ? actorId : undefined;
|
||||
const identityActorId = actor !== ActorType.USER ? actorId : undefined;
|
||||
const actorType = actor || ActorType.PLATFORM;
|
||||
|
||||
const secretVersions = await secretVersionV2BridgeDAL.insertMany(
|
||||
secrets.map(({ id, updatedAt, createdAt, ...el }) => ({
|
||||
...el,
|
||||
secretId: id,
|
||||
userActorId,
|
||||
identityActorId,
|
||||
actorType
|
||||
})),
|
||||
secrets.map(({ id, updatedAt, createdAt, ...el }) => ({ ...el, secretId: id })),
|
||||
tx
|
||||
);
|
||||
await secretVersionV2TagBridgeDAL.insertMany(
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import ms from "ms";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
|
||||
import { TSshCertificateAuthorityDALFactory } from "../ssh/ssh-certificate-authority-dal";
|
||||
import { TSshCertificateTemplateDALFactory } from "./ssh-certificate-template-dal";
|
||||
|
@ -6,6 +6,7 @@ export const sanitizedSshCertificate = SshCertificatesSchema.pick({
|
||||
sshCertificateTemplateId: true,
|
||||
serialNumber: true,
|
||||
certType: true,
|
||||
publicKey: true,
|
||||
principals: true,
|
||||
keyId: true,
|
||||
notBefore: true,
|
||||
|
@ -1,13 +1,13 @@
|
||||
import { execFile } from "child_process";
|
||||
import crypto from "crypto";
|
||||
import { promises as fs } from "fs";
|
||||
import ms from "ms";
|
||||
import os from "os";
|
||||
import path from "path";
|
||||
import { promisify } from "util";
|
||||
|
||||
import { TSshCertificateTemplates } from "@app/db/schemas";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
|
||||
|
||||
import {
|
||||
|
@ -1,15 +1,12 @@
|
||||
import { Redis } from "ioredis";
|
||||
|
||||
import { pgAdvisoryLockHashText } from "@app/lib/crypto/hashtext";
|
||||
import { Redlock, Settings } from "@app/lib/red-lock";
|
||||
|
||||
export const PgSqlLock = {
|
||||
BootUpMigration: 2023,
|
||||
SuperAdminInit: 2024,
|
||||
KmsRootKeyInit: 2025,
|
||||
OrgGatewayRootCaInit: (orgId: string) => pgAdvisoryLockHashText(`org-gateway-root-ca:${orgId}`),
|
||||
OrgGatewayCertExchange: (orgId: string) => pgAdvisoryLockHashText(`org-gateway-cert-exchange:${orgId}`)
|
||||
} as const;
|
||||
export enum PgSqlLock {
|
||||
BootUpMigration = 2023,
|
||||
SuperAdminInit = 2024,
|
||||
KmsRootKeyInit = 2025
|
||||
}
|
||||
|
||||
export type TKeyStoreFactory = ReturnType<typeof keyStoreFactory>;
|
||||
|
||||
@ -36,8 +33,7 @@ export const KeyStorePrefixes = {
|
||||
SecretSyncLastRunTimestamp: (syncId: string) => `secret-sync-last-run-${syncId}` as const,
|
||||
IdentityAccessTokenStatusUpdate: (identityAccessTokenId: string) =>
|
||||
`identity-access-token-status:${identityAccessTokenId}`,
|
||||
ServiceTokenStatusUpdate: (serviceTokenId: string) => `service-token-status:${serviceTokenId}`,
|
||||
GatewayIdentityCredential: (identityId: string) => `gateway-credentials:${identityId}`
|
||||
ServiceTokenStatusUpdate: (serviceTokenId: string) => `service-token-status:${serviceTokenId}`
|
||||
};
|
||||
|
||||
export const KeyStoreTtls = {
|
||||
|
@ -329,7 +329,6 @@ export const OIDC_AUTH = {
|
||||
boundIssuer: "The unique identifier of the identity provider issuing the JWT.",
|
||||
boundAudiences: "The list of intended recipients.",
|
||||
boundClaims: "The attributes that should be present in the JWT for it to be valid.",
|
||||
claimMetadataMapping: "The attributes that should be present in the permission metadata from the JWT.",
|
||||
boundSubject: "The expected principal that is the subject of the JWT.",
|
||||
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
|
||||
accessTokenTTL: "The lifetime for an access token in seconds.",
|
||||
@ -343,7 +342,6 @@ export const OIDC_AUTH = {
|
||||
boundIssuer: "The new unique identifier of the identity provider issuing the JWT.",
|
||||
boundAudiences: "The new list of intended recipients.",
|
||||
boundClaims: "The new attributes that should be present in the JWT for it to be valid.",
|
||||
claimMetadataMapping: "The new attributes that should be present in the permission metadata from the JWT.",
|
||||
boundSubject: "The new expected principal that is the subject of the JWT.",
|
||||
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
|
||||
accessTokenTTL: "The new lifetime for an access token in seconds.",
|
||||
@ -461,8 +459,7 @@ export const PROJECTS = {
|
||||
workspaceId: "The ID of the project to update.",
|
||||
name: "The new name of the project.",
|
||||
projectDescription: "An optional description label for the project.",
|
||||
autoCapitalization: "Disable or enable auto-capitalization for the project.",
|
||||
slug: "An optional slug for the project. (must be unique within the organization)"
|
||||
autoCapitalization: "Disable or enable auto-capitalization for the project."
|
||||
},
|
||||
GET_KEY: {
|
||||
workspaceId: "The ID of the project to get the key from."
|
||||
@ -641,8 +638,7 @@ export const FOLDERS = {
|
||||
environment: "The slug of the environment to create the folder in.",
|
||||
name: "The name of the folder to create.",
|
||||
path: "The path of the folder to create.",
|
||||
directory: "The directory of the folder to create. (Deprecated in favor of path)",
|
||||
description: "An optional description label for the folder."
|
||||
directory: "The directory of the folder to create. (Deprecated in favor of path)"
|
||||
},
|
||||
UPDATE: {
|
||||
folderId: "The ID of the folder to update.",
|
||||
@ -651,8 +647,7 @@ export const FOLDERS = {
|
||||
path: "The path of the folder to update.",
|
||||
directory: "The new directory of the folder to update. (Deprecated in favor of path)",
|
||||
projectSlug: "The slug of the project where the folder is located.",
|
||||
workspaceId: "The ID of the project where the folder is located.",
|
||||
description: "An optional description label for the folder."
|
||||
workspaceId: "The ID of the project where the folder is located."
|
||||
},
|
||||
DELETE: {
|
||||
folderIdOrName: "The ID or name of the folder to delete.",
|
||||
@ -669,7 +664,6 @@ export const SECRETS = {
|
||||
secretPath: "The path of the secret to attach tags to.",
|
||||
type: "The type of the secret to attach tags to. (shared/personal)",
|
||||
environment: "The slug of the environment where the secret is located",
|
||||
viewSecretValue: "Whether or not to retrieve the secret value.",
|
||||
projectSlug: "The slug of the project where the secret is located.",
|
||||
tagSlugs: "An array of existing tag slugs to attach to the secret."
|
||||
},
|
||||
@ -693,7 +687,6 @@ export const RAW_SECRETS = {
|
||||
"The slug of the project to list secrets from. This parameter is only applicable by machine identities.",
|
||||
environment: "The slug of the environment to list secrets from.",
|
||||
secretPath: "The secret path to list secrets from.",
|
||||
viewSecretValue: "Whether or not to retrieve the secret value.",
|
||||
includeImports: "Weather to include imported secrets or not.",
|
||||
tagSlugs: "The comma separated tag slugs to filter secrets.",
|
||||
metadataFilter:
|
||||
@ -722,7 +715,6 @@ export const RAW_SECRETS = {
|
||||
secretPath: "The path of the secret to get.",
|
||||
version: "The version of the secret to get.",
|
||||
type: "The type of the secret to get.",
|
||||
viewSecretValue: "Whether or not to retrieve the secret value.",
|
||||
includeImports: "Weather to include imported secrets or not."
|
||||
},
|
||||
UPDATE: {
|
||||
@ -1727,8 +1719,7 @@ export const SecretSyncs = {
|
||||
SYNC_OPTIONS: (destination: SecretSync) => {
|
||||
const destinationName = SECRET_SYNC_NAME_MAP[destination];
|
||||
return {
|
||||
initialSyncBehavior: `Specify how Infisical should resolve the initial sync to the ${destinationName} destination.`,
|
||||
disableSecretDeletion: `Enable this flag to prevent removal of secrets from the ${destinationName} destination when syncing.`
|
||||
initialSyncBehavior: `Specify how Infisical should resolve the initial sync to the ${destinationName} destination.`
|
||||
};
|
||||
},
|
||||
ADDITIONAL_SYNC_OPTIONS: {
|
||||
@ -1774,12 +1765,6 @@ export const SecretSyncs = {
|
||||
},
|
||||
DATABRICKS: {
|
||||
scope: "The Databricks secret scope that secrets should be synced to."
|
||||
},
|
||||
HUMANITEC: {
|
||||
app: "The ID of the Humanitec app to sync secrets to.",
|
||||
org: "The ID of the Humanitec org to sync secrets to.",
|
||||
env: "The ID of the Humanitec environment to sync secrets to.",
|
||||
scope: "The Humanitec scope that secrets should be synced to."
|
||||
}
|
||||
}
|
||||
};
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user