mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-28 02:53:22 +00:00
Compare commits
230 Commits
revert-260
...
doc/add-gi
Author | SHA1 | Date | |
---|---|---|---|
|
bf97294dad | ||
|
4ba3899861 | ||
|
2d2ad0724f | ||
|
e90efb7fc8 | ||
|
17d5e4bdab | ||
|
f22a5580a6 | ||
|
0e946f73bd | ||
|
7b8551f883 | ||
|
3b1ce86ee6 | ||
|
c649661133 | ||
|
70e44d04ef | ||
|
0dddd58be1 | ||
|
148f522c58 | ||
|
d4c911a28f | ||
|
603fcd8ab5 | ||
|
a1474145ae | ||
|
7c055f71f7 | ||
|
14884cd6b0 | ||
|
98fd146e85 | ||
|
1d3dca11e7 | ||
|
22f8a3daa7 | ||
|
395b3d9e05 | ||
|
1041e136fb | ||
|
21024b0d72 | ||
|
00e68dc0bf | ||
|
5e068cd8a0 | ||
|
abdf8f46a3 | ||
|
1cf046f6b3 | ||
|
0fda6d6f4d | ||
|
8d4115925c | ||
|
d0b3c6b66a | ||
|
a1685af119 | ||
|
8d4a06e9e4 | ||
|
6dbe3c8793 | ||
|
a3ec1a27de | ||
|
472f02e8b1 | ||
|
3989646b80 | ||
|
472f5eb8b4 | ||
|
f5b039f939 | ||
|
b7b3d07e9f | ||
|
891a1ea2b9 | ||
|
a807f0cf6c | ||
|
cfc0b2fb8d | ||
|
f096a567de | ||
|
65d642113d | ||
|
92e7e90c21 | ||
|
f9f6ec0a8d | ||
|
d9621b0b17 | ||
|
d80a70731d | ||
|
bd99b4e356 | ||
|
7db0bd7daa | ||
|
8bc538af93 | ||
|
8ef078872e | ||
|
d5f718c6ad | ||
|
5f93016d22 | ||
|
f220246eb4 | ||
|
829b399cda | ||
|
f91f9c9487 | ||
|
f0d19e4701 | ||
|
7eeff6c406 | ||
|
132c3080bb | ||
|
bf09fa33fa | ||
|
a87e7b792c | ||
|
e8ca020903 | ||
|
a603938488 | ||
|
cff7981fe0 | ||
|
b39d5c6682 | ||
|
829ae7d3c0 | ||
|
19c26c680c | ||
|
dd1f1d07cc | ||
|
027b200b1a | ||
|
c3f8c55672 | ||
|
75aeef3897 | ||
|
e761e65322 | ||
|
c97fe77aec | ||
|
370ed45abb | ||
|
3e16d7e160 | ||
|
6bf4b4a380 | ||
|
61f786e8d8 | ||
|
26064e3a08 | ||
|
9b246166a1 | ||
|
9dedaa6779 | ||
|
8eab7d2f01 | ||
|
4e796e7e41 | ||
|
c6fa647825 | ||
|
496cebb08f | ||
|
33db6df7f2 | ||
|
88d25e97e9 | ||
|
4ad9fa1ad1 | ||
|
1642fb42d8 | ||
|
3983c2bc4a | ||
|
34d87ca30f | ||
|
12b6f27151 | ||
|
ea426e8b2d | ||
|
4d567f0b08 | ||
|
6548372e3b | ||
|
77af640c4c | ||
|
90f85152bc | ||
|
cfa8770bdc | ||
|
be8562824d | ||
|
6956d14e2e | ||
|
4f1fe8a9fa | ||
|
b0031b71e0 | ||
|
bae7c6c3d7 | ||
|
7503876ca0 | ||
|
36b5a3dc90 | ||
|
dfe36f346f | ||
|
b1b61842c6 | ||
|
f9ca9b51b2 | ||
|
e8b33f27fc | ||
|
7e7e6ade5c | ||
|
4010817916 | ||
|
eea367c3bc | ||
|
860ebb73a9 | ||
|
56567ee7c9 | ||
|
1cd17a451c | ||
|
6b7bc2a3c4 | ||
|
cb52568ebd | ||
|
9d30fb3870 | ||
|
161ac5e097 | ||
|
bb5b585cf6 | ||
|
fa94191c40 | ||
|
6a5eabc411 | ||
|
c956a0f91f | ||
|
df7b55606e | ||
|
5f14b27f41 | ||
|
02b2395276 | ||
|
402fa2b0e0 | ||
|
3725241f52 | ||
|
10b457a695 | ||
|
3912e2082d | ||
|
7dd6eac20a | ||
|
5664e1ff26 | ||
|
a27a428329 | ||
|
b196251c19 | ||
|
b18d8d542f | ||
|
3c287600ab | ||
|
759d11ff21 | ||
|
2bd817765c | ||
|
7aa9c5dd00 | ||
|
b693c035ce | ||
|
c65a991943 | ||
|
3a3811cb3c | ||
|
332ca61f5d | ||
|
64f43e59d0 | ||
|
ccaf4c00af | ||
|
e3ba1c59bf | ||
|
ce0bc191d8 | ||
|
489ccb8e15 | ||
|
ae8f695b6f | ||
|
19357d4bd7 | ||
|
776d0a0fe1 | ||
|
85dec28667 | ||
|
21ea7dd317 | ||
|
57e214ef50 | ||
|
1986fe9617 | ||
|
1309f30af9 | ||
|
89a4fc91ca | ||
|
af0ec2400d | ||
|
770e73e40b | ||
|
39fdeabdea | ||
|
25c26f2cde | ||
|
1ca8b9ba08 | ||
|
14d9fe01e0 | ||
|
216810f289 | ||
|
f530b78eb8 | ||
|
c3809ed22b | ||
|
9f85d8bba1 | ||
|
1056645ee3 | ||
|
5e9914b738 | ||
|
1ea52e6a80 | ||
|
20da697de8 | ||
|
16abf48081 | ||
|
e73ae485bc | ||
|
621f73e223 | ||
|
93e69bd34e | ||
|
e382135384 | ||
|
f2a554b5fd | ||
|
df5bdf3773 | ||
|
8401048daf | ||
|
335a87d856 | ||
|
1add9dd965 | ||
|
df46daf93d | ||
|
f82f7ae8d0 | ||
|
8536a1c987 | ||
|
b3cf43b46d | ||
|
9d4dbb63ae | ||
|
9c6f23fba6 | ||
|
babe483ca9 | ||
|
38ede687cd | ||
|
5f465c4832 | ||
|
a0618086b0 | ||
|
9a9bb4ca43 | ||
|
b68ddfae1b | ||
|
7646670378 | ||
|
d18be0f74c | ||
|
ec96db3503 | ||
|
7245aaa9ec | ||
|
d32f69e052 | ||
|
726477e3d7 | ||
|
a4ca996a1b | ||
|
303312fe91 | ||
|
f3f2879d6d | ||
|
d0f3d96b3e | ||
|
70d2a21fbc | ||
|
418ae42d94 | ||
|
273c6b3842 | ||
|
6be8d5d2a7 | ||
|
9eb7640755 | ||
|
741138c4bd | ||
|
bed620aad0 | ||
|
2ddf75d2e6 | ||
|
02d9dbb987 | ||
|
0ed333c2b2 | ||
|
3841394eb7 | ||
|
b1ba770a71 | ||
|
3552119c7d | ||
|
7a46725523 | ||
|
3bc39c6cec | ||
|
b5b1e57fe7 | ||
|
1a5f66fe46 | ||
|
ac0cb6d96f | ||
|
f71f894de8 | ||
|
66d2cc8947 | ||
|
e034aa381a | ||
|
d6ffd4fa5f | ||
|
1c32dd5d8a | ||
|
c183ef2b4f | ||
|
b6955d0e9b | ||
|
f4ba441ec3 |
@@ -78,3 +78,5 @@ PLAIN_API_KEY=
|
|||||||
PLAIN_WISH_LABEL_IDS=
|
PLAIN_WISH_LABEL_IDS=
|
||||||
|
|
||||||
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
|
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
|
||||||
|
|
||||||
|
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
|
||||||
|
@@ -7,12 +7,12 @@ permissions:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
infisical-tests:
|
infisical-tests:
|
||||||
name: Run tests before deployment
|
name: Integration tests
|
||||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||||
uses: ./.github/workflows/run-backend-tests.yml
|
uses: ./.github/workflows/run-backend-tests.yml
|
||||||
|
|
||||||
infisical-image:
|
infisical-image:
|
||||||
name: Build backend image
|
name: Build
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [infisical-tests]
|
needs: [infisical-tests]
|
||||||
steps:
|
steps:
|
||||||
@@ -102,10 +102,10 @@ jobs:
|
|||||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||||
service: infisical-core-gamma-stage
|
service: infisical-core-gamma-stage
|
||||||
cluster: infisical-gamma-stage
|
cluster: infisical-gamma-stage
|
||||||
wait-for-service-stability: false
|
wait-for-service-stability: true
|
||||||
|
|
||||||
production-postgres-deployment:
|
production-us:
|
||||||
name: Deploy to production
|
name: US production deploy
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [gamma-deployment]
|
needs: [gamma-deployment]
|
||||||
environment:
|
environment:
|
||||||
@@ -159,3 +159,54 @@ jobs:
|
|||||||
service: infisical-core-platform
|
service: infisical-core-platform
|
||||||
cluster: infisical-core-platform
|
cluster: infisical-core-platform
|
||||||
wait-for-service-stability: true
|
wait-for-service-stability: true
|
||||||
|
|
||||||
|
production-eu:
|
||||||
|
name: EU production deploy
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [production-us]
|
||||||
|
environment:
|
||||||
|
name: production-eu
|
||||||
|
steps:
|
||||||
|
- uses: twingate/github-action@v1
|
||||||
|
with:
|
||||||
|
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||||
|
- name: Configure AWS Credentials
|
||||||
|
uses: aws-actions/configure-aws-credentials@v4
|
||||||
|
with:
|
||||||
|
audience: sts.amazonaws.com
|
||||||
|
aws-region: eu-central-1
|
||||||
|
role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Setup Node.js environment
|
||||||
|
uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: "20"
|
||||||
|
- name: Change directory to backend and install dependencies
|
||||||
|
env:
|
||||||
|
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||||
|
run: |
|
||||||
|
cd backend
|
||||||
|
npm install
|
||||||
|
npm run migration:latest
|
||||||
|
- name: Save commit hashes for tag
|
||||||
|
id: commit
|
||||||
|
uses: pr-mpt/actions-commit-hash@v2
|
||||||
|
- name: Download task definition
|
||||||
|
run: |
|
||||||
|
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||||
|
- name: Render Amazon ECS task definition
|
||||||
|
id: render-web-container
|
||||||
|
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||||
|
with:
|
||||||
|
task-definition: task-definition.json
|
||||||
|
container-name: infisical-core-platform
|
||||||
|
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||||
|
environment-variables: "LOG_LEVEL=info"
|
||||||
|
- name: Deploy to Amazon ECS service
|
||||||
|
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||||
|
with:
|
||||||
|
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||||
|
service: infisical-core-platform
|
||||||
|
cluster: infisical-core-platform
|
||||||
|
wait-for-service-stability: true
|
@@ -9,6 +9,7 @@ jobs:
|
|||||||
name: Run tests before deployment
|
name: Run tests before deployment
|
||||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||||
uses: ./.github/workflows/run-backend-tests.yml
|
uses: ./.github/workflows/run-backend-tests.yml
|
||||||
|
|
||||||
infisical-standalone:
|
infisical-standalone:
|
||||||
name: Build infisical standalone image postgres
|
name: Build infisical standalone image postgres
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -60,3 +61,55 @@ jobs:
|
|||||||
build-args: |
|
build-args: |
|
||||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||||
|
|
||||||
|
infisical-fips-standalone:
|
||||||
|
name: Build infisical standalone image postgres
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [infisical-tests]
|
||||||
|
steps:
|
||||||
|
- name: Extract version from tag
|
||||||
|
id: extract_version
|
||||||
|
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||||
|
- name: ☁️ Checkout source
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: 📦 Install dependencies to test all dependencies
|
||||||
|
run: npm ci --only-production
|
||||||
|
working-directory: backend
|
||||||
|
- name: version output
|
||||||
|
run: |
|
||||||
|
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||||
|
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||||
|
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||||
|
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||||
|
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||||
|
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||||
|
- name: Save commit hashes for tag
|
||||||
|
id: commit
|
||||||
|
uses: pr-mpt/actions-commit-hash@v2
|
||||||
|
- name: 🔧 Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
- name: 🐋 Login to Docker Hub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Set up Depot CLI
|
||||||
|
uses: depot/setup-action@v1
|
||||||
|
- name: 📦 Build backend and export to Docker
|
||||||
|
uses: depot/build-push-action@v1
|
||||||
|
with:
|
||||||
|
project: 64mmf0n610
|
||||||
|
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||||
|
push: true
|
||||||
|
context: .
|
||||||
|
tags: |
|
||||||
|
infisical/infisical-fips:latest-postgres
|
||||||
|
infisical/infisical-fips:${{ steps.commit.outputs.short }}
|
||||||
|
infisical/infisical-fips:${{ steps.extract_version.outputs.version }}
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
file: Dockerfile.fips.standalone-infisical
|
||||||
|
build-args: |
|
||||||
|
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||||
|
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||||
|
@@ -6,3 +6,4 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/S
|
|||||||
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
|
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
|
||||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
|
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
|
||||||
docs/mint.json:generic-api-key:651
|
docs/mint.json:generic-api-key:651
|
||||||
|
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
|
||||||
|
167
Dockerfile.fips.standalone-infisical
Normal file
167
Dockerfile.fips.standalone-infisical
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
ARG POSTHOG_HOST=https://app.posthog.com
|
||||||
|
ARG POSTHOG_API_KEY=posthog-api-key
|
||||||
|
ARG INTERCOM_ID=intercom-id
|
||||||
|
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||||
|
|
||||||
|
FROM node:20-slim AS base
|
||||||
|
|
||||||
|
FROM base AS frontend-dependencies
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
RUN npm ci --only-production --ignore-scripts
|
||||||
|
|
||||||
|
# Rebuild the source code only when needed
|
||||||
|
FROM base AS frontend-builder
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy dependencies
|
||||||
|
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||||
|
# Copy all files
|
||||||
|
COPY /frontend .
|
||||||
|
|
||||||
|
ENV NODE_ENV production
|
||||||
|
ENV NEXT_PUBLIC_ENV production
|
||||||
|
ARG POSTHOG_HOST
|
||||||
|
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
|
||||||
|
ARG POSTHOG_API_KEY
|
||||||
|
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||||
|
ARG INTERCOM_ID
|
||||||
|
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
|
||||||
|
ARG INFISICAL_PLATFORM_VERSION
|
||||||
|
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||||
|
ARG CAPTCHA_SITE_KEY
|
||||||
|
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||||
|
|
||||||
|
# Build
|
||||||
|
RUN npm run build
|
||||||
|
|
||||||
|
# Production image
|
||||||
|
FROM base AS frontend-runner
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||||
|
|
||||||
|
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
|
||||||
|
VOLUME /app/.next/cache/images
|
||||||
|
|
||||||
|
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
|
||||||
|
COPY --from=frontend-builder /app/public ./public
|
||||||
|
RUN chown non-root-user:nodejs ./public/data
|
||||||
|
|
||||||
|
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
|
||||||
|
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
|
||||||
|
|
||||||
|
USER non-root-user
|
||||||
|
|
||||||
|
ENV NEXT_TELEMETRY_DISABLED 1
|
||||||
|
|
||||||
|
##
|
||||||
|
## BACKEND
|
||||||
|
##
|
||||||
|
FROM base AS backend-build
|
||||||
|
|
||||||
|
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||||
|
|
||||||
|
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Required for pkcs11js
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
python3 \
|
||||||
|
make \
|
||||||
|
g++ \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
COPY backend/package*.json ./
|
||||||
|
RUN npm ci --only-production
|
||||||
|
|
||||||
|
COPY /backend .
|
||||||
|
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
|
||||||
|
RUN npm i -D tsconfig-paths
|
||||||
|
RUN npm run build
|
||||||
|
|
||||||
|
# Production stage
|
||||||
|
FROM base AS backend-runner
|
||||||
|
|
||||||
|
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Required for pkcs11js
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
python3 \
|
||||||
|
make \
|
||||||
|
g++ \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
COPY backend/package*.json ./
|
||||||
|
RUN npm ci --only-production
|
||||||
|
|
||||||
|
COPY --from=backend-build /app .
|
||||||
|
|
||||||
|
RUN mkdir frontend-build
|
||||||
|
|
||||||
|
# Production stage
|
||||||
|
FROM base AS production
|
||||||
|
|
||||||
|
# Install necessary packages
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
git \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install Infisical CLI
|
||||||
|
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||||
|
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||||
|
|
||||||
|
# Give non-root-user permission to update SSL certs
|
||||||
|
RUN chown -R non-root-user /etc/ssl/certs
|
||||||
|
RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt
|
||||||
|
RUN chmod -R u+rwx /etc/ssl/certs
|
||||||
|
RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt
|
||||||
|
RUN chown non-root-user /usr/sbin/update-ca-certificates
|
||||||
|
RUN chmod u+rx /usr/sbin/update-ca-certificates
|
||||||
|
|
||||||
|
## set pre baked keys
|
||||||
|
ARG POSTHOG_API_KEY
|
||||||
|
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||||
|
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||||
|
ARG INTERCOM_ID=intercom-id
|
||||||
|
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
|
||||||
|
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
|
||||||
|
ARG CAPTCHA_SITE_KEY
|
||||||
|
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||||
|
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||||
|
|
||||||
|
WORKDIR /
|
||||||
|
|
||||||
|
COPY --from=backend-runner /app /backend
|
||||||
|
|
||||||
|
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||||
|
|
||||||
|
ENV PORT 8080
|
||||||
|
ENV HOST=0.0.0.0
|
||||||
|
ENV HTTPS_ENABLED false
|
||||||
|
ENV NODE_ENV production
|
||||||
|
ENV STANDALONE_BUILD true
|
||||||
|
ENV STANDALONE_MODE true
|
||||||
|
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||||
|
|
||||||
|
WORKDIR /backend
|
||||||
|
|
||||||
|
ENV TELEMETRY_ENABLED true
|
||||||
|
|
||||||
|
EXPOSE 8080
|
||||||
|
EXPOSE 443
|
||||||
|
|
||||||
|
USER non-root-user
|
||||||
|
|
||||||
|
CMD ["./standalone-entrypoint.sh"]
|
@@ -72,6 +72,9 @@ RUN addgroup --system --gid 1001 nodejs \
|
|||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Required for pkcs11js
|
||||||
|
RUN apk add --no-cache python3 make g++
|
||||||
|
|
||||||
COPY backend/package*.json ./
|
COPY backend/package*.json ./
|
||||||
RUN npm ci --only-production
|
RUN npm ci --only-production
|
||||||
|
|
||||||
@@ -85,6 +88,9 @@ FROM base AS backend-runner
|
|||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Required for pkcs11js
|
||||||
|
RUN apk add --no-cache python3 make g++
|
||||||
|
|
||||||
COPY backend/package*.json ./
|
COPY backend/package*.json ./
|
||||||
RUN npm ci --only-production
|
RUN npm ci --only-production
|
||||||
|
|
||||||
|
@@ -3,6 +3,12 @@ FROM node:20-alpine AS build
|
|||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Required for pkcs11js
|
||||||
|
RUN apk --update add \
|
||||||
|
python3 \
|
||||||
|
make \
|
||||||
|
g++
|
||||||
|
|
||||||
COPY package*.json ./
|
COPY package*.json ./
|
||||||
RUN npm ci --only-production
|
RUN npm ci --only-production
|
||||||
|
|
||||||
@@ -11,12 +17,17 @@ RUN npm run build
|
|||||||
|
|
||||||
# Production stage
|
# Production stage
|
||||||
FROM node:20-alpine
|
FROM node:20-alpine
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
ENV npm_config_cache /home/node/.npm
|
ENV npm_config_cache /home/node/.npm
|
||||||
|
|
||||||
COPY package*.json ./
|
COPY package*.json ./
|
||||||
|
|
||||||
|
RUN apk --update add \
|
||||||
|
python3 \
|
||||||
|
make \
|
||||||
|
g++
|
||||||
|
|
||||||
RUN npm ci --only-production && npm cache clean --force
|
RUN npm ci --only-production && npm cache clean --force
|
||||||
|
|
||||||
COPY --from=build /app .
|
COPY --from=build /app .
|
||||||
|
@@ -1,5 +1,44 @@
|
|||||||
FROM node:20-alpine
|
FROM node:20-alpine
|
||||||
|
|
||||||
|
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
||||||
|
|
||||||
|
ARG SOFTHSM2_VERSION=2.5.0
|
||||||
|
|
||||||
|
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||||
|
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||||
|
|
||||||
|
# install build dependencies including python3
|
||||||
|
RUN apk --update add \
|
||||||
|
alpine-sdk \
|
||||||
|
autoconf \
|
||||||
|
automake \
|
||||||
|
git \
|
||||||
|
libtool \
|
||||||
|
openssl-dev \
|
||||||
|
python3 \
|
||||||
|
make \
|
||||||
|
g++
|
||||||
|
|
||||||
|
# build and install SoftHSM2
|
||||||
|
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||||
|
WORKDIR ${SOFTHSM2_SOURCES}
|
||||||
|
|
||||||
|
RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
|
||||||
|
&& sh autogen.sh \
|
||||||
|
&& ./configure --prefix=/usr/local --disable-gost \
|
||||||
|
&& make \
|
||||||
|
&& make install
|
||||||
|
|
||||||
|
WORKDIR /root
|
||||||
|
RUN rm -fr ${SOFTHSM2_SOURCES}
|
||||||
|
|
||||||
|
# install pkcs11-tool
|
||||||
|
RUN apk --update add opensc
|
||||||
|
|
||||||
|
RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||||
|
|
||||||
|
# ? App setup
|
||||||
|
|
||||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||||
|
@@ -34,7 +34,7 @@ describe("Identity v1", async () => {
|
|||||||
test("Create identity", async () => {
|
test("Create identity", async () => {
|
||||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||||
expect(newIdentity.name).toBe("mac1");
|
expect(newIdentity.name).toBe("mac1");
|
||||||
expect(newIdentity.authMethod).toBeNull();
|
expect(newIdentity.authMethods).toEqual([]);
|
||||||
|
|
||||||
await deleteIdentity(newIdentity.id);
|
await deleteIdentity(newIdentity.id);
|
||||||
});
|
});
|
||||||
@@ -42,7 +42,7 @@ describe("Identity v1", async () => {
|
|||||||
test("Update identity", async () => {
|
test("Update identity", async () => {
|
||||||
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
|
||||||
expect(newIdentity.name).toBe("mac1");
|
expect(newIdentity.name).toBe("mac1");
|
||||||
expect(newIdentity.authMethod).toBeNull();
|
expect(newIdentity.authMethods).toEqual([]);
|
||||||
|
|
||||||
const updatedIdentity = await testServer.inject({
|
const updatedIdentity = await testServer.inject({
|
||||||
method: "PATCH",
|
method: "PATCH",
|
||||||
|
@@ -118,9 +118,9 @@ describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
|
|||||||
value: "stage-value"
|
value: "stage-value"
|
||||||
});
|
});
|
||||||
|
|
||||||
// wait for 5 second for replication to finish
|
// wait for 10 second for replication to finish
|
||||||
await new Promise((resolve) => {
|
await new Promise((resolve) => {
|
||||||
setTimeout(resolve, 5000); // time to breathe for db
|
setTimeout(resolve, 10000); // time to breathe for db
|
||||||
});
|
});
|
||||||
|
|
||||||
const secret = await getSecretByNameV2({
|
const secret = await getSecretByNameV2({
|
||||||
@@ -173,9 +173,9 @@ describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
|
|||||||
value: "prod-value"
|
value: "prod-value"
|
||||||
});
|
});
|
||||||
|
|
||||||
// wait for 5 second for replication to finish
|
// wait for 10 second for replication to finish
|
||||||
await new Promise((resolve) => {
|
await new Promise((resolve) => {
|
||||||
setTimeout(resolve, 5000); // time to breathe for db
|
setTimeout(resolve, 10000); // time to breathe for db
|
||||||
});
|
});
|
||||||
|
|
||||||
const secret = await getSecretByNameV2({
|
const secret = await getSecretByNameV2({
|
||||||
@@ -343,9 +343,9 @@ describe.each([{ path: "/" }, { path: "/deep" }])(
|
|||||||
value: "prod-value"
|
value: "prod-value"
|
||||||
});
|
});
|
||||||
|
|
||||||
// wait for 5 second for replication to finish
|
// wait for 10 second for replication to finish
|
||||||
await new Promise((resolve) => {
|
await new Promise((resolve) => {
|
||||||
setTimeout(resolve, 5000); // time to breathe for db
|
setTimeout(resolve, 10000); // time to breathe for db
|
||||||
});
|
});
|
||||||
|
|
||||||
const secret = await getSecretByNameV2({
|
const secret = await getSecretByNameV2({
|
||||||
|
@@ -16,6 +16,7 @@ import { initDbConnection } from "@app/db";
|
|||||||
import { queueServiceFactory } from "@app/queue";
|
import { queueServiceFactory } from "@app/queue";
|
||||||
import { keyStoreFactory } from "@app/keystore/keystore";
|
import { keyStoreFactory } from "@app/keystore/keystore";
|
||||||
import { Redis } from "ioredis";
|
import { Redis } from "ioredis";
|
||||||
|
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||||
|
|
||||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||||
export default {
|
export default {
|
||||||
@@ -54,7 +55,12 @@ export default {
|
|||||||
const smtp = mockSmtpServer();
|
const smtp = mockSmtpServer();
|
||||||
const queue = queueServiceFactory(cfg.REDIS_URL);
|
const queue = queueServiceFactory(cfg.REDIS_URL);
|
||||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||||
const server = await main({ db, smtp, logger, queue, keyStore });
|
|
||||||
|
const hsmModule = initializeHsmModule();
|
||||||
|
hsmModule.initialize();
|
||||||
|
|
||||||
|
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule() });
|
||||||
|
|
||||||
// @ts-expect-error type
|
// @ts-expect-error type
|
||||||
globalThis.testServer = server;
|
globalThis.testServer = server;
|
||||||
// @ts-expect-error type
|
// @ts-expect-error type
|
||||||
|
2785
backend/package-lock.json
generated
2785
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -44,7 +44,7 @@
|
|||||||
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
|
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
|
||||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||||
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
||||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts",
|
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
|
||||||
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
|
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
|
||||||
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
|
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
|
||||||
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
||||||
@@ -84,6 +84,7 @@
|
|||||||
"@types/passport-google-oauth20": "^2.0.14",
|
"@types/passport-google-oauth20": "^2.0.14",
|
||||||
"@types/pg": "^8.10.9",
|
"@types/pg": "^8.10.9",
|
||||||
"@types/picomatch": "^2.3.3",
|
"@types/picomatch": "^2.3.3",
|
||||||
|
"@types/pkcs11js": "^1.0.4",
|
||||||
"@types/prompt-sync": "^4.2.3",
|
"@types/prompt-sync": "^4.2.3",
|
||||||
"@types/resolve": "^1.20.6",
|
"@types/resolve": "^1.20.6",
|
||||||
"@types/safe-regex": "^1.1.6",
|
"@types/safe-regex": "^1.1.6",
|
||||||
@@ -156,11 +157,12 @@
|
|||||||
"connect-redis": "^7.1.1",
|
"connect-redis": "^7.1.1",
|
||||||
"cron": "^3.1.7",
|
"cron": "^3.1.7",
|
||||||
"dotenv": "^16.4.1",
|
"dotenv": "^16.4.1",
|
||||||
"fastify": "^4.26.0",
|
"fastify": "^4.28.1",
|
||||||
"fastify-plugin": "^4.5.1",
|
"fastify-plugin": "^4.5.1",
|
||||||
"google-auth-library": "^9.9.0",
|
"google-auth-library": "^9.9.0",
|
||||||
"googleapis": "^137.1.0",
|
"googleapis": "^137.1.0",
|
||||||
"handlebars": "^4.7.8",
|
"handlebars": "^4.7.8",
|
||||||
|
"hdb": "^0.19.10",
|
||||||
"ioredis": "^5.3.2",
|
"ioredis": "^5.3.2",
|
||||||
"jmespath": "^0.16.0",
|
"jmespath": "^0.16.0",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
@@ -187,6 +189,7 @@
|
|||||||
"pg-query-stream": "^4.5.3",
|
"pg-query-stream": "^4.5.3",
|
||||||
"picomatch": "^3.0.1",
|
"picomatch": "^3.0.1",
|
||||||
"pino": "^8.16.2",
|
"pino": "^8.16.2",
|
||||||
|
"pkcs11js": "^2.1.6",
|
||||||
"pkijs": "^3.2.4",
|
"pkijs": "^3.2.4",
|
||||||
"posthog-node": "^3.6.2",
|
"posthog-node": "^3.6.2",
|
||||||
"probot": "^13.3.8",
|
"probot": "^13.3.8",
|
||||||
@@ -195,6 +198,7 @@
|
|||||||
"scim2-parse-filter": "^0.2.10",
|
"scim2-parse-filter": "^0.2.10",
|
||||||
"sjcl": "^1.0.8",
|
"sjcl": "^1.0.8",
|
||||||
"smee-client": "^2.0.0",
|
"smee-client": "^2.0.0",
|
||||||
|
"snowflake-sdk": "^1.14.0",
|
||||||
"tedious": "^18.2.1",
|
"tedious": "^18.2.1",
|
||||||
"tweetnacl": "^1.0.3",
|
"tweetnacl": "^1.0.3",
|
||||||
"tweetnacl-util": "^0.15.1",
|
"tweetnacl-util": "^0.15.1",
|
||||||
|
4
backend/src/@types/fastify.d.ts
vendored
4
backend/src/@types/fastify.d.ts
vendored
@@ -18,6 +18,7 @@ import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-con
|
|||||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||||
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||||
|
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
|
||||||
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||||
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
|
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||||
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
|
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
|
||||||
@@ -43,6 +44,7 @@ import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
|
|||||||
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
|
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
|
||||||
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
|
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
|
||||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||||
|
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
|
||||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||||
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
||||||
@@ -183,12 +185,14 @@ declare module "fastify" {
|
|||||||
rateLimit: TRateLimitServiceFactory;
|
rateLimit: TRateLimitServiceFactory;
|
||||||
userEngagement: TUserEngagementServiceFactory;
|
userEngagement: TUserEngagementServiceFactory;
|
||||||
externalKms: TExternalKmsServiceFactory;
|
externalKms: TExternalKmsServiceFactory;
|
||||||
|
hsm: THsmServiceFactory;
|
||||||
orgAdmin: TOrgAdminServiceFactory;
|
orgAdmin: TOrgAdminServiceFactory;
|
||||||
slack: TSlackServiceFactory;
|
slack: TSlackServiceFactory;
|
||||||
workflowIntegration: TWorkflowIntegrationServiceFactory;
|
workflowIntegration: TWorkflowIntegrationServiceFactory;
|
||||||
cmek: TCmekServiceFactory;
|
cmek: TCmekServiceFactory;
|
||||||
migration: TExternalMigrationServiceFactory;
|
migration: TExternalMigrationServiceFactory;
|
||||||
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
|
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
|
||||||
|
projectTemplate: TProjectTemplateServiceFactory;
|
||||||
};
|
};
|
||||||
// this is exclusive use for middlewares in which we need to inject data
|
// this is exclusive use for middlewares in which we need to inject data
|
||||||
// everywhere else access using service layer
|
// everywhere else access using service layer
|
||||||
|
4
backend/src/@types/hdb.d.ts
vendored
Normal file
4
backend/src/@types/hdb.d.ts
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
declare module "hdb" {
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
|
||||||
|
function createClient(options): any;
|
||||||
|
}
|
8
backend/src/@types/knex.d.ts
vendored
8
backend/src/@types/knex.d.ts
vendored
@@ -200,6 +200,9 @@ import {
|
|||||||
TProjectSlackConfigsInsert,
|
TProjectSlackConfigsInsert,
|
||||||
TProjectSlackConfigsUpdate,
|
TProjectSlackConfigsUpdate,
|
||||||
TProjectsUpdate,
|
TProjectsUpdate,
|
||||||
|
TProjectTemplates,
|
||||||
|
TProjectTemplatesInsert,
|
||||||
|
TProjectTemplatesUpdate,
|
||||||
TProjectUserAdditionalPrivilege,
|
TProjectUserAdditionalPrivilege,
|
||||||
TProjectUserAdditionalPrivilegeInsert,
|
TProjectUserAdditionalPrivilegeInsert,
|
||||||
TProjectUserAdditionalPrivilegeUpdate,
|
TProjectUserAdditionalPrivilegeUpdate,
|
||||||
@@ -818,5 +821,10 @@ declare module "knex/types/tables" {
|
|||||||
TExternalGroupOrgRoleMappingsInsert,
|
TExternalGroupOrgRoleMappingsInsert,
|
||||||
TExternalGroupOrgRoleMappingsUpdate
|
TExternalGroupOrgRoleMappingsUpdate
|
||||||
>;
|
>;
|
||||||
|
[TableName.ProjectTemplates]: KnexOriginal.CompositeTableType<
|
||||||
|
TProjectTemplates,
|
||||||
|
TProjectTemplatesInsert,
|
||||||
|
TProjectTemplatesUpdate
|
||||||
|
>;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -9,7 +9,7 @@ export async function up(knex: Knex): Promise<void> {
|
|||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||||
t.string("integration").notNullable();
|
t.string("integration").notNullable();
|
||||||
t.string("teamId"); // vercel-specific
|
t.string("teamId"); // vercel-specific
|
||||||
t.string("url"); // for self hosted
|
t.string("url"); // for self-hosted
|
||||||
t.string("namespace"); // hashicorp specific
|
t.string("namespace"); // hashicorp specific
|
||||||
t.string("accountId"); // netlify
|
t.string("accountId"); // netlify
|
||||||
t.text("refreshCiphertext");
|
t.text("refreshCiphertext");
|
||||||
@@ -36,7 +36,7 @@ export async function up(knex: Knex): Promise<void> {
|
|||||||
await knex.schema.createTable(TableName.Integration, (t) => {
|
await knex.schema.createTable(TableName.Integration, (t) => {
|
||||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||||
t.boolean("isActive").notNullable();
|
t.boolean("isActive").notNullable();
|
||||||
t.string("url"); // self hosted
|
t.string("url"); // self-hosted
|
||||||
t.string("app"); // name of app in provider
|
t.string("app"); // name of app in provider
|
||||||
t.string("appId");
|
t.string("appId");
|
||||||
t.string("targetEnvironment");
|
t.string("targetEnvironment");
|
||||||
|
@@ -0,0 +1,76 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
|
||||||
|
const BATCH_SIZE = 30_000;
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
||||||
|
|
||||||
|
if (!hasAuthMethodColumnAccessToken) {
|
||||||
|
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||||
|
t.string("authMethod").nullable();
|
||||||
|
});
|
||||||
|
|
||||||
|
let nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
|
||||||
|
let totalUpdated = 0;
|
||||||
|
|
||||||
|
do {
|
||||||
|
const batchIds = nullableAccessTokens.map((token) => token.id);
|
||||||
|
|
||||||
|
// ! Update the auth method column in batches for the current batch
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
await knex(TableName.IdentityAccessToken)
|
||||||
|
.whereIn("id", batchIds)
|
||||||
|
.update({
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore because generate schema happens after this
|
||||||
|
authMethod: knex(TableName.Identity)
|
||||||
|
.select("authMethod")
|
||||||
|
.whereRaw(`${TableName.IdentityAccessToken}."identityId" = ${TableName.Identity}.id`)
|
||||||
|
.whereNotNull("authMethod")
|
||||||
|
.first()
|
||||||
|
});
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
|
||||||
|
|
||||||
|
totalUpdated += batchIds.length;
|
||||||
|
console.log(`Updated ${batchIds.length} access tokens in batch <> Total updated: ${totalUpdated}`);
|
||||||
|
} while (nullableAccessTokens.length > 0);
|
||||||
|
|
||||||
|
// ! We delete all access tokens where the identity has no auth method set!
|
||||||
|
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
|
||||||
|
await knex(TableName.IdentityAccessToken)
|
||||||
|
.whereNotExists((queryBuilder) => {
|
||||||
|
void queryBuilder
|
||||||
|
.select("id")
|
||||||
|
.from(TableName.Identity)
|
||||||
|
.whereRaw(`${TableName.IdentityAccessToken}."identityId" = ${TableName.Identity}.id`)
|
||||||
|
.whereNotNull("authMethod");
|
||||||
|
})
|
||||||
|
.delete();
|
||||||
|
|
||||||
|
// Finally we set the authMethod to notNullable after populating the column.
|
||||||
|
// This will fail if the data is not populated correctly, so it's safe.
|
||||||
|
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||||
|
t.string("authMethod").notNullable().alter();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// ! We aren't dropping the authMethod column from the Identity itself, because we wan't to be able to easily rollback for the time being.
|
||||||
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
||||||
|
|
||||||
|
if (hasAuthMethodColumnAccessToken) {
|
||||||
|
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||||
|
t.dropColumn("authMethod");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const config = { transaction: false };
|
||||||
|
export { config };
|
@@ -0,0 +1,28 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "@app/db/schemas";
|
||||||
|
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
if (!(await knex.schema.hasTable(TableName.ProjectTemplates))) {
|
||||||
|
await knex.schema.createTable(TableName.ProjectTemplates, (t) => {
|
||||||
|
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||||
|
t.string("name", 32).notNullable();
|
||||||
|
t.string("description").nullable();
|
||||||
|
t.jsonb("roles").notNullable();
|
||||||
|
t.jsonb("environments").notNullable();
|
||||||
|
t.uuid("orgId").notNullable().references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||||
|
t.timestamps(true, true, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
await createOnUpdateTrigger(knex, TableName.ProjectTemplates);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
if (await knex.schema.hasTable(TableName.ProjectTemplates)) {
|
||||||
|
await dropOnUpdateTrigger(knex, TableName.ProjectTemplates);
|
||||||
|
|
||||||
|
await knex.schema.dropTable(TableName.ProjectTemplates);
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,35 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
const hasDisableBootstrapCertValidationCol = await knex.schema.hasColumn(
|
||||||
|
TableName.CertificateTemplateEstConfig,
|
||||||
|
"disableBootstrapCertValidation"
|
||||||
|
);
|
||||||
|
|
||||||
|
const hasCaChainCol = await knex.schema.hasColumn(TableName.CertificateTemplateEstConfig, "encryptedCaChain");
|
||||||
|
|
||||||
|
await knex.schema.alterTable(TableName.CertificateTemplateEstConfig, (t) => {
|
||||||
|
if (!hasDisableBootstrapCertValidationCol) {
|
||||||
|
t.boolean("disableBootstrapCertValidation").defaultTo(false).notNullable();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasCaChainCol) {
|
||||||
|
t.binary("encryptedCaChain").nullable().alter();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
const hasDisableBootstrapCertValidationCol = await knex.schema.hasColumn(
|
||||||
|
TableName.CertificateTemplateEstConfig,
|
||||||
|
"disableBootstrapCertValidation"
|
||||||
|
);
|
||||||
|
|
||||||
|
await knex.schema.alterTable(TableName.CertificateTemplateEstConfig, (t) => {
|
||||||
|
if (hasDisableBootstrapCertValidationCol) {
|
||||||
|
t.dropColumn("disableBootstrapCertValidation");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
23
backend/src/db/migrations/20241111175154_kms-root-cfg-hsm.ts
Normal file
23
backend/src/db/migrations/20241111175154_kms-root-cfg-hsm.ts
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import { Knex } from "knex";
|
||||||
|
|
||||||
|
import { TableName } from "../schemas";
|
||||||
|
|
||||||
|
export async function up(knex: Knex): Promise<void> {
|
||||||
|
const hasEncryptionStrategy = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "encryptionStrategy");
|
||||||
|
const hasTimestampsCol = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "createdAt");
|
||||||
|
|
||||||
|
await knex.schema.alterTable(TableName.KmsServerRootConfig, (t) => {
|
||||||
|
if (!hasEncryptionStrategy) t.string("encryptionStrategy").defaultTo("SOFTWARE");
|
||||||
|
if (!hasTimestampsCol) t.timestamps(true, true, true);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(knex: Knex): Promise<void> {
|
||||||
|
const hasEncryptionStrategy = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "encryptionStrategy");
|
||||||
|
const hasTimestampsCol = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "createdAt");
|
||||||
|
|
||||||
|
await knex.schema.alterTable(TableName.KmsServerRootConfig, (t) => {
|
||||||
|
if (hasEncryptionStrategy) t.dropColumn("encryptionStrategy");
|
||||||
|
if (hasTimestampsCol) t.dropTimestamps(true);
|
||||||
|
});
|
||||||
|
}
|
@@ -12,11 +12,12 @@ import { TImmutableDBKeys } from "./models";
|
|||||||
export const CertificateTemplateEstConfigsSchema = z.object({
|
export const CertificateTemplateEstConfigsSchema = z.object({
|
||||||
id: z.string().uuid(),
|
id: z.string().uuid(),
|
||||||
certificateTemplateId: z.string().uuid(),
|
certificateTemplateId: z.string().uuid(),
|
||||||
encryptedCaChain: zodBuffer,
|
encryptedCaChain: zodBuffer.nullable().optional(),
|
||||||
hashedPassphrase: z.string(),
|
hashedPassphrase: z.string(),
|
||||||
isEnabled: z.boolean(),
|
isEnabled: z.boolean(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date()
|
updatedAt: z.date(),
|
||||||
|
disableBootstrapCertValidation: z.boolean().default(false)
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TCertificateTemplateEstConfigs = z.infer<typeof CertificateTemplateEstConfigsSchema>;
|
export type TCertificateTemplateEstConfigs = z.infer<typeof CertificateTemplateEstConfigsSchema>;
|
||||||
|
@@ -20,7 +20,8 @@ export const IdentityAccessTokensSchema = z.object({
|
|||||||
identityId: z.string().uuid(),
|
identityId: z.string().uuid(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
name: z.string().nullable().optional()
|
name: z.string().nullable().optional(),
|
||||||
|
authMethod: z.string()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TIdentityAccessTokens = z.infer<typeof IdentityAccessTokensSchema>;
|
export type TIdentityAccessTokens = z.infer<typeof IdentityAccessTokensSchema>;
|
||||||
|
@@ -64,6 +64,7 @@ export * from "./project-keys";
|
|||||||
export * from "./project-memberships";
|
export * from "./project-memberships";
|
||||||
export * from "./project-roles";
|
export * from "./project-roles";
|
||||||
export * from "./project-slack-configs";
|
export * from "./project-slack-configs";
|
||||||
|
export * from "./project-templates";
|
||||||
export * from "./project-user-additional-privilege";
|
export * from "./project-user-additional-privilege";
|
||||||
export * from "./project-user-membership-roles";
|
export * from "./project-user-membership-roles";
|
||||||
export * from "./projects";
|
export * from "./projects";
|
||||||
|
@@ -11,7 +11,10 @@ import { TImmutableDBKeys } from "./models";
|
|||||||
|
|
||||||
export const KmsRootConfigSchema = z.object({
|
export const KmsRootConfigSchema = z.object({
|
||||||
id: z.string().uuid(),
|
id: z.string().uuid(),
|
||||||
encryptedRootKey: zodBuffer
|
encryptedRootKey: zodBuffer,
|
||||||
|
encryptionStrategy: z.string(),
|
||||||
|
createdAt: z.date(),
|
||||||
|
updatedAt: z.date()
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TKmsRootConfig = z.infer<typeof KmsRootConfigSchema>;
|
export type TKmsRootConfig = z.infer<typeof KmsRootConfigSchema>;
|
||||||
|
@@ -41,6 +41,7 @@ export enum TableName {
|
|||||||
ProjectUserAdditionalPrivilege = "project_user_additional_privilege",
|
ProjectUserAdditionalPrivilege = "project_user_additional_privilege",
|
||||||
ProjectUserMembershipRole = "project_user_membership_roles",
|
ProjectUserMembershipRole = "project_user_membership_roles",
|
||||||
ProjectKeys = "project_keys",
|
ProjectKeys = "project_keys",
|
||||||
|
ProjectTemplates = "project_templates",
|
||||||
Secret = "secrets",
|
Secret = "secrets",
|
||||||
SecretReference = "secret_references",
|
SecretReference = "secret_references",
|
||||||
SecretSharing = "secret_sharing",
|
SecretSharing = "secret_sharing",
|
||||||
@@ -189,7 +190,7 @@ export enum ProjectUpgradeStatus {
|
|||||||
|
|
||||||
export enum IdentityAuthMethod {
|
export enum IdentityAuthMethod {
|
||||||
TOKEN_AUTH = "token-auth",
|
TOKEN_AUTH = "token-auth",
|
||||||
Univeral = "universal-auth",
|
UNIVERSAL_AUTH = "universal-auth",
|
||||||
KUBERNETES_AUTH = "kubernetes-auth",
|
KUBERNETES_AUTH = "kubernetes-auth",
|
||||||
GCP_AUTH = "gcp-auth",
|
GCP_AUTH = "gcp-auth",
|
||||||
AWS_AUTH = "aws-auth",
|
AWS_AUTH = "aws-auth",
|
||||||
|
@@ -15,7 +15,8 @@ export const ProjectRolesSchema = z.object({
|
|||||||
permissions: z.unknown(),
|
permissions: z.unknown(),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
updatedAt: z.date(),
|
updatedAt: z.date(),
|
||||||
projectId: z.string()
|
projectId: z.string(),
|
||||||
|
version: z.number().default(1)
|
||||||
});
|
});
|
||||||
|
|
||||||
export type TProjectRoles = z.infer<typeof ProjectRolesSchema>;
|
export type TProjectRoles = z.infer<typeof ProjectRolesSchema>;
|
||||||
|
23
backend/src/db/schemas/project-templates.ts
Normal file
23
backend/src/db/schemas/project-templates.ts
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
// Code generated by automation script, DO NOT EDIT.
|
||||||
|
// Automated by pulling database and generating zod schema
|
||||||
|
// To update. Just run npm run generate:schema
|
||||||
|
// Written by akhilmhdh.
|
||||||
|
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { TImmutableDBKeys } from "./models";
|
||||||
|
|
||||||
|
export const ProjectTemplatesSchema = z.object({
|
||||||
|
id: z.string().uuid(),
|
||||||
|
name: z.string(),
|
||||||
|
description: z.string().nullable().optional(),
|
||||||
|
roles: z.unknown(),
|
||||||
|
environments: z.unknown(),
|
||||||
|
orgId: z.string().uuid(),
|
||||||
|
createdAt: z.date(),
|
||||||
|
updatedAt: z.date()
|
||||||
|
});
|
||||||
|
|
||||||
|
export type TProjectTemplates = z.infer<typeof ProjectTemplatesSchema>;
|
||||||
|
export type TProjectTemplatesInsert = Omit<z.input<typeof ProjectTemplatesSchema>, TImmutableDBKeys>;
|
||||||
|
export type TProjectTemplatesUpdate = Partial<Omit<z.input<typeof ProjectTemplatesSchema>, TImmutableDBKeys>>;
|
@@ -16,7 +16,7 @@ export async function seed(knex: Knex): Promise<void> {
|
|||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
id: seedData1.machineIdentity.id,
|
id: seedData1.machineIdentity.id,
|
||||||
name: seedData1.machineIdentity.name,
|
name: seedData1.machineIdentity.name,
|
||||||
authMethod: IdentityAuthMethod.Univeral
|
authMethod: IdentityAuthMethod.UNIVERSAL_AUTH
|
||||||
}
|
}
|
||||||
]);
|
]);
|
||||||
const identityUa = await knex(TableName.IdentityUniversalAuth)
|
const identityUa = await knex(TableName.IdentityUniversalAuth)
|
||||||
|
@@ -1,3 +1,5 @@
|
|||||||
|
import { registerProjectTemplateRouter } from "@app/ee/routes/v1/project-template-router";
|
||||||
|
|
||||||
import { registerAccessApprovalPolicyRouter } from "./access-approval-policy-router";
|
import { registerAccessApprovalPolicyRouter } from "./access-approval-policy-router";
|
||||||
import { registerAccessApprovalRequestRouter } from "./access-approval-request-router";
|
import { registerAccessApprovalRequestRouter } from "./access-approval-request-router";
|
||||||
import { registerAuditLogStreamRouter } from "./audit-log-stream-router";
|
import { registerAuditLogStreamRouter } from "./audit-log-stream-router";
|
||||||
@@ -92,4 +94,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
|||||||
await server.register(registerExternalKmsRouter, {
|
await server.register(registerExternalKmsRouter, {
|
||||||
prefix: "/external-kms"
|
prefix: "/external-kms"
|
||||||
});
|
});
|
||||||
|
|
||||||
|
await server.register(registerProjectTemplateRouter, { prefix: "/project-templates" });
|
||||||
};
|
};
|
||||||
|
@@ -192,7 +192,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
|||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
roles: ProjectRolesSchema.omit({ permissions: true }).array()
|
roles: ProjectRolesSchema.omit({ permissions: true, version: true }).array()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -225,7 +225,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
|||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
role: SanitizedRoleSchemaV1
|
role: SanitizedRoleSchemaV1.omit({ version: true })
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
309
backend/src/ee/routes/v1/project-template-router.ts
Normal file
309
backend/src/ee/routes/v1/project-template-router.ts
Normal file
@@ -0,0 +1,309 @@
|
|||||||
|
import slugify from "@sindresorhus/slugify";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { ProjectMembershipRole, ProjectTemplatesSchema } from "@app/db/schemas";
|
||||||
|
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||||
|
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||||
|
import { ProjectTemplateDefaultEnvironments } from "@app/ee/services/project-template/project-template-constants";
|
||||||
|
import { isInfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-fns";
|
||||||
|
import { ProjectTemplates } from "@app/lib/api-docs";
|
||||||
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
|
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||||
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
|
||||||
|
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;
|
||||||
|
|
||||||
|
const SlugSchema = z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.min(1)
|
||||||
|
.max(32)
|
||||||
|
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
||||||
|
.refine((v) => slugify(v) === v, {
|
||||||
|
message: "Must be valid slug format"
|
||||||
|
});
|
||||||
|
|
||||||
|
const isReservedRoleSlug = (slug: string) =>
|
||||||
|
Object.values(ProjectMembershipRole).includes(slug as ProjectMembershipRole);
|
||||||
|
|
||||||
|
const isReservedRoleName = (name: string) =>
|
||||||
|
["custom", "admin", "viewer", "developer", "no access"].includes(name.toLowerCase());
|
||||||
|
|
||||||
|
const SanitizedProjectTemplateSchema = ProjectTemplatesSchema.extend({
|
||||||
|
roles: z
|
||||||
|
.object({
|
||||||
|
name: z.string().trim().min(1),
|
||||||
|
slug: SlugSchema,
|
||||||
|
permissions: UnpackedPermissionSchema.array()
|
||||||
|
})
|
||||||
|
.array(),
|
||||||
|
environments: z
|
||||||
|
.object({
|
||||||
|
name: z.string().trim().min(1),
|
||||||
|
slug: SlugSchema,
|
||||||
|
position: z.number().min(1)
|
||||||
|
})
|
||||||
|
.array()
|
||||||
|
});
|
||||||
|
|
||||||
|
const ProjectTemplateRolesSchema = z
|
||||||
|
.object({
|
||||||
|
name: z.string().trim().min(1),
|
||||||
|
slug: SlugSchema,
|
||||||
|
permissions: ProjectPermissionV2Schema.array()
|
||||||
|
})
|
||||||
|
.array()
|
||||||
|
.superRefine((roles, ctx) => {
|
||||||
|
if (!roles.length) return;
|
||||||
|
|
||||||
|
if (Buffer.byteLength(JSON.stringify(roles)) > MAX_JSON_SIZE_LIMIT_IN_BYTES)
|
||||||
|
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Size limit exceeded" });
|
||||||
|
|
||||||
|
if (new Set(roles.map((v) => v.slug)).size !== roles.length)
|
||||||
|
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Role slugs must be unique" });
|
||||||
|
|
||||||
|
if (new Set(roles.map((v) => v.name)).size !== roles.length)
|
||||||
|
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Role names must be unique" });
|
||||||
|
|
||||||
|
roles.forEach((role) => {
|
||||||
|
if (isReservedRoleSlug(role.slug))
|
||||||
|
ctx.addIssue({ code: z.ZodIssueCode.custom, message: `Role slug "${role.slug}" is reserved` });
|
||||||
|
|
||||||
|
if (isReservedRoleName(role.name))
|
||||||
|
ctx.addIssue({ code: z.ZodIssueCode.custom, message: `Role name "${role.name}" is reserved` });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
const ProjectTemplateEnvironmentsSchema = z
|
||||||
|
.object({
|
||||||
|
name: z.string().trim().min(1),
|
||||||
|
slug: SlugSchema,
|
||||||
|
position: z.number().min(1)
|
||||||
|
})
|
||||||
|
.array()
|
||||||
|
.min(1)
|
||||||
|
.superRefine((environments, ctx) => {
|
||||||
|
if (Buffer.byteLength(JSON.stringify(environments)) > MAX_JSON_SIZE_LIMIT_IN_BYTES)
|
||||||
|
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Size limit exceeded" });
|
||||||
|
|
||||||
|
if (new Set(environments.map((v) => v.name)).size !== environments.length)
|
||||||
|
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Environment names must be unique" });
|
||||||
|
|
||||||
|
if (new Set(environments.map((v) => v.slug)).size !== environments.length)
|
||||||
|
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Environment slugs must be unique" });
|
||||||
|
|
||||||
|
if (
|
||||||
|
environments.some((env) => env.position < 1 || env.position > environments.length) ||
|
||||||
|
new Set(environments.map((env) => env.position)).size !== environments.length
|
||||||
|
)
|
||||||
|
ctx.addIssue({
|
||||||
|
code: z.ZodIssueCode.custom,
|
||||||
|
message: "One or more of the positions specified is invalid. Positions must be sequential starting from 1."
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
export const registerProjectTemplateRouter = async (server: FastifyZodProvider) => {
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: "/",
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
description: "List project templates for the current organization.",
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
projectTemplates: SanitizedProjectTemplateSchema.array()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const projectTemplates = await server.services.projectTemplate.listProjectTemplatesByOrg(req.permission);
|
||||||
|
|
||||||
|
const auditTemplates = projectTemplates.filter((template) => !isInfisicalProjectTemplate(template.name));
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
orgId: req.permission.orgId,
|
||||||
|
event: {
|
||||||
|
type: EventType.GET_PROJECT_TEMPLATES,
|
||||||
|
metadata: {
|
||||||
|
count: auditTemplates.length,
|
||||||
|
templateIds: auditTemplates.map((template) => template.id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return { projectTemplates };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: "/:templateId",
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
description: "Get a project template by ID.",
|
||||||
|
params: z.object({
|
||||||
|
templateId: z.string().uuid()
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
projectTemplate: SanitizedProjectTemplateSchema
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const projectTemplate = await server.services.projectTemplate.findProjectTemplateById(
|
||||||
|
req.params.templateId,
|
||||||
|
req.permission
|
||||||
|
);
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
orgId: req.permission.orgId,
|
||||||
|
event: {
|
||||||
|
type: EventType.GET_PROJECT_TEMPLATE,
|
||||||
|
metadata: {
|
||||||
|
templateId: req.params.templateId
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return { projectTemplate };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "POST",
|
||||||
|
url: "/",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
description: "Create a project template.",
|
||||||
|
body: z.object({
|
||||||
|
name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), {
|
||||||
|
message: `The requested project template name is reserved.`
|
||||||
|
}).describe(ProjectTemplates.CREATE.name),
|
||||||
|
description: z.string().max(256).trim().optional().describe(ProjectTemplates.CREATE.description),
|
||||||
|
roles: ProjectTemplateRolesSchema.default([]).describe(ProjectTemplates.CREATE.roles),
|
||||||
|
environments: ProjectTemplateEnvironmentsSchema.default(ProjectTemplateDefaultEnvironments).describe(
|
||||||
|
ProjectTemplates.CREATE.environments
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
projectTemplate: SanitizedProjectTemplateSchema
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const projectTemplate = await server.services.projectTemplate.createProjectTemplate(req.body, req.permission);
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
orgId: req.permission.orgId,
|
||||||
|
event: {
|
||||||
|
type: EventType.CREATE_PROJECT_TEMPLATE,
|
||||||
|
metadata: req.body
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return { projectTemplate };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "PATCH",
|
||||||
|
url: "/:templateId",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
description: "Update a project template.",
|
||||||
|
params: z.object({ templateId: z.string().uuid().describe(ProjectTemplates.UPDATE.templateId) }),
|
||||||
|
body: z.object({
|
||||||
|
name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), {
|
||||||
|
message: `The requested project template name is reserved.`
|
||||||
|
})
|
||||||
|
.optional()
|
||||||
|
.describe(ProjectTemplates.UPDATE.name),
|
||||||
|
description: z.string().max(256).trim().optional().describe(ProjectTemplates.UPDATE.description),
|
||||||
|
roles: ProjectTemplateRolesSchema.optional().describe(ProjectTemplates.UPDATE.roles),
|
||||||
|
environments: ProjectTemplateEnvironmentsSchema.optional().describe(ProjectTemplates.UPDATE.environments)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
projectTemplate: SanitizedProjectTemplateSchema
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const projectTemplate = await server.services.projectTemplate.updateProjectTemplateById(
|
||||||
|
req.params.templateId,
|
||||||
|
req.body,
|
||||||
|
req.permission
|
||||||
|
);
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
orgId: req.permission.orgId,
|
||||||
|
event: {
|
||||||
|
type: EventType.UPDATE_PROJECT_TEMPLATE,
|
||||||
|
metadata: {
|
||||||
|
templateId: req.params.templateId,
|
||||||
|
...req.body
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return { projectTemplate };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "DELETE",
|
||||||
|
url: "/:templateId",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
description: "Delete a project template.",
|
||||||
|
params: z.object({ templateId: z.string().uuid().describe(ProjectTemplates.DELETE.templateId) }),
|
||||||
|
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
projectTemplate: SanitizedProjectTemplateSchema
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const projectTemplate = await server.services.projectTemplate.deleteProjectTemplateById(
|
||||||
|
req.params.templateId,
|
||||||
|
req.permission
|
||||||
|
);
|
||||||
|
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
orgId: req.permission.orgId,
|
||||||
|
event: {
|
||||||
|
type: EventType.DELETE_PROJECT_TEMPLATE,
|
||||||
|
metadata: {
|
||||||
|
templateId: req.params.templateId
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return { projectTemplate };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
@@ -186,7 +186,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
|||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
roles: ProjectRolesSchema.omit({ permissions: true }).array()
|
roles: ProjectRolesSchema.omit({ permissions: true, version: true }).array()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -219,7 +219,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
|||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
role: SanitizedRoleSchema
|
role: SanitizedRoleSchema.omit({ version: true })
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@@ -1,3 +1,7 @@
|
|||||||
|
import {
|
||||||
|
TCreateProjectTemplateDTO,
|
||||||
|
TUpdateProjectTemplateDTO
|
||||||
|
} from "@app/ee/services/project-template/project-template-types";
|
||||||
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
|
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||||
import { TProjectPermission } from "@app/lib/types";
|
import { TProjectPermission } from "@app/lib/types";
|
||||||
import { ActorType } from "@app/services/auth/auth-type";
|
import { ActorType } from "@app/services/auth/auth-type";
|
||||||
@@ -192,7 +196,13 @@ export enum EventType {
|
|||||||
CMEK_ENCRYPT = "cmek-encrypt",
|
CMEK_ENCRYPT = "cmek-encrypt",
|
||||||
CMEK_DECRYPT = "cmek-decrypt",
|
CMEK_DECRYPT = "cmek-decrypt",
|
||||||
UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "update-external-group-org-role-mapping",
|
UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "update-external-group-org-role-mapping",
|
||||||
GET_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "get-external-group-org-role-mapping"
|
GET_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "get-external-group-org-role-mapping",
|
||||||
|
GET_PROJECT_TEMPLATES = "get-project-templates",
|
||||||
|
GET_PROJECT_TEMPLATE = "get-project-template",
|
||||||
|
CREATE_PROJECT_TEMPLATE = "create-project-template",
|
||||||
|
UPDATE_PROJECT_TEMPLATE = "update-project-template",
|
||||||
|
DELETE_PROJECT_TEMPLATE = "delete-project-template",
|
||||||
|
APPLY_PROJECT_TEMPLATE = "apply-project-template"
|
||||||
}
|
}
|
||||||
|
|
||||||
interface UserActorMetadata {
|
interface UserActorMetadata {
|
||||||
@@ -1618,6 +1628,46 @@ interface UpdateExternalGroupOrgRoleMappingsEvent {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface GetProjectTemplatesEvent {
|
||||||
|
type: EventType.GET_PROJECT_TEMPLATES;
|
||||||
|
metadata: {
|
||||||
|
count: number;
|
||||||
|
templateIds: string[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GetProjectTemplateEvent {
|
||||||
|
type: EventType.GET_PROJECT_TEMPLATE;
|
||||||
|
metadata: {
|
||||||
|
templateId: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CreateProjectTemplateEvent {
|
||||||
|
type: EventType.CREATE_PROJECT_TEMPLATE;
|
||||||
|
metadata: TCreateProjectTemplateDTO;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface UpdateProjectTemplateEvent {
|
||||||
|
type: EventType.UPDATE_PROJECT_TEMPLATE;
|
||||||
|
metadata: TUpdateProjectTemplateDTO & { templateId: string };
|
||||||
|
}
|
||||||
|
|
||||||
|
interface DeleteProjectTemplateEvent {
|
||||||
|
type: EventType.DELETE_PROJECT_TEMPLATE;
|
||||||
|
metadata: {
|
||||||
|
templateId: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ApplyProjectTemplateEvent {
|
||||||
|
type: EventType.APPLY_PROJECT_TEMPLATE;
|
||||||
|
metadata: {
|
||||||
|
template: string;
|
||||||
|
projectId: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
export type Event =
|
export type Event =
|
||||||
| GetSecretsEvent
|
| GetSecretsEvent
|
||||||
| GetSecretEvent
|
| GetSecretEvent
|
||||||
@@ -1766,4 +1816,10 @@ export type Event =
|
|||||||
| CmekEncryptEvent
|
| CmekEncryptEvent
|
||||||
| CmekDecryptEvent
|
| CmekDecryptEvent
|
||||||
| GetExternalGroupOrgRoleMappingsEvent
|
| GetExternalGroupOrgRoleMappingsEvent
|
||||||
| UpdateExternalGroupOrgRoleMappingsEvent;
|
| UpdateExternalGroupOrgRoleMappingsEvent
|
||||||
|
| GetProjectTemplatesEvent
|
||||||
|
| GetProjectTemplateEvent
|
||||||
|
| CreateProjectTemplateEvent
|
||||||
|
| UpdateProjectTemplateEvent
|
||||||
|
| DeleteProjectTemplateEvent
|
||||||
|
| ApplyProjectTemplateEvent;
|
||||||
|
@@ -171,6 +171,7 @@ export const certificateEstServiceFactory = ({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!estConfig.disableBootstrapCertValidation) {
|
||||||
const caCerts = estConfig.caChain
|
const caCerts = estConfig.caChain
|
||||||
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
|
||||||
?.map((cert) => {
|
?.map((cert) => {
|
||||||
@@ -193,6 +194,7 @@ export const certificateEstServiceFactory = ({
|
|||||||
if (!(await isCertChainValid([certObj, ...caCerts]))) {
|
if (!(await isCertChainValid([certObj, ...caCerts]))) {
|
||||||
throw new BadRequestError({ message: "Invalid certificate chain" });
|
throw new BadRequestError({ message: "Invalid certificate chain" });
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const { certificate } = await certificateAuthorityService.signCertFromCa({
|
const { certificate } = await certificateAuthorityService.signCertFromCa({
|
||||||
isInternal: true,
|
isInternal: true,
|
||||||
|
20
backend/src/ee/services/dynamic-secret/dynamic-secret-fns.ts
Normal file
20
backend/src/ee/services/dynamic-secret/dynamic-secret-fns.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import { getConfig } from "@app/lib/config/env";
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { getDbConnectionHost } from "@app/lib/knex";
|
||||||
|
|
||||||
|
export const verifyHostInputValidity = (host: string) => {
|
||||||
|
const appCfg = getConfig();
|
||||||
|
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||||
|
|
||||||
|
if (
|
||||||
|
appCfg.isCloud &&
|
||||||
|
// localhost
|
||||||
|
// internal ips
|
||||||
|
(host === "host.docker.internal" || host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
|
||||||
|
)
|
||||||
|
throw new BadRequestError({ message: "Invalid db host" });
|
||||||
|
|
||||||
|
if (host === "localhost" || host === "127.0.0.1" || dbHost === host) {
|
||||||
|
throw new BadRequestError({ message: "Invalid db host" });
|
||||||
|
}
|
||||||
|
};
|
@@ -9,7 +9,7 @@ import {
|
|||||||
} from "@app/ee/services/permission/project-permission";
|
} from "@app/ee/services/permission/project-permission";
|
||||||
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
import { OrderByDirection } from "@app/lib/types";
|
import { OrderByDirection, OrgServiceActor } from "@app/lib/types";
|
||||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||||
|
|
||||||
@@ -22,6 +22,7 @@ import {
|
|||||||
TDeleteDynamicSecretDTO,
|
TDeleteDynamicSecretDTO,
|
||||||
TDetailsDynamicSecretDTO,
|
TDetailsDynamicSecretDTO,
|
||||||
TGetDynamicSecretsCountDTO,
|
TGetDynamicSecretsCountDTO,
|
||||||
|
TListDynamicSecretsByFolderMappingsDTO,
|
||||||
TListDynamicSecretsDTO,
|
TListDynamicSecretsDTO,
|
||||||
TListDynamicSecretsMultiEnvDTO,
|
TListDynamicSecretsMultiEnvDTO,
|
||||||
TUpdateDynamicSecretDTO
|
TUpdateDynamicSecretDTO
|
||||||
@@ -454,8 +455,44 @@ export const dynamicSecretServiceFactory = ({
|
|||||||
return dynamicSecretCfg;
|
return dynamicSecretCfg;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const listDynamicSecretsByFolderIds = async (
|
||||||
|
{ folderMappings, filters, projectId }: TListDynamicSecretsByFolderMappingsDTO,
|
||||||
|
actor: OrgServiceActor
|
||||||
|
) => {
|
||||||
|
const { permission } = await permissionService.getProjectPermission(
|
||||||
|
actor.type,
|
||||||
|
actor.id,
|
||||||
|
projectId,
|
||||||
|
actor.authMethod,
|
||||||
|
actor.orgId
|
||||||
|
);
|
||||||
|
|
||||||
|
const userAccessibleFolderMappings = folderMappings.filter(({ path, environment }) =>
|
||||||
|
permission.can(
|
||||||
|
ProjectPermissionDynamicSecretActions.ReadRootCredential,
|
||||||
|
subject(ProjectPermissionSub.DynamicSecrets, { environment, secretPath: path })
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
const groupedFolderMappings = new Map(userAccessibleFolderMappings.map((path) => [path.folderId, path]));
|
||||||
|
|
||||||
|
const dynamicSecrets = await dynamicSecretDAL.listDynamicSecretsByFolderIds({
|
||||||
|
folderIds: userAccessibleFolderMappings.map(({ folderId }) => folderId),
|
||||||
|
...filters
|
||||||
|
});
|
||||||
|
|
||||||
|
return dynamicSecrets.map((dynamicSecret) => {
|
||||||
|
const { environment, path } = groupedFolderMappings.get(dynamicSecret.folderId)!;
|
||||||
|
return {
|
||||||
|
...dynamicSecret,
|
||||||
|
environment,
|
||||||
|
path
|
||||||
|
};
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
// get dynamic secrets for multiple envs
|
// get dynamic secrets for multiple envs
|
||||||
const listDynamicSecretsByFolderIds = async ({
|
const listDynamicSecretsByEnvs = async ({
|
||||||
actorAuthMethod,
|
actorAuthMethod,
|
||||||
actorOrgId,
|
actorOrgId,
|
||||||
actorId,
|
actorId,
|
||||||
@@ -521,9 +558,10 @@ export const dynamicSecretServiceFactory = ({
|
|||||||
deleteByName,
|
deleteByName,
|
||||||
getDetails,
|
getDetails,
|
||||||
listDynamicSecretsByEnv,
|
listDynamicSecretsByEnv,
|
||||||
listDynamicSecretsByFolderIds,
|
listDynamicSecretsByEnvs,
|
||||||
getDynamicSecretCount,
|
getDynamicSecretCount,
|
||||||
getCountMultiEnv,
|
getCountMultiEnv,
|
||||||
fetchAzureEntraIdUsers
|
fetchAzureEntraIdUsers,
|
||||||
|
listDynamicSecretsByFolderIds
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@@ -48,17 +48,27 @@ export type TDetailsDynamicSecretDTO = {
|
|||||||
projectSlug: string;
|
projectSlug: string;
|
||||||
} & Omit<TProjectPermission, "projectId">;
|
} & Omit<TProjectPermission, "projectId">;
|
||||||
|
|
||||||
export type TListDynamicSecretsDTO = {
|
export type ListDynamicSecretsFilters = {
|
||||||
path: string;
|
|
||||||
environmentSlug: string;
|
|
||||||
projectSlug?: string;
|
|
||||||
projectId?: string;
|
|
||||||
offset?: number;
|
offset?: number;
|
||||||
limit?: number;
|
limit?: number;
|
||||||
orderBy?: SecretsOrderBy;
|
orderBy?: SecretsOrderBy;
|
||||||
orderDirection?: OrderByDirection;
|
orderDirection?: OrderByDirection;
|
||||||
search?: string;
|
search?: string;
|
||||||
} & Omit<TProjectPermission, "projectId">;
|
};
|
||||||
|
|
||||||
|
export type TListDynamicSecretsDTO = {
|
||||||
|
path: string;
|
||||||
|
environmentSlug: string;
|
||||||
|
projectSlug?: string;
|
||||||
|
projectId?: string;
|
||||||
|
} & ListDynamicSecretsFilters &
|
||||||
|
Omit<TProjectPermission, "projectId">;
|
||||||
|
|
||||||
|
export type TListDynamicSecretsByFolderMappingsDTO = {
|
||||||
|
projectId: string;
|
||||||
|
folderMappings: { folderId: string; path: string; environment: string }[];
|
||||||
|
filters: ListDynamicSecretsFilters;
|
||||||
|
};
|
||||||
|
|
||||||
export type TListDynamicSecretsMultiEnvDTO = Omit<
|
export type TListDynamicSecretsMultiEnvDTO = Omit<
|
||||||
TListDynamicSecretsDTO,
|
TListDynamicSecretsDTO,
|
||||||
|
@@ -2,10 +2,9 @@ import { Client as ElasticSearchClient } from "@elastic/elasticsearch";
|
|||||||
import { customAlphabet } from "nanoid";
|
import { customAlphabet } from "nanoid";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { getConfig } from "@app/lib/config/env";
|
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
|
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||||
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
|
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
|
||||||
|
|
||||||
const generatePassword = () => {
|
const generatePassword = () => {
|
||||||
@@ -19,23 +18,8 @@ const generateUsername = () => {
|
|||||||
|
|
||||||
export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||||
const validateProviderInputs = async (inputs: unknown) => {
|
const validateProviderInputs = async (inputs: unknown) => {
|
||||||
const appCfg = getConfig();
|
|
||||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
|
||||||
|
|
||||||
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
|
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
|
||||||
if (
|
verifyHostInputValidity(providerInputs.host);
|
||||||
isCloud &&
|
|
||||||
// localhost
|
|
||||||
// internal ips
|
|
||||||
(providerInputs.host === "host.docker.internal" ||
|
|
||||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
|
||||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
|
||||||
) {
|
|
||||||
throw new BadRequestError({ message: "Invalid db host" });
|
|
||||||
}
|
|
||||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
|
||||||
throw new BadRequestError({ message: "Invalid db host" });
|
|
||||||
}
|
|
||||||
|
|
||||||
return providerInputs;
|
return providerInputs;
|
||||||
};
|
};
|
||||||
|
@@ -1,3 +1,5 @@
|
|||||||
|
import { SnowflakeProvider } from "@app/ee/services/dynamic-secret/providers/snowflake";
|
||||||
|
|
||||||
import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache";
|
import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache";
|
||||||
import { AwsIamProvider } from "./aws-iam";
|
import { AwsIamProvider } from "./aws-iam";
|
||||||
import { AzureEntraIDProvider } from "./azure-entra-id";
|
import { AzureEntraIDProvider } from "./azure-entra-id";
|
||||||
@@ -9,6 +11,7 @@ import { MongoAtlasProvider } from "./mongo-atlas";
|
|||||||
import { MongoDBProvider } from "./mongo-db";
|
import { MongoDBProvider } from "./mongo-db";
|
||||||
import { RabbitMqProvider } from "./rabbit-mq";
|
import { RabbitMqProvider } from "./rabbit-mq";
|
||||||
import { RedisDatabaseProvider } from "./redis";
|
import { RedisDatabaseProvider } from "./redis";
|
||||||
|
import { SapHanaProvider } from "./sap-hana";
|
||||||
import { SqlDatabaseProvider } from "./sql-database";
|
import { SqlDatabaseProvider } from "./sql-database";
|
||||||
|
|
||||||
export const buildDynamicSecretProviders = () => ({
|
export const buildDynamicSecretProviders = () => ({
|
||||||
@@ -22,5 +25,7 @@ export const buildDynamicSecretProviders = () => ({
|
|||||||
[DynamicSecretProviders.ElasticSearch]: ElasticSearchProvider(),
|
[DynamicSecretProviders.ElasticSearch]: ElasticSearchProvider(),
|
||||||
[DynamicSecretProviders.RabbitMq]: RabbitMqProvider(),
|
[DynamicSecretProviders.RabbitMq]: RabbitMqProvider(),
|
||||||
[DynamicSecretProviders.AzureEntraID]: AzureEntraIDProvider(),
|
[DynamicSecretProviders.AzureEntraID]: AzureEntraIDProvider(),
|
||||||
[DynamicSecretProviders.Ldap]: LdapProvider()
|
[DynamicSecretProviders.Ldap]: LdapProvider(),
|
||||||
|
[DynamicSecretProviders.SapHana]: SapHanaProvider(),
|
||||||
|
[DynamicSecretProviders.Snowflake]: SnowflakeProvider()
|
||||||
});
|
});
|
||||||
|
@@ -7,7 +7,7 @@ import { z } from "zod";
|
|||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
import { LdapSchema, TDynamicProviderFns } from "./models";
|
import { LdapCredentialType, LdapSchema, TDynamicProviderFns } from "./models";
|
||||||
|
|
||||||
const generatePassword = () => {
|
const generatePassword = () => {
|
||||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||||
@@ -193,6 +193,28 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
|||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const client = await getClient(providerInputs);
|
const client = await getClient(providerInputs);
|
||||||
|
|
||||||
|
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
||||||
|
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
|
||||||
|
|
||||||
|
if (dnMatch) {
|
||||||
|
const username = dnMatch[1];
|
||||||
|
const password = generatePassword();
|
||||||
|
|
||||||
|
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rotationLdif });
|
||||||
|
|
||||||
|
try {
|
||||||
|
const dnArray = await executeLdif(client, generatedLdif);
|
||||||
|
|
||||||
|
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
|
||||||
|
} catch (err) {
|
||||||
|
throw new BadRequestError({ message: (err as Error).message });
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Invalid rotation LDIF, missing DN."
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
const username = generateUsername();
|
const username = generateUsername();
|
||||||
const password = generatePassword();
|
const password = generatePassword();
|
||||||
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.creationLdif });
|
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.creationLdif });
|
||||||
@@ -208,14 +230,39 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
|||||||
}
|
}
|
||||||
throw new BadRequestError({ message: (err as Error).message });
|
throw new BadRequestError({ message: (err as Error).message });
|
||||||
}
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const revoke = async (inputs: unknown, entityId: string) => {
|
const revoke = async (inputs: unknown, entityId: string) => {
|
||||||
const providerInputs = await validateProviderInputs(inputs);
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
const connection = await getClient(providerInputs);
|
const client = await getClient(providerInputs);
|
||||||
|
|
||||||
|
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
||||||
|
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
|
||||||
|
|
||||||
|
if (dnMatch) {
|
||||||
|
const username = dnMatch[1];
|
||||||
|
const password = generatePassword();
|
||||||
|
|
||||||
|
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rotationLdif });
|
||||||
|
|
||||||
|
try {
|
||||||
|
const dnArray = await executeLdif(client, generatedLdif);
|
||||||
|
|
||||||
|
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
|
||||||
|
} catch (err) {
|
||||||
|
throw new BadRequestError({ message: (err as Error).message });
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Invalid rotation LDIF, missing DN."
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const revocationLdif = generateLDIF({ username: entityId, ldifTemplate: providerInputs.revocationLdif });
|
const revocationLdif = generateLDIF({ username: entityId, ldifTemplate: providerInputs.revocationLdif });
|
||||||
|
|
||||||
await executeLdif(connection, revocationLdif);
|
await executeLdif(client, revocationLdif);
|
||||||
|
|
||||||
return { entityId };
|
return { entityId };
|
||||||
};
|
};
|
||||||
|
@@ -12,6 +12,11 @@ export enum ElasticSearchAuthTypes {
|
|||||||
ApiKey = "api-key"
|
ApiKey = "api-key"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export enum LdapCredentialType {
|
||||||
|
Dynamic = "dynamic",
|
||||||
|
Static = "static"
|
||||||
|
}
|
||||||
|
|
||||||
export const DynamicSecretRedisDBSchema = z.object({
|
export const DynamicSecretRedisDBSchema = z.object({
|
||||||
host: z.string().trim().toLowerCase(),
|
host: z.string().trim().toLowerCase(),
|
||||||
port: z.number(),
|
port: z.number(),
|
||||||
@@ -166,6 +171,27 @@ export const DynamicSecretMongoDBSchema = z.object({
|
|||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export const DynamicSecretSapHanaSchema = z.object({
|
||||||
|
host: z.string().trim().toLowerCase(),
|
||||||
|
port: z.number(),
|
||||||
|
username: z.string().trim(),
|
||||||
|
password: z.string().trim(),
|
||||||
|
creationStatement: z.string().trim(),
|
||||||
|
revocationStatement: z.string().trim(),
|
||||||
|
renewStatement: z.string().trim().optional(),
|
||||||
|
ca: z.string().optional()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const DynamicSecretSnowflakeSchema = z.object({
|
||||||
|
accountId: z.string().trim().min(1),
|
||||||
|
orgId: z.string().trim().min(1),
|
||||||
|
username: z.string().trim().min(1),
|
||||||
|
password: z.string().trim().min(1),
|
||||||
|
creationStatement: z.string().trim().min(1),
|
||||||
|
revocationStatement: z.string().trim().min(1),
|
||||||
|
renewStatement: z.string().trim().optional()
|
||||||
|
});
|
||||||
|
|
||||||
export const AzureEntraIDSchema = z.object({
|
export const AzureEntraIDSchema = z.object({
|
||||||
tenantId: z.string().trim().min(1),
|
tenantId: z.string().trim().min(1),
|
||||||
userId: z.string().trim().min(1),
|
userId: z.string().trim().min(1),
|
||||||
@@ -174,16 +200,26 @@ export const AzureEntraIDSchema = z.object({
|
|||||||
clientSecret: z.string().trim().min(1)
|
clientSecret: z.string().trim().min(1)
|
||||||
});
|
});
|
||||||
|
|
||||||
export const LdapSchema = z.object({
|
export const LdapSchema = z.union([
|
||||||
|
z.object({
|
||||||
url: z.string().trim().min(1),
|
url: z.string().trim().min(1),
|
||||||
binddn: z.string().trim().min(1),
|
binddn: z.string().trim().min(1),
|
||||||
bindpass: z.string().trim().min(1),
|
bindpass: z.string().trim().min(1),
|
||||||
ca: z.string().optional(),
|
ca: z.string().optional(),
|
||||||
|
credentialType: z.literal(LdapCredentialType.Dynamic).optional().default(LdapCredentialType.Dynamic),
|
||||||
creationLdif: z.string().min(1),
|
creationLdif: z.string().min(1),
|
||||||
revocationLdif: z.string().min(1),
|
revocationLdif: z.string().min(1),
|
||||||
rollbackLdif: z.string().optional()
|
rollbackLdif: z.string().optional()
|
||||||
});
|
}),
|
||||||
|
z.object({
|
||||||
|
url: z.string().trim().min(1),
|
||||||
|
binddn: z.string().trim().min(1),
|
||||||
|
bindpass: z.string().trim().min(1),
|
||||||
|
ca: z.string().optional(),
|
||||||
|
credentialType: z.literal(LdapCredentialType.Static),
|
||||||
|
rotationLdif: z.string().min(1)
|
||||||
|
})
|
||||||
|
]);
|
||||||
|
|
||||||
export enum DynamicSecretProviders {
|
export enum DynamicSecretProviders {
|
||||||
SqlDatabase = "sql-database",
|
SqlDatabase = "sql-database",
|
||||||
@@ -196,7 +232,9 @@ export enum DynamicSecretProviders {
|
|||||||
MongoDB = "mongo-db",
|
MongoDB = "mongo-db",
|
||||||
RabbitMq = "rabbit-mq",
|
RabbitMq = "rabbit-mq",
|
||||||
AzureEntraID = "azure-entra-id",
|
AzureEntraID = "azure-entra-id",
|
||||||
Ldap = "ldap"
|
Ldap = "ldap",
|
||||||
|
SapHana = "sap-hana",
|
||||||
|
Snowflake = "snowflake"
|
||||||
}
|
}
|
||||||
|
|
||||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||||
@@ -204,13 +242,15 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
|||||||
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
|
||||||
|
z.object({ type: z.literal(DynamicSecretProviders.SapHana), inputs: DynamicSecretSapHanaSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.AwsElastiCache), inputs: DynamicSecretAwsElastiCacheSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.AwsElastiCache), inputs: DynamicSecretAwsElastiCacheSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.MongoAtlas), inputs: DynamicSecretMongoAtlasSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.MongoAtlas), inputs: DynamicSecretMongoAtlasSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.ElasticSearch), inputs: DynamicSecretElasticSearchSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.ElasticSearch), inputs: DynamicSecretElasticSearchSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.MongoDB), inputs: DynamicSecretMongoDBSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.MongoDB), inputs: DynamicSecretMongoDBSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.AzureEntraID), inputs: AzureEntraIDSchema }),
|
z.object({ type: z.literal(DynamicSecretProviders.AzureEntraID), inputs: AzureEntraIDSchema }),
|
||||||
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema })
|
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
|
||||||
|
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema })
|
||||||
]);
|
]);
|
||||||
|
|
||||||
export type TDynamicProviderFns = {
|
export type TDynamicProviderFns = {
|
||||||
|
@@ -2,10 +2,9 @@ import { MongoClient } from "mongodb";
|
|||||||
import { customAlphabet } from "nanoid";
|
import { customAlphabet } from "nanoid";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { getConfig } from "@app/lib/config/env";
|
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
|
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||||
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
|
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
|
||||||
|
|
||||||
const generatePassword = (size = 48) => {
|
const generatePassword = (size = 48) => {
|
||||||
@@ -19,22 +18,8 @@ const generateUsername = () => {
|
|||||||
|
|
||||||
export const MongoDBProvider = (): TDynamicProviderFns => {
|
export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||||
const validateProviderInputs = async (inputs: unknown) => {
|
const validateProviderInputs = async (inputs: unknown) => {
|
||||||
const appCfg = getConfig();
|
|
||||||
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
|
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
|
||||||
if (
|
verifyHostInputValidity(providerInputs.host);
|
||||||
appCfg.isCloud &&
|
|
||||||
// localhost
|
|
||||||
// internal ips
|
|
||||||
(providerInputs.host === "host.docker.internal" ||
|
|
||||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
|
||||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
|
||||||
)
|
|
||||||
throw new BadRequestError({ message: "Invalid db host" });
|
|
||||||
|
|
||||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
|
||||||
throw new BadRequestError({ message: "Invalid db host" });
|
|
||||||
}
|
|
||||||
|
|
||||||
return providerInputs;
|
return providerInputs;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@@ -3,12 +3,11 @@ import https from "https";
|
|||||||
import { customAlphabet } from "nanoid";
|
import { customAlphabet } from "nanoid";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { getConfig } from "@app/lib/config/env";
|
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
|
||||||
import { removeTrailingSlash } from "@app/lib/fn";
|
import { removeTrailingSlash } from "@app/lib/fn";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
|
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||||
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
|
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
|
||||||
|
|
||||||
const generatePassword = () => {
|
const generatePassword = () => {
|
||||||
@@ -79,23 +78,8 @@ async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRa
|
|||||||
|
|
||||||
export const RabbitMqProvider = (): TDynamicProviderFns => {
|
export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||||
const validateProviderInputs = async (inputs: unknown) => {
|
const validateProviderInputs = async (inputs: unknown) => {
|
||||||
const appCfg = getConfig();
|
|
||||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
|
||||||
|
|
||||||
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
|
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
|
||||||
if (
|
verifyHostInputValidity(providerInputs.host);
|
||||||
isCloud &&
|
|
||||||
// localhost
|
|
||||||
// internal ips
|
|
||||||
(providerInputs.host === "host.docker.internal" ||
|
|
||||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
|
||||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
|
||||||
) {
|
|
||||||
throw new BadRequestError({ message: "Invalid db host" });
|
|
||||||
}
|
|
||||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
|
||||||
throw new BadRequestError({ message: "Invalid db host" });
|
|
||||||
}
|
|
||||||
|
|
||||||
return providerInputs;
|
return providerInputs;
|
||||||
};
|
};
|
||||||
|
@@ -3,11 +3,10 @@ import { Redis } from "ioredis";
|
|||||||
import { customAlphabet } from "nanoid";
|
import { customAlphabet } from "nanoid";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { getConfig } from "@app/lib/config/env";
|
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
import { getDbConnectionHost } from "@app/lib/knex";
|
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
|
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||||
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
|
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
|
||||||
|
|
||||||
const generatePassword = () => {
|
const generatePassword = () => {
|
||||||
@@ -51,22 +50,8 @@ const executeTransactions = async (connection: Redis, commands: string[]): Promi
|
|||||||
|
|
||||||
export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||||
const validateProviderInputs = async (inputs: unknown) => {
|
const validateProviderInputs = async (inputs: unknown) => {
|
||||||
const appCfg = getConfig();
|
|
||||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
|
||||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
|
||||||
|
|
||||||
const providerInputs = await DynamicSecretRedisDBSchema.parseAsync(inputs);
|
const providerInputs = await DynamicSecretRedisDBSchema.parseAsync(inputs);
|
||||||
if (
|
verifyHostInputValidity(providerInputs.host);
|
||||||
isCloud &&
|
|
||||||
// localhost
|
|
||||||
// internal ips
|
|
||||||
(providerInputs.host === "host.docker.internal" ||
|
|
||||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
|
||||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
|
||||||
)
|
|
||||||
throw new BadRequestError({ message: "Invalid db host" });
|
|
||||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1" || dbHost === providerInputs.host)
|
|
||||||
throw new BadRequestError({ message: "Invalid db host" });
|
|
||||||
return providerInputs;
|
return providerInputs;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
174
backend/src/ee/services/dynamic-secret/providers/sap-hana.ts
Normal file
174
backend/src/ee/services/dynamic-secret/providers/sap-hana.ts
Normal file
@@ -0,0 +1,174 @@
|
|||||||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
|
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||||
|
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||||
|
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||||
|
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||||
|
|
||||||
|
import handlebars from "handlebars";
|
||||||
|
import hdb from "hdb";
|
||||||
|
import { customAlphabet } from "nanoid";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
|
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||||
|
import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models";
|
||||||
|
|
||||||
|
const generatePassword = (size = 48) => {
|
||||||
|
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
|
||||||
|
return customAlphabet(charset, 48)(size);
|
||||||
|
};
|
||||||
|
|
||||||
|
const generateUsername = () => {
|
||||||
|
return alphaNumericNanoId(32);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||||
|
const validateProviderInputs = async (inputs: unknown) => {
|
||||||
|
const providerInputs = await DynamicSecretSapHanaSchema.parseAsync(inputs);
|
||||||
|
|
||||||
|
verifyHostInputValidity(providerInputs.host);
|
||||||
|
return providerInputs;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
|
||||||
|
const client = hdb.createClient({
|
||||||
|
host: providerInputs.host,
|
||||||
|
port: providerInputs.port,
|
||||||
|
user: providerInputs.username,
|
||||||
|
password: providerInputs.password,
|
||||||
|
...(providerInputs.ca
|
||||||
|
? {
|
||||||
|
ca: providerInputs.ca
|
||||||
|
}
|
||||||
|
: {})
|
||||||
|
});
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
client.connect((err: any) => {
|
||||||
|
if (err) {
|
||||||
|
return reject(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (client.readyState) {
|
||||||
|
return resolve(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
reject(new Error("SAP HANA client not ready"));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
return client;
|
||||||
|
};
|
||||||
|
|
||||||
|
const validateConnection = async (inputs: unknown) => {
|
||||||
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
const client = await getClient(providerInputs);
|
||||||
|
|
||||||
|
const testResult: boolean = await new Promise((resolve, reject) => {
|
||||||
|
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
|
||||||
|
if (err) {
|
||||||
|
reject();
|
||||||
|
}
|
||||||
|
|
||||||
|
resolve(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
return testResult;
|
||||||
|
};
|
||||||
|
|
||||||
|
const create = async (inputs: unknown, expireAt: number) => {
|
||||||
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
|
||||||
|
const username = generateUsername();
|
||||||
|
const password = generatePassword();
|
||||||
|
const expiration = new Date(expireAt).toISOString();
|
||||||
|
|
||||||
|
const client = await getClient(providerInputs);
|
||||||
|
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
expiration
|
||||||
|
});
|
||||||
|
|
||||||
|
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||||
|
for await (const query of queries) {
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
client.exec(query, (err: any) => {
|
||||||
|
if (err) {
|
||||||
|
reject(
|
||||||
|
new BadRequestError({
|
||||||
|
message: err.message
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
resolve(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||||
|
};
|
||||||
|
|
||||||
|
const revoke = async (inputs: unknown, username: string) => {
|
||||||
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
const client = await getClient(providerInputs);
|
||||||
|
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||||
|
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||||
|
for await (const query of queries) {
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
client.exec(query, (err: any) => {
|
||||||
|
if (err) {
|
||||||
|
reject(
|
||||||
|
new BadRequestError({
|
||||||
|
message: err.message
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
resolve(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return { entityId: username };
|
||||||
|
};
|
||||||
|
|
||||||
|
const renew = async (inputs: unknown, username: string, expireAt: number) => {
|
||||||
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
const client = await getClient(providerInputs);
|
||||||
|
try {
|
||||||
|
const expiration = new Date(expireAt).toISOString();
|
||||||
|
|
||||||
|
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration });
|
||||||
|
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||||
|
for await (const query of queries) {
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
client.exec(query, (err: any) => {
|
||||||
|
if (err) {
|
||||||
|
reject(
|
||||||
|
new BadRequestError({
|
||||||
|
message: err.message
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
resolve(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
client.disconnect();
|
||||||
|
}
|
||||||
|
|
||||||
|
return { entityId: username };
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
validateProviderInputs,
|
||||||
|
validateConnection,
|
||||||
|
create,
|
||||||
|
revoke,
|
||||||
|
renew
|
||||||
|
};
|
||||||
|
};
|
174
backend/src/ee/services/dynamic-secret/providers/snowflake.ts
Normal file
174
backend/src/ee/services/dynamic-secret/providers/snowflake.ts
Normal file
@@ -0,0 +1,174 @@
|
|||||||
|
import handlebars from "handlebars";
|
||||||
|
import { customAlphabet } from "nanoid";
|
||||||
|
import snowflake from "snowflake-sdk";
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { BadRequestError } from "@app/lib/errors";
|
||||||
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
|
import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
|
||||||
|
|
||||||
|
// destroy client requires callback...
|
||||||
|
const noop = () => {};
|
||||||
|
|
||||||
|
const generatePassword = (size = 48) => {
|
||||||
|
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||||
|
return customAlphabet(charset, 48)(size);
|
||||||
|
};
|
||||||
|
|
||||||
|
const generateUsername = () => {
|
||||||
|
return `infisical_${alphaNumericNanoId(32)}`; // username must start with alpha character, hence prefix
|
||||||
|
};
|
||||||
|
|
||||||
|
const getDaysToExpiry = (expiryDate: Date) => {
|
||||||
|
const start = new Date().getTime();
|
||||||
|
const end = new Date(expiryDate).getTime();
|
||||||
|
const diffTime = Math.abs(end - start);
|
||||||
|
|
||||||
|
return Math.ceil(diffTime / (1000 * 60 * 60 * 24));
|
||||||
|
};
|
||||||
|
|
||||||
|
export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||||
|
const validateProviderInputs = async (inputs: unknown) => {
|
||||||
|
const providerInputs = await DynamicSecretSnowflakeSchema.parseAsync(inputs);
|
||||||
|
return providerInputs;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
|
||||||
|
const client = snowflake.createConnection({
|
||||||
|
account: `${providerInputs.orgId}-${providerInputs.accountId}`,
|
||||||
|
username: providerInputs.username,
|
||||||
|
password: providerInputs.password,
|
||||||
|
application: "Infisical"
|
||||||
|
});
|
||||||
|
|
||||||
|
await client.connectAsync(noop);
|
||||||
|
|
||||||
|
return client;
|
||||||
|
};
|
||||||
|
|
||||||
|
const validateConnection = async (inputs: unknown) => {
|
||||||
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
const client = await getClient(providerInputs);
|
||||||
|
|
||||||
|
let isValidConnection: boolean;
|
||||||
|
|
||||||
|
try {
|
||||||
|
isValidConnection = await Promise.race([
|
||||||
|
client.isValidAsync(),
|
||||||
|
new Promise((resolve) => {
|
||||||
|
setTimeout(resolve, 10000);
|
||||||
|
}).then(() => {
|
||||||
|
throw new BadRequestError({ message: "Unable to establish connection - verify credentials" });
|
||||||
|
})
|
||||||
|
]);
|
||||||
|
} finally {
|
||||||
|
client.destroy(noop);
|
||||||
|
}
|
||||||
|
|
||||||
|
return isValidConnection;
|
||||||
|
};
|
||||||
|
|
||||||
|
const create = async (inputs: unknown, expireAt: number) => {
|
||||||
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
|
||||||
|
const client = await getClient(providerInputs);
|
||||||
|
|
||||||
|
const username = generateUsername();
|
||||||
|
const password = generatePassword();
|
||||||
|
|
||||||
|
try {
|
||||||
|
const expiration = getDaysToExpiry(new Date(expireAt));
|
||||||
|
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
expiration
|
||||||
|
});
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
client.execute({
|
||||||
|
sqlText: creationStatement,
|
||||||
|
complete(err) {
|
||||||
|
if (err) {
|
||||||
|
return reject(new BadRequestError({ name: "CreateLease", message: err.message }));
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolve(true);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
client.destroy(noop);
|
||||||
|
}
|
||||||
|
|
||||||
|
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||||
|
};
|
||||||
|
|
||||||
|
const revoke = async (inputs: unknown, username: string) => {
|
||||||
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
|
||||||
|
const client = await getClient(providerInputs);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
client.execute({
|
||||||
|
sqlText: revokeStatement,
|
||||||
|
complete(err) {
|
||||||
|
if (err) {
|
||||||
|
return reject(new BadRequestError({ name: "RevokeLease", message: err.message }));
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolve(true);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
client.destroy(noop);
|
||||||
|
}
|
||||||
|
|
||||||
|
return { entityId: username };
|
||||||
|
};
|
||||||
|
|
||||||
|
const renew = async (inputs: unknown, username: string, expireAt: number) => {
|
||||||
|
const providerInputs = await validateProviderInputs(inputs);
|
||||||
|
|
||||||
|
if (!providerInputs.renewStatement) return { entityId: username };
|
||||||
|
|
||||||
|
const client = await getClient(providerInputs);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const expiration = getDaysToExpiry(new Date(expireAt));
|
||||||
|
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||||
|
username,
|
||||||
|
expiration
|
||||||
|
});
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
client.execute({
|
||||||
|
sqlText: renewStatement,
|
||||||
|
complete(err) {
|
||||||
|
if (err) {
|
||||||
|
return reject(new BadRequestError({ name: "RenewLease", message: err.message }));
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolve(true);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
client.destroy(noop);
|
||||||
|
}
|
||||||
|
|
||||||
|
return { entityId: username };
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
validateProviderInputs,
|
||||||
|
validateConnection,
|
||||||
|
create,
|
||||||
|
revoke,
|
||||||
|
renew
|
||||||
|
};
|
||||||
|
};
|
@@ -3,11 +3,9 @@ import knex from "knex";
|
|||||||
import { customAlphabet } from "nanoid";
|
import { customAlphabet } from "nanoid";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import { getConfig } from "@app/lib/config/env";
|
|
||||||
import { BadRequestError } from "@app/lib/errors";
|
|
||||||
import { getDbConnectionHost } from "@app/lib/knex";
|
|
||||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||||
|
|
||||||
|
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||||
import { DynamicSecretSqlDBSchema, SqlProviders, TDynamicProviderFns } from "./models";
|
import { DynamicSecretSqlDBSchema, SqlProviders, TDynamicProviderFns } from "./models";
|
||||||
|
|
||||||
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
|
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
|
||||||
@@ -29,27 +27,8 @@ const generateUsername = (provider: SqlProviders) => {
|
|||||||
|
|
||||||
export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||||
const validateProviderInputs = async (inputs: unknown) => {
|
const validateProviderInputs = async (inputs: unknown) => {
|
||||||
const appCfg = getConfig();
|
|
||||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
|
||||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
|
||||||
|
|
||||||
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
|
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
|
||||||
if (
|
verifyHostInputValidity(providerInputs.host);
|
||||||
isCloud &&
|
|
||||||
// localhost
|
|
||||||
// internal ips
|
|
||||||
(providerInputs.host === "host.docker.internal" ||
|
|
||||||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
|
||||||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
|
|
||||||
)
|
|
||||||
throw new BadRequestError({ message: "Invalid db host" });
|
|
||||||
if (
|
|
||||||
providerInputs.host === "localhost" ||
|
|
||||||
providerInputs.host === "127.0.0.1" ||
|
|
||||||
// database infisical uses
|
|
||||||
dbHost === providerInputs.host
|
|
||||||
)
|
|
||||||
throw new BadRequestError({ message: "Invalid db host" });
|
|
||||||
return providerInputs;
|
return providerInputs;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@@ -123,7 +123,7 @@ export const groupServiceFactory = ({
|
|||||||
const plan = await licenseService.getPlan(actorOrgId);
|
const plan = await licenseService.getPlan(actorOrgId);
|
||||||
if (!plan.groups)
|
if (!plan.groups)
|
||||||
throw new BadRequestError({
|
throw new BadRequestError({
|
||||||
message: "Failed to update group due to plan restrictio Upgrade plan to update group."
|
message: "Failed to update group due to plan restriction Upgrade plan to update group."
|
||||||
});
|
});
|
||||||
|
|
||||||
const group = await groupDAL.findOne({ orgId: actorOrgId, id });
|
const group = await groupDAL.findOne({ orgId: actorOrgId, id });
|
||||||
|
58
backend/src/ee/services/hsm/hsm-fns.ts
Normal file
58
backend/src/ee/services/hsm/hsm-fns.ts
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
import * as pkcs11js from "pkcs11js";
|
||||||
|
|
||||||
|
import { getConfig } from "@app/lib/config/env";
|
||||||
|
import { logger } from "@app/lib/logger";
|
||||||
|
|
||||||
|
import { HsmModule } from "./hsm-types";
|
||||||
|
|
||||||
|
export const initializeHsmModule = () => {
|
||||||
|
const appCfg = getConfig();
|
||||||
|
|
||||||
|
// Create a new instance of PKCS11 module
|
||||||
|
const pkcs11 = new pkcs11js.PKCS11();
|
||||||
|
let isInitialized = false;
|
||||||
|
|
||||||
|
const initialize = () => {
|
||||||
|
if (!appCfg.isHsmConfigured) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Load the PKCS#11 module
|
||||||
|
pkcs11.load(appCfg.HSM_LIB_PATH!);
|
||||||
|
|
||||||
|
// Initialize the module
|
||||||
|
pkcs11.C_Initialize();
|
||||||
|
isInitialized = true;
|
||||||
|
|
||||||
|
logger.info("PKCS#11 module initialized");
|
||||||
|
} catch (err) {
|
||||||
|
logger.error("Failed to initialize PKCS#11 module:", err);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const finalize = () => {
|
||||||
|
if (isInitialized) {
|
||||||
|
try {
|
||||||
|
pkcs11.C_Finalize();
|
||||||
|
isInitialized = false;
|
||||||
|
logger.info("PKCS#11 module finalized");
|
||||||
|
} catch (err) {
|
||||||
|
logger.error("Failed to finalize PKCS#11 module:", err);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getModule = (): HsmModule => ({
|
||||||
|
pkcs11,
|
||||||
|
isInitialized
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
initialize,
|
||||||
|
finalize,
|
||||||
|
getModule
|
||||||
|
};
|
||||||
|
};
|
470
backend/src/ee/services/hsm/hsm-service.ts
Normal file
470
backend/src/ee/services/hsm/hsm-service.ts
Normal file
@@ -0,0 +1,470 @@
|
|||||||
|
import pkcs11js from "pkcs11js";
|
||||||
|
|
||||||
|
import { getConfig } from "@app/lib/config/env";
|
||||||
|
import { logger } from "@app/lib/logger";
|
||||||
|
|
||||||
|
import { HsmKeyType, HsmModule } from "./hsm-types";
|
||||||
|
|
||||||
|
type THsmServiceFactoryDep = {
|
||||||
|
hsmModule: HsmModule;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type THsmServiceFactory = ReturnType<typeof hsmServiceFactory>;
|
||||||
|
|
||||||
|
type SyncOrAsync<T> = T | Promise<T>;
|
||||||
|
type SessionCallback<T> = (session: pkcs11js.Handle) => SyncOrAsync<T>;
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-empty-pattern
|
||||||
|
export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsmServiceFactoryDep) => {
|
||||||
|
const appCfg = getConfig();
|
||||||
|
|
||||||
|
// Constants for buffer structures
|
||||||
|
const IV_LENGTH = 16; // Luna HSM typically expects 16-byte IV for cbc
|
||||||
|
const BLOCK_SIZE = 16;
|
||||||
|
const HMAC_SIZE = 32;
|
||||||
|
|
||||||
|
const AES_KEY_SIZE = 256;
|
||||||
|
const HMAC_KEY_SIZE = 256;
|
||||||
|
|
||||||
|
const $withSession = async <T>(callbackWithSession: SessionCallback<T>): Promise<T> => {
|
||||||
|
const RETRY_INTERVAL = 200; // 200ms between attempts
|
||||||
|
const MAX_TIMEOUT = 90_000; // 90 seconds maximum total time
|
||||||
|
|
||||||
|
let sessionHandle: pkcs11js.Handle | null = null;
|
||||||
|
|
||||||
|
const removeSession = () => {
|
||||||
|
if (sessionHandle !== null) {
|
||||||
|
try {
|
||||||
|
pkcs11.C_Logout(sessionHandle);
|
||||||
|
pkcs11.C_CloseSession(sessionHandle);
|
||||||
|
logger.info("HSM: Terminated session successfully");
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, "HSM: Failed to terminate session");
|
||||||
|
} finally {
|
||||||
|
sessionHandle = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (!pkcs11 || !isInitialized) {
|
||||||
|
throw new Error("PKCS#11 module is not initialized");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get slot list
|
||||||
|
let slots: pkcs11js.Handle[];
|
||||||
|
try {
|
||||||
|
slots = pkcs11.C_GetSlotList(false); // false to get all slots
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Failed to get slot list: ${(error as Error)?.message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (slots.length === 0) {
|
||||||
|
throw new Error("No slots available");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (appCfg.HSM_SLOT >= slots.length) {
|
||||||
|
throw new Error(`HSM slot ${appCfg.HSM_SLOT} not found or not initialized`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const slotId = slots[appCfg.HSM_SLOT];
|
||||||
|
|
||||||
|
const startTime = Date.now();
|
||||||
|
while (Date.now() - startTime < MAX_TIMEOUT) {
|
||||||
|
try {
|
||||||
|
// Open session
|
||||||
|
// eslint-disable-next-line no-bitwise
|
||||||
|
sessionHandle = pkcs11.C_OpenSession(slotId, pkcs11js.CKF_SERIAL_SESSION | pkcs11js.CKF_RW_SESSION);
|
||||||
|
|
||||||
|
// Login
|
||||||
|
try {
|
||||||
|
pkcs11.C_Login(sessionHandle, pkcs11js.CKU_USER, appCfg.HSM_PIN);
|
||||||
|
logger.info("HSM: Successfully authenticated");
|
||||||
|
break;
|
||||||
|
} catch (error) {
|
||||||
|
// Handle specific error cases
|
||||||
|
if (error instanceof pkcs11js.Pkcs11Error) {
|
||||||
|
if (error.code === pkcs11js.CKR_PIN_INCORRECT) {
|
||||||
|
// We throw instantly here to prevent further attempts, because if too many attempts are made, the HSM will potentially wipe all key material
|
||||||
|
logger.error(error, `HSM: Incorrect PIN detected for HSM slot ${appCfg.HSM_SLOT}`);
|
||||||
|
throw new Error("HSM: Incorrect HSM Pin detected. Please check the HSM configuration.");
|
||||||
|
}
|
||||||
|
if (error.code === pkcs11js.CKR_USER_ALREADY_LOGGED_IN) {
|
||||||
|
logger.warn("HSM: Session already logged in");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw error; // Re-throw other errors
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`HSM: Session creation failed. Retrying... Error: ${(error as Error)?.message}`);
|
||||||
|
|
||||||
|
if (sessionHandle !== null) {
|
||||||
|
try {
|
||||||
|
pkcs11.C_CloseSession(sessionHandle);
|
||||||
|
} catch (closeError) {
|
||||||
|
logger.error(closeError, "HSM: Failed to close session");
|
||||||
|
}
|
||||||
|
sessionHandle = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait before retrying
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
await new Promise((resolve) => {
|
||||||
|
setTimeout(resolve, RETRY_INTERVAL);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sessionHandle === null) {
|
||||||
|
throw new Error("HSM: Failed to open session after maximum retries");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute callback with session handle
|
||||||
|
const result = await callbackWithSession(sessionHandle);
|
||||||
|
removeSession();
|
||||||
|
return result;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, "HSM: Failed to open session");
|
||||||
|
throw error;
|
||||||
|
} finally {
|
||||||
|
// Ensure cleanup
|
||||||
|
removeSession();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const $findKey = (sessionHandle: pkcs11js.Handle, type: HsmKeyType) => {
|
||||||
|
const label = type === HsmKeyType.HMAC ? `${appCfg.HSM_KEY_LABEL}_HMAC` : appCfg.HSM_KEY_LABEL;
|
||||||
|
const keyType = type === HsmKeyType.HMAC ? pkcs11js.CKK_GENERIC_SECRET : pkcs11js.CKK_AES;
|
||||||
|
|
||||||
|
const template = [
|
||||||
|
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||||
|
{ type: pkcs11js.CKA_KEY_TYPE, value: keyType },
|
||||||
|
{ type: pkcs11js.CKA_LABEL, value: label }
|
||||||
|
];
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Initialize search
|
||||||
|
pkcs11.C_FindObjectsInit(sessionHandle, template);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Find first matching object
|
||||||
|
const handles = pkcs11.C_FindObjects(sessionHandle, 1);
|
||||||
|
|
||||||
|
if (handles.length === 0) {
|
||||||
|
throw new Error("Failed to find master key");
|
||||||
|
}
|
||||||
|
|
||||||
|
return handles[0]; // Return the key handle
|
||||||
|
} finally {
|
||||||
|
// Always finalize the search operation
|
||||||
|
pkcs11.C_FindObjectsFinal(sessionHandle);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const $keyExists = (session: pkcs11js.Handle, type: HsmKeyType): boolean => {
|
||||||
|
try {
|
||||||
|
const key = $findKey(session, type);
|
||||||
|
// items(0) will throw an error if no items are found
|
||||||
|
// Return true only if we got a valid object with handle
|
||||||
|
return !!key && key.length > 0;
|
||||||
|
} catch (error) {
|
||||||
|
// If items(0) throws, it means no key was found
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-call
|
||||||
|
logger.error(error, "HSM: Failed while checking for HSM key presence");
|
||||||
|
|
||||||
|
if (error instanceof pkcs11js.Pkcs11Error) {
|
||||||
|
if (error.code === pkcs11js.CKR_OBJECT_HANDLE_INVALID) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const encrypt: {
|
||||||
|
(data: Buffer, providedSession: pkcs11js.Handle): Promise<Buffer>;
|
||||||
|
(data: Buffer): Promise<Buffer>;
|
||||||
|
} = async (data: Buffer, providedSession?: pkcs11js.Handle) => {
|
||||||
|
if (!pkcs11 || !isInitialized) {
|
||||||
|
throw new Error("PKCS#11 module is not initialized");
|
||||||
|
}
|
||||||
|
|
||||||
|
const $performEncryption = (sessionHandle: pkcs11js.Handle) => {
|
||||||
|
try {
|
||||||
|
const aesKey = $findKey(sessionHandle, HsmKeyType.AES);
|
||||||
|
if (!aesKey) {
|
||||||
|
throw new Error("HSM: Encryption failed, AES key not found");
|
||||||
|
}
|
||||||
|
|
||||||
|
const hmacKey = $findKey(sessionHandle, HsmKeyType.HMAC);
|
||||||
|
if (!hmacKey) {
|
||||||
|
throw new Error("HSM: Encryption failed, HMAC key not found");
|
||||||
|
}
|
||||||
|
|
||||||
|
const iv = Buffer.alloc(IV_LENGTH);
|
||||||
|
pkcs11.C_GenerateRandom(sessionHandle, iv);
|
||||||
|
|
||||||
|
const encryptMechanism = {
|
||||||
|
mechanism: pkcs11js.CKM_AES_CBC_PAD,
|
||||||
|
parameter: iv
|
||||||
|
};
|
||||||
|
|
||||||
|
pkcs11.C_EncryptInit(sessionHandle, encryptMechanism, aesKey);
|
||||||
|
|
||||||
|
// Calculate max buffer size (input length + potential full block of padding)
|
||||||
|
const maxEncryptedLength = Math.ceil(data.length / BLOCK_SIZE) * BLOCK_SIZE + BLOCK_SIZE;
|
||||||
|
|
||||||
|
// Encrypt the data - this returns the encrypted data directly
|
||||||
|
const encryptedData = pkcs11.C_Encrypt(sessionHandle, data, Buffer.alloc(maxEncryptedLength));
|
||||||
|
|
||||||
|
// Initialize HMAC
|
||||||
|
const hmacMechanism = {
|
||||||
|
mechanism: pkcs11js.CKM_SHA256_HMAC
|
||||||
|
};
|
||||||
|
|
||||||
|
pkcs11.C_SignInit(sessionHandle, hmacMechanism, hmacKey);
|
||||||
|
|
||||||
|
// Sign the IV and encrypted data
|
||||||
|
pkcs11.C_SignUpdate(sessionHandle, iv);
|
||||||
|
pkcs11.C_SignUpdate(sessionHandle, encryptedData);
|
||||||
|
|
||||||
|
// Get the HMAC
|
||||||
|
const hmac = Buffer.alloc(HMAC_SIZE);
|
||||||
|
pkcs11.C_SignFinal(sessionHandle, hmac);
|
||||||
|
|
||||||
|
// Combine encrypted data and HMAC [Encrypted Data | HMAC]
|
||||||
|
const finalBuffer = Buffer.alloc(encryptedData.length + hmac.length);
|
||||||
|
encryptedData.copy(finalBuffer);
|
||||||
|
hmac.copy(finalBuffer, encryptedData.length);
|
||||||
|
|
||||||
|
return Buffer.concat([iv, finalBuffer]);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, "HSM: Failed to perform encryption");
|
||||||
|
throw new Error(`HSM: Encryption failed: ${(error as Error)?.message}`);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if (providedSession) {
|
||||||
|
return $performEncryption(providedSession);
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await $withSession($performEncryption);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
|
||||||
|
const decrypt: {
|
||||||
|
(encryptedBlob: Buffer, providedSession: pkcs11js.Handle): Promise<Buffer>;
|
||||||
|
(encryptedBlob: Buffer): Promise<Buffer>;
|
||||||
|
} = async (encryptedBlob: Buffer, providedSession?: pkcs11js.Handle) => {
|
||||||
|
if (!pkcs11 || !isInitialized) {
|
||||||
|
throw new Error("PKCS#11 module is not initialized");
|
||||||
|
}
|
||||||
|
|
||||||
|
const $performDecryption = (sessionHandle: pkcs11js.Handle) => {
|
||||||
|
try {
|
||||||
|
// structure is: [IV (16 bytes) | Encrypted Data (N bytes) | HMAC (32 bytes)]
|
||||||
|
const iv = encryptedBlob.subarray(0, IV_LENGTH);
|
||||||
|
const encryptedDataWithHmac = encryptedBlob.subarray(IV_LENGTH);
|
||||||
|
|
||||||
|
// Split encrypted data and HMAC
|
||||||
|
const hmac = encryptedDataWithHmac.subarray(-HMAC_SIZE); // Last 32 bytes are HMAC
|
||||||
|
|
||||||
|
const encryptedData = encryptedDataWithHmac.subarray(0, -HMAC_SIZE); // Everything except last 32 bytes
|
||||||
|
|
||||||
|
// Find the keys
|
||||||
|
const aesKey = $findKey(sessionHandle, HsmKeyType.AES);
|
||||||
|
if (!aesKey) {
|
||||||
|
throw new Error("HSM: Decryption failed, AES key not found");
|
||||||
|
}
|
||||||
|
|
||||||
|
const hmacKey = $findKey(sessionHandle, HsmKeyType.HMAC);
|
||||||
|
if (!hmacKey) {
|
||||||
|
throw new Error("HSM: Decryption failed, HMAC key not found");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify HMAC first
|
||||||
|
const hmacMechanism = {
|
||||||
|
mechanism: pkcs11js.CKM_SHA256_HMAC
|
||||||
|
};
|
||||||
|
|
||||||
|
pkcs11.C_VerifyInit(sessionHandle, hmacMechanism, hmacKey);
|
||||||
|
pkcs11.C_VerifyUpdate(sessionHandle, iv);
|
||||||
|
pkcs11.C_VerifyUpdate(sessionHandle, encryptedData);
|
||||||
|
|
||||||
|
try {
|
||||||
|
pkcs11.C_VerifyFinal(sessionHandle, hmac);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, "HSM: HMAC verification failed");
|
||||||
|
throw new Error("HSM: Decryption failed"); // Generic error for failed verification
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only decrypt if verification passed
|
||||||
|
const decryptMechanism = {
|
||||||
|
mechanism: pkcs11js.CKM_AES_CBC_PAD,
|
||||||
|
parameter: iv
|
||||||
|
};
|
||||||
|
|
||||||
|
pkcs11.C_DecryptInit(sessionHandle, decryptMechanism, aesKey);
|
||||||
|
|
||||||
|
const tempBuffer = Buffer.alloc(encryptedData.length);
|
||||||
|
const decryptedData = pkcs11.C_Decrypt(sessionHandle, encryptedData, tempBuffer);
|
||||||
|
|
||||||
|
// Create a new buffer from the decrypted data
|
||||||
|
return Buffer.from(decryptedData);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, "HSM: Failed to perform decryption");
|
||||||
|
throw new Error("HSM: Decryption failed"); // Generic error for failed decryption, to avoid leaking details about why it failed (such as padding related errors)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if (providedSession) {
|
||||||
|
return $performDecryption(providedSession);
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await $withSession($performDecryption);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
|
||||||
|
// We test the core functionality of the PKCS#11 module that we are using throughout Infisical. This is to ensure that the user doesn't configure a faulty or unsupported HSM device.
|
||||||
|
const $testPkcs11Module = async (session: pkcs11js.Handle) => {
|
||||||
|
try {
|
||||||
|
if (!pkcs11 || !isInitialized) {
|
||||||
|
throw new Error("PKCS#11 module is not initialized");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!session) {
|
||||||
|
throw new Error("HSM: Attempted to run test without a valid session");
|
||||||
|
}
|
||||||
|
|
||||||
|
const randomData = pkcs11.C_GenerateRandom(session, Buffer.alloc(500));
|
||||||
|
|
||||||
|
const encryptedData = await encrypt(randomData, session);
|
||||||
|
const decryptedData = await decrypt(encryptedData, session);
|
||||||
|
|
||||||
|
const randomDataHex = randomData.toString("hex");
|
||||||
|
const decryptedDataHex = decryptedData.toString("hex");
|
||||||
|
|
||||||
|
if (randomDataHex !== decryptedDataHex && Buffer.compare(randomData, decryptedData)) {
|
||||||
|
throw new Error("HSM: Startup test failed. Decrypted data does not match original data");
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, "HSM: Error testing PKCS#11 module");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const isActive = async () => {
|
||||||
|
if (!isInitialized || !appCfg.isHsmConfigured) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
let pkcs11TestPassed = false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
pkcs11TestPassed = await $withSession($testPkcs11Module);
|
||||||
|
} catch (err) {
|
||||||
|
logger.error(err, "HSM: Error testing PKCS#11 module");
|
||||||
|
}
|
||||||
|
|
||||||
|
return appCfg.isHsmConfigured && isInitialized && pkcs11TestPassed;
|
||||||
|
};
|
||||||
|
|
||||||
|
const startService = async () => {
|
||||||
|
if (!appCfg.isHsmConfigured || !pkcs11 || !isInitialized) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await $withSession(async (sessionHandle) => {
|
||||||
|
// Check if master key exists, create if not
|
||||||
|
|
||||||
|
const genericAttributes = [
|
||||||
|
{ type: pkcs11js.CKA_TOKEN, value: true }, // Persistent storage
|
||||||
|
{ type: pkcs11js.CKA_EXTRACTABLE, value: false }, // Cannot be extracted
|
||||||
|
{ type: pkcs11js.CKA_SENSITIVE, value: true }, // Sensitive value
|
||||||
|
{ type: pkcs11js.CKA_PRIVATE, value: true } // Requires authentication
|
||||||
|
];
|
||||||
|
|
||||||
|
if (!$keyExists(sessionHandle, HsmKeyType.AES)) {
|
||||||
|
// Template for generating 256-bit AES master key
|
||||||
|
const keyTemplate = [
|
||||||
|
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||||
|
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_AES },
|
||||||
|
{ type: pkcs11js.CKA_VALUE_LEN, value: AES_KEY_SIZE / 8 },
|
||||||
|
{ type: pkcs11js.CKA_LABEL, value: appCfg.HSM_KEY_LABEL! },
|
||||||
|
{ type: pkcs11js.CKA_ENCRYPT, value: true }, // Allow encryption
|
||||||
|
{ type: pkcs11js.CKA_DECRYPT, value: true }, // Allow decryption
|
||||||
|
...genericAttributes
|
||||||
|
];
|
||||||
|
|
||||||
|
// Generate the key
|
||||||
|
pkcs11.C_GenerateKey(
|
||||||
|
sessionHandle,
|
||||||
|
{
|
||||||
|
mechanism: pkcs11js.CKM_AES_KEY_GEN
|
||||||
|
},
|
||||||
|
keyTemplate
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.info(`HSM: Master key created successfully with label: ${appCfg.HSM_KEY_LABEL}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if HMAC key exists, create if not
|
||||||
|
if (!$keyExists(sessionHandle, HsmKeyType.HMAC)) {
|
||||||
|
const hmacKeyTemplate = [
|
||||||
|
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||||
|
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_GENERIC_SECRET },
|
||||||
|
{ type: pkcs11js.CKA_VALUE_LEN, value: HMAC_KEY_SIZE / 8 }, // 256-bit key
|
||||||
|
{ type: pkcs11js.CKA_LABEL, value: `${appCfg.HSM_KEY_LABEL!}_HMAC` },
|
||||||
|
{ type: pkcs11js.CKA_SIGN, value: true }, // Allow signing
|
||||||
|
{ type: pkcs11js.CKA_VERIFY, value: true }, // Allow verification
|
||||||
|
...genericAttributes
|
||||||
|
];
|
||||||
|
|
||||||
|
// Generate the HMAC key
|
||||||
|
pkcs11.C_GenerateKey(
|
||||||
|
sessionHandle,
|
||||||
|
{
|
||||||
|
mechanism: pkcs11js.CKM_GENERIC_SECRET_KEY_GEN
|
||||||
|
},
|
||||||
|
hmacKeyTemplate
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.info(`HSM: HMAC key created successfully with label: ${appCfg.HSM_KEY_LABEL}_HMAC`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get slot info to check supported mechanisms
|
||||||
|
const slotId = pkcs11.C_GetSessionInfo(sessionHandle).slotID;
|
||||||
|
const mechanisms = pkcs11.C_GetMechanismList(slotId);
|
||||||
|
|
||||||
|
// Check for AES CBC PAD support
|
||||||
|
const hasAesCbc = mechanisms.includes(pkcs11js.CKM_AES_CBC_PAD);
|
||||||
|
|
||||||
|
if (!hasAesCbc) {
|
||||||
|
throw new Error(`Required mechanism CKM_AEC_CBC_PAD not supported by HSM`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run test encryption/decryption
|
||||||
|
const testPassed = await $testPkcs11Module(sessionHandle);
|
||||||
|
|
||||||
|
if (!testPassed) {
|
||||||
|
throw new Error("PKCS#11 module test failed. Please ensure that the HSM is correctly configured.");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error, "HSM: Error initializing HSM service:");
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
encrypt,
|
||||||
|
startService,
|
||||||
|
isActive,
|
||||||
|
decrypt
|
||||||
|
};
|
||||||
|
};
|
11
backend/src/ee/services/hsm/hsm-types.ts
Normal file
11
backend/src/ee/services/hsm/hsm-types.ts
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import pkcs11js from "pkcs11js";
|
||||||
|
|
||||||
|
export type HsmModule = {
|
||||||
|
pkcs11: pkcs11js.PKCS11;
|
||||||
|
isInitialized: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
|
export enum HsmKeyType {
|
||||||
|
AES = "AES",
|
||||||
|
HMAC = "hmac"
|
||||||
|
}
|
@@ -29,6 +29,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
|||||||
auditLogStreams: false,
|
auditLogStreams: false,
|
||||||
auditLogStreamLimit: 3,
|
auditLogStreamLimit: 3,
|
||||||
samlSSO: false,
|
samlSSO: false,
|
||||||
|
hsm: false,
|
||||||
oidcSSO: false,
|
oidcSSO: false,
|
||||||
scim: false,
|
scim: false,
|
||||||
ldap: false,
|
ldap: false,
|
||||||
@@ -47,7 +48,8 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
|||||||
secretsLimit: 40
|
secretsLimit: 40
|
||||||
},
|
},
|
||||||
pkiEst: false,
|
pkiEst: false,
|
||||||
enforceMfa: false
|
enforceMfa: false,
|
||||||
|
projectTemplates: false
|
||||||
});
|
});
|
||||||
|
|
||||||
export const setupLicenseRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {
|
export const setupLicenseRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {
|
||||||
|
@@ -129,7 +129,7 @@ export const licenseServiceFactory = ({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// this means this is self hosted oss version
|
// this means this is the self-hosted oss version
|
||||||
// else it would reach catch statement
|
// else it would reach catch statement
|
||||||
isValidLicense = true;
|
isValidLicense = true;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
@@ -46,6 +46,7 @@ export type TFeatureSet = {
|
|||||||
auditLogStreams: false;
|
auditLogStreams: false;
|
||||||
auditLogStreamLimit: 3;
|
auditLogStreamLimit: 3;
|
||||||
samlSSO: false;
|
samlSSO: false;
|
||||||
|
hsm: false;
|
||||||
oidcSSO: false;
|
oidcSSO: false;
|
||||||
scim: false;
|
scim: false;
|
||||||
ldap: false;
|
ldap: false;
|
||||||
@@ -65,6 +66,7 @@ export type TFeatureSet = {
|
|||||||
};
|
};
|
||||||
pkiEst: boolean;
|
pkiEst: boolean;
|
||||||
enforceMfa: boolean;
|
enforceMfa: boolean;
|
||||||
|
projectTemplates: false;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TOrgPlansTableDTO = {
|
export type TOrgPlansTableDTO = {
|
||||||
|
@@ -26,7 +26,8 @@ export enum OrgPermissionSubjects {
|
|||||||
Identity = "identity",
|
Identity = "identity",
|
||||||
Kms = "kms",
|
Kms = "kms",
|
||||||
AdminConsole = "organization-admin-console",
|
AdminConsole = "organization-admin-console",
|
||||||
AuditLogs = "audit-logs"
|
AuditLogs = "audit-logs",
|
||||||
|
ProjectTemplates = "project-templates"
|
||||||
}
|
}
|
||||||
|
|
||||||
export type OrgPermissionSet =
|
export type OrgPermissionSet =
|
||||||
@@ -45,6 +46,7 @@ export type OrgPermissionSet =
|
|||||||
| [OrgPermissionActions, OrgPermissionSubjects.Identity]
|
| [OrgPermissionActions, OrgPermissionSubjects.Identity]
|
||||||
| [OrgPermissionActions, OrgPermissionSubjects.Kms]
|
| [OrgPermissionActions, OrgPermissionSubjects.Kms]
|
||||||
| [OrgPermissionActions, OrgPermissionSubjects.AuditLogs]
|
| [OrgPermissionActions, OrgPermissionSubjects.AuditLogs]
|
||||||
|
| [OrgPermissionActions, OrgPermissionSubjects.ProjectTemplates]
|
||||||
| [OrgPermissionAdminConsoleAction, OrgPermissionSubjects.AdminConsole];
|
| [OrgPermissionAdminConsoleAction, OrgPermissionSubjects.AdminConsole];
|
||||||
|
|
||||||
const buildAdminPermission = () => {
|
const buildAdminPermission = () => {
|
||||||
@@ -118,6 +120,11 @@ const buildAdminPermission = () => {
|
|||||||
can(OrgPermissionActions.Edit, OrgPermissionSubjects.AuditLogs);
|
can(OrgPermissionActions.Edit, OrgPermissionSubjects.AuditLogs);
|
||||||
can(OrgPermissionActions.Delete, OrgPermissionSubjects.AuditLogs);
|
can(OrgPermissionActions.Delete, OrgPermissionSubjects.AuditLogs);
|
||||||
|
|
||||||
|
can(OrgPermissionActions.Read, OrgPermissionSubjects.ProjectTemplates);
|
||||||
|
can(OrgPermissionActions.Create, OrgPermissionSubjects.ProjectTemplates);
|
||||||
|
can(OrgPermissionActions.Edit, OrgPermissionSubjects.ProjectTemplates);
|
||||||
|
can(OrgPermissionActions.Delete, OrgPermissionSubjects.ProjectTemplates);
|
||||||
|
|
||||||
can(OrgPermissionAdminConsoleAction.AccessAllProjects, OrgPermissionSubjects.AdminConsole);
|
can(OrgPermissionAdminConsoleAction.AccessAllProjects, OrgPermissionSubjects.AdminConsole);
|
||||||
|
|
||||||
return rules;
|
return rules;
|
||||||
|
@@ -694,31 +694,35 @@ export const buildServiceTokenProjectPermission = (
|
|||||||
const canRead = permission.includes("read");
|
const canRead = permission.includes("read");
|
||||||
const { can, build } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
|
const { can, build } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
|
||||||
scopes.forEach(({ secretPath, environment }) => {
|
scopes.forEach(({ secretPath, environment }) => {
|
||||||
|
[ProjectPermissionSub.Secrets, ProjectPermissionSub.SecretImports, ProjectPermissionSub.SecretFolders].forEach(
|
||||||
|
(subject) => {
|
||||||
if (canWrite) {
|
if (canWrite) {
|
||||||
// TODO: @Akhi
|
// TODO: @Akhi
|
||||||
// @ts-expect-error type
|
// @ts-expect-error type
|
||||||
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Secrets, {
|
can(ProjectPermissionActions.Edit, subject, {
|
||||||
secretPath: { $glob: secretPath },
|
secretPath: { $glob: secretPath },
|
||||||
environment
|
environment
|
||||||
});
|
});
|
||||||
// @ts-expect-error type
|
// @ts-expect-error type
|
||||||
can(ProjectPermissionActions.Create, ProjectPermissionSub.Secrets, {
|
can(ProjectPermissionActions.Create, subject, {
|
||||||
secretPath: { $glob: secretPath },
|
secretPath: { $glob: secretPath },
|
||||||
environment
|
environment
|
||||||
});
|
});
|
||||||
// @ts-expect-error type
|
// @ts-expect-error type
|
||||||
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Secrets, {
|
can(ProjectPermissionActions.Delete, subject, {
|
||||||
secretPath: { $glob: secretPath },
|
secretPath: { $glob: secretPath },
|
||||||
environment
|
environment
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (canRead) {
|
if (canRead) {
|
||||||
// @ts-expect-error type
|
// @ts-expect-error type
|
||||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets, {
|
can(ProjectPermissionActions.Read, subject, {
|
||||||
secretPath: { $glob: secretPath },
|
secretPath: { $glob: secretPath },
|
||||||
environment
|
environment
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
return build({ conditionsMatcher });
|
return build({ conditionsMatcher });
|
||||||
|
@@ -0,0 +1,5 @@
|
|||||||
|
export const ProjectTemplateDefaultEnvironments = [
|
||||||
|
{ name: "Development", slug: "dev", position: 1 },
|
||||||
|
{ name: "Staging", slug: "staging", position: 2 },
|
||||||
|
{ name: "Production", slug: "prod", position: 3 }
|
||||||
|
];
|
@@ -0,0 +1,7 @@
|
|||||||
|
import { TDbClient } from "@app/db";
|
||||||
|
import { TableName } from "@app/db/schemas";
|
||||||
|
import { ormify } from "@app/lib/knex";
|
||||||
|
|
||||||
|
export type TProjectTemplateDALFactory = ReturnType<typeof projectTemplateDALFactory>;
|
||||||
|
|
||||||
|
export const projectTemplateDALFactory = (db: TDbClient) => ormify(db, TableName.ProjectTemplates);
|
@@ -0,0 +1,24 @@
|
|||||||
|
import { ProjectTemplateDefaultEnvironments } from "@app/ee/services/project-template/project-template-constants";
|
||||||
|
import {
|
||||||
|
InfisicalProjectTemplate,
|
||||||
|
TUnpackedPermission
|
||||||
|
} from "@app/ee/services/project-template/project-template-types";
|
||||||
|
import { getPredefinedRoles } from "@app/services/project-role/project-role-fns";
|
||||||
|
|
||||||
|
export const getDefaultProjectTemplate = (orgId: string) => ({
|
||||||
|
id: "b11b49a9-09a9-4443-916a-4246f9ff2c69", // random ID to appease zod
|
||||||
|
name: InfisicalProjectTemplate.Default,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
description: "Infisical's default project template",
|
||||||
|
environments: ProjectTemplateDefaultEnvironments,
|
||||||
|
roles: [...getPredefinedRoles("project-template")].map(({ name, slug, permissions }) => ({
|
||||||
|
name,
|
||||||
|
slug,
|
||||||
|
permissions: permissions as TUnpackedPermission[]
|
||||||
|
})),
|
||||||
|
orgId
|
||||||
|
});
|
||||||
|
|
||||||
|
export const isInfisicalProjectTemplate = (template: string) =>
|
||||||
|
Object.values(InfisicalProjectTemplate).includes(template as InfisicalProjectTemplate);
|
@@ -0,0 +1,265 @@
|
|||||||
|
import { ForbiddenError } from "@casl/ability";
|
||||||
|
import { packRules } from "@casl/ability/extra";
|
||||||
|
|
||||||
|
import { TProjectTemplates } from "@app/db/schemas";
|
||||||
|
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||||
|
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||||
|
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||||
|
import { getDefaultProjectTemplate } from "@app/ee/services/project-template/project-template-fns";
|
||||||
|
import {
|
||||||
|
TCreateProjectTemplateDTO,
|
||||||
|
TProjectTemplateEnvironment,
|
||||||
|
TProjectTemplateRole,
|
||||||
|
TUnpackedPermission,
|
||||||
|
TUpdateProjectTemplateDTO
|
||||||
|
} from "@app/ee/services/project-template/project-template-types";
|
||||||
|
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
|
import { OrgServiceActor } from "@app/lib/types";
|
||||||
|
import { unpackPermissions } from "@app/server/routes/santizedSchemas/permission";
|
||||||
|
import { getPredefinedRoles } from "@app/services/project-role/project-role-fns";
|
||||||
|
|
||||||
|
import { TProjectTemplateDALFactory } from "./project-template-dal";
|
||||||
|
|
||||||
|
type TProjectTemplatesServiceFactoryDep = {
|
||||||
|
licenseService: TLicenseServiceFactory;
|
||||||
|
permissionService: TPermissionServiceFactory;
|
||||||
|
projectTemplateDAL: TProjectTemplateDALFactory;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TProjectTemplateServiceFactory = ReturnType<typeof projectTemplateServiceFactory>;
|
||||||
|
|
||||||
|
const $unpackProjectTemplate = ({ roles, environments, ...rest }: TProjectTemplates) => ({
|
||||||
|
...rest,
|
||||||
|
environments: environments as TProjectTemplateEnvironment[],
|
||||||
|
roles: [
|
||||||
|
...getPredefinedRoles("project-template").map(({ name, slug, permissions }) => ({
|
||||||
|
name,
|
||||||
|
slug,
|
||||||
|
permissions: permissions as TUnpackedPermission[]
|
||||||
|
})),
|
||||||
|
...(roles as TProjectTemplateRole[]).map((role) => ({
|
||||||
|
...role,
|
||||||
|
permissions: unpackPermissions(role.permissions)
|
||||||
|
}))
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
export const projectTemplateServiceFactory = ({
|
||||||
|
licenseService,
|
||||||
|
permissionService,
|
||||||
|
projectTemplateDAL
|
||||||
|
}: TProjectTemplatesServiceFactoryDep) => {
|
||||||
|
const listProjectTemplatesByOrg = async (actor: OrgServiceActor) => {
|
||||||
|
const plan = await licenseService.getPlan(actor.orgId);
|
||||||
|
|
||||||
|
if (!plan.projectTemplates)
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Failed to access project templates due to plan restriction. Upgrade plan to access project templates."
|
||||||
|
});
|
||||||
|
|
||||||
|
const { permission } = await permissionService.getOrgPermission(
|
||||||
|
actor.type,
|
||||||
|
actor.id,
|
||||||
|
actor.orgId,
|
||||||
|
actor.authMethod,
|
||||||
|
actor.orgId
|
||||||
|
);
|
||||||
|
|
||||||
|
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.ProjectTemplates);
|
||||||
|
|
||||||
|
const projectTemplates = await projectTemplateDAL.find({
|
||||||
|
orgId: actor.orgId
|
||||||
|
});
|
||||||
|
|
||||||
|
return [
|
||||||
|
getDefaultProjectTemplate(actor.orgId),
|
||||||
|
...projectTemplates.map((template) => $unpackProjectTemplate(template))
|
||||||
|
];
|
||||||
|
};
|
||||||
|
|
||||||
|
const findProjectTemplateByName = async (name: string, actor: OrgServiceActor) => {
|
||||||
|
const plan = await licenseService.getPlan(actor.orgId);
|
||||||
|
|
||||||
|
if (!plan.projectTemplates)
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Failed to access project template due to plan restriction. Upgrade plan to access project templates."
|
||||||
|
});
|
||||||
|
|
||||||
|
const projectTemplate = await projectTemplateDAL.findOne({ name, orgId: actor.orgId });
|
||||||
|
|
||||||
|
if (!projectTemplate) throw new NotFoundError({ message: `Could not find project template with Name "${name}"` });
|
||||||
|
|
||||||
|
const { permission } = await permissionService.getOrgPermission(
|
||||||
|
actor.type,
|
||||||
|
actor.id,
|
||||||
|
projectTemplate.orgId,
|
||||||
|
actor.authMethod,
|
||||||
|
actor.orgId
|
||||||
|
);
|
||||||
|
|
||||||
|
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.ProjectTemplates);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...$unpackProjectTemplate(projectTemplate),
|
||||||
|
packedRoles: projectTemplate.roles as TProjectTemplateRole[] // preserve packed for when applying template
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const findProjectTemplateById = async (id: string, actor: OrgServiceActor) => {
|
||||||
|
const plan = await licenseService.getPlan(actor.orgId);
|
||||||
|
|
||||||
|
if (!plan.projectTemplates)
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Failed to access project template due to plan restriction. Upgrade plan to access project templates."
|
||||||
|
});
|
||||||
|
|
||||||
|
const projectTemplate = await projectTemplateDAL.findById(id);
|
||||||
|
|
||||||
|
if (!projectTemplate) throw new NotFoundError({ message: `Could not find project template with ID ${id}` });
|
||||||
|
|
||||||
|
const { permission } = await permissionService.getOrgPermission(
|
||||||
|
actor.type,
|
||||||
|
actor.id,
|
||||||
|
projectTemplate.orgId,
|
||||||
|
actor.authMethod,
|
||||||
|
actor.orgId
|
||||||
|
);
|
||||||
|
|
||||||
|
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.ProjectTemplates);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...$unpackProjectTemplate(projectTemplate),
|
||||||
|
packedRoles: projectTemplate.roles as TProjectTemplateRole[] // preserve packed for when applying template
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const createProjectTemplate = async (
|
||||||
|
{ roles, environments, ...params }: TCreateProjectTemplateDTO,
|
||||||
|
actor: OrgServiceActor
|
||||||
|
) => {
|
||||||
|
const plan = await licenseService.getPlan(actor.orgId);
|
||||||
|
|
||||||
|
if (!plan.projectTemplates)
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Failed to create project template due to plan restriction. Upgrade plan to access project templates."
|
||||||
|
});
|
||||||
|
|
||||||
|
const { permission } = await permissionService.getOrgPermission(
|
||||||
|
actor.type,
|
||||||
|
actor.id,
|
||||||
|
actor.orgId,
|
||||||
|
actor.authMethod,
|
||||||
|
actor.orgId
|
||||||
|
);
|
||||||
|
|
||||||
|
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.ProjectTemplates);
|
||||||
|
|
||||||
|
const isConflictingName = Boolean(
|
||||||
|
await projectTemplateDAL.findOne({
|
||||||
|
name: params.name,
|
||||||
|
orgId: actor.orgId
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
if (isConflictingName)
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `A project template with the name "${params.name}" already exists.`
|
||||||
|
});
|
||||||
|
|
||||||
|
const projectTemplate = await projectTemplateDAL.create({
|
||||||
|
...params,
|
||||||
|
roles: JSON.stringify(roles.map((role) => ({ ...role, permissions: packRules(role.permissions) }))),
|
||||||
|
environments: JSON.stringify(environments),
|
||||||
|
orgId: actor.orgId
|
||||||
|
});
|
||||||
|
|
||||||
|
return $unpackProjectTemplate(projectTemplate);
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateProjectTemplateById = async (
|
||||||
|
id: string,
|
||||||
|
{ roles, environments, ...params }: TUpdateProjectTemplateDTO,
|
||||||
|
actor: OrgServiceActor
|
||||||
|
) => {
|
||||||
|
const plan = await licenseService.getPlan(actor.orgId);
|
||||||
|
|
||||||
|
if (!plan.projectTemplates)
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Failed to update project template due to plan restriction. Upgrade plan to access project templates."
|
||||||
|
});
|
||||||
|
|
||||||
|
const projectTemplate = await projectTemplateDAL.findById(id);
|
||||||
|
|
||||||
|
if (!projectTemplate) throw new NotFoundError({ message: `Could not find project template with ID ${id}` });
|
||||||
|
|
||||||
|
const { permission } = await permissionService.getOrgPermission(
|
||||||
|
actor.type,
|
||||||
|
actor.id,
|
||||||
|
projectTemplate.orgId,
|
||||||
|
actor.authMethod,
|
||||||
|
actor.orgId
|
||||||
|
);
|
||||||
|
|
||||||
|
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.ProjectTemplates);
|
||||||
|
|
||||||
|
if (params.name && projectTemplate.name !== params.name) {
|
||||||
|
const isConflictingName = Boolean(
|
||||||
|
await projectTemplateDAL.findOne({
|
||||||
|
name: params.name,
|
||||||
|
orgId: projectTemplate.orgId
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
if (isConflictingName)
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: `A project template with the name "${params.name}" already exists.`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const updatedProjectTemplate = await projectTemplateDAL.updateById(id, {
|
||||||
|
...params,
|
||||||
|
roles: roles
|
||||||
|
? JSON.stringify(roles.map((role) => ({ ...role, permissions: packRules(role.permissions) })))
|
||||||
|
: undefined,
|
||||||
|
environments: environments ? JSON.stringify(environments) : undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
return $unpackProjectTemplate(updatedProjectTemplate);
|
||||||
|
};
|
||||||
|
|
||||||
|
const deleteProjectTemplateById = async (id: string, actor: OrgServiceActor) => {
|
||||||
|
const plan = await licenseService.getPlan(actor.orgId);
|
||||||
|
|
||||||
|
if (!plan.projectTemplates)
|
||||||
|
throw new BadRequestError({
|
||||||
|
message: "Failed to delete project template due to plan restriction. Upgrade plan to access project templates."
|
||||||
|
});
|
||||||
|
|
||||||
|
const projectTemplate = await projectTemplateDAL.findById(id);
|
||||||
|
|
||||||
|
if (!projectTemplate) throw new NotFoundError({ message: `Could not find project template with ID ${id}` });
|
||||||
|
|
||||||
|
const { permission } = await permissionService.getOrgPermission(
|
||||||
|
actor.type,
|
||||||
|
actor.id,
|
||||||
|
projectTemplate.orgId,
|
||||||
|
actor.authMethod,
|
||||||
|
actor.orgId
|
||||||
|
);
|
||||||
|
|
||||||
|
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.ProjectTemplates);
|
||||||
|
|
||||||
|
const deletedProjectTemplate = await projectTemplateDAL.deleteById(id);
|
||||||
|
|
||||||
|
return $unpackProjectTemplate(deletedProjectTemplate);
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
listProjectTemplatesByOrg,
|
||||||
|
createProjectTemplate,
|
||||||
|
updateProjectTemplateById,
|
||||||
|
deleteProjectTemplateById,
|
||||||
|
findProjectTemplateById,
|
||||||
|
findProjectTemplateByName
|
||||||
|
};
|
||||||
|
};
|
@@ -0,0 +1,28 @@
|
|||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
import { TProjectEnvironments } from "@app/db/schemas";
|
||||||
|
import { TProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||||
|
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||||
|
|
||||||
|
export type TProjectTemplateEnvironment = Pick<TProjectEnvironments, "name" | "slug" | "position">;
|
||||||
|
|
||||||
|
export type TProjectTemplateRole = {
|
||||||
|
slug: string;
|
||||||
|
name: string;
|
||||||
|
permissions: TProjectPermissionV2Schema[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TCreateProjectTemplateDTO = {
|
||||||
|
name: string;
|
||||||
|
description?: string;
|
||||||
|
roles: TProjectTemplateRole[];
|
||||||
|
environments: TProjectTemplateEnvironment[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TUpdateProjectTemplateDTO = Partial<TCreateProjectTemplateDTO>;
|
||||||
|
|
||||||
|
export type TUnpackedPermission = z.infer<typeof UnpackedPermissionSchema>;
|
||||||
|
|
||||||
|
export enum InfisicalProjectTemplate {
|
||||||
|
Default = "default"
|
||||||
|
}
|
@@ -267,7 +267,8 @@ export const secretApprovalRequestServiceFactory = ({
|
|||||||
: "",
|
: "",
|
||||||
secretComment: el.secretVersion.encryptedComment
|
secretComment: el.secretVersion.encryptedComment
|
||||||
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedComment }).toString()
|
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedComment }).toString()
|
||||||
: ""
|
: "",
|
||||||
|
tags: el.secretVersion.tags
|
||||||
}
|
}
|
||||||
: undefined
|
: undefined
|
||||||
}));
|
}));
|
||||||
@@ -571,7 +572,7 @@ export const secretApprovalRequestServiceFactory = ({
|
|||||||
reminderNote: el.reminderNote,
|
reminderNote: el.reminderNote,
|
||||||
skipMultilineEncoding: el.skipMultilineEncoding,
|
skipMultilineEncoding: el.skipMultilineEncoding,
|
||||||
key: el.key,
|
key: el.key,
|
||||||
tagIds: el?.tags.map(({ id }) => id),
|
tags: el?.tags.map(({ id }) => id),
|
||||||
...encryptedValue
|
...encryptedValue
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@@ -85,7 +85,8 @@ export const secretRotationDbFn = async ({
|
|||||||
password,
|
password,
|
||||||
username,
|
username,
|
||||||
client,
|
client,
|
||||||
variables
|
variables,
|
||||||
|
options
|
||||||
}: TSecretRotationDbFn) => {
|
}: TSecretRotationDbFn) => {
|
||||||
const appCfg = getConfig();
|
const appCfg = getConfig();
|
||||||
|
|
||||||
@@ -117,7 +118,8 @@ export const secretRotationDbFn = async ({
|
|||||||
password,
|
password,
|
||||||
connectionTimeoutMillis: EXTERNAL_REQUEST_TIMEOUT,
|
connectionTimeoutMillis: EXTERNAL_REQUEST_TIMEOUT,
|
||||||
ssl,
|
ssl,
|
||||||
pool: { min: 0, max: 1 }
|
pool: { min: 0, max: 1 },
|
||||||
|
options
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
const data = await db.raw(query, variables);
|
const data = await db.raw(query, variables);
|
||||||
@@ -153,6 +155,14 @@ export const getDbSetQuery = (db: TDbProviderClients, variables: { username: str
|
|||||||
variables: [variables.username]
|
variables: [variables.username]
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (db === TDbProviderClients.MsSqlServer) {
|
||||||
|
return {
|
||||||
|
query: `ALTER LOGIN ?? WITH PASSWORD = '${variables.password}'`,
|
||||||
|
variables: [variables.username]
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
// add more based on client
|
// add more based on client
|
||||||
return {
|
return {
|
||||||
query: `ALTER USER ?? IDENTIFIED BY '${variables.password}'`,
|
query: `ALTER USER ?? IDENTIFIED BY '${variables.password}'`,
|
||||||
|
@@ -24,4 +24,5 @@ export type TSecretRotationDbFn = {
|
|||||||
query: string;
|
query: string;
|
||||||
variables: unknown[];
|
variables: unknown[];
|
||||||
ca?: string;
|
ca?: string;
|
||||||
|
options?: Record<string, unknown>;
|
||||||
};
|
};
|
||||||
|
@@ -94,7 +94,9 @@ export const secretRotationQueueFactory = ({
|
|||||||
// on prod it this will be in days, in development this will be second
|
// on prod it this will be in days, in development this will be second
|
||||||
every: appCfg.NODE_ENV === "development" ? secondsToMillis(interval) : daysToMillisecond(interval),
|
every: appCfg.NODE_ENV === "development" ? secondsToMillis(interval) : daysToMillisecond(interval),
|
||||||
immediately: true
|
immediately: true
|
||||||
}
|
},
|
||||||
|
removeOnComplete: true,
|
||||||
|
removeOnFail: true
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
@@ -114,6 +116,7 @@ export const secretRotationQueueFactory = ({
|
|||||||
|
|
||||||
queue.start(QueueName.SecretRotation, async (job) => {
|
queue.start(QueueName.SecretRotation, async (job) => {
|
||||||
const { rotationId } = job.data;
|
const { rotationId } = job.data;
|
||||||
|
const appCfg = getConfig();
|
||||||
logger.info(`secretRotationQueue.process: [rotationDocument=${rotationId}]`);
|
logger.info(`secretRotationQueue.process: [rotationDocument=${rotationId}]`);
|
||||||
const secretRotation = await secretRotationDAL.findById(rotationId);
|
const secretRotation = await secretRotationDAL.findById(rotationId);
|
||||||
const rotationProvider = rotationTemplates.find(({ name }) => name === secretRotation?.provider);
|
const rotationProvider = rotationTemplates.find(({ name }) => name === secretRotation?.provider);
|
||||||
@@ -172,6 +175,15 @@ export const secretRotationQueueFactory = ({
|
|||||||
// set a random value for new password
|
// set a random value for new password
|
||||||
newCredential.internal.rotated_password = alphaNumericNanoId(32);
|
newCredential.internal.rotated_password = alphaNumericNanoId(32);
|
||||||
const { admin_username: username, admin_password: password, host, database, port, ca } = newCredential.inputs;
|
const { admin_username: username, admin_password: password, host, database, port, ca } = newCredential.inputs;
|
||||||
|
|
||||||
|
const options =
|
||||||
|
provider.template.client === TDbProviderClients.MsSqlServer
|
||||||
|
? ({
|
||||||
|
encrypt: appCfg.ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT,
|
||||||
|
cryptoCredentialsDetails: ca ? { ca } : {}
|
||||||
|
} as Record<string, unknown>)
|
||||||
|
: undefined;
|
||||||
|
|
||||||
const dbFunctionArg = {
|
const dbFunctionArg = {
|
||||||
username,
|
username,
|
||||||
password,
|
password,
|
||||||
@@ -179,8 +191,10 @@ export const secretRotationQueueFactory = ({
|
|||||||
database,
|
database,
|
||||||
port,
|
port,
|
||||||
ca: ca as string,
|
ca: ca as string,
|
||||||
client: provider.template.client === TDbProviderClients.MySql ? "mysql2" : provider.template.client
|
client: provider.template.client === TDbProviderClients.MySql ? "mysql2" : provider.template.client,
|
||||||
|
options
|
||||||
} as TSecretRotationDbFn;
|
} as TSecretRotationDbFn;
|
||||||
|
|
||||||
// set function
|
// set function
|
||||||
await secretRotationDbFn({
|
await secretRotationDbFn({
|
||||||
...dbFunctionArg,
|
...dbFunctionArg,
|
||||||
@@ -189,12 +203,17 @@ export const secretRotationQueueFactory = ({
|
|||||||
username: newCredential.internal.username as string
|
username: newCredential.internal.username as string
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
// test function
|
// test function
|
||||||
|
const testQuery =
|
||||||
|
provider.template.client === TDbProviderClients.MsSqlServer ? "SELECT GETDATE()" : "SELECT NOW()";
|
||||||
|
|
||||||
await secretRotationDbFn({
|
await secretRotationDbFn({
|
||||||
...dbFunctionArg,
|
...dbFunctionArg,
|
||||||
query: "SELECT NOW()",
|
query: testQuery,
|
||||||
variables: []
|
variables: []
|
||||||
});
|
});
|
||||||
|
|
||||||
newCredential.outputs.db_username = newCredential.internal.username;
|
newCredential.outputs.db_username = newCredential.internal.username;
|
||||||
newCredential.outputs.db_password = newCredential.internal.rotated_password;
|
newCredential.outputs.db_password = newCredential.internal.rotated_password;
|
||||||
// clean up
|
// clean up
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
import { AWS_IAM_TEMPLATE } from "./aws-iam";
|
import { AWS_IAM_TEMPLATE } from "./aws-iam";
|
||||||
|
import { MSSQL_TEMPLATE } from "./mssql";
|
||||||
import { MYSQL_TEMPLATE } from "./mysql";
|
import { MYSQL_TEMPLATE } from "./mysql";
|
||||||
import { POSTGRES_TEMPLATE } from "./postgres";
|
import { POSTGRES_TEMPLATE } from "./postgres";
|
||||||
import { SENDGRID_TEMPLATE } from "./sendgrid";
|
import { SENDGRID_TEMPLATE } from "./sendgrid";
|
||||||
@@ -26,6 +27,13 @@ export const rotationTemplates: TSecretRotationProviderTemplate[] = [
|
|||||||
description: "Rotate MySQL@7/MariaDB user credentials",
|
description: "Rotate MySQL@7/MariaDB user credentials",
|
||||||
template: MYSQL_TEMPLATE
|
template: MYSQL_TEMPLATE
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "mssql",
|
||||||
|
title: "Microsoft SQL Server",
|
||||||
|
image: "mssqlserver.png",
|
||||||
|
description: "Rotate Microsoft SQL server user credentials",
|
||||||
|
template: MSSQL_TEMPLATE
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: "aws-iam",
|
name: "aws-iam",
|
||||||
title: "AWS IAM",
|
title: "AWS IAM",
|
||||||
|
33
backend/src/ee/services/secret-rotation/templates/mssql.ts
Normal file
33
backend/src/ee/services/secret-rotation/templates/mssql.ts
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import { TDbProviderClients, TProviderFunctionTypes } from "./types";
|
||||||
|
|
||||||
|
export const MSSQL_TEMPLATE = {
|
||||||
|
type: TProviderFunctionTypes.DB as const,
|
||||||
|
client: TDbProviderClients.MsSqlServer,
|
||||||
|
inputs: {
|
||||||
|
type: "object" as const,
|
||||||
|
properties: {
|
||||||
|
admin_username: { type: "string" as const },
|
||||||
|
admin_password: { type: "string" as const },
|
||||||
|
host: { type: "string" as const },
|
||||||
|
database: { type: "string" as const, default: "master" },
|
||||||
|
port: { type: "integer" as const, default: "1433" },
|
||||||
|
username1: {
|
||||||
|
type: "string",
|
||||||
|
default: "infisical-sql-user1",
|
||||||
|
desc: "SQL Server login name that must be created at server level with a matching database user"
|
||||||
|
},
|
||||||
|
username2: {
|
||||||
|
type: "string",
|
||||||
|
default: "infisical-sql-user2",
|
||||||
|
desc: "SQL Server login name that must be created at server level with a matching database user"
|
||||||
|
},
|
||||||
|
ca: { type: "string", desc: "SSL certificate for db auth(string)" }
|
||||||
|
},
|
||||||
|
required: ["admin_username", "admin_password", "host", "database", "username1", "username2", "port"],
|
||||||
|
additionalProperties: false
|
||||||
|
},
|
||||||
|
outputs: {
|
||||||
|
db_username: { type: "string" },
|
||||||
|
db_password: { type: "string" }
|
||||||
|
}
|
||||||
|
};
|
@@ -8,7 +8,9 @@ export enum TDbProviderClients {
|
|||||||
// postgres, cockroack db, amazon red shift
|
// postgres, cockroack db, amazon red shift
|
||||||
Pg = "pg",
|
Pg = "pg",
|
||||||
// mysql and maria db
|
// mysql and maria db
|
||||||
MySql = "mysql"
|
MySql = "mysql",
|
||||||
|
|
||||||
|
MsSqlServer = "mssql"
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum TAwsProviderSystems {
|
export enum TAwsProviderSystems {
|
||||||
|
@@ -29,7 +29,7 @@ export const KeyStorePrefixes = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const KeyStoreTtls = {
|
export const KeyStoreTtls = {
|
||||||
SetSyncSecretIntegrationLastRunTimestampInSeconds: 10,
|
SetSyncSecretIntegrationLastRunTimestampInSeconds: 60,
|
||||||
AccessTokenStatusUpdateInSeconds: 120
|
AccessTokenStatusUpdateInSeconds: 120
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@@ -391,7 +391,8 @@ export const PROJECTS = {
|
|||||||
CREATE: {
|
CREATE: {
|
||||||
organizationSlug: "The slug of the organization to create the project in.",
|
organizationSlug: "The slug of the organization to create the project in.",
|
||||||
projectName: "The name of the project to create.",
|
projectName: "The name of the project to create.",
|
||||||
slug: "An optional slug for the project."
|
slug: "An optional slug for the project.",
|
||||||
|
template: "The name of the project template, if specified, to apply to this project."
|
||||||
},
|
},
|
||||||
DELETE: {
|
DELETE: {
|
||||||
workspaceId: "The ID of the project to delete."
|
workspaceId: "The ID of the project to delete."
|
||||||
@@ -669,6 +670,12 @@ export const RAW_SECRETS = {
|
|||||||
type: "The type of the secret to delete.",
|
type: "The type of the secret to delete.",
|
||||||
projectSlug: "The slug of the project to delete the secret in.",
|
projectSlug: "The slug of the project to delete the secret in.",
|
||||||
workspaceId: "The ID of the project where the secret is located."
|
workspaceId: "The ID of the project where the secret is located."
|
||||||
|
},
|
||||||
|
GET_REFERENCE_TREE: {
|
||||||
|
secretName: "The name of the secret to get the reference tree for.",
|
||||||
|
workspaceId: "The ID of the project where the secret is located.",
|
||||||
|
environment: "The slug of the environment where the the secret is located.",
|
||||||
|
secretPath: "The folder path where the secret is located."
|
||||||
}
|
}
|
||||||
} as const;
|
} as const;
|
||||||
|
|
||||||
@@ -1432,3 +1439,22 @@ export const KMS = {
|
|||||||
ciphertext: "The ciphertext to be decrypted (base64 encoded)."
|
ciphertext: "The ciphertext to be decrypted (base64 encoded)."
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const ProjectTemplates = {
|
||||||
|
CREATE: {
|
||||||
|
name: "The name of the project template to be created. Must be slug-friendly.",
|
||||||
|
description: "An optional description of the project template.",
|
||||||
|
roles: "The roles to be created when the template is applied to a project.",
|
||||||
|
environments: "The environments to be created when the template is applied to a project."
|
||||||
|
},
|
||||||
|
UPDATE: {
|
||||||
|
templateId: "The ID of the project template to be updated.",
|
||||||
|
name: "The updated name of the project template. Must be slug-friendly.",
|
||||||
|
description: "The updated description of the project template.",
|
||||||
|
roles: "The updated roles to be created when the template is applied to a project.",
|
||||||
|
environments: "The updated environments to be created when the template is applied to a project."
|
||||||
|
},
|
||||||
|
DELETE: {
|
||||||
|
templateId: "The ID of the project template to be deleted."
|
||||||
|
}
|
||||||
|
};
|
||||||
|
@@ -162,10 +162,23 @@ const envSchema = z
|
|||||||
DISABLE_AUDIT_LOG_GENERATION: zodStrBool.default("false"),
|
DISABLE_AUDIT_LOG_GENERATION: zodStrBool.default("false"),
|
||||||
SSL_CLIENT_CERTIFICATE_HEADER_KEY: zpStr(z.string().optional()).default("x-ssl-client-cert"),
|
SSL_CLIENT_CERTIFICATE_HEADER_KEY: zpStr(z.string().optional()).default("x-ssl-client-cert"),
|
||||||
WORKFLOW_SLACK_CLIENT_ID: zpStr(z.string().optional()),
|
WORKFLOW_SLACK_CLIENT_ID: zpStr(z.string().optional()),
|
||||||
WORKFLOW_SLACK_CLIENT_SECRET: zpStr(z.string().optional())
|
WORKFLOW_SLACK_CLIENT_SECRET: zpStr(z.string().optional()),
|
||||||
|
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT: zodStrBool.default("true"),
|
||||||
|
|
||||||
|
// HSM
|
||||||
|
HSM_LIB_PATH: zpStr(z.string().optional()),
|
||||||
|
HSM_PIN: zpStr(z.string().optional()),
|
||||||
|
HSM_KEY_LABEL: zpStr(z.string().optional()),
|
||||||
|
HSM_SLOT: z.coerce.number().optional().default(0)
|
||||||
})
|
})
|
||||||
|
// To ensure that basic encryption is always possible.
|
||||||
|
.refine(
|
||||||
|
(data) => Boolean(data.ENCRYPTION_KEY) || Boolean(data.ROOT_ENCRYPTION_KEY),
|
||||||
|
"Either ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY must be defined."
|
||||||
|
)
|
||||||
.transform((data) => ({
|
.transform((data) => ({
|
||||||
...data,
|
...data,
|
||||||
|
|
||||||
DB_READ_REPLICAS: data.DB_READ_REPLICAS
|
DB_READ_REPLICAS: data.DB_READ_REPLICAS
|
||||||
? databaseReadReplicaSchema.parse(JSON.parse(data.DB_READ_REPLICAS))
|
? databaseReadReplicaSchema.parse(JSON.parse(data.DB_READ_REPLICAS))
|
||||||
: undefined,
|
: undefined,
|
||||||
@@ -174,10 +187,14 @@ const envSchema = z
|
|||||||
isRedisConfigured: Boolean(data.REDIS_URL),
|
isRedisConfigured: Boolean(data.REDIS_URL),
|
||||||
isDevelopmentMode: data.NODE_ENV === "development",
|
isDevelopmentMode: data.NODE_ENV === "development",
|
||||||
isProductionMode: data.NODE_ENV === "production" || IS_PACKAGED,
|
isProductionMode: data.NODE_ENV === "production" || IS_PACKAGED,
|
||||||
|
|
||||||
isSecretScanningConfigured:
|
isSecretScanningConfigured:
|
||||||
Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
|
Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
|
||||||
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&
|
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&
|
||||||
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET),
|
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET),
|
||||||
|
isHsmConfigured:
|
||||||
|
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined,
|
||||||
|
|
||||||
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG,
|
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG,
|
||||||
SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(",")
|
SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(",")
|
||||||
}));
|
}));
|
||||||
|
@@ -57,3 +57,10 @@ export enum OrderByDirection {
|
|||||||
ASC = "asc",
|
ASC = "asc",
|
||||||
DESC = "desc"
|
DESC = "desc"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type OrgServiceActor = {
|
||||||
|
type: ActorType;
|
||||||
|
id: string;
|
||||||
|
authMethod: ActorAuthMethod;
|
||||||
|
orgId: string;
|
||||||
|
};
|
||||||
|
@@ -1,2 +1,3 @@
|
|||||||
export { isDisposableEmail } from "./validate-email";
|
export { isDisposableEmail } from "./validate-email";
|
||||||
|
export { isValidFolderName, isValidSecretPath } from "./validate-folder-name";
|
||||||
export { blockLocalAndPrivateIpAddresses } from "./validate-url";
|
export { blockLocalAndPrivateIpAddresses } from "./validate-url";
|
||||||
|
8
backend/src/lib/validator/validate-folder-name.ts
Normal file
8
backend/src/lib/validator/validate-folder-name.ts
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
// regex to allow only alphanumeric, dash, underscore
|
||||||
|
export const isValidFolderName = (name: string) => /^[a-zA-Z0-9-_]+$/.test(name);
|
||||||
|
|
||||||
|
export const isValidSecretPath = (path: string) =>
|
||||||
|
path
|
||||||
|
.split("/")
|
||||||
|
.filter((el) => el.length)
|
||||||
|
.every((name) => isValidFolderName(name));
|
@@ -1,6 +1,8 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from "dotenv";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
|
|
||||||
|
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||||
|
|
||||||
import { initAuditLogDbConnection, initDbConnection } from "./db";
|
import { initAuditLogDbConnection, initDbConnection } from "./db";
|
||||||
import { keyStoreFactory } from "./keystore/keystore";
|
import { keyStoreFactory } from "./keystore/keystore";
|
||||||
import { formatSmtpConfig, initEnvConfig, IS_PACKAGED } from "./lib/config/env";
|
import { formatSmtpConfig, initEnvConfig, IS_PACKAGED } from "./lib/config/env";
|
||||||
@@ -53,13 +55,17 @@ const run = async () => {
|
|||||||
const queue = queueServiceFactory(appCfg.REDIS_URL);
|
const queue = queueServiceFactory(appCfg.REDIS_URL);
|
||||||
const keyStore = keyStoreFactory(appCfg.REDIS_URL);
|
const keyStore = keyStoreFactory(appCfg.REDIS_URL);
|
||||||
|
|
||||||
const server = await main({ db, auditLogDb, smtp, logger, queue, keyStore });
|
const hsmModule = initializeHsmModule();
|
||||||
|
hsmModule.initialize();
|
||||||
|
|
||||||
|
const server = await main({ db, auditLogDb, hsmModule: hsmModule.getModule(), smtp, logger, queue, keyStore });
|
||||||
const bootstrap = await bootstrapCheck({ db });
|
const bootstrap = await bootstrapCheck({ db });
|
||||||
|
|
||||||
// eslint-disable-next-line
|
// eslint-disable-next-line
|
||||||
process.on("SIGINT", async () => {
|
process.on("SIGINT", async () => {
|
||||||
await server.close();
|
await server.close();
|
||||||
await db.destroy();
|
await db.destroy();
|
||||||
|
hsmModule.finalize();
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -67,6 +73,7 @@ const run = async () => {
|
|||||||
process.on("SIGTERM", async () => {
|
process.on("SIGTERM", async () => {
|
||||||
await server.close();
|
await server.close();
|
||||||
await db.destroy();
|
await db.destroy();
|
||||||
|
hsmModule.finalize();
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@@ -14,6 +14,7 @@ import fastify from "fastify";
|
|||||||
import { Knex } from "knex";
|
import { Knex } from "knex";
|
||||||
import { Logger } from "pino";
|
import { Logger } from "pino";
|
||||||
|
|
||||||
|
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
|
||||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||||
import { getConfig, IS_PACKAGED } from "@app/lib/config/env";
|
import { getConfig, IS_PACKAGED } from "@app/lib/config/env";
|
||||||
import { TQueueServiceFactory } from "@app/queue";
|
import { TQueueServiceFactory } from "@app/queue";
|
||||||
@@ -36,16 +37,19 @@ type TMain = {
|
|||||||
logger?: Logger;
|
logger?: Logger;
|
||||||
queue: TQueueServiceFactory;
|
queue: TQueueServiceFactory;
|
||||||
keyStore: TKeyStoreFactory;
|
keyStore: TKeyStoreFactory;
|
||||||
|
hsmModule: HsmModule;
|
||||||
};
|
};
|
||||||
|
|
||||||
// Run the server!
|
// Run the server!
|
||||||
export const main = async ({ db, auditLogDb, smtp, logger, queue, keyStore }: TMain) => {
|
export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, keyStore }: TMain) => {
|
||||||
const appCfg = getConfig();
|
const appCfg = getConfig();
|
||||||
|
|
||||||
const server = fastify({
|
const server = fastify({
|
||||||
logger: appCfg.NODE_ENV === "test" ? false : logger,
|
logger: appCfg.NODE_ENV === "test" ? false : logger,
|
||||||
trustProxy: true,
|
trustProxy: true,
|
||||||
connectionTimeout: 30 * 1000,
|
connectionTimeout: appCfg.isHsmConfigured ? 90_000 : 30_000,
|
||||||
ignoreTrailingSlash: true
|
ignoreTrailingSlash: true,
|
||||||
|
pluginTimeout: 40_000
|
||||||
}).withTypeProvider<ZodTypeProvider>();
|
}).withTypeProvider<ZodTypeProvider>();
|
||||||
|
|
||||||
server.setValidatorCompiler(validatorCompiler);
|
server.setValidatorCompiler(validatorCompiler);
|
||||||
@@ -95,7 +99,7 @@ export const main = async ({ db, auditLogDb, smtp, logger, queue, keyStore }: TM
|
|||||||
|
|
||||||
await server.register(maintenanceMode);
|
await server.register(maintenanceMode);
|
||||||
|
|
||||||
await server.register(registerRoutes, { smtp, queue, db, auditLogDb, keyStore });
|
await server.register(registerRoutes, { smtp, queue, db, auditLogDb, keyStore, hsmModule });
|
||||||
|
|
||||||
if (appCfg.isProductionMode) {
|
if (appCfg.isProductionMode) {
|
||||||
await server.register(registerExternalNextjs, {
|
await server.register(registerExternalNextjs, {
|
||||||
|
@@ -15,8 +15,12 @@ export const fastifySwagger = fp(async (fastify) => {
|
|||||||
},
|
},
|
||||||
servers: [
|
servers: [
|
||||||
{
|
{
|
||||||
url: "https://app.infisical.com",
|
url: "https://us.infisical.com",
|
||||||
description: "Production server"
|
description: "Production server (US)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
url: "https://eu.infisical.com",
|
||||||
|
description: "Production server (EU)"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
url: "http://localhost:8080",
|
url: "http://localhost:8080",
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
import { CronJob } from "cron";
|
import { CronJob } from "cron";
|
||||||
// import { Redis } from "ioredis";
|
|
||||||
import { Knex } from "knex";
|
import { Knex } from "knex";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
@@ -31,6 +30,8 @@ import { externalKmsServiceFactory } from "@app/ee/services/external-kms/externa
|
|||||||
import { groupDALFactory } from "@app/ee/services/group/group-dal";
|
import { groupDALFactory } from "@app/ee/services/group/group-dal";
|
||||||
import { groupServiceFactory } from "@app/ee/services/group/group-service";
|
import { groupServiceFactory } from "@app/ee/services/group/group-service";
|
||||||
import { userGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
import { userGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||||
|
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
||||||
|
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
|
||||||
import { identityProjectAdditionalPrivilegeDALFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-dal";
|
import { identityProjectAdditionalPrivilegeDALFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-dal";
|
||||||
import { identityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
import { identityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||||
import { identityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
import { identityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
||||||
@@ -43,6 +44,8 @@ import { oidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
|
|||||||
import { oidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
import { oidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||||
import { permissionDALFactory } from "@app/ee/services/permission/permission-dal";
|
import { permissionDALFactory } from "@app/ee/services/permission/permission-dal";
|
||||||
import { permissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
import { permissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||||
|
import { projectTemplateDALFactory } from "@app/ee/services/project-template/project-template-dal";
|
||||||
|
import { projectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
|
||||||
import { projectUserAdditionalPrivilegeDALFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-dal";
|
import { projectUserAdditionalPrivilegeDALFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-dal";
|
||||||
import { projectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
import { projectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||||
import { rateLimitDALFactory } from "@app/ee/services/rate-limit/rate-limit-dal";
|
import { rateLimitDALFactory } from "@app/ee/services/rate-limit/rate-limit-dal";
|
||||||
@@ -221,10 +224,18 @@ export const registerRoutes = async (
|
|||||||
{
|
{
|
||||||
auditLogDb,
|
auditLogDb,
|
||||||
db,
|
db,
|
||||||
|
hsmModule,
|
||||||
smtp: smtpService,
|
smtp: smtpService,
|
||||||
queue: queueService,
|
queue: queueService,
|
||||||
keyStore
|
keyStore
|
||||||
}: { auditLogDb?: Knex; db: Knex; smtp: TSmtpService; queue: TQueueServiceFactory; keyStore: TKeyStoreFactory }
|
}: {
|
||||||
|
auditLogDb?: Knex;
|
||||||
|
db: Knex;
|
||||||
|
hsmModule: HsmModule;
|
||||||
|
smtp: TSmtpService;
|
||||||
|
queue: TQueueServiceFactory;
|
||||||
|
keyStore: TKeyStoreFactory;
|
||||||
|
}
|
||||||
) => {
|
) => {
|
||||||
const appCfg = getConfig();
|
const appCfg = getConfig();
|
||||||
await server.register(registerSecretScannerGhApp, { prefix: "/ss-webhook" });
|
await server.register(registerSecretScannerGhApp, { prefix: "/ss-webhook" });
|
||||||
@@ -340,6 +351,8 @@ export const registerRoutes = async (
|
|||||||
|
|
||||||
const externalGroupOrgRoleMappingDAL = externalGroupOrgRoleMappingDALFactory(db);
|
const externalGroupOrgRoleMappingDAL = externalGroupOrgRoleMappingDALFactory(db);
|
||||||
|
|
||||||
|
const projectTemplateDAL = projectTemplateDALFactory(db);
|
||||||
|
|
||||||
const permissionService = permissionServiceFactory({
|
const permissionService = permissionServiceFactory({
|
||||||
permissionDAL,
|
permissionDAL,
|
||||||
orgRoleDAL,
|
orgRoleDAL,
|
||||||
@@ -348,14 +361,21 @@ export const registerRoutes = async (
|
|||||||
projectDAL
|
projectDAL
|
||||||
});
|
});
|
||||||
const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL, keyStore });
|
const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL, keyStore });
|
||||||
|
|
||||||
|
const hsmService = hsmServiceFactory({
|
||||||
|
hsmModule
|
||||||
|
});
|
||||||
|
|
||||||
const kmsService = kmsServiceFactory({
|
const kmsService = kmsServiceFactory({
|
||||||
kmsRootConfigDAL,
|
kmsRootConfigDAL,
|
||||||
keyStore,
|
keyStore,
|
||||||
kmsDAL,
|
kmsDAL,
|
||||||
internalKmsDAL,
|
internalKmsDAL,
|
||||||
orgDAL,
|
orgDAL,
|
||||||
projectDAL
|
projectDAL,
|
||||||
|
hsmService
|
||||||
});
|
});
|
||||||
|
|
||||||
const externalKmsService = externalKmsServiceFactory({
|
const externalKmsService = externalKmsServiceFactory({
|
||||||
kmsDAL,
|
kmsDAL,
|
||||||
kmsService,
|
kmsService,
|
||||||
@@ -552,6 +572,7 @@ export const registerRoutes = async (
|
|||||||
userDAL,
|
userDAL,
|
||||||
authService: loginService,
|
authService: loginService,
|
||||||
serverCfgDAL: superAdminDAL,
|
serverCfgDAL: superAdminDAL,
|
||||||
|
kmsRootConfigDAL,
|
||||||
orgService,
|
orgService,
|
||||||
keyStore,
|
keyStore,
|
||||||
licenseService,
|
licenseService,
|
||||||
@@ -732,6 +753,12 @@ export const registerRoutes = async (
|
|||||||
permissionService
|
permissionService
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const projectTemplateService = projectTemplateServiceFactory({
|
||||||
|
licenseService,
|
||||||
|
permissionService,
|
||||||
|
projectTemplateDAL
|
||||||
|
});
|
||||||
|
|
||||||
const projectService = projectServiceFactory({
|
const projectService = projectServiceFactory({
|
||||||
permissionService,
|
permissionService,
|
||||||
projectDAL,
|
projectDAL,
|
||||||
@@ -758,7 +785,8 @@ export const registerRoutes = async (
|
|||||||
projectBotDAL,
|
projectBotDAL,
|
||||||
certificateTemplateDAL,
|
certificateTemplateDAL,
|
||||||
projectSlackConfigDAL,
|
projectSlackConfigDAL,
|
||||||
slackIntegrationDAL
|
slackIntegrationDAL,
|
||||||
|
projectTemplateService
|
||||||
});
|
});
|
||||||
|
|
||||||
const projectEnvService = projectEnvServiceFactory({
|
const projectEnvService = projectEnvServiceFactory({
|
||||||
@@ -1087,7 +1115,6 @@ export const registerRoutes = async (
|
|||||||
|
|
||||||
const identityTokenAuthService = identityTokenAuthServiceFactory({
|
const identityTokenAuthService = identityTokenAuthServiceFactory({
|
||||||
identityTokenAuthDAL,
|
identityTokenAuthDAL,
|
||||||
identityDAL,
|
|
||||||
identityOrgMembershipDAL,
|
identityOrgMembershipDAL,
|
||||||
identityAccessTokenDAL,
|
identityAccessTokenDAL,
|
||||||
permissionService,
|
permissionService,
|
||||||
@@ -1096,7 +1123,6 @@ export const registerRoutes = async (
|
|||||||
const identityUaService = identityUaServiceFactory({
|
const identityUaService = identityUaServiceFactory({
|
||||||
identityOrgMembershipDAL,
|
identityOrgMembershipDAL,
|
||||||
permissionService,
|
permissionService,
|
||||||
identityDAL,
|
|
||||||
identityAccessTokenDAL,
|
identityAccessTokenDAL,
|
||||||
identityUaClientSecretDAL,
|
identityUaClientSecretDAL,
|
||||||
identityUaDAL,
|
identityUaDAL,
|
||||||
@@ -1106,7 +1132,6 @@ export const registerRoutes = async (
|
|||||||
identityKubernetesAuthDAL,
|
identityKubernetesAuthDAL,
|
||||||
identityOrgMembershipDAL,
|
identityOrgMembershipDAL,
|
||||||
identityAccessTokenDAL,
|
identityAccessTokenDAL,
|
||||||
identityDAL,
|
|
||||||
orgBotDAL,
|
orgBotDAL,
|
||||||
permissionService,
|
permissionService,
|
||||||
licenseService
|
licenseService
|
||||||
@@ -1115,7 +1140,6 @@ export const registerRoutes = async (
|
|||||||
identityGcpAuthDAL,
|
identityGcpAuthDAL,
|
||||||
identityOrgMembershipDAL,
|
identityOrgMembershipDAL,
|
||||||
identityAccessTokenDAL,
|
identityAccessTokenDAL,
|
||||||
identityDAL,
|
|
||||||
permissionService,
|
permissionService,
|
||||||
licenseService
|
licenseService
|
||||||
});
|
});
|
||||||
@@ -1124,7 +1148,6 @@ export const registerRoutes = async (
|
|||||||
identityAccessTokenDAL,
|
identityAccessTokenDAL,
|
||||||
identityAwsAuthDAL,
|
identityAwsAuthDAL,
|
||||||
identityOrgMembershipDAL,
|
identityOrgMembershipDAL,
|
||||||
identityDAL,
|
|
||||||
licenseService,
|
licenseService,
|
||||||
permissionService
|
permissionService
|
||||||
});
|
});
|
||||||
@@ -1133,7 +1156,6 @@ export const registerRoutes = async (
|
|||||||
identityAzureAuthDAL,
|
identityAzureAuthDAL,
|
||||||
identityOrgMembershipDAL,
|
identityOrgMembershipDAL,
|
||||||
identityAccessTokenDAL,
|
identityAccessTokenDAL,
|
||||||
identityDAL,
|
|
||||||
permissionService,
|
permissionService,
|
||||||
licenseService
|
licenseService
|
||||||
});
|
});
|
||||||
@@ -1142,7 +1164,6 @@ export const registerRoutes = async (
|
|||||||
identityOidcAuthDAL,
|
identityOidcAuthDAL,
|
||||||
identityOrgMembershipDAL,
|
identityOrgMembershipDAL,
|
||||||
identityAccessTokenDAL,
|
identityAccessTokenDAL,
|
||||||
identityDAL,
|
|
||||||
permissionService,
|
permissionService,
|
||||||
licenseService,
|
licenseService,
|
||||||
orgBotDAL
|
orgBotDAL
|
||||||
@@ -1257,10 +1278,13 @@ export const registerRoutes = async (
|
|||||||
});
|
});
|
||||||
|
|
||||||
await superAdminService.initServerCfg();
|
await superAdminService.initServerCfg();
|
||||||
//
|
|
||||||
// setup the communication with license key server
|
// setup the communication with license key server
|
||||||
await licenseService.init();
|
await licenseService.init();
|
||||||
|
|
||||||
|
// Start HSM service if it's configured/enabled.
|
||||||
|
await hsmService.startService();
|
||||||
|
|
||||||
await telemetryQueue.startTelemetryCheck();
|
await telemetryQueue.startTelemetryCheck();
|
||||||
await dailyResourceCleanUp.startCleanUp();
|
await dailyResourceCleanUp.startCleanUp();
|
||||||
await dailyExpiringPkiItemAlert.startSendingAlerts();
|
await dailyExpiringPkiItemAlert.startSendingAlerts();
|
||||||
@@ -1338,12 +1362,14 @@ export const registerRoutes = async (
|
|||||||
secretSharing: secretSharingService,
|
secretSharing: secretSharingService,
|
||||||
userEngagement: userEngagementService,
|
userEngagement: userEngagementService,
|
||||||
externalKms: externalKmsService,
|
externalKms: externalKmsService,
|
||||||
|
hsm: hsmService,
|
||||||
cmek: cmekService,
|
cmek: cmekService,
|
||||||
orgAdmin: orgAdminService,
|
orgAdmin: orgAdminService,
|
||||||
slack: slackService,
|
slack: slackService,
|
||||||
workflowIntegration: workflowIntegrationService,
|
workflowIntegration: workflowIntegrationService,
|
||||||
migration: migrationService,
|
migration: migrationService,
|
||||||
externalGroupOrgRoleMapping: externalGroupOrgRoleMappingService
|
externalGroupOrgRoleMapping: externalGroupOrgRoleMappingService,
|
||||||
|
projectTemplate: projectTemplateService
|
||||||
});
|
});
|
||||||
|
|
||||||
const cronJobs: CronJob[] = [];
|
const cronJobs: CronJob[] = [];
|
||||||
|
@@ -7,6 +7,7 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
|||||||
import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
|
import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
|
import { RootKeyEncryptionStrategy } from "@app/services/kms/kms-types";
|
||||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||||
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
|
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
|
||||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||||
@@ -29,6 +30,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
|||||||
}).extend({
|
}).extend({
|
||||||
isMigrationModeOn: z.boolean(),
|
isMigrationModeOn: z.boolean(),
|
||||||
defaultAuthOrgSlug: z.string().nullable(),
|
defaultAuthOrgSlug: z.string().nullable(),
|
||||||
|
defaultAuthOrgAuthEnforced: z.boolean().nullish(),
|
||||||
|
defaultAuthOrgAuthMethod: z.string().nullish(),
|
||||||
isSecretScanningDisabled: z.boolean()
|
isSecretScanningDisabled: z.boolean()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@@ -193,6 +196,57 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: "/encryption-strategies",
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
strategies: z
|
||||||
|
.object({
|
||||||
|
strategy: z.nativeEnum(RootKeyEncryptionStrategy),
|
||||||
|
enabled: z.boolean()
|
||||||
|
})
|
||||||
|
.array()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: (req, res, done) => {
|
||||||
|
verifyAuth([AuthMode.JWT])(req, res, () => {
|
||||||
|
verifySuperAdmin(req, res, done);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
handler: async () => {
|
||||||
|
const encryptionDetails = await server.services.superAdmin.getConfiguredEncryptionStrategies();
|
||||||
|
return encryptionDetails;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "PATCH",
|
||||||
|
url: "/encryption-strategies",
|
||||||
|
config: {
|
||||||
|
rateLimit: writeLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
body: z.object({
|
||||||
|
strategy: z.nativeEnum(RootKeyEncryptionStrategy)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
onRequest: (req, res, done) => {
|
||||||
|
verifyAuth([AuthMode.JWT])(req, res, () => {
|
||||||
|
verifySuperAdmin(req, res, done);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
handler: async (req) => {
|
||||||
|
await server.services.superAdmin.updateRootEncryptionStrategy(req.body.strategy);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
server.route({
|
server.route({
|
||||||
method: "POST",
|
method: "POST",
|
||||||
url: "/signup",
|
url: "/signup",
|
||||||
|
@@ -14,7 +14,8 @@ import { validateTemplateRegexField } from "@app/services/certificate-template/c
|
|||||||
const sanitizedEstConfig = CertificateTemplateEstConfigsSchema.pick({
|
const sanitizedEstConfig = CertificateTemplateEstConfigsSchema.pick({
|
||||||
id: true,
|
id: true,
|
||||||
certificateTemplateId: true,
|
certificateTemplateId: true,
|
||||||
isEnabled: true
|
isEnabled: true,
|
||||||
|
disableBootstrapCertValidation: true
|
||||||
});
|
});
|
||||||
|
|
||||||
export const registerCertificateTemplateRouter = async (server: FastifyZodProvider) => {
|
export const registerCertificateTemplateRouter = async (server: FastifyZodProvider) => {
|
||||||
@@ -241,11 +242,18 @@ export const registerCertificateTemplateRouter = async (server: FastifyZodProvid
|
|||||||
params: z.object({
|
params: z.object({
|
||||||
certificateTemplateId: z.string().trim()
|
certificateTemplateId: z.string().trim()
|
||||||
}),
|
}),
|
||||||
body: z.object({
|
body: z
|
||||||
caChain: z.string().trim().min(1),
|
.object({
|
||||||
|
caChain: z.string().trim().optional(),
|
||||||
passphrase: z.string().min(1),
|
passphrase: z.string().min(1),
|
||||||
isEnabled: z.boolean().default(true)
|
isEnabled: z.boolean().default(true),
|
||||||
}),
|
disableBootstrapCertValidation: z.boolean().default(false)
|
||||||
|
})
|
||||||
|
.refine(
|
||||||
|
({ caChain, disableBootstrapCertValidation }) =>
|
||||||
|
disableBootstrapCertValidation || (!disableBootstrapCertValidation && caChain),
|
||||||
|
"CA chain is required"
|
||||||
|
),
|
||||||
response: {
|
response: {
|
||||||
200: sanitizedEstConfig
|
200: sanitizedEstConfig
|
||||||
}
|
}
|
||||||
@@ -289,8 +297,9 @@ export const registerCertificateTemplateRouter = async (server: FastifyZodProvid
|
|||||||
certificateTemplateId: z.string().trim()
|
certificateTemplateId: z.string().trim()
|
||||||
}),
|
}),
|
||||||
body: z.object({
|
body: z.object({
|
||||||
caChain: z.string().trim().min(1).optional(),
|
caChain: z.string().trim().optional(),
|
||||||
passphrase: z.string().min(1).optional(),
|
passphrase: z.string().min(1).optional(),
|
||||||
|
disableBootstrapCertValidation: z.boolean().optional(),
|
||||||
isEnabled: z.boolean().optional()
|
isEnabled: z.boolean().optional()
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
|
@@ -293,10 +293,10 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
|
|||||||
schema: {
|
schema: {
|
||||||
description: "Decrypt data with KMS key",
|
description: "Decrypt data with KMS key",
|
||||||
params: z.object({
|
params: z.object({
|
||||||
keyId: z.string().uuid().describe(KMS.ENCRYPT.keyId)
|
keyId: z.string().uuid().describe(KMS.DECRYPT.keyId)
|
||||||
}),
|
}),
|
||||||
body: z.object({
|
body: z.object({
|
||||||
ciphertext: base64Schema.describe(KMS.ENCRYPT.plaintext)
|
ciphertext: base64Schema.describe(KMS.DECRYPT.ciphertext)
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
|
@@ -20,6 +20,8 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
|||||||
import { SecretsOrderBy } from "@app/services/secret/secret-types";
|
import { SecretsOrderBy } from "@app/services/secret/secret-types";
|
||||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||||
|
|
||||||
|
const MAX_DEEP_SEARCH_LIMIT = 500; // arbitrary limit to prevent excessive results
|
||||||
|
|
||||||
// handle querystring boolean values
|
// handle querystring boolean values
|
||||||
const booleanSchema = z
|
const booleanSchema = z
|
||||||
.union([z.boolean(), z.string().trim()])
|
.union([z.boolean(), z.string().trim()])
|
||||||
@@ -34,6 +36,35 @@ const booleanSchema = z
|
|||||||
.optional()
|
.optional()
|
||||||
.default(true);
|
.default(true);
|
||||||
|
|
||||||
|
const parseSecretPathSearch = (search?: string) => {
|
||||||
|
if (!search)
|
||||||
|
return {
|
||||||
|
searchName: "",
|
||||||
|
searchPath: ""
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!search.includes("/"))
|
||||||
|
return {
|
||||||
|
searchName: search,
|
||||||
|
searchPath: ""
|
||||||
|
};
|
||||||
|
|
||||||
|
if (search === "/")
|
||||||
|
return {
|
||||||
|
searchName: "",
|
||||||
|
searchPath: "/"
|
||||||
|
};
|
||||||
|
|
||||||
|
const [searchName, ...searchPathSegments] = search.split("/").reverse();
|
||||||
|
let searchPath = removeTrailingSlash(searchPathSegments.reverse().join("/").toLowerCase());
|
||||||
|
if (!searchPath.startsWith("/")) searchPath = `/${searchPath}`;
|
||||||
|
|
||||||
|
return {
|
||||||
|
searchName,
|
||||||
|
searchPath
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||||
server.route({
|
server.route({
|
||||||
method: "GET",
|
method: "GET",
|
||||||
@@ -134,7 +165,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
|||||||
let folders: Awaited<ReturnType<typeof server.services.folder.getFoldersMultiEnv>> | undefined;
|
let folders: Awaited<ReturnType<typeof server.services.folder.getFoldersMultiEnv>> | undefined;
|
||||||
let secrets: Awaited<ReturnType<typeof server.services.secret.getSecretsRawMultiEnv>> | undefined;
|
let secrets: Awaited<ReturnType<typeof server.services.secret.getSecretsRawMultiEnv>> | undefined;
|
||||||
let dynamicSecrets:
|
let dynamicSecrets:
|
||||||
| Awaited<ReturnType<typeof server.services.dynamicSecret.listDynamicSecretsByFolderIds>>
|
| Awaited<ReturnType<typeof server.services.dynamicSecret.listDynamicSecretsByEnvs>>
|
||||||
| undefined;
|
| undefined;
|
||||||
|
|
||||||
let totalFolderCount: number | undefined;
|
let totalFolderCount: number | undefined;
|
||||||
@@ -218,7 +249,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (remainingLimit > 0 && totalDynamicSecretCount > adjustedOffset) {
|
if (remainingLimit > 0 && totalDynamicSecretCount > adjustedOffset) {
|
||||||
dynamicSecrets = await server.services.dynamicSecret.listDynamicSecretsByFolderIds({
|
dynamicSecrets = await server.services.dynamicSecret.listDynamicSecretsByEnvs({
|
||||||
actor: req.permission.type,
|
actor: req.permission.type,
|
||||||
actorId: req.permission.id,
|
actorId: req.permission.id,
|
||||||
actorAuthMethod: req.permission.authMethod,
|
actorAuthMethod: req.permission.authMethod,
|
||||||
@@ -633,4 +664,180 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: "/secrets-deep-search",
|
||||||
|
config: {
|
||||||
|
rateLimit: secretsLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
security: [
|
||||||
|
{
|
||||||
|
bearerAuth: []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
querystring: z.object({
|
||||||
|
projectId: z.string().trim(),
|
||||||
|
environments: z.string().trim().transform(decodeURIComponent),
|
||||||
|
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
|
||||||
|
search: z.string().trim().optional(),
|
||||||
|
tags: z.string().trim().transform(decodeURIComponent).optional()
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
folders: SecretFoldersSchema.extend({ path: z.string() }).array().optional(),
|
||||||
|
dynamicSecrets: SanitizedDynamicSecretSchema.extend({ path: z.string(), environment: z.string() })
|
||||||
|
.array()
|
||||||
|
.optional(),
|
||||||
|
secrets: secretRawSchema
|
||||||
|
.extend({
|
||||||
|
secretPath: z.string().optional(),
|
||||||
|
tags: SecretTagsSchema.pick({
|
||||||
|
id: true,
|
||||||
|
slug: true,
|
||||||
|
color: true
|
||||||
|
})
|
||||||
|
.extend({ name: z.string() })
|
||||||
|
.array()
|
||||||
|
.optional()
|
||||||
|
})
|
||||||
|
.array()
|
||||||
|
.optional()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const { secretPath, projectId, search } = req.query;
|
||||||
|
|
||||||
|
const environments = req.query.environments.split(",").filter((env) => Boolean(env.trim()));
|
||||||
|
if (!environments.length) throw new BadRequestError({ message: "One or more environments required" });
|
||||||
|
|
||||||
|
const tags = req.query.tags?.split(",").filter((tag) => Boolean(tag.trim())) ?? [];
|
||||||
|
if (!search && !tags.length) throw new BadRequestError({ message: "Search or tags required" });
|
||||||
|
|
||||||
|
const searchHasTags = Boolean(tags.length);
|
||||||
|
|
||||||
|
const allFolders = await server.services.folder.getFoldersDeepByEnvs(
|
||||||
|
{
|
||||||
|
projectId,
|
||||||
|
environments,
|
||||||
|
secretPath
|
||||||
|
},
|
||||||
|
req.permission
|
||||||
|
);
|
||||||
|
|
||||||
|
const { searchName, searchPath } = parseSecretPathSearch(search);
|
||||||
|
|
||||||
|
const folderMappings = allFolders.map((folder) => ({
|
||||||
|
folderId: folder.id,
|
||||||
|
path: folder.path,
|
||||||
|
environment: folder.environment
|
||||||
|
}));
|
||||||
|
|
||||||
|
const sharedFilters = {
|
||||||
|
search: searchName,
|
||||||
|
limit: MAX_DEEP_SEARCH_LIMIT,
|
||||||
|
orderBy: SecretsOrderBy.Name
|
||||||
|
};
|
||||||
|
|
||||||
|
const secrets = await server.services.secret.getSecretsRawByFolderMappings(
|
||||||
|
{
|
||||||
|
projectId,
|
||||||
|
folderMappings,
|
||||||
|
filters: {
|
||||||
|
...sharedFilters,
|
||||||
|
tagSlugs: tags,
|
||||||
|
includeTagsInSearch: true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
req.permission
|
||||||
|
);
|
||||||
|
|
||||||
|
const dynamicSecrets = searchHasTags
|
||||||
|
? []
|
||||||
|
: await server.services.dynamicSecret.listDynamicSecretsByFolderIds(
|
||||||
|
{
|
||||||
|
projectId,
|
||||||
|
folderMappings,
|
||||||
|
filters: sharedFilters
|
||||||
|
},
|
||||||
|
req.permission
|
||||||
|
);
|
||||||
|
|
||||||
|
for await (const environment of environments) {
|
||||||
|
const secretCountForEnv = secrets.filter((secret) => secret.environment === environment).length;
|
||||||
|
|
||||||
|
if (secretCountForEnv) {
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
projectId,
|
||||||
|
...req.auditLogInfo,
|
||||||
|
event: {
|
||||||
|
type: EventType.GET_SECRETS,
|
||||||
|
metadata: {
|
||||||
|
environment,
|
||||||
|
secretPath,
|
||||||
|
numberOfSecrets: secretCountForEnv
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (getUserAgentType(req.headers["user-agent"]) !== UserAgentType.K8_OPERATOR) {
|
||||||
|
await server.services.telemetry.sendPostHogEvents({
|
||||||
|
event: PostHogEventTypes.SecretPulled,
|
||||||
|
distinctId: getTelemetryDistinctId(req),
|
||||||
|
properties: {
|
||||||
|
numberOfSecrets: secretCountForEnv,
|
||||||
|
workspaceId: projectId,
|
||||||
|
environment,
|
||||||
|
secretPath,
|
||||||
|
channel: getUserAgentType(req.headers["user-agent"]),
|
||||||
|
...req.auditLogInfo
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const sliceQuickSearch = <T>(array: T[]) => array.slice(0, 25);
|
||||||
|
|
||||||
|
return {
|
||||||
|
secrets: sliceQuickSearch(
|
||||||
|
searchPath ? secrets.filter((secret) => secret.secretPath.endsWith(searchPath)) : secrets
|
||||||
|
),
|
||||||
|
dynamicSecrets: sliceQuickSearch(
|
||||||
|
searchPath
|
||||||
|
? dynamicSecrets.filter((dynamicSecret) => dynamicSecret.path.endsWith(searchPath))
|
||||||
|
: dynamicSecrets
|
||||||
|
),
|
||||||
|
folders: searchHasTags
|
||||||
|
? []
|
||||||
|
: sliceQuickSearch(
|
||||||
|
allFolders.filter((folder) => {
|
||||||
|
const [folderName, ...folderPathSegments] = folder.path.split("/").reverse();
|
||||||
|
const folderPath = folderPathSegments.reverse().join("/").toLowerCase() || "/";
|
||||||
|
|
||||||
|
if (searchPath) {
|
||||||
|
if (searchPath === "/") {
|
||||||
|
// only show root folders if no folder name search
|
||||||
|
if (!searchName) return folderPath === searchPath;
|
||||||
|
|
||||||
|
// start partial match on root folders
|
||||||
|
return folderName.toLowerCase().startsWith(searchName.toLowerCase());
|
||||||
|
}
|
||||||
|
|
||||||
|
// support ending partial path match
|
||||||
|
return (
|
||||||
|
folderPath.endsWith(searchPath) && folderName.toLowerCase().startsWith(searchName.toLowerCase())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// no search path, "fuzzy" match all folders
|
||||||
|
return folderName.toLowerCase().includes(searchName.toLowerCase());
|
||||||
|
})
|
||||||
|
)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
@@ -37,7 +37,9 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
|
|||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
identity: IdentitiesSchema
|
identity: IdentitiesSchema.extend({
|
||||||
|
authMethods: z.array(z.string())
|
||||||
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -216,7 +218,9 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
|
|||||||
permissions: true,
|
permissions: true,
|
||||||
description: true
|
description: true
|
||||||
}).optional(),
|
}).optional(),
|
||||||
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true })
|
identity: IdentitiesSchema.pick({ name: true, id: true }).extend({
|
||||||
|
authMethods: z.array(z.string())
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -261,7 +265,9 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
|
|||||||
permissions: true,
|
permissions: true,
|
||||||
description: true
|
description: true
|
||||||
}).optional(),
|
}).optional(),
|
||||||
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true })
|
identity: IdentitiesSchema.pick({ name: true, id: true }).extend({
|
||||||
|
authMethods: z.array(z.string())
|
||||||
|
})
|
||||||
}).array(),
|
}).array(),
|
||||||
totalCount: z.number()
|
totalCount: z.number()
|
||||||
})
|
})
|
||||||
@@ -319,7 +325,9 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
|
|||||||
temporaryAccessEndTime: z.date().nullable().optional()
|
temporaryAccessEndTime: z.date().nullable().optional()
|
||||||
})
|
})
|
||||||
),
|
),
|
||||||
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true }),
|
identity: IdentitiesSchema.pick({ name: true, id: true }).extend({
|
||||||
|
authMethods: z.array(z.string())
|
||||||
|
}),
|
||||||
project: SanitizedProjectSchema.pick({ name: true, id: true })
|
project: SanitizedProjectSchema.pick({ name: true, id: true })
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
@@ -891,6 +891,48 @@ export const registerIntegrationAuthRouter = async (server: FastifyZodProvider)
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: "/:integrationAuthId/bitbucket/environments",
|
||||||
|
config: {
|
||||||
|
rateLimit: readLimit
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
|
schema: {
|
||||||
|
params: z.object({
|
||||||
|
integrationAuthId: z.string().trim()
|
||||||
|
}),
|
||||||
|
querystring: z.object({
|
||||||
|
workspaceSlug: z.string().trim().min(1, { message: "Workspace slug required" }),
|
||||||
|
repoSlug: z.string().trim().min(1, { message: "Repo slug required" })
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
environments: z
|
||||||
|
.object({
|
||||||
|
name: z.string(),
|
||||||
|
slug: z.string(),
|
||||||
|
uuid: z.string(),
|
||||||
|
type: z.string()
|
||||||
|
})
|
||||||
|
.array()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
handler: async (req) => {
|
||||||
|
const environments = await server.services.integrationAuth.getBitbucketEnvironments({
|
||||||
|
actorId: req.permission.id,
|
||||||
|
actor: req.permission.type,
|
||||||
|
actorAuthMethod: req.permission.authMethod,
|
||||||
|
actorOrgId: req.permission.orgId,
|
||||||
|
id: req.params.integrationAuthId,
|
||||||
|
workspaceSlug: req.query.workspaceSlug,
|
||||||
|
repoSlug: req.query.repoSlug
|
||||||
|
});
|
||||||
|
return { environments };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
server.route({
|
server.route({
|
||||||
method: "GET",
|
method: "GET",
|
||||||
url: "/:integrationAuthId/northflank/secret-groups",
|
url: "/:integrationAuthId/northflank/secret-groups",
|
||||||
|
@@ -4,6 +4,7 @@ import { SecretFoldersSchema } from "@app/db/schemas";
|
|||||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||||
import { FOLDERS } from "@app/lib/api-docs";
|
import { FOLDERS } from "@app/lib/api-docs";
|
||||||
import { prefixWithSlash, removeTrailingSlash } from "@app/lib/fn";
|
import { prefixWithSlash, removeTrailingSlash } from "@app/lib/fn";
|
||||||
|
import { isValidFolderName } from "@app/lib/validator";
|
||||||
import { readLimit, secretsLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, secretsLimit } from "@app/server/config/rateLimiter";
|
||||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||||
import { AuthMode } from "@app/services/auth/auth-type";
|
import { AuthMode } from "@app/services/auth/auth-type";
|
||||||
@@ -25,7 +26,13 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
|||||||
body: z.object({
|
body: z.object({
|
||||||
workspaceId: z.string().trim().describe(FOLDERS.CREATE.workspaceId),
|
workspaceId: z.string().trim().describe(FOLDERS.CREATE.workspaceId),
|
||||||
environment: z.string().trim().describe(FOLDERS.CREATE.environment),
|
environment: z.string().trim().describe(FOLDERS.CREATE.environment),
|
||||||
name: z.string().trim().describe(FOLDERS.CREATE.name),
|
name: z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.describe(FOLDERS.CREATE.name)
|
||||||
|
.refine((name) => isValidFolderName(name), {
|
||||||
|
message: "Invalid folder name. Only alphanumeric characters, dashes, and underscores are allowed."
|
||||||
|
}),
|
||||||
path: z
|
path: z
|
||||||
.string()
|
.string()
|
||||||
.trim()
|
.trim()
|
||||||
@@ -97,7 +104,13 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
|||||||
body: z.object({
|
body: z.object({
|
||||||
workspaceId: z.string().trim().describe(FOLDERS.UPDATE.workspaceId),
|
workspaceId: z.string().trim().describe(FOLDERS.UPDATE.workspaceId),
|
||||||
environment: z.string().trim().describe(FOLDERS.UPDATE.environment),
|
environment: z.string().trim().describe(FOLDERS.UPDATE.environment),
|
||||||
name: z.string().trim().describe(FOLDERS.UPDATE.name),
|
name: z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.describe(FOLDERS.UPDATE.name)
|
||||||
|
.refine((name) => isValidFolderName(name), {
|
||||||
|
message: "Invalid folder name. Only alphanumeric characters, dashes, and underscores are allowed."
|
||||||
|
}),
|
||||||
path: z
|
path: z
|
||||||
.string()
|
.string()
|
||||||
.trim()
|
.trim()
|
||||||
@@ -170,7 +183,13 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
|||||||
.object({
|
.object({
|
||||||
id: z.string().describe(FOLDERS.UPDATE.folderId),
|
id: z.string().describe(FOLDERS.UPDATE.folderId),
|
||||||
environment: z.string().trim().describe(FOLDERS.UPDATE.environment),
|
environment: z.string().trim().describe(FOLDERS.UPDATE.environment),
|
||||||
name: z.string().trim().describe(FOLDERS.UPDATE.name),
|
name: z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.describe(FOLDERS.UPDATE.name)
|
||||||
|
.refine((name) => isValidFolderName(name), {
|
||||||
|
message: "Invalid folder name. Only alphanumeric characters, dashes, and underscores are allowed."
|
||||||
|
}),
|
||||||
path: z
|
path: z
|
||||||
.string()
|
.string()
|
||||||
.trim()
|
.trim()
|
||||||
|
@@ -58,7 +58,9 @@ export const registerIdentityOrgRouter = async (server: FastifyZodProvider) => {
|
|||||||
permissions: true,
|
permissions: true,
|
||||||
description: true
|
description: true
|
||||||
}).optional(),
|
}).optional(),
|
||||||
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true })
|
identity: IdentitiesSchema.pick({ name: true, id: true }).extend({
|
||||||
|
authMethods: z.array(z.string())
|
||||||
|
})
|
||||||
})
|
})
|
||||||
).array(),
|
).array(),
|
||||||
totalCount: z.number()
|
totalCount: z.number()
|
||||||
|
@@ -264,7 +264,9 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
|
|||||||
temporaryAccessEndTime: z.date().nullable().optional()
|
temporaryAccessEndTime: z.date().nullable().optional()
|
||||||
})
|
})
|
||||||
),
|
),
|
||||||
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true }),
|
identity: IdentitiesSchema.pick({ name: true, id: true }).extend({
|
||||||
|
authMethods: z.array(z.string())
|
||||||
|
}),
|
||||||
project: SanitizedProjectSchema.pick({ name: true, id: true })
|
project: SanitizedProjectSchema.pick({ name: true, id: true })
|
||||||
})
|
})
|
||||||
.array(),
|
.array(),
|
||||||
@@ -285,6 +287,7 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
|
|||||||
orderDirection: req.query.orderDirection,
|
orderDirection: req.query.orderDirection,
|
||||||
search: req.query.search
|
search: req.query.search
|
||||||
});
|
});
|
||||||
|
|
||||||
return { identityMemberships, totalCount };
|
return { identityMemberships, totalCount };
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -328,7 +331,9 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
|
|||||||
temporaryAccessEndTime: z.date().nullable().optional()
|
temporaryAccessEndTime: z.date().nullable().optional()
|
||||||
})
|
})
|
||||||
),
|
),
|
||||||
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true }),
|
identity: IdentitiesSchema.pick({ name: true, id: true }).extend({
|
||||||
|
authMethods: z.array(z.string())
|
||||||
|
}),
|
||||||
project: SanitizedProjectSchema.pick({ name: true, id: true })
|
project: SanitizedProjectSchema.pick({ name: true, id: true })
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@@ -9,6 +9,7 @@ import {
|
|||||||
ProjectKeysSchema
|
ProjectKeysSchema
|
||||||
} from "@app/db/schemas";
|
} from "@app/db/schemas";
|
||||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||||
|
import { InfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-types";
|
||||||
import { PROJECTS } from "@app/lib/api-docs";
|
import { PROJECTS } from "@app/lib/api-docs";
|
||||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||||
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||||
@@ -169,7 +170,15 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
|||||||
})
|
})
|
||||||
.optional()
|
.optional()
|
||||||
.describe(PROJECTS.CREATE.slug),
|
.describe(PROJECTS.CREATE.slug),
|
||||||
kmsKeyId: z.string().optional()
|
kmsKeyId: z.string().optional(),
|
||||||
|
template: z
|
||||||
|
.string()
|
||||||
|
.refine((v) => slugify(v) === v, {
|
||||||
|
message: "Template name must be in slug format"
|
||||||
|
})
|
||||||
|
.optional()
|
||||||
|
.default(InfisicalProjectTemplate.Default)
|
||||||
|
.describe(PROJECTS.CREATE.template)
|
||||||
}),
|
}),
|
||||||
response: {
|
response: {
|
||||||
200: z.object({
|
200: z.object({
|
||||||
@@ -186,7 +195,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
|||||||
actorAuthMethod: req.permission.authMethod,
|
actorAuthMethod: req.permission.authMethod,
|
||||||
workspaceName: req.body.projectName,
|
workspaceName: req.body.projectName,
|
||||||
slug: req.body.slug,
|
slug: req.body.slug,
|
||||||
kmsKeyId: req.body.kmsKeyId
|
kmsKeyId: req.body.kmsKeyId,
|
||||||
|
template: req.body.template
|
||||||
});
|
});
|
||||||
|
|
||||||
await server.services.telemetry.sendPostHogEvents({
|
await server.services.telemetry.sendPostHogEvents({
|
||||||
@@ -199,6 +209,20 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (req.body.template) {
|
||||||
|
await server.services.auditLog.createAuditLog({
|
||||||
|
...req.auditLogInfo,
|
||||||
|
orgId: req.permission.orgId,
|
||||||
|
event: {
|
||||||
|
type: EventType.APPLY_PROJECT_TEMPLATE,
|
||||||
|
metadata: {
|
||||||
|
template: req.body.template,
|
||||||
|
projectId: project.id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
return { project };
|
return { project };
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@@ -23,6 +23,18 @@ import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
|||||||
|
|
||||||
import { secretRawSchema } from "../sanitizedSchemas";
|
import { secretRawSchema } from "../sanitizedSchemas";
|
||||||
|
|
||||||
|
const SecretReferenceNode = z.object({
|
||||||
|
key: z.string(),
|
||||||
|
value: z.string().optional(),
|
||||||
|
environment: z.string(),
|
||||||
|
secretPath: z.string()
|
||||||
|
});
|
||||||
|
type TSecretReferenceNode = z.infer<typeof SecretReferenceNode> & { children: TSecretReferenceNode[] };
|
||||||
|
|
||||||
|
const SecretReferenceNodeTree: z.ZodType<TSecretReferenceNode> = SecretReferenceNode.extend({
|
||||||
|
children: z.lazy(() => SecretReferenceNodeTree.array())
|
||||||
|
});
|
||||||
|
|
||||||
export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||||
server.route({
|
server.route({
|
||||||
method: "POST",
|
method: "POST",
|
||||||
@@ -2102,6 +2114,58 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
server.route({
|
||||||
|
method: "GET",
|
||||||
|
url: "/raw/:secretName/secret-reference-tree",
|
||||||
|
config: {
|
||||||
|
rateLimit: secretsLimit
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
description: "Get secret reference tree",
|
||||||
|
security: [
|
||||||
|
{
|
||||||
|
bearerAuth: []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
params: z.object({
|
||||||
|
secretName: z.string().trim().describe(RAW_SECRETS.GET_REFERENCE_TREE.secretName)
|
||||||
|
}),
|
||||||
|
querystring: z.object({
|
||||||
|
workspaceId: z.string().trim().describe(RAW_SECRETS.GET_REFERENCE_TREE.workspaceId),
|
||||||
|
environment: z.string().trim().describe(RAW_SECRETS.GET_REFERENCE_TREE.environment),
|
||||||
|
secretPath: z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.default("/")
|
||||||
|
.transform(removeTrailingSlash)
|
||||||
|
.describe(RAW_SECRETS.GET_REFERENCE_TREE.secretPath)
|
||||||
|
}),
|
||||||
|
response: {
|
||||||
|
200: z.object({
|
||||||
|
tree: SecretReferenceNodeTree,
|
||||||
|
value: z.string().optional()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRequest: verifyAuth([AuthMode.JWT]),
|
||||||
|
handler: async (req) => {
|
||||||
|
const { secretName } = req.params;
|
||||||
|
const { secretPath, environment, workspaceId } = req.query;
|
||||||
|
const { tree, value } = await server.services.secret.getSecretReferenceTree({
|
||||||
|
actorId: req.permission.id,
|
||||||
|
actor: req.permission.type,
|
||||||
|
actorAuthMethod: req.permission.authMethod,
|
||||||
|
actorOrgId: req.permission.orgId,
|
||||||
|
projectId: workspaceId,
|
||||||
|
secretName,
|
||||||
|
secretPath,
|
||||||
|
environment
|
||||||
|
});
|
||||||
|
|
||||||
|
return { tree, value };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
server.route({
|
server.route({
|
||||||
method: "POST",
|
method: "POST",
|
||||||
url: "/backfill-secret-references",
|
url: "/backfill-secret-references",
|
||||||
|
@@ -235,7 +235,8 @@ export const certificateTemplateServiceFactory = ({
|
|||||||
actorId,
|
actorId,
|
||||||
actorAuthMethod,
|
actorAuthMethod,
|
||||||
actor,
|
actor,
|
||||||
actorOrgId
|
actorOrgId,
|
||||||
|
disableBootstrapCertValidation
|
||||||
}: TCreateEstConfigurationDTO) => {
|
}: TCreateEstConfigurationDTO) => {
|
||||||
const plan = await licenseService.getPlan(actorOrgId);
|
const plan = await licenseService.getPlan(actorOrgId);
|
||||||
if (!plan.pkiEst) {
|
if (!plan.pkiEst) {
|
||||||
@@ -266,6 +267,8 @@ export const certificateTemplateServiceFactory = ({
|
|||||||
|
|
||||||
const appCfg = getConfig();
|
const appCfg = getConfig();
|
||||||
|
|
||||||
|
let encryptedCaChain: Buffer | undefined;
|
||||||
|
if (caChain) {
|
||||||
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
|
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
|
||||||
projectId: certTemplate.projectId,
|
projectId: certTemplate.projectId,
|
||||||
projectDAL,
|
projectDAL,
|
||||||
@@ -289,16 +292,20 @@ export const certificateTemplateServiceFactory = ({
|
|||||||
kmsId: certificateManagerKmsId
|
kmsId: certificateManagerKmsId
|
||||||
});
|
});
|
||||||
|
|
||||||
const { cipherTextBlob: encryptedCaChain } = await kmsEncryptor({
|
const { cipherTextBlob } = await kmsEncryptor({
|
||||||
plainText: Buffer.from(caChain)
|
plainText: Buffer.from(caChain)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
encryptedCaChain = cipherTextBlob;
|
||||||
|
}
|
||||||
|
|
||||||
const hashedPassphrase = await bcrypt.hash(passphrase, appCfg.SALT_ROUNDS);
|
const hashedPassphrase = await bcrypt.hash(passphrase, appCfg.SALT_ROUNDS);
|
||||||
const estConfig = await certificateTemplateEstConfigDAL.create({
|
const estConfig = await certificateTemplateEstConfigDAL.create({
|
||||||
certificateTemplateId,
|
certificateTemplateId,
|
||||||
hashedPassphrase,
|
hashedPassphrase,
|
||||||
encryptedCaChain,
|
encryptedCaChain,
|
||||||
isEnabled
|
isEnabled,
|
||||||
|
disableBootstrapCertValidation
|
||||||
});
|
});
|
||||||
|
|
||||||
return { ...estConfig, projectId: certTemplate.projectId };
|
return { ...estConfig, projectId: certTemplate.projectId };
|
||||||
@@ -312,7 +319,8 @@ export const certificateTemplateServiceFactory = ({
|
|||||||
actorId,
|
actorId,
|
||||||
actorAuthMethod,
|
actorAuthMethod,
|
||||||
actor,
|
actor,
|
||||||
actorOrgId
|
actorOrgId,
|
||||||
|
disableBootstrapCertValidation
|
||||||
}: TUpdateEstConfigurationDTO) => {
|
}: TUpdateEstConfigurationDTO) => {
|
||||||
const plan = await licenseService.getPlan(actorOrgId);
|
const plan = await licenseService.getPlan(actorOrgId);
|
||||||
if (!plan.pkiEst) {
|
if (!plan.pkiEst) {
|
||||||
@@ -360,7 +368,8 @@ export const certificateTemplateServiceFactory = ({
|
|||||||
});
|
});
|
||||||
|
|
||||||
const updatedData: TCertificateTemplateEstConfigsUpdate = {
|
const updatedData: TCertificateTemplateEstConfigsUpdate = {
|
||||||
isEnabled
|
isEnabled,
|
||||||
|
disableBootstrapCertValidation
|
||||||
};
|
};
|
||||||
|
|
||||||
if (caChain) {
|
if (caChain) {
|
||||||
@@ -442,18 +451,24 @@ export const certificateTemplateServiceFactory = ({
|
|||||||
kmsId: certificateManagerKmsId
|
kmsId: certificateManagerKmsId
|
||||||
});
|
});
|
||||||
|
|
||||||
const decryptedCaChain = await kmsDecryptor({
|
let decryptedCaChain = "";
|
||||||
|
if (estConfig.encryptedCaChain) {
|
||||||
|
decryptedCaChain = (
|
||||||
|
await kmsDecryptor({
|
||||||
cipherTextBlob: estConfig.encryptedCaChain
|
cipherTextBlob: estConfig.encryptedCaChain
|
||||||
});
|
})
|
||||||
|
).toString();
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
certificateTemplateId,
|
certificateTemplateId,
|
||||||
id: estConfig.id,
|
id: estConfig.id,
|
||||||
isEnabled: estConfig.isEnabled,
|
isEnabled: estConfig.isEnabled,
|
||||||
caChain: decryptedCaChain.toString(),
|
caChain: decryptedCaChain,
|
||||||
hashedPassphrase: estConfig.hashedPassphrase,
|
hashedPassphrase: estConfig.hashedPassphrase,
|
||||||
projectId: certTemplate.projectId,
|
projectId: certTemplate.projectId,
|
||||||
orgId: certTemplate.orgId
|
orgId: certTemplate.orgId,
|
||||||
|
disableBootstrapCertValidation: estConfig.disableBootstrapCertValidation
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@@ -34,9 +34,10 @@ export type TDeleteCertTemplateDTO = {
|
|||||||
|
|
||||||
export type TCreateEstConfigurationDTO = {
|
export type TCreateEstConfigurationDTO = {
|
||||||
certificateTemplateId: string;
|
certificateTemplateId: string;
|
||||||
caChain: string;
|
caChain?: string;
|
||||||
passphrase: string;
|
passphrase: string;
|
||||||
isEnabled: boolean;
|
isEnabled: boolean;
|
||||||
|
disableBootstrapCertValidation: boolean;
|
||||||
} & Omit<TProjectPermission, "projectId">;
|
} & Omit<TProjectPermission, "projectId">;
|
||||||
|
|
||||||
export type TUpdateEstConfigurationDTO = {
|
export type TUpdateEstConfigurationDTO = {
|
||||||
@@ -44,6 +45,7 @@ export type TUpdateEstConfigurationDTO = {
|
|||||||
caChain?: string;
|
caChain?: string;
|
||||||
passphrase?: string;
|
passphrase?: string;
|
||||||
isEnabled?: boolean;
|
isEnabled?: boolean;
|
||||||
|
disableBootstrapCertValidation?: boolean;
|
||||||
} & Omit<TProjectPermission, "projectId">;
|
} & Omit<TProjectPermission, "projectId">;
|
||||||
|
|
||||||
export type TGetEstConfigurationDTO =
|
export type TGetEstConfigurationDTO =
|
||||||
|
@@ -1,9 +1,9 @@
|
|||||||
import { ForbiddenError } from "@casl/ability";
|
import { ForbiddenError } from "@casl/ability";
|
||||||
import { FastifyRequest } from "fastify";
|
|
||||||
|
|
||||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||||
import { ProjectPermissionCmekActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
import { ProjectPermissionCmekActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
|
import { OrgServiceActor } from "@app/lib/types";
|
||||||
import {
|
import {
|
||||||
TCmekDecryptDTO,
|
TCmekDecryptDTO,
|
||||||
TCmekEncryptDTO,
|
TCmekEncryptDTO,
|
||||||
@@ -23,7 +23,7 @@ type TCmekServiceFactoryDep = {
|
|||||||
export type TCmekServiceFactory = ReturnType<typeof cmekServiceFactory>;
|
export type TCmekServiceFactory = ReturnType<typeof cmekServiceFactory>;
|
||||||
|
|
||||||
export const cmekServiceFactory = ({ kmsService, kmsDAL, permissionService }: TCmekServiceFactoryDep) => {
|
export const cmekServiceFactory = ({ kmsService, kmsDAL, permissionService }: TCmekServiceFactoryDep) => {
|
||||||
const createCmek = async ({ projectId, ...dto }: TCreateCmekDTO, actor: FastifyRequest["permission"]) => {
|
const createCmek = async ({ projectId, ...dto }: TCreateCmekDTO, actor: OrgServiceActor) => {
|
||||||
const { permission } = await permissionService.getProjectPermission(
|
const { permission } = await permissionService.getProjectPermission(
|
||||||
actor.type,
|
actor.type,
|
||||||
actor.id,
|
actor.id,
|
||||||
@@ -43,7 +43,7 @@ export const cmekServiceFactory = ({ kmsService, kmsDAL, permissionService }: TC
|
|||||||
return cmek;
|
return cmek;
|
||||||
};
|
};
|
||||||
|
|
||||||
const updateCmekById = async ({ keyId, ...data }: TUpdabteCmekByIdDTO, actor: FastifyRequest["permission"]) => {
|
const updateCmekById = async ({ keyId, ...data }: TUpdabteCmekByIdDTO, actor: OrgServiceActor) => {
|
||||||
const key = await kmsDAL.findById(keyId);
|
const key = await kmsDAL.findById(keyId);
|
||||||
|
|
||||||
if (!key) throw new NotFoundError({ message: `Key with ID ${keyId} not found` });
|
if (!key) throw new NotFoundError({ message: `Key with ID ${keyId} not found` });
|
||||||
@@ -65,7 +65,7 @@ export const cmekServiceFactory = ({ kmsService, kmsDAL, permissionService }: TC
|
|||||||
return cmek;
|
return cmek;
|
||||||
};
|
};
|
||||||
|
|
||||||
const deleteCmekById = async (keyId: string, actor: FastifyRequest["permission"]) => {
|
const deleteCmekById = async (keyId: string, actor: OrgServiceActor) => {
|
||||||
const key = await kmsDAL.findById(keyId);
|
const key = await kmsDAL.findById(keyId);
|
||||||
|
|
||||||
if (!key) throw new NotFoundError({ message: `Key with ID ${keyId} not found` });
|
if (!key) throw new NotFoundError({ message: `Key with ID ${keyId} not found` });
|
||||||
@@ -87,10 +87,7 @@ export const cmekServiceFactory = ({ kmsService, kmsDAL, permissionService }: TC
|
|||||||
return cmek;
|
return cmek;
|
||||||
};
|
};
|
||||||
|
|
||||||
const listCmeksByProjectId = async (
|
const listCmeksByProjectId = async ({ projectId, ...filters }: TListCmeksByProjectIdDTO, actor: OrgServiceActor) => {
|
||||||
{ projectId, ...filters }: TListCmeksByProjectIdDTO,
|
|
||||||
actor: FastifyRequest["permission"]
|
|
||||||
) => {
|
|
||||||
const { permission } = await permissionService.getProjectPermission(
|
const { permission } = await permissionService.getProjectPermission(
|
||||||
actor.type,
|
actor.type,
|
||||||
actor.id,
|
actor.id,
|
||||||
@@ -106,7 +103,7 @@ export const cmekServiceFactory = ({ kmsService, kmsDAL, permissionService }: TC
|
|||||||
return { cmeks, totalCount };
|
return { cmeks, totalCount };
|
||||||
};
|
};
|
||||||
|
|
||||||
const cmekEncrypt = async ({ keyId, plaintext }: TCmekEncryptDTO, actor: FastifyRequest["permission"]) => {
|
const cmekEncrypt = async ({ keyId, plaintext }: TCmekEncryptDTO, actor: OrgServiceActor) => {
|
||||||
const key = await kmsDAL.findById(keyId);
|
const key = await kmsDAL.findById(keyId);
|
||||||
|
|
||||||
if (!key) throw new NotFoundError({ message: `Key with ID ${keyId} not found` });
|
if (!key) throw new NotFoundError({ message: `Key with ID ${keyId} not found` });
|
||||||
@@ -132,7 +129,7 @@ export const cmekServiceFactory = ({ kmsService, kmsDAL, permissionService }: TC
|
|||||||
return cipherTextBlob.toString("base64");
|
return cipherTextBlob.toString("base64");
|
||||||
};
|
};
|
||||||
|
|
||||||
const cmekDecrypt = async ({ keyId, ciphertext }: TCmekDecryptDTO, actor: FastifyRequest["permission"]) => {
|
const cmekDecrypt = async ({ keyId, ciphertext }: TCmekDecryptDTO, actor: OrgServiceActor) => {
|
||||||
const key = await kmsDAL.findById(keyId);
|
const key = await kmsDAL.findById(keyId);
|
||||||
|
|
||||||
if (!key) throw new NotFoundError({ message: `Key with ID ${keyId} not found` });
|
if (!key) throw new NotFoundError({ message: `Key with ID ${keyId} not found` });
|
||||||
|
@@ -1,9 +1,9 @@
|
|||||||
import { ForbiddenError } from "@casl/ability";
|
import { ForbiddenError } from "@casl/ability";
|
||||||
import { FastifyRequest } from "fastify";
|
|
||||||
|
|
||||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||||
|
import { OrgServiceActor } from "@app/lib/types";
|
||||||
import { constructGroupOrgMembershipRoleMappings } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-fns";
|
import { constructGroupOrgMembershipRoleMappings } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-fns";
|
||||||
import { TSyncExternalGroupOrgMembershipRoleMappingsDTO } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-types";
|
import { TSyncExternalGroupOrgMembershipRoleMappingsDTO } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-types";
|
||||||
import { TOrgRoleDALFactory } from "@app/services/org/org-role-dal";
|
import { TOrgRoleDALFactory } from "@app/services/org/org-role-dal";
|
||||||
@@ -25,7 +25,7 @@ export const externalGroupOrgRoleMappingServiceFactory = ({
|
|||||||
permissionService,
|
permissionService,
|
||||||
orgRoleDAL
|
orgRoleDAL
|
||||||
}: TExternalGroupOrgRoleMappingServiceFactoryDep) => {
|
}: TExternalGroupOrgRoleMappingServiceFactoryDep) => {
|
||||||
const listExternalGroupOrgRoleMappings = async (actor: FastifyRequest["permission"]) => {
|
const listExternalGroupOrgRoleMappings = async (actor: OrgServiceActor) => {
|
||||||
const { permission } = await permissionService.getOrgPermission(
|
const { permission } = await permissionService.getOrgPermission(
|
||||||
actor.type,
|
actor.type,
|
||||||
actor.id,
|
actor.id,
|
||||||
@@ -46,7 +46,7 @@ export const externalGroupOrgRoleMappingServiceFactory = ({
|
|||||||
|
|
||||||
const updateExternalGroupOrgRoleMappings = async (
|
const updateExternalGroupOrgRoleMappings = async (
|
||||||
dto: TSyncExternalGroupOrgMembershipRoleMappingsDTO,
|
dto: TSyncExternalGroupOrgMembershipRoleMappingsDTO,
|
||||||
actor: FastifyRequest["permission"]
|
actor: OrgServiceActor
|
||||||
) => {
|
) => {
|
||||||
const { permission } = await permissionService.getOrgPermission(
|
const { permission } = await permissionService.getOrgPermission(
|
||||||
actor.type,
|
actor.type,
|
||||||
|
@@ -4,7 +4,7 @@ import sjcl from "sjcl";
|
|||||||
import tweetnacl from "tweetnacl";
|
import tweetnacl from "tweetnacl";
|
||||||
import tweetnaclUtil from "tweetnacl-util";
|
import tweetnaclUtil from "tweetnacl-util";
|
||||||
|
|
||||||
import { SecretType } from "@app/db/schemas";
|
import { SecretType, TSecretFolders } from "@app/db/schemas";
|
||||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||||
import { chunkArray } from "@app/lib/fn";
|
import { chunkArray } from "@app/lib/fn";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
@@ -35,7 +35,7 @@ export type TImportDataIntoInfisicalDTO = {
|
|||||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "create">;
|
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "create">;
|
||||||
secretVersionTagDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany" | "create">;
|
secretVersionTagDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany" | "create">;
|
||||||
|
|
||||||
folderDAL: Pick<TSecretFolderDALFactory, "create" | "findBySecretPath">;
|
folderDAL: Pick<TSecretFolderDALFactory, "create" | "findBySecretPath" | "findById">;
|
||||||
projectService: Pick<TProjectServiceFactory, "createProject">;
|
projectService: Pick<TProjectServiceFactory, "createProject">;
|
||||||
projectEnvService: Pick<TProjectEnvServiceFactory, "createEnvironment">;
|
projectEnvService: Pick<TProjectEnvServiceFactory, "createEnvironment">;
|
||||||
secretV2BridgeService: Pick<TSecretV2BridgeServiceFactory, "createManySecret">;
|
secretV2BridgeService: Pick<TSecretV2BridgeServiceFactory, "createManySecret">;
|
||||||
@@ -67,6 +67,7 @@ export const parseEnvKeyDataFn = async (decryptedJson: string): Promise<Infisica
|
|||||||
const infisicalImportData: InfisicalImportData = {
|
const infisicalImportData: InfisicalImportData = {
|
||||||
projects: [],
|
projects: [],
|
||||||
environments: [],
|
environments: [],
|
||||||
|
folders: [],
|
||||||
secrets: []
|
secrets: []
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -80,25 +81,410 @@ export const parseEnvKeyDataFn = async (decryptedJson: string): Promise<Infisica
|
|||||||
envTemplates.set(env.id, env.defaultName);
|
envTemplates.set(env.id, env.defaultName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// custom base environments
|
||||||
|
for (const env of parsedJson.nonDefaultEnvironmentRoles) {
|
||||||
|
envTemplates.set(env.id, env.name);
|
||||||
|
}
|
||||||
|
|
||||||
// environments
|
// environments
|
||||||
for (const env of parsedJson.baseEnvironments) {
|
for (const env of parsedJson.baseEnvironments) {
|
||||||
|
const appId = parsedJson.apps.find((a) => a.id === env.envParentId)?.id;
|
||||||
|
|
||||||
|
// If we find the app from the envParentId, we know this is a root-level environment.
|
||||||
|
if (appId) {
|
||||||
infisicalImportData.environments.push({
|
infisicalImportData.environments.push({
|
||||||
id: env.id,
|
id: env.id,
|
||||||
name: envTemplates.get(env.environmentRoleId)!,
|
name: envTemplates.get(env.environmentRoleId)!,
|
||||||
projectId: env.envParentId
|
projectId: appId
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const findRootInheritedSecret = (
|
||||||
|
secret: { val?: string; inheritsEnvironmentId?: string },
|
||||||
|
secretName: string,
|
||||||
|
envs: typeof parsedJson.envs
|
||||||
|
): { val?: string } => {
|
||||||
|
if (!secret) {
|
||||||
|
return {
|
||||||
|
val: ""
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we have a direct value, return it
|
||||||
|
if (secret.val !== undefined) {
|
||||||
|
return secret;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If there's no inheritance, return the secret as is
|
||||||
|
if (!secret.inheritsEnvironmentId) {
|
||||||
|
return secret;
|
||||||
|
}
|
||||||
|
|
||||||
|
const inheritedEnv = envs[secret.inheritsEnvironmentId];
|
||||||
|
if (!inheritedEnv) return secret;
|
||||||
|
return findRootInheritedSecret(inheritedEnv.variables[secretName], secretName, envs);
|
||||||
|
};
|
||||||
|
|
||||||
|
const targetIdToFolderIdsMap = new Map<string, string>();
|
||||||
|
|
||||||
|
const processBranches = () => {
|
||||||
|
for (const subEnv of parsedJson.subEnvironments) {
|
||||||
|
const app = parsedJson.apps.find((a) => a.id === subEnv.envParentId);
|
||||||
|
const block = parsedJson.blocks.find((b) => b.id === subEnv.envParentId);
|
||||||
|
|
||||||
|
if (app) {
|
||||||
|
// Handle regular app branches
|
||||||
|
const branchEnvironment = infisicalImportData.environments.find((e) => e.id === subEnv.parentEnvironmentId);
|
||||||
|
|
||||||
|
// check if the folder already exists in the same parent environment with the same name
|
||||||
|
|
||||||
|
const folderExists = infisicalImportData.folders.some(
|
||||||
|
(f) => f.name === subEnv.subName && f.parentFolderId === subEnv.parentEnvironmentId
|
||||||
|
);
|
||||||
|
|
||||||
|
// No need to map to target ID's here, because we are not dealing with blocks
|
||||||
|
if (!folderExists) {
|
||||||
|
infisicalImportData.folders.push({
|
||||||
|
name: subEnv.subName,
|
||||||
|
parentFolderId: subEnv.parentEnvironmentId,
|
||||||
|
environmentId: branchEnvironment!.id,
|
||||||
|
id: subEnv.id
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (block) {
|
||||||
|
// Handle block branches
|
||||||
|
// 1. Find all apps that use this block
|
||||||
|
const appsUsingBlock = parsedJson.appBlocks.filter((ab) => ab.blockId === block.id);
|
||||||
|
|
||||||
|
for (const { appId, orderIndex } of appsUsingBlock) {
|
||||||
|
// 2. Find the matching environment in the app based on the environment role
|
||||||
|
const blockBaseEnv = parsedJson.baseEnvironments.find((be) => be.id === subEnv.parentEnvironmentId);
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
if (!blockBaseEnv) continue;
|
||||||
|
|
||||||
|
const matchingAppEnv = parsedJson.baseEnvironments.find(
|
||||||
|
(be) => be.envParentId === appId && be.environmentRoleId === blockBaseEnv.environmentRoleId
|
||||||
|
);
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
if (!matchingAppEnv) continue;
|
||||||
|
|
||||||
|
const folderExists = infisicalImportData.folders.some(
|
||||||
|
(f) => f.name === subEnv.subName && f.parentFolderId === matchingAppEnv.id
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!folderExists) {
|
||||||
|
// 3. Create a folder in the matching app environment
|
||||||
|
infisicalImportData.folders.push({
|
||||||
|
name: subEnv.subName,
|
||||||
|
parentFolderId: matchingAppEnv.id,
|
||||||
|
environmentId: matchingAppEnv.id,
|
||||||
|
id: `${subEnv.id}-${appId}` // Create unique ID for each app's copy of the branch
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// folder already exists, so lets map the old folder id to the new folder id
|
||||||
|
targetIdToFolderIdsMap.set(subEnv.id, `${subEnv.id}-${appId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Process secrets in the block branch for this app
|
||||||
|
const branchSecrets = parsedJson.envs[subEnv.id]?.variables || {};
|
||||||
|
for (const [secretName, secretData] of Object.entries(branchSecrets)) {
|
||||||
|
if (secretData.inheritsEnvironmentId) {
|
||||||
|
const resolvedSecret = findRootInheritedSecret(secretData, secretName, parsedJson.envs);
|
||||||
|
|
||||||
|
// If the secret already exists in the environment, we need to check the orderIndex of the appBlock. The appBlock with the highest orderIndex should take precedence.
|
||||||
|
const preExistingSecretIndex = infisicalImportData.secrets.findIndex(
|
||||||
|
(s) => s.name === secretName && s.environmentId === matchingAppEnv.id
|
||||||
|
);
|
||||||
|
|
||||||
|
if (preExistingSecretIndex !== -1) {
|
||||||
|
const preExistingSecret = infisicalImportData.secrets[preExistingSecretIndex];
|
||||||
|
|
||||||
|
if (
|
||||||
|
preExistingSecret.appBlockOrderIndex !== undefined &&
|
||||||
|
orderIndex > preExistingSecret.appBlockOrderIndex
|
||||||
|
) {
|
||||||
|
// if the existing secret has a lower orderIndex, we should replace it
|
||||||
|
infisicalImportData.secrets[preExistingSecretIndex] = {
|
||||||
|
...preExistingSecret,
|
||||||
|
value: resolvedSecret.val || "",
|
||||||
|
appBlockOrderIndex: orderIndex
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
infisicalImportData.secrets.push({
|
||||||
|
id: randomUUID(),
|
||||||
|
name: secretName,
|
||||||
|
environmentId: matchingAppEnv.id,
|
||||||
|
value: resolvedSecret.val || "",
|
||||||
|
folderId: `${subEnv.id}-${appId}`,
|
||||||
|
appBlockOrderIndex: orderIndex
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// If the secret already exists in the environment, we need to check the orderIndex of the appBlock. The appBlock with the highest orderIndex should take precedence.
|
||||||
|
const preExistingSecretIndex = infisicalImportData.secrets.findIndex(
|
||||||
|
(s) => s.name === secretName && s.environmentId === matchingAppEnv.id
|
||||||
|
);
|
||||||
|
|
||||||
|
if (preExistingSecretIndex !== -1) {
|
||||||
|
const preExistingSecret = infisicalImportData.secrets[preExistingSecretIndex];
|
||||||
|
|
||||||
|
if (
|
||||||
|
preExistingSecret.appBlockOrderIndex !== undefined &&
|
||||||
|
orderIndex > preExistingSecret.appBlockOrderIndex
|
||||||
|
) {
|
||||||
|
// if the existing secret has a lower orderIndex, we should replace it
|
||||||
|
infisicalImportData.secrets[preExistingSecretIndex] = {
|
||||||
|
...preExistingSecret,
|
||||||
|
value: secretData.val || "",
|
||||||
|
appBlockOrderIndex: orderIndex
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
infisicalImportData.secrets.push({
|
||||||
|
id: randomUUID(),
|
||||||
|
name: secretName,
|
||||||
|
environmentId: matchingAppEnv.id,
|
||||||
|
value: secretData.val || "",
|
||||||
|
folderId: `${subEnv.id}-${appId}`,
|
||||||
|
appBlockOrderIndex: orderIndex
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const processBlocksForApp = (appIds: string[]) => {
|
||||||
|
for (const appId of appIds) {
|
||||||
|
const blocksInApp = parsedJson.appBlocks.filter((ab) => ab.appId === appId);
|
||||||
|
logger.info(
|
||||||
|
{
|
||||||
|
blocksInApp
|
||||||
|
},
|
||||||
|
"[processBlocksForApp]: Processing blocks for app"
|
||||||
|
);
|
||||||
|
|
||||||
|
for (const appBlock of blocksInApp) {
|
||||||
|
// 1. find all base environments for this block
|
||||||
|
const blockBaseEnvironments = parsedJson.baseEnvironments.filter((env) => env.envParentId === appBlock.blockId);
|
||||||
|
logger.info(
|
||||||
|
{
|
||||||
|
blockBaseEnvironments
|
||||||
|
},
|
||||||
|
"[processBlocksForApp]: Processing block base environments"
|
||||||
|
);
|
||||||
|
|
||||||
|
for (const blockBaseEnvironment of blockBaseEnvironments) {
|
||||||
|
// 2. find the corresponding environment that is not from the block
|
||||||
|
const matchingEnv = parsedJson.baseEnvironments.find(
|
||||||
|
(be) =>
|
||||||
|
be.environmentRoleId === blockBaseEnvironment.environmentRoleId && be.envParentId !== appBlock.blockId
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!matchingEnv) {
|
||||||
|
throw new Error(`Could not find environment for block ${appBlock.blockId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. find all the secrets for this environment block
|
||||||
|
const blockSecrets = parsedJson.envs[blockBaseEnvironment.id].variables;
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
{
|
||||||
|
blockSecretsLength: Object.keys(blockSecrets).length
|
||||||
|
},
|
||||||
|
"[processBlocksForApp]: Processing block secrets"
|
||||||
|
);
|
||||||
|
|
||||||
|
// 4. process each secret
|
||||||
|
for (const secret of Object.keys(blockSecrets)) {
|
||||||
|
const selectedSecret = blockSecrets[secret];
|
||||||
|
|
||||||
|
if (selectedSecret.inheritsEnvironmentId) {
|
||||||
|
const resolvedSecret = findRootInheritedSecret(selectedSecret, secret, parsedJson.envs);
|
||||||
|
|
||||||
|
// If the secret already exists in the environment, we need to check the orderIndex of the appBlock. The appBlock with the highest orderIndex should take precedence.
|
||||||
|
const preExistingSecretIndex = infisicalImportData.secrets.findIndex(
|
||||||
|
(s) => s.name === secret && s.environmentId === matchingEnv.id
|
||||||
|
);
|
||||||
|
|
||||||
|
if (preExistingSecretIndex !== -1) {
|
||||||
|
const preExistingSecret = infisicalImportData.secrets[preExistingSecretIndex];
|
||||||
|
|
||||||
|
if (
|
||||||
|
preExistingSecret.appBlockOrderIndex !== undefined &&
|
||||||
|
appBlock.orderIndex > preExistingSecret.appBlockOrderIndex
|
||||||
|
) {
|
||||||
|
// if the existing secret has a lower orderIndex, we should replace it
|
||||||
|
infisicalImportData.secrets[preExistingSecretIndex] = {
|
||||||
|
...preExistingSecret,
|
||||||
|
value: selectedSecret.val || "",
|
||||||
|
appBlockOrderIndex: appBlock.orderIndex
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
// secrets
|
|
||||||
for (const env of Object.keys(parsedJson.envs)) {
|
|
||||||
if (!env.includes("|")) {
|
|
||||||
const envData = parsedJson.envs[env];
|
|
||||||
for (const secret of Object.keys(envData.variables)) {
|
|
||||||
infisicalImportData.secrets.push({
|
infisicalImportData.secrets.push({
|
||||||
id: randomUUID(),
|
id: randomUUID(),
|
||||||
name: secret,
|
name: secret,
|
||||||
environmentId: env,
|
environmentId: matchingEnv.id,
|
||||||
value: envData.variables[secret].val
|
value: resolvedSecret.val || "",
|
||||||
|
appBlockOrderIndex: appBlock.orderIndex
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// If the secret already exists in the environment, we need to check the orderIndex of the appBlock. The appBlock with the highest orderIndex should take precedence.
|
||||||
|
const preExistingSecretIndex = infisicalImportData.secrets.findIndex(
|
||||||
|
(s) => s.name === secret && s.environmentId === matchingEnv.id
|
||||||
|
);
|
||||||
|
|
||||||
|
if (preExistingSecretIndex !== -1) {
|
||||||
|
const preExistingSecret = infisicalImportData.secrets[preExistingSecretIndex];
|
||||||
|
|
||||||
|
if (
|
||||||
|
preExistingSecret.appBlockOrderIndex !== undefined &&
|
||||||
|
appBlock.orderIndex > preExistingSecret.appBlockOrderIndex
|
||||||
|
) {
|
||||||
|
// if the existing secret has a lower orderIndex, we should replace it
|
||||||
|
infisicalImportData.secrets[preExistingSecretIndex] = {
|
||||||
|
...preExistingSecret,
|
||||||
|
value: selectedSecret.val || "",
|
||||||
|
appBlockOrderIndex: appBlock.orderIndex
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
infisicalImportData.secrets.push({
|
||||||
|
id: randomUUID(),
|
||||||
|
name: secret,
|
||||||
|
environmentId: matchingEnv.id,
|
||||||
|
value: selectedSecret.val || "",
|
||||||
|
appBlockOrderIndex: appBlock.orderIndex
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
processBranches();
|
||||||
|
processBlocksForApp(infisicalImportData.projects.map((app) => app.id));
|
||||||
|
|
||||||
|
for (const env of Object.keys(parsedJson.envs)) {
|
||||||
|
// Skip user-specific environments
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
if (env.includes("|")) continue;
|
||||||
|
|
||||||
|
const envData = parsedJson.envs[env];
|
||||||
|
const baseEnv = parsedJson.baseEnvironments.find((be) => be.id === env);
|
||||||
|
const subEnv = parsedJson.subEnvironments.find((se) => se.id === env);
|
||||||
|
|
||||||
|
// Skip if we can't find either a base environment or sub-environment
|
||||||
|
if (!baseEnv && !subEnv) {
|
||||||
|
logger.info(
|
||||||
|
{
|
||||||
|
envId: env
|
||||||
|
},
|
||||||
|
"[parseEnvKeyDataFn]: Could not find base or sub environment for env, skipping"
|
||||||
|
);
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If this is a base environment of a block, skip it (handled by processBlocksForApp)
|
||||||
|
if (baseEnv) {
|
||||||
|
const isBlock = parsedJson.appBlocks.some((block) => block.blockId === baseEnv.envParentId);
|
||||||
|
if (isBlock) {
|
||||||
|
logger.info(
|
||||||
|
{
|
||||||
|
envId: env,
|
||||||
|
baseEnv
|
||||||
|
},
|
||||||
|
"[parseEnvKeyDataFn]: Skipping block environment (handled separately)"
|
||||||
|
);
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process each secret in this environment or branch
|
||||||
|
for (const [secretName, secretData] of Object.entries(envData.variables)) {
|
||||||
|
const indexOfExistingSecret = infisicalImportData.secrets.findIndex(
|
||||||
|
(s) =>
|
||||||
|
s.name === secretName &&
|
||||||
|
(s.environmentId === subEnv?.parentEnvironmentId || s.environmentId === env) &&
|
||||||
|
(s.folderId ? s.folderId === subEnv?.id : true) &&
|
||||||
|
(secretData.val ? s.value === secretData.val : true)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (secretData.inheritsEnvironmentId) {
|
||||||
|
const resolvedSecret = findRootInheritedSecret(secretData, secretName, parsedJson.envs);
|
||||||
|
// Check if there's already a secret with this name in the environment, if there is, we should override it. Because if there's already one, we know its coming from a block.
|
||||||
|
// Variables from the normal environment should take precedence over variables from the block.
|
||||||
|
if (indexOfExistingSecret !== -1) {
|
||||||
|
// if a existing secret is found, we should replace it directly
|
||||||
|
const newSecret: (typeof infisicalImportData.secrets)[number] = {
|
||||||
|
...infisicalImportData.secrets[indexOfExistingSecret],
|
||||||
|
value: resolvedSecret.val || ""
|
||||||
|
};
|
||||||
|
|
||||||
|
infisicalImportData.secrets[indexOfExistingSecret] = newSecret;
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
infisicalImportData.secrets.push({
|
||||||
|
id: randomUUID(),
|
||||||
|
name: secretName,
|
||||||
|
environmentId: subEnv ? subEnv.parentEnvironmentId : env,
|
||||||
|
value: resolvedSecret.val || "",
|
||||||
|
...(subEnv && { folderId: subEnv.id }) // Add folderId if this is a branch secret
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Check if there's already a secret with this name in the environment, if there is, we should override it. Because if there's already one, we know its coming from a block.
|
||||||
|
// Variables from the normal environment should take precedence over variables from the block.
|
||||||
|
|
||||||
|
if (indexOfExistingSecret !== -1) {
|
||||||
|
// if a existing secret is found, we should replace it directly
|
||||||
|
const newSecret: (typeof infisicalImportData.secrets)[number] = {
|
||||||
|
...infisicalImportData.secrets[indexOfExistingSecret],
|
||||||
|
value: secretData.val || ""
|
||||||
|
};
|
||||||
|
|
||||||
|
infisicalImportData.secrets[indexOfExistingSecret] = newSecret;
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const folderId = targetIdToFolderIdsMap.get(subEnv?.id || "") || subEnv?.id;
|
||||||
|
|
||||||
|
infisicalImportData.secrets.push({
|
||||||
|
id: randomUUID(),
|
||||||
|
name: secretName,
|
||||||
|
environmentId: subEnv ? subEnv.parentEnvironmentId : env,
|
||||||
|
value: secretData.val || "",
|
||||||
|
...(folderId && { folderId })
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -125,7 +511,17 @@ export const importDataIntoInfisicalFn = async ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const originalToNewProjectId = new Map<string, string>();
|
const originalToNewProjectId = new Map<string, string>();
|
||||||
const originalToNewEnvironmentId = new Map<string, string>();
|
const originalToNewEnvironmentId = new Map<
|
||||||
|
string,
|
||||||
|
{ envId: string; envSlug: string; rootFolderId: string; projectId: string }
|
||||||
|
>();
|
||||||
|
const originalToNewFolderId = new Map<
|
||||||
|
string,
|
||||||
|
{
|
||||||
|
folderId: string;
|
||||||
|
projectId: string;
|
||||||
|
}
|
||||||
|
>();
|
||||||
const projectsNotImported: string[] = [];
|
const projectsNotImported: string[] = [];
|
||||||
|
|
||||||
await projectDAL.transaction(async (tx) => {
|
await projectDAL.transaction(async (tx) => {
|
||||||
@@ -170,65 +566,176 @@ export const importDataIntoInfisicalFn = async ({
|
|||||||
|
|
||||||
const lastPos = await projectEnvDAL.findLastEnvPosition(projectId, tx);
|
const lastPos = await projectEnvDAL.findLastEnvPosition(projectId, tx);
|
||||||
const doc = await projectEnvDAL.create({ slug, name: environment.name, projectId, position: lastPos + 1 }, tx);
|
const doc = await projectEnvDAL.create({ slug, name: environment.name, projectId, position: lastPos + 1 }, tx);
|
||||||
await folderDAL.create({ name: "root", parentId: null, envId: doc.id, version: 1 }, tx);
|
const folder = await folderDAL.create({ name: "root", parentId: null, envId: doc.id, version: 1 }, tx);
|
||||||
|
|
||||||
originalToNewEnvironmentId.set(environment.id, doc.slug);
|
originalToNewEnvironmentId.set(environment.id, {
|
||||||
|
envSlug: doc.slug,
|
||||||
|
envId: doc.id,
|
||||||
|
rootFolderId: folder.id,
|
||||||
|
projectId
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (data.folders) {
|
||||||
|
for await (const folder of data.folders) {
|
||||||
|
const parentEnv = originalToNewEnvironmentId.get(folder.parentFolderId as string);
|
||||||
|
|
||||||
|
if (!parentEnv) {
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const newFolder = await folderDAL.create(
|
||||||
|
{
|
||||||
|
name: folder.name,
|
||||||
|
envId: parentEnv.envId,
|
||||||
|
parentId: parentEnv.rootFolderId
|
||||||
|
},
|
||||||
|
tx
|
||||||
|
);
|
||||||
|
|
||||||
|
originalToNewFolderId.set(folder.id, {
|
||||||
|
folderId: newFolder.id,
|
||||||
|
projectId: parentEnv.projectId
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Useful for debugging:
|
||||||
|
// console.log("data.secrets", data.secrets);
|
||||||
|
// console.log("data.folders", data.folders);
|
||||||
|
// console.log("data.environment", data.environments);
|
||||||
|
|
||||||
if (data.secrets && data.secrets.length > 0) {
|
if (data.secrets && data.secrets.length > 0) {
|
||||||
const mappedToEnvironmentId = new Map<
|
const mappedToEnvironmentId = new Map<
|
||||||
string,
|
string,
|
||||||
{
|
{
|
||||||
secretKey: string;
|
secretKey: string;
|
||||||
secretValue: string;
|
secretValue: string;
|
||||||
|
folderId?: string;
|
||||||
|
isFromBlock?: boolean;
|
||||||
}[]
|
}[]
|
||||||
>();
|
>();
|
||||||
|
|
||||||
for (const secret of data.secrets) {
|
for (const secret of data.secrets) {
|
||||||
if (!originalToNewEnvironmentId.get(secret.environmentId)) {
|
const targetId = secret.folderId || secret.environmentId;
|
||||||
|
|
||||||
|
// Skip if we can't find either an environment or folder mapping for this secret
|
||||||
|
if (!originalToNewEnvironmentId.get(secret.environmentId) && !originalToNewFolderId.get(targetId)) {
|
||||||
|
logger.info({ secret }, "[importDataIntoInfisicalFn]: Could not find environment or folder for secret");
|
||||||
|
|
||||||
// eslint-disable-next-line no-continue
|
// eslint-disable-next-line no-continue
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!mappedToEnvironmentId.has(secret.environmentId)) {
|
if (!mappedToEnvironmentId.has(targetId)) {
|
||||||
mappedToEnvironmentId.set(secret.environmentId, []);
|
mappedToEnvironmentId.set(targetId, []);
|
||||||
}
|
}
|
||||||
mappedToEnvironmentId.get(secret.environmentId)!.push({
|
|
||||||
|
const alreadyHasSecret = mappedToEnvironmentId
|
||||||
|
.get(targetId)!
|
||||||
|
.find((el) => el.secretKey === secret.name && el.folderId === secret.folderId);
|
||||||
|
|
||||||
|
if (alreadyHasSecret && alreadyHasSecret.isFromBlock) {
|
||||||
|
// remove the existing secret if any
|
||||||
|
mappedToEnvironmentId
|
||||||
|
.get(targetId)!
|
||||||
|
.splice(mappedToEnvironmentId.get(targetId)!.indexOf(alreadyHasSecret), 1);
|
||||||
|
}
|
||||||
|
mappedToEnvironmentId.get(targetId)!.push({
|
||||||
secretKey: secret.name,
|
secretKey: secret.name,
|
||||||
secretValue: secret.value || ""
|
secretValue: secret.value || "",
|
||||||
|
folderId: secret.folderId,
|
||||||
|
isFromBlock: secret.appBlockOrderIndex !== undefined
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// for each of the mappedEnvironmentId
|
// for each of the mappedEnvironmentId
|
||||||
for await (const [envId, secrets] of mappedToEnvironmentId) {
|
for await (const [targetId, secrets] of mappedToEnvironmentId) {
|
||||||
const environment = data.environments.find((env) => env.id === envId);
|
logger.info("[importDataIntoInfisicalFn]: Processing secrets for targetId", targetId);
|
||||||
const projectId = originalToNewProjectId.get(environment?.projectId as string)!;
|
|
||||||
|
let selectedFolder: TSecretFolders | undefined;
|
||||||
|
let selectedProjectId: string | undefined;
|
||||||
|
|
||||||
|
// Case 1: Secret belongs to a folder / branch / branch of a block
|
||||||
|
const foundFolder = originalToNewFolderId.get(targetId);
|
||||||
|
if (foundFolder) {
|
||||||
|
logger.info("[importDataIntoInfisicalFn]: Processing secrets for folder");
|
||||||
|
selectedFolder = await folderDAL.findById(foundFolder.folderId, tx);
|
||||||
|
selectedProjectId = foundFolder.projectId;
|
||||||
|
} else {
|
||||||
|
logger.info("[importDataIntoInfisicalFn]: Processing secrets for normal environment");
|
||||||
|
const environment = data.environments.find((env) => env.id === targetId);
|
||||||
|
if (!environment) {
|
||||||
|
logger.info(
|
||||||
|
{
|
||||||
|
targetId
|
||||||
|
},
|
||||||
|
"[importDataIntoInfisicalFn]: Could not find environment for secret"
|
||||||
|
);
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const projectId = originalToNewProjectId.get(environment.projectId)!;
|
||||||
|
|
||||||
if (!projectId) {
|
if (!projectId) {
|
||||||
throw new BadRequestError({ message: `Failed to import secret, project not found` });
|
throw new BadRequestError({ message: `Failed to import secret, project not found` });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const env = originalToNewEnvironmentId.get(targetId);
|
||||||
|
if (!env) {
|
||||||
|
logger.info(
|
||||||
|
{
|
||||||
|
targetId
|
||||||
|
},
|
||||||
|
"[importDataIntoInfisicalFn]: Could not find environment for secret"
|
||||||
|
);
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-continue
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const folder = await folderDAL.findBySecretPath(projectId, env.envSlug, "/", tx);
|
||||||
|
|
||||||
|
if (!folder) {
|
||||||
|
throw new NotFoundError({
|
||||||
|
message: `Folder not found for the given environment slug (${env.envSlug}) & secret path (/)`,
|
||||||
|
name: "Create secret"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
selectedFolder = folder;
|
||||||
|
selectedProjectId = projectId;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!selectedFolder) {
|
||||||
|
throw new NotFoundError({
|
||||||
|
message: `Folder not found for the given environment slug & secret path`,
|
||||||
|
name: "CreateSecret"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!selectedProjectId) {
|
||||||
|
throw new NotFoundError({
|
||||||
|
message: `Project not found for the given environment slug & secret path`,
|
||||||
|
name: "CreateSecret"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
const { encryptor: secretManagerEncrypt } = await kmsService.createCipherPairWithDataKey(
|
const { encryptor: secretManagerEncrypt } = await kmsService.createCipherPairWithDataKey(
|
||||||
{
|
{
|
||||||
type: KmsDataKey.SecretManager,
|
type: KmsDataKey.SecretManager,
|
||||||
projectId
|
projectId: selectedProjectId
|
||||||
},
|
},
|
||||||
tx
|
tx
|
||||||
);
|
);
|
||||||
|
|
||||||
const envSlug = originalToNewEnvironmentId.get(envId)!;
|
|
||||||
const folder = await folderDAL.findBySecretPath(projectId, envSlug, "/", tx);
|
|
||||||
if (!folder)
|
|
||||||
throw new NotFoundError({
|
|
||||||
message: `Folder not found for the given environment slug (${envSlug}) & secret path (/)`,
|
|
||||||
name: "Create secret"
|
|
||||||
});
|
|
||||||
|
|
||||||
const secretBatches = chunkArray(secrets, 2500);
|
const secretBatches = chunkArray(secrets, 2500);
|
||||||
for await (const secretBatch of secretBatches) {
|
for await (const secretBatch of secretBatches) {
|
||||||
const secretsByKeys = await secretDAL.findBySecretKeys(
|
const secretsByKeys = await secretDAL.findBySecretKeys(
|
||||||
folder.id,
|
selectedFolder.id,
|
||||||
secretBatch.map((el) => ({
|
secretBatch.map((el) => ({
|
||||||
key: el.secretKey,
|
key: el.secretKey,
|
||||||
type: SecretType.Shared
|
type: SecretType.Shared
|
||||||
@@ -254,7 +761,7 @@ export const importDataIntoInfisicalFn = async ({
|
|||||||
type: SecretType.Shared
|
type: SecretType.Shared
|
||||||
};
|
};
|
||||||
}),
|
}),
|
||||||
folderId: folder.id,
|
folderId: selectedFolder.id,
|
||||||
secretDAL,
|
secretDAL,
|
||||||
secretVersionDAL,
|
secretVersionDAL,
|
||||||
secretTagDAL,
|
secretTagDAL,
|
||||||
|
@@ -31,7 +31,7 @@ export type TExternalMigrationQueueFactoryDep = {
|
|||||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "create">;
|
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "create">;
|
||||||
secretVersionTagDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany" | "create">;
|
secretVersionTagDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany" | "create">;
|
||||||
|
|
||||||
folderDAL: Pick<TSecretFolderDALFactory, "create" | "findBySecretPath">;
|
folderDAL: Pick<TSecretFolderDALFactory, "create" | "findBySecretPath" | "findOne" | "findById">;
|
||||||
projectService: Pick<TProjectServiceFactory, "createProject">;
|
projectService: Pick<TProjectServiceFactory, "createProject">;
|
||||||
projectEnvService: Pick<TProjectEnvServiceFactory, "createEnvironment">;
|
projectEnvService: Pick<TProjectEnvServiceFactory, "createEnvironment">;
|
||||||
secretV2BridgeService: Pick<TSecretV2BridgeServiceFactory, "createManySecret">;
|
secretV2BridgeService: Pick<TSecretV2BridgeServiceFactory, "createManySecret">;
|
||||||
|
@@ -2,8 +2,16 @@ import { ActorAuthMethod, ActorType } from "../auth/auth-type";
|
|||||||
|
|
||||||
export type InfisicalImportData = {
|
export type InfisicalImportData = {
|
||||||
projects: Array<{ name: string; id: string }>;
|
projects: Array<{ name: string; id: string }>;
|
||||||
environments: Array<{ name: string; id: string; projectId: string }>;
|
environments: Array<{ name: string; id: string; projectId: string; envParentId?: string }>;
|
||||||
secrets: Array<{ name: string; id: string; environmentId: string; value: string }>;
|
folders: Array<{ id: string; name: string; environmentId: string; parentFolderId?: string }>;
|
||||||
|
secrets: Array<{
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
environmentId: string;
|
||||||
|
value: string;
|
||||||
|
folderId?: string;
|
||||||
|
appBlockOrderIndex?: number; // Not used for infisical import, only used for building the import structure to determine which block(s) take precedence.
|
||||||
|
}>;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TImportEnvKeyDataCreate = {
|
export type TImportEnvKeyDataCreate = {
|
||||||
@@ -28,62 +36,62 @@ export type TEnvKeyExportJSON = {
|
|||||||
org: {
|
org: {
|
||||||
id: string;
|
id: string;
|
||||||
name: string;
|
name: string;
|
||||||
settings: {
|
|
||||||
auth: {
|
|
||||||
inviteExpirationMs: number;
|
|
||||||
deviceGrantExpirationMs: number;
|
|
||||||
tokenExpirationMs: number;
|
|
||||||
};
|
|
||||||
crypto: {
|
|
||||||
requiresPassphrase: boolean;
|
|
||||||
requiresLockout: boolean;
|
|
||||||
};
|
|
||||||
envs: {
|
|
||||||
autoCaps: boolean;
|
|
||||||
autoCommitLocals: boolean;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Apps are projects
|
||||||
apps: {
|
apps: {
|
||||||
id: string;
|
id: string;
|
||||||
name: string;
|
name: string;
|
||||||
settings: Record<string, unknown>;
|
|
||||||
}[];
|
}[];
|
||||||
defaultOrgRoles: {
|
// Blocks are basically global projects that can be imported in other projects
|
||||||
|
blocks: {
|
||||||
id: string;
|
id: string;
|
||||||
defaultName: string;
|
name: string;
|
||||||
}[];
|
}[];
|
||||||
defaultAppRoles: {
|
|
||||||
id: string;
|
appBlocks: {
|
||||||
defaultName: string;
|
appId: string;
|
||||||
|
blockId: string;
|
||||||
|
orderIndex: number;
|
||||||
}[];
|
}[];
|
||||||
|
|
||||||
defaultEnvironmentRoles: {
|
defaultEnvironmentRoles: {
|
||||||
id: string;
|
id: string;
|
||||||
defaultName: string;
|
defaultName: string;
|
||||||
settings: {
|
|
||||||
autoCommit: boolean;
|
|
||||||
};
|
|
||||||
}[];
|
}[];
|
||||||
|
|
||||||
|
nonDefaultEnvironmentRoles: {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
}[];
|
||||||
|
|
||||||
baseEnvironments: {
|
baseEnvironments: {
|
||||||
id: string;
|
id: string;
|
||||||
envParentId: string;
|
envParentId: string;
|
||||||
environmentRoleId: string;
|
environmentRoleId: string;
|
||||||
settings: Record<string, unknown>;
|
|
||||||
}[];
|
}[];
|
||||||
orgUsers: {
|
|
||||||
|
// Branches for both blocks and apps
|
||||||
|
subEnvironments: {
|
||||||
id: string;
|
id: string;
|
||||||
firstName: string;
|
envParentId: string;
|
||||||
lastName: string;
|
environmentRoleId: string;
|
||||||
email: string;
|
parentEnvironmentId: string;
|
||||||
provider: string;
|
subName: string;
|
||||||
orgRoleId: string;
|
|
||||||
uid: string;
|
|
||||||
}[];
|
}[];
|
||||||
|
|
||||||
envs: Record<
|
envs: Record<
|
||||||
string,
|
string,
|
||||||
{
|
{
|
||||||
variables: Record<string, { val: string }>;
|
variables: Record<
|
||||||
inherits: Record<string, unknown>;
|
string,
|
||||||
|
{
|
||||||
|
val?: string;
|
||||||
|
inheritsEnvironmentId?: string;
|
||||||
|
}
|
||||||
|
>;
|
||||||
|
|
||||||
|
inherits: Record<string, string[]>;
|
||||||
}
|
}
|
||||||
>;
|
>;
|
||||||
};
|
};
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
import { Knex } from "knex";
|
import { Knex } from "knex";
|
||||||
|
|
||||||
import { TDbClient } from "@app/db";
|
import { TDbClient } from "@app/db";
|
||||||
import { IdentityAuthMethod, TableName, TIdentityAccessTokens } from "@app/db/schemas";
|
import { TableName, TIdentityAccessTokens } from "@app/db/schemas";
|
||||||
import { DatabaseError } from "@app/lib/errors";
|
import { DatabaseError } from "@app/lib/errors";
|
||||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||||
import { logger } from "@app/lib/logger";
|
import { logger } from "@app/lib/logger";
|
||||||
@@ -17,54 +17,27 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
|||||||
const doc = await (tx || db.replicaNode())(TableName.IdentityAccessToken)
|
const doc = await (tx || db.replicaNode())(TableName.IdentityAccessToken)
|
||||||
.where(filter)
|
.where(filter)
|
||||||
.join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
|
.join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
|
||||||
.leftJoin(TableName.IdentityUaClientSecret, (qb) => {
|
.leftJoin(
|
||||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.Univeral])).andOn(
|
TableName.IdentityUaClientSecret,
|
||||||
`${TableName.IdentityAccessToken}.identityUAClientSecretId`,
|
`${TableName.IdentityAccessToken}.identityUAClientSecretId`,
|
||||||
`${TableName.IdentityUaClientSecret}.id`
|
`${TableName.IdentityUaClientSecret}.id`
|
||||||
);
|
)
|
||||||
})
|
.leftJoin(
|
||||||
.leftJoin(TableName.IdentityUniversalAuth, (qb) => {
|
TableName.IdentityUniversalAuth,
|
||||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.Univeral])).andOn(
|
|
||||||
`${TableName.IdentityUaClientSecret}.identityUAId`,
|
`${TableName.IdentityUaClientSecret}.identityUAId`,
|
||||||
`${TableName.IdentityUniversalAuth}.id`
|
`${TableName.IdentityUniversalAuth}.id`
|
||||||
);
|
)
|
||||||
})
|
.leftJoin(TableName.IdentityGcpAuth, `${TableName.Identity}.id`, `${TableName.IdentityGcpAuth}.identityId`)
|
||||||
.leftJoin(TableName.IdentityGcpAuth, (qb) => {
|
.leftJoin(TableName.IdentityAwsAuth, `${TableName.Identity}.id`, `${TableName.IdentityAwsAuth}.identityId`)
|
||||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.GCP_AUTH])).andOn(
|
.leftJoin(TableName.IdentityAzureAuth, `${TableName.Identity}.id`, `${TableName.IdentityAzureAuth}.identityId`)
|
||||||
`${TableName.Identity}.id`,
|
.leftJoin(
|
||||||
`${TableName.IdentityGcpAuth}.identityId`
|
TableName.IdentityKubernetesAuth,
|
||||||
);
|
|
||||||
})
|
|
||||||
.leftJoin(TableName.IdentityAwsAuth, (qb) => {
|
|
||||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.AWS_AUTH])).andOn(
|
|
||||||
`${TableName.Identity}.id`,
|
|
||||||
`${TableName.IdentityAwsAuth}.identityId`
|
|
||||||
);
|
|
||||||
})
|
|
||||||
.leftJoin(TableName.IdentityAzureAuth, (qb) => {
|
|
||||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.AZURE_AUTH])).andOn(
|
|
||||||
`${TableName.Identity}.id`,
|
|
||||||
`${TableName.IdentityAzureAuth}.identityId`
|
|
||||||
);
|
|
||||||
})
|
|
||||||
.leftJoin(TableName.IdentityKubernetesAuth, (qb) => {
|
|
||||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.KUBERNETES_AUTH])).andOn(
|
|
||||||
`${TableName.Identity}.id`,
|
`${TableName.Identity}.id`,
|
||||||
`${TableName.IdentityKubernetesAuth}.identityId`
|
`${TableName.IdentityKubernetesAuth}.identityId`
|
||||||
);
|
)
|
||||||
})
|
.leftJoin(TableName.IdentityOidcAuth, `${TableName.Identity}.id`, `${TableName.IdentityOidcAuth}.identityId`)
|
||||||
.leftJoin(TableName.IdentityOidcAuth, (qb) => {
|
.leftJoin(TableName.IdentityTokenAuth, `${TableName.Identity}.id`, `${TableName.IdentityTokenAuth}.identityId`)
|
||||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.OIDC_AUTH])).andOn(
|
|
||||||
`${TableName.Identity}.id`,
|
|
||||||
`${TableName.IdentityOidcAuth}.identityId`
|
|
||||||
);
|
|
||||||
})
|
|
||||||
.leftJoin(TableName.IdentityTokenAuth, (qb) => {
|
|
||||||
qb.on(`${TableName.Identity}.authMethod`, db.raw("?", [IdentityAuthMethod.TOKEN_AUTH])).andOn(
|
|
||||||
`${TableName.Identity}.id`,
|
|
||||||
`${TableName.IdentityTokenAuth}.identityId`
|
|
||||||
);
|
|
||||||
})
|
|
||||||
.select(selectAllTableCols(TableName.IdentityAccessToken))
|
.select(selectAllTableCols(TableName.IdentityAccessToken))
|
||||||
.select(
|
.select(
|
||||||
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityUniversalAuth).as("accessTokenTrustedIpsUa"),
|
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityUniversalAuth).as("accessTokenTrustedIpsUa"),
|
||||||
@@ -82,14 +55,13 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
...doc,
|
...doc,
|
||||||
accessTokenTrustedIps:
|
trustedIpsUniversalAuth: doc.accessTokenTrustedIpsUa,
|
||||||
doc.accessTokenTrustedIpsUa ||
|
trustedIpsGcpAuth: doc.accessTokenTrustedIpsGcp,
|
||||||
doc.accessTokenTrustedIpsGcp ||
|
trustedIpsAwsAuth: doc.accessTokenTrustedIpsAws,
|
||||||
doc.accessTokenTrustedIpsAws ||
|
trustedIpsAzureAuth: doc.accessTokenTrustedIpsAzure,
|
||||||
doc.accessTokenTrustedIpsAzure ||
|
trustedIpsKubernetesAuth: doc.accessTokenTrustedIpsK8s,
|
||||||
doc.accessTokenTrustedIpsK8s ||
|
trustedIpsOidcAuth: doc.accessTokenTrustedIpsOidc,
|
||||||
doc.accessTokenTrustedIpsOidc ||
|
trustedIpsAccessTokenAuth: doc.accessTokenTrustedIpsToken
|
||||||
doc.accessTokenTrustedIpsToken
|
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new DatabaseError({ error, name: "IdAccessTokenFindOne" });
|
throw new DatabaseError({ error, name: "IdAccessTokenFindOne" });
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
import jwt, { JwtPayload } from "jsonwebtoken";
|
import jwt, { JwtPayload } from "jsonwebtoken";
|
||||||
|
|
||||||
import { TableName, TIdentityAccessTokens } from "@app/db/schemas";
|
import { IdentityAuthMethod, TableName, TIdentityAccessTokens } from "@app/db/schemas";
|
||||||
import { getConfig } from "@app/lib/config/env";
|
import { getConfig } from "@app/lib/config/env";
|
||||||
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
|
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||||
import { checkIPAgainstBlocklist, TIp } from "@app/lib/ip";
|
import { checkIPAgainstBlocklist, TIp } from "@app/lib/ip";
|
||||||
@@ -164,10 +164,22 @@ export const identityAccessTokenServiceFactory = ({
|
|||||||
message: "Failed to authorize revoked access token, access token is revoked"
|
message: "Failed to authorize revoked access token, access token is revoked"
|
||||||
});
|
});
|
||||||
|
|
||||||
if (ipAddress && identityAccessToken) {
|
const trustedIpsMap: Record<IdentityAuthMethod, unknown> = {
|
||||||
|
[IdentityAuthMethod.UNIVERSAL_AUTH]: identityAccessToken.trustedIpsUniversalAuth,
|
||||||
|
[IdentityAuthMethod.GCP_AUTH]: identityAccessToken.trustedIpsGcpAuth,
|
||||||
|
[IdentityAuthMethod.AWS_AUTH]: identityAccessToken.trustedIpsAwsAuth,
|
||||||
|
[IdentityAuthMethod.AZURE_AUTH]: identityAccessToken.trustedIpsAzureAuth,
|
||||||
|
[IdentityAuthMethod.KUBERNETES_AUTH]: identityAccessToken.trustedIpsKubernetesAuth,
|
||||||
|
[IdentityAuthMethod.OIDC_AUTH]: identityAccessToken.trustedIpsOidcAuth,
|
||||||
|
[IdentityAuthMethod.TOKEN_AUTH]: identityAccessToken.trustedIpsAccessTokenAuth
|
||||||
|
};
|
||||||
|
|
||||||
|
const trustedIps = trustedIpsMap[identityAccessToken.authMethod as IdentityAuthMethod];
|
||||||
|
|
||||||
|
if (ipAddress) {
|
||||||
checkIPAgainstBlocklist({
|
checkIPAgainstBlocklist({
|
||||||
ipAddress,
|
ipAddress,
|
||||||
trustedIps: identityAccessToken?.accessTokenTrustedIps as TIp[]
|
trustedIps: trustedIps as TIp[]
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -13,7 +13,6 @@ import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedErro
|
|||||||
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
|
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
|
||||||
|
|
||||||
import { ActorType, AuthTokenType } from "../auth/auth-type";
|
import { ActorType, AuthTokenType } from "../auth/auth-type";
|
||||||
import { TIdentityDALFactory } from "../identity/identity-dal";
|
|
||||||
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
|
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
|
||||||
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
|
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
|
||||||
import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types";
|
import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types";
|
||||||
@@ -33,7 +32,6 @@ type TIdentityAwsAuthServiceFactoryDep = {
|
|||||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||||
identityAwsAuthDAL: Pick<TIdentityAwsAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
identityAwsAuthDAL: Pick<TIdentityAwsAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||||
identityDAL: Pick<TIdentityDALFactory, "updateById">;
|
|
||||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||||
};
|
};
|
||||||
@@ -44,7 +42,6 @@ export const identityAwsAuthServiceFactory = ({
|
|||||||
identityAccessTokenDAL,
|
identityAccessTokenDAL,
|
||||||
identityAwsAuthDAL,
|
identityAwsAuthDAL,
|
||||||
identityOrgMembershipDAL,
|
identityOrgMembershipDAL,
|
||||||
identityDAL,
|
|
||||||
licenseService,
|
licenseService,
|
||||||
permissionService
|
permissionService
|
||||||
}: TIdentityAwsAuthServiceFactoryDep) => {
|
}: TIdentityAwsAuthServiceFactoryDep) => {
|
||||||
@@ -113,7 +110,8 @@ export const identityAwsAuthServiceFactory = ({
|
|||||||
accessTokenTTL: identityAwsAuth.accessTokenTTL,
|
accessTokenTTL: identityAwsAuth.accessTokenTTL,
|
||||||
accessTokenMaxTTL: identityAwsAuth.accessTokenMaxTTL,
|
accessTokenMaxTTL: identityAwsAuth.accessTokenMaxTTL,
|
||||||
accessTokenNumUses: 0,
|
accessTokenNumUses: 0,
|
||||||
accessTokenNumUsesLimit: identityAwsAuth.accessTokenNumUsesLimit
|
accessTokenNumUsesLimit: identityAwsAuth.accessTokenNumUsesLimit,
|
||||||
|
authMethod: IdentityAuthMethod.AWS_AUTH
|
||||||
},
|
},
|
||||||
tx
|
tx
|
||||||
);
|
);
|
||||||
@@ -155,10 +153,12 @@ export const identityAwsAuthServiceFactory = ({
|
|||||||
}: TAttachAwsAuthDTO) => {
|
}: TAttachAwsAuthDTO) => {
|
||||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||||
if (identityMembershipOrg.identity.authMethod)
|
|
||||||
|
if (identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AWS_AUTH)) {
|
||||||
throw new BadRequestError({
|
throw new BadRequestError({
|
||||||
message: "Failed to add AWS Auth to already configured identity"
|
message: "Failed to add AWS Auth to already configured identity"
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
|
||||||
if (accessTokenMaxTTL > 0 && accessTokenTTL > accessTokenMaxTTL) {
|
if (accessTokenMaxTTL > 0 && accessTokenTTL > accessTokenMaxTTL) {
|
||||||
throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" });
|
throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" });
|
||||||
@@ -206,13 +206,6 @@ export const identityAwsAuthServiceFactory = ({
|
|||||||
},
|
},
|
||||||
tx
|
tx
|
||||||
);
|
);
|
||||||
await identityDAL.updateById(
|
|
||||||
identityMembershipOrg.identityId,
|
|
||||||
{
|
|
||||||
authMethod: IdentityAuthMethod.AWS_AUTH
|
|
||||||
},
|
|
||||||
tx
|
|
||||||
);
|
|
||||||
return doc;
|
return doc;
|
||||||
});
|
});
|
||||||
return { ...identityAwsAuth, orgId: identityMembershipOrg.orgId };
|
return { ...identityAwsAuth, orgId: identityMembershipOrg.orgId };
|
||||||
@@ -234,10 +227,12 @@ export const identityAwsAuthServiceFactory = ({
|
|||||||
}: TUpdateAwsAuthDTO) => {
|
}: TUpdateAwsAuthDTO) => {
|
||||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.AWS_AUTH)
|
|
||||||
throw new BadRequestError({
|
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AWS_AUTH)) {
|
||||||
message: "Failed to update AWS Auth"
|
throw new NotFoundError({
|
||||||
|
message: "The identity does not have AWS Auth attached"
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
|
||||||
const identityAwsAuth = await identityAwsAuthDAL.findOne({ identityId });
|
const identityAwsAuth = await identityAwsAuthDAL.findOne({ identityId });
|
||||||
|
|
||||||
@@ -293,10 +288,12 @@ export const identityAwsAuthServiceFactory = ({
|
|||||||
const getAwsAuth = async ({ identityId, actorId, actor, actorAuthMethod, actorOrgId }: TGetAwsAuthDTO) => {
|
const getAwsAuth = async ({ identityId, actorId, actor, actorAuthMethod, actorOrgId }: TGetAwsAuthDTO) => {
|
||||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.AWS_AUTH)
|
|
||||||
|
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AWS_AUTH)) {
|
||||||
throw new BadRequestError({
|
throw new BadRequestError({
|
||||||
message: "The identity does not have AWS Auth attached"
|
message: "The identity does not have AWS Auth attached"
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
|
||||||
const awsIdentityAuth = await identityAwsAuthDAL.findOne({ identityId });
|
const awsIdentityAuth = await identityAwsAuthDAL.findOne({ identityId });
|
||||||
|
|
||||||
@@ -320,10 +317,11 @@ export const identityAwsAuthServiceFactory = ({
|
|||||||
}: TRevokeAwsAuthDTO) => {
|
}: TRevokeAwsAuthDTO) => {
|
||||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||||
if (identityMembershipOrg.identity?.authMethod !== IdentityAuthMethod.AWS_AUTH)
|
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.AWS_AUTH)) {
|
||||||
throw new BadRequestError({
|
throw new BadRequestError({
|
||||||
message: "The identity does not have aws auth"
|
message: "The identity does not have aws auth"
|
||||||
});
|
});
|
||||||
|
}
|
||||||
const { permission } = await permissionService.getOrgPermission(
|
const { permission } = await permissionService.getOrgPermission(
|
||||||
actor,
|
actor,
|
||||||
actorId,
|
actorId,
|
||||||
@@ -348,7 +346,6 @@ export const identityAwsAuthServiceFactory = ({
|
|||||||
|
|
||||||
const revokedIdentityAwsAuth = await identityAwsAuthDAL.transaction(async (tx) => {
|
const revokedIdentityAwsAuth = await identityAwsAuthDAL.transaction(async (tx) => {
|
||||||
const deletedAwsAuth = await identityAwsAuthDAL.delete({ identityId }, tx);
|
const deletedAwsAuth = await identityAwsAuthDAL.delete({ identityId }, tx);
|
||||||
await identityDAL.updateById(identityId, { authMethod: null }, tx);
|
|
||||||
return { ...deletedAwsAuth?.[0], orgId: identityMembershipOrg.orgId };
|
return { ...deletedAwsAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||||
});
|
});
|
||||||
return revokedIdentityAwsAuth;
|
return revokedIdentityAwsAuth;
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user