mirror of
https://github.com/Infisical/infisical.git
synced 2025-09-06 06:00:42 +00:00
Compare commits
1 Commits
docs-updat
...
daniel/mov
Author | SHA1 | Date | |
---|---|---|---|
|
abb56379fc |
@@ -74,14 +74,6 @@ CAPTCHA_SECRET=
|
||||
|
||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED=
|
||||
OTEL_EXPORT_TYPE=
|
||||
OTEL_EXPORT_OTLP_ENDPOINT=
|
||||
OTEL_OTLP_PUSH_INTERVAL=
|
||||
|
||||
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=
|
||||
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=
|
||||
|
||||
PLAIN_API_KEY=
|
||||
PLAIN_WISH_LABEL_IDS=
|
||||
|
||||
|
@@ -1,115 +1,62 @@
|
||||
name: Release standalone docker image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*-postgres"
|
||||
push:
|
||||
tags:
|
||||
- "infisical/v*.*.*-postgres"
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Run tests before deployment
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
|
||||
infisical-standalone:
|
||||
name: Build infisical standalone image postgres
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: version output
|
||||
run: |
|
||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical:latest-postgres
|
||||
infisical/infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: Dockerfile.standalone-infisical
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
|
||||
infisical-fips-standalone:
|
||||
name: Build infisical standalone image postgres
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: version output
|
||||
run: |
|
||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical-fips:latest-postgres
|
||||
infisical/infisical-fips:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical-fips:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: Dockerfile.fips.standalone-infisical
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
infisical-tests:
|
||||
name: Run tests before deployment
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
infisical-standalone:
|
||||
name: Build infisical standalone image postgres
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: version output
|
||||
run: |
|
||||
echo "Output Value: ${{ steps.version.outputs.major }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.minor }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.patch }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.version_type }}"
|
||||
echo "Output Value: ${{ steps.version.outputs.increment }}"
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 📦 Build backend and export to Docker
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
infisical/infisical:latest-postgres
|
||||
infisical/infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/infisical:${{ steps.extract_version.outputs.version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: Dockerfile.standalone-infisical
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
|
||||
|
@@ -10,7 +10,8 @@ on:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
# packages: write
|
||||
# issues: write
|
||||
jobs:
|
||||
cli-integration-tests:
|
||||
name: Run tests before deployment
|
||||
@@ -25,63 +26,6 @@ jobs:
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
npm-release:
|
||||
runs-on: ubuntu-20.04
|
||||
env:
|
||||
working-directory: ./npm
|
||||
needs:
|
||||
- cli-integration-tests
|
||||
- goreleaser
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Extract version
|
||||
run: |
|
||||
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
|
||||
echo "Version extracted: $VERSION"
|
||||
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Print version
|
||||
run: echo ${{ env.CLI_VERSION }}
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
cache-dependency-path: ./npm/package-lock.json
|
||||
- name: Install dependencies
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm install --ignore-scripts
|
||||
|
||||
- name: Set NPM version
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
|
||||
|
||||
- name: Setup NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: |
|
||||
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
|
||||
|
||||
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
- name: Pack NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm pack
|
||||
|
||||
- name: Publish NPM
|
||||
working-directory: ${{ env.working-directory }}
|
||||
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [cli-integration-tests]
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@@ -71,5 +71,3 @@ frontend-build
|
||||
cli/infisical-merge
|
||||
cli/test/infisical-merge
|
||||
/backend/binary
|
||||
|
||||
/npm/bin
|
||||
|
@@ -6,4 +6,3 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/S
|
||||
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
|
||||
docs/mint.json:generic-api-key:651
|
||||
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
|
||||
|
@@ -1,167 +0,0 @@
|
||||
ARG POSTHOG_HOST=https://app.posthog.com
|
||||
ARG POSTHOG_API_KEY=posthog-api-key
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||
|
||||
FROM node:20-slim AS base
|
||||
|
||||
FROM base AS frontend-dependencies
|
||||
WORKDIR /app
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
|
||||
|
||||
# Install dependencies
|
||||
RUN npm ci --only-production --ignore-scripts
|
||||
|
||||
# Rebuild the source code only when needed
|
||||
FROM base AS frontend-builder
|
||||
WORKDIR /app
|
||||
|
||||
# Copy dependencies
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
# Copy all files
|
||||
COPY /frontend .
|
||||
|
||||
ENV NODE_ENV production
|
||||
ENV NEXT_PUBLIC_ENV production
|
||||
ARG POSTHOG_HOST
|
||||
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
|
||||
# Production image
|
||||
FROM base AS frontend-runner
|
||||
WORKDIR /app
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
|
||||
VOLUME /app/.next/cache/images
|
||||
|
||||
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
|
||||
COPY --from=frontend-builder /app/public ./public
|
||||
RUN chown non-root-user:nodejs ./public/data
|
||||
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
|
||||
|
||||
USER non-root-user
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
##
|
||||
## BACKEND
|
||||
##
|
||||
FROM base AS backend-build
|
||||
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY /backend .
|
||||
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
|
||||
RUN npm i -D tsconfig-paths
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM base AS backend-runner
|
||||
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
COPY --from=backend-build /app .
|
||||
|
||||
RUN mkdir frontend-build
|
||||
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
# Install necessary packages
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
curl \
|
||||
git \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
# Give non-root-user permission to update SSL certs
|
||||
RUN chown -R non-root-user /etc/ssl/certs
|
||||
RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chmod -R u+rwx /etc/ssl/certs
|
||||
RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt
|
||||
RUN chown non-root-user /usr/sbin/update-ca-certificates
|
||||
RUN chmod u+rx /usr/sbin/update-ca-certificates
|
||||
|
||||
## set pre baked keys
|
||||
ARG POSTHOG_API_KEY
|
||||
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
|
||||
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
ENV HTTPS_ENABLED false
|
||||
ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
ENV TELEMETRY_ENABLED true
|
||||
|
||||
EXPOSE 8080
|
||||
EXPOSE 443
|
||||
|
||||
USER non-root-user
|
||||
|
||||
CMD ["./standalone-entrypoint.sh"]
|
@@ -72,9 +72,6 @@ RUN addgroup --system --gid 1001 nodejs \
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apk add --no-cache python3 make g++
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@@ -88,9 +85,6 @@ FROM base AS backend-runner
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apk add --no-cache python3 make g++
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
|
4
Makefile
4
Makefile
@@ -10,9 +10,6 @@ up-dev:
|
||||
up-dev-ldap:
|
||||
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
||||
|
||||
up-dev-metrics:
|
||||
docker compose -f docker-compose.dev.yml --profile metrics up --build
|
||||
|
||||
up-prod:
|
||||
docker-compose -f docker-compose.prod.yml up --build
|
||||
|
||||
@@ -30,3 +27,4 @@ reviewable-api:
|
||||
npm run type:check
|
||||
|
||||
reviewable: reviewable-ui reviewable-api
|
||||
|
||||
|
@@ -3,12 +3,6 @@ FROM node:20-alpine AS build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@@ -17,17 +11,12 @@ RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM node:20-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV npm_config_cache /home/node/.npm
|
||||
|
||||
COPY package*.json ./
|
||||
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
|
||||
RUN npm ci --only-production && npm cache clean --force
|
||||
|
||||
COPY --from=build /app .
|
||||
|
@@ -1,44 +1,5 @@
|
||||
FROM node:20-alpine
|
||||
|
||||
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
||||
|
||||
ARG SOFTHSM2_VERSION=2.5.0
|
||||
|
||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||
|
||||
# install build dependencies including python3
|
||||
RUN apk --update add \
|
||||
alpine-sdk \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
openssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
|
||||
# build and install SoftHSM2
|
||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||
WORKDIR ${SOFTHSM2_SOURCES}
|
||||
|
||||
RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
|
||||
&& sh autogen.sh \
|
||||
&& ./configure --prefix=/usr/local --disable-gost \
|
||||
&& make \
|
||||
&& make install
|
||||
|
||||
WORKDIR /root
|
||||
RUN rm -fr ${SOFTHSM2_SOURCES}
|
||||
|
||||
# install pkcs11-tool
|
||||
RUN apk --update add opensc
|
||||
|
||||
RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
|
||||
# ? App setup
|
||||
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
|
@@ -5,9 +5,6 @@ export const mockSmtpServer = (): TSmtpService => {
|
||||
return {
|
||||
sendMail: async (data) => {
|
||||
storage.push(data);
|
||||
},
|
||||
verify: async () => {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
@@ -16,7 +16,6 @@ import { initDbConnection } from "@app/db";
|
||||
import { queueServiceFactory } from "@app/queue";
|
||||
import { keyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Redis } from "ioredis";
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
export default {
|
||||
@@ -55,12 +54,7 @@ export default {
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL);
|
||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule();
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule() });
|
||||
|
||||
const server = await main({ db, smtp, logger, queue, keyStore });
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
// @ts-expect-error type
|
||||
|
1701
backend/package-lock.json
generated
1701
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -50,7 +50,6 @@
|
||||
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
||||
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
|
||||
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
|
||||
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
|
||||
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
|
||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
@@ -59,7 +58,6 @@
|
||||
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
@@ -86,7 +84,6 @@
|
||||
"@types/passport-google-oauth20": "^2.0.14",
|
||||
"@types/pg": "^8.10.9",
|
||||
"@types/picomatch": "^2.3.3",
|
||||
"@types/pkcs11js": "^1.0.4",
|
||||
"@types/prompt-sync": "^4.2.3",
|
||||
"@types/resolve": "^1.20.6",
|
||||
"@types/safe-regex": "^1.1.6",
|
||||
@@ -140,14 +137,6 @@
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
||||
"@opentelemetry/instrumentation": "^0.55.0",
|
||||
"@opentelemetry/resources": "^1.28.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.28.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.27.0",
|
||||
"@peculiar/asn1-schema": "^2.3.8",
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
@@ -191,7 +180,6 @@
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
"otplib": "^12.0.1",
|
||||
"passport-github": "^1.1.0",
|
||||
"passport-gitlab2": "^5.0.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
@@ -200,7 +188,6 @@
|
||||
"pg-query-stream": "^4.5.3",
|
||||
"picomatch": "^3.0.1",
|
||||
"pino": "^8.16.2",
|
||||
"pkcs11js": "^2.1.6",
|
||||
"pkijs": "^3.2.4",
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.3.8",
|
||||
|
@@ -8,80 +8,61 @@ const prompt = promptSync({
|
||||
sigint: true
|
||||
});
|
||||
|
||||
const sanitizeInputParam = (value: string) => {
|
||||
// Escape double quotes and wrap the entire value in double quotes
|
||||
if (value) {
|
||||
return `"${value.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
return '""';
|
||||
};
|
||||
|
||||
const exportDb = () => {
|
||||
const exportHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate from: "));
|
||||
const exportPort = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432"
|
||||
);
|
||||
const exportUser = sanitizeInputParam(
|
||||
prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const exportPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate from: "));
|
||||
const exportDatabase = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const exportHost = prompt("Enter your Postgres Host to migrate from: ");
|
||||
const exportPort = prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432";
|
||||
const exportUser = prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical";
|
||||
const exportPassword = prompt("Enter your Postgres Password to migrate from: ");
|
||||
const exportDatabase = prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical";
|
||||
|
||||
// we do not include the audit_log and secret_sharing entries
|
||||
execSync(
|
||||
`PGDATABASE=${exportDatabase} PGPASSWORD=${exportPassword} PGHOST=${exportHost} PGPORT=${exportPort} PGUSER=${exportUser} pg_dump -Fc infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
||||
`PGDATABASE="${exportDatabase}" PGPASSWORD="${exportPassword}" PGHOST="${exportHost}" PGPORT=${exportPort} PGUSER=${exportUser} pg_dump infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
||||
__dirname,
|
||||
"../src/db/backup.dump"
|
||||
"../src/db/dump.sql"
|
||||
)}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
||||
};
|
||||
|
||||
const importDbForOrg = () => {
|
||||
const importHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate to: "));
|
||||
const importPort = sanitizeInputParam(prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432");
|
||||
const importUser = sanitizeInputParam(
|
||||
prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const importPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate to: "));
|
||||
const importDatabase = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const orgId = sanitizeInputParam(prompt("Enter the organization ID to migrate: "));
|
||||
const importHost = prompt("Enter your Postgres Host to migrate to: ");
|
||||
const importPort = prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432";
|
||||
const importUser = prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical";
|
||||
const importPassword = prompt("Enter your Postgres Password to migrate to: ");
|
||||
const importDatabase = prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical";
|
||||
const orgId = prompt("Enter the organization ID to migrate: ");
|
||||
|
||||
if (!existsSync(path.join(__dirname, "../src/db/backup.dump"))) {
|
||||
if (!existsSync(path.join(__dirname, "../src/db/dump.sql"))) {
|
||||
console.log("File not found, please export the database first.");
|
||||
return;
|
||||
}
|
||||
|
||||
execSync(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} pg_restore -d ${importDatabase} --verbose ${path.join(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -f ${path.join(
|
||||
__dirname,
|
||||
"../src/db/backup.dump"
|
||||
)}`,
|
||||
{ maxBuffer: 1024 * 1024 * 4096 }
|
||||
"../src/db/dump.sql"
|
||||
)}`
|
||||
);
|
||||
|
||||
execSync(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
||||
);
|
||||
|
||||
// delete global/instance-level resources not relevant to the organization to migrate
|
||||
// users
|
||||
execSync(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
||||
);
|
||||
|
||||
// identities
|
||||
execSync(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
||||
);
|
||||
|
||||
// reset slack configuration in superAdmin
|
||||
execSync(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
||||
);
|
||||
|
||||
console.log("Organization migrated successfully.");
|
||||
|
4
backend/src/@types/fastify.d.ts
vendored
4
backend/src/@types/fastify.d.ts
vendored
@@ -44,7 +44,6 @@ import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
|
||||
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
|
||||
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
|
||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
||||
@@ -79,7 +78,6 @@ import { TServiceTokenServiceFactory } from "@app/services/service-token/service
|
||||
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
|
||||
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
|
||||
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||
import { TTotpServiceFactory } from "@app/services/totp/totp-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { TUserServiceFactory } from "@app/services/user/user-service";
|
||||
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
|
||||
@@ -186,7 +184,6 @@ declare module "fastify" {
|
||||
rateLimit: TRateLimitServiceFactory;
|
||||
userEngagement: TUserEngagementServiceFactory;
|
||||
externalKms: TExternalKmsServiceFactory;
|
||||
hsm: THsmServiceFactory;
|
||||
orgAdmin: TOrgAdminServiceFactory;
|
||||
slack: TSlackServiceFactory;
|
||||
workflowIntegration: TWorkflowIntegrationServiceFactory;
|
||||
@@ -194,7 +191,6 @@ declare module "fastify" {
|
||||
migration: TExternalMigrationServiceFactory;
|
||||
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
|
||||
projectTemplate: TProjectTemplateServiceFactory;
|
||||
totp: TTotpServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
4
backend/src/@types/knex.d.ts
vendored
4
backend/src/@types/knex.d.ts
vendored
@@ -314,9 +314,6 @@ import {
|
||||
TSuperAdmin,
|
||||
TSuperAdminInsert,
|
||||
TSuperAdminUpdate,
|
||||
TTotpConfigs,
|
||||
TTotpConfigsInsert,
|
||||
TTotpConfigsUpdate,
|
||||
TTrustedIps,
|
||||
TTrustedIpsInsert,
|
||||
TTrustedIpsUpdate,
|
||||
@@ -829,6 +826,5 @@ declare module "knex/types/tables" {
|
||||
TProjectTemplatesInsert,
|
||||
TProjectTemplatesUpdate
|
||||
>;
|
||||
[TableName.TotpConfig]: KnexOriginal.CompositeTableType<TTotpConfigs, TTotpConfigsInsert, TTotpConfigsUpdate>;
|
||||
}
|
||||
}
|
||||
|
@@ -64,25 +64,23 @@ export async function up(knex: Knex): Promise<void> {
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.Certificate)) {
|
||||
const hasCaCertIdColumn = await knex.schema.hasColumn(TableName.Certificate, "caCertId");
|
||||
if (!hasCaCertIdColumn) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").nullable();
|
||||
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
|
||||
});
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").nullable();
|
||||
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
|
||||
});
|
||||
|
||||
await knex.raw(`
|
||||
await knex.raw(`
|
||||
UPDATE "${TableName.Certificate}" cert
|
||||
SET "caCertId" = (
|
||||
SELECT caCert.id
|
||||
FROM "${TableName.CertificateAuthorityCert}" caCert
|
||||
WHERE caCert."caId" = cert."caId"
|
||||
)`);
|
||||
)
|
||||
`);
|
||||
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.uuid("caCertId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -2,7 +2,7 @@ import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const BATCH_SIZE = 10_000;
|
||||
const BATCH_SIZE = 30_000;
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
||||
@@ -12,18 +12,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
t.string("authMethod").nullable();
|
||||
});
|
||||
|
||||
// first we remove identities without auth method that is unused
|
||||
// ! We delete all access tokens where the identity has no auth method set!
|
||||
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
|
||||
await knex(TableName.IdentityAccessToken)
|
||||
.leftJoin(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
|
||||
.whereNull(`${TableName.Identity}.authMethod`)
|
||||
.delete();
|
||||
|
||||
let nullableAccessTokens = await knex(TableName.IdentityAccessToken)
|
||||
.whereNull("authMethod")
|
||||
.limit(BATCH_SIZE)
|
||||
.select("id");
|
||||
let nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
|
||||
let totalUpdated = 0;
|
||||
|
||||
do {
|
||||
@@ -44,15 +33,24 @@ export async function up(knex: Knex): Promise<void> {
|
||||
});
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
nullableAccessTokens = await knex(TableName.IdentityAccessToken)
|
||||
.whereNull("authMethod")
|
||||
.limit(BATCH_SIZE)
|
||||
.select("id");
|
||||
nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
|
||||
|
||||
totalUpdated += batchIds.length;
|
||||
console.log(`Updated ${batchIds.length} access tokens in batch <> Total updated: ${totalUpdated}`);
|
||||
} while (nullableAccessTokens.length > 0);
|
||||
|
||||
// ! We delete all access tokens where the identity has no auth method set!
|
||||
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
|
||||
await knex(TableName.IdentityAccessToken)
|
||||
.whereNotExists((queryBuilder) => {
|
||||
void queryBuilder
|
||||
.select("id")
|
||||
.from(TableName.Identity)
|
||||
.whereRaw(`${TableName.IdentityAccessToken}."identityId" = ${TableName.Identity}.id`)
|
||||
.whereNotNull("authMethod");
|
||||
})
|
||||
.delete();
|
||||
|
||||
// Finally we set the authMethod to notNullable after populating the column.
|
||||
// This will fail if the data is not populated correctly, so it's safe.
|
||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||
|
@@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
t.dropForeign("orgId");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
t.dropForeign("orgId");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization);
|
||||
});
|
||||
}
|
||||
}
|
@@ -1,23 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEncryptionStrategy = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "encryptionStrategy");
|
||||
const hasTimestampsCol = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "createdAt");
|
||||
|
||||
await knex.schema.alterTable(TableName.KmsServerRootConfig, (t) => {
|
||||
if (!hasEncryptionStrategy) t.string("encryptionStrategy").defaultTo("SOFTWARE");
|
||||
if (!hasTimestampsCol) t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptionStrategy = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "encryptionStrategy");
|
||||
const hasTimestampsCol = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "createdAt");
|
||||
|
||||
await knex.schema.alterTable(TableName.KmsServerRootConfig, (t) => {
|
||||
if (hasEncryptionStrategy) t.dropColumn("encryptionStrategy");
|
||||
if (hasTimestampsCol) t.dropTimestamps(true);
|
||||
});
|
||||
}
|
@@ -1,54 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.TotpConfig))) {
|
||||
await knex.schema.createTable(TableName.TotpConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("userId").notNullable();
|
||||
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
|
||||
t.boolean("isVerified").defaultTo(false).notNullable();
|
||||
t.binary("encryptedRecoveryCodes").notNullable();
|
||||
t.binary("encryptedSecret").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
t.unique("userId");
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.TotpConfig);
|
||||
}
|
||||
|
||||
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
if (!doesOrgMfaMethodColExist) {
|
||||
t.string("selectedMfaMethod");
|
||||
}
|
||||
});
|
||||
|
||||
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
if (!doesUserSelectedMfaMethodColExist) {
|
||||
t.string("selectedMfaMethod");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropOnUpdateTrigger(knex, TableName.TotpConfig);
|
||||
await knex.schema.dropTableIfExists(TableName.TotpConfig);
|
||||
|
||||
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
|
||||
await knex.schema.alterTable(TableName.Organization, (t) => {
|
||||
if (doesOrgMfaMethodColExist) {
|
||||
t.dropColumn("selectedMfaMethod");
|
||||
}
|
||||
});
|
||||
|
||||
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
if (doesUserSelectedMfaMethodColExist) {
|
||||
t.dropColumn("selectedMfaMethod");
|
||||
}
|
||||
});
|
||||
}
|
@@ -106,7 +106,6 @@ export * from "./secrets-v2";
|
||||
export * from "./service-tokens";
|
||||
export * from "./slack-integrations";
|
||||
export * from "./super-admin";
|
||||
export * from "./totp-configs";
|
||||
export * from "./trusted-ips";
|
||||
export * from "./user-actions";
|
||||
export * from "./user-aliases";
|
||||
|
@@ -11,10 +11,7 @@ import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const KmsRootConfigSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedRootKey: zodBuffer,
|
||||
encryptionStrategy: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
encryptedRootKey: zodBuffer
|
||||
});
|
||||
|
||||
export type TKmsRootConfig = z.infer<typeof KmsRootConfigSchema>;
|
||||
|
@@ -117,7 +117,6 @@ export enum TableName {
|
||||
ExternalKms = "external_kms",
|
||||
InternalKms = "internal_kms",
|
||||
InternalKmsKeyVersion = "internal_kms_key_version",
|
||||
TotpConfig = "totp_configs",
|
||||
// @depreciated
|
||||
KmsKeyVersion = "kms_key_versions",
|
||||
WorkflowIntegrations = "workflow_integrations",
|
||||
|
@@ -21,8 +21,7 @@ export const OrganizationsSchema = z.object({
|
||||
kmsDefaultKeyId: z.string().uuid().nullable().optional(),
|
||||
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||
defaultMembershipRole: z.string().default("member"),
|
||||
enforceMfa: z.boolean().default(false),
|
||||
selectedMfaMethod: z.string().nullable().optional()
|
||||
enforceMfa: z.boolean().default(false)
|
||||
});
|
||||
|
||||
export type TOrganizations = z.infer<typeof OrganizationsSchema>;
|
||||
|
@@ -1,24 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const TotpConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
userId: z.string().uuid(),
|
||||
isVerified: z.boolean().default(false),
|
||||
encryptedRecoveryCodes: zodBuffer,
|
||||
encryptedSecret: zodBuffer,
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TTotpConfigs = z.infer<typeof TotpConfigsSchema>;
|
||||
export type TTotpConfigsInsert = Omit<z.input<typeof TotpConfigsSchema>, TImmutableDBKeys>;
|
||||
export type TTotpConfigsUpdate = Partial<Omit<z.input<typeof TotpConfigsSchema>, TImmutableDBKeys>>;
|
@@ -26,8 +26,7 @@ export const UsersSchema = z.object({
|
||||
consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(),
|
||||
isLocked: z.boolean().default(false).nullable().optional(),
|
||||
temporaryLockDateEnd: z.date().nullable().optional(),
|
||||
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional(),
|
||||
selectedMfaMethod: z.string().nullable().optional()
|
||||
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional()
|
||||
});
|
||||
|
||||
export type TUsers = z.infer<typeof UsersSchema>;
|
||||
|
@@ -2,9 +2,6 @@ import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "./schemas";
|
||||
|
||||
interface PgTriggerResult {
|
||||
rows: Array<{ exists: boolean }>;
|
||||
}
|
||||
export const createJunctionTable = (knex: Knex, tableName: TableName, table1Name: TableName, table2Name: TableName) =>
|
||||
knex.schema.createTable(tableName, (table) => {
|
||||
table.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
@@ -31,26 +28,13 @@ DROP FUNCTION IF EXISTS on_update_timestamp() CASCADE;
|
||||
|
||||
// we would be using this to apply updatedAt where ever we wanta
|
||||
// remember to set `timestamps(true,true,true)` before this on schema
|
||||
export const createOnUpdateTrigger = async (knex: Knex, tableName: string) => {
|
||||
const triggerExists = await knex.raw<PgTriggerResult>(`
|
||||
SELECT EXISTS (
|
||||
SELECT 1
|
||||
FROM pg_trigger
|
||||
WHERE tgname = '${tableName}_updatedAt'
|
||||
);
|
||||
`);
|
||||
|
||||
if (!triggerExists?.rows?.[0]?.exists) {
|
||||
return knex.raw(`
|
||||
CREATE TRIGGER "${tableName}_updatedAt"
|
||||
BEFORE UPDATE ON ${tableName}
|
||||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE on_update_timestamp();
|
||||
`);
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
export const createOnUpdateTrigger = (knex: Knex, tableName: string) =>
|
||||
knex.raw(`
|
||||
CREATE TRIGGER "${tableName}_updatedAt"
|
||||
BEFORE UPDATE ON ${tableName}
|
||||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE on_update_timestamp();
|
||||
`);
|
||||
|
||||
export const dropOnUpdateTrigger = (knex: Knex, tableName: string) =>
|
||||
knex.raw(`DROP TRIGGER IF EXISTS "${tableName}_updatedAt" ON ${tableName}`);
|
||||
|
@@ -122,8 +122,6 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
`email: ${email} firstName: ${profile.firstName as string}`
|
||||
);
|
||||
|
||||
throw new Error("Invalid saml request. Missing email or first name");
|
||||
}
|
||||
|
||||
const userMetadata = Object.keys(profile.attributes || {})
|
||||
|
@@ -1,58 +0,0 @@
|
||||
import * as pkcs11js from "pkcs11js";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { HsmModule } from "./hsm-types";
|
||||
|
||||
export const initializeHsmModule = () => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
// Create a new instance of PKCS11 module
|
||||
const pkcs11 = new pkcs11js.PKCS11();
|
||||
let isInitialized = false;
|
||||
|
||||
const initialize = () => {
|
||||
if (!appCfg.isHsmConfigured) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Load the PKCS#11 module
|
||||
pkcs11.load(appCfg.HSM_LIB_PATH!);
|
||||
|
||||
// Initialize the module
|
||||
pkcs11.C_Initialize();
|
||||
isInitialized = true;
|
||||
|
||||
logger.info("PKCS#11 module initialized");
|
||||
} catch (err) {
|
||||
logger.error("Failed to initialize PKCS#11 module:", err);
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const finalize = () => {
|
||||
if (isInitialized) {
|
||||
try {
|
||||
pkcs11.C_Finalize();
|
||||
isInitialized = false;
|
||||
logger.info("PKCS#11 module finalized");
|
||||
} catch (err) {
|
||||
logger.error("Failed to finalize PKCS#11 module:", err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const getModule = (): HsmModule => ({
|
||||
pkcs11,
|
||||
isInitialized
|
||||
});
|
||||
|
||||
return {
|
||||
initialize,
|
||||
finalize,
|
||||
getModule
|
||||
};
|
||||
};
|
@@ -1,470 +0,0 @@
|
||||
import pkcs11js from "pkcs11js";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { HsmKeyType, HsmModule } from "./hsm-types";
|
||||
|
||||
type THsmServiceFactoryDep = {
|
||||
hsmModule: HsmModule;
|
||||
};
|
||||
|
||||
export type THsmServiceFactory = ReturnType<typeof hsmServiceFactory>;
|
||||
|
||||
type SyncOrAsync<T> = T | Promise<T>;
|
||||
type SessionCallback<T> = (session: pkcs11js.Handle) => SyncOrAsync<T>;
|
||||
|
||||
// eslint-disable-next-line no-empty-pattern
|
||||
export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsmServiceFactoryDep) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
// Constants for buffer structures
|
||||
const IV_LENGTH = 16; // Luna HSM typically expects 16-byte IV for cbc
|
||||
const BLOCK_SIZE = 16;
|
||||
const HMAC_SIZE = 32;
|
||||
|
||||
const AES_KEY_SIZE = 256;
|
||||
const HMAC_KEY_SIZE = 256;
|
||||
|
||||
const $withSession = async <T>(callbackWithSession: SessionCallback<T>): Promise<T> => {
|
||||
const RETRY_INTERVAL = 200; // 200ms between attempts
|
||||
const MAX_TIMEOUT = 90_000; // 90 seconds maximum total time
|
||||
|
||||
let sessionHandle: pkcs11js.Handle | null = null;
|
||||
|
||||
const removeSession = () => {
|
||||
if (sessionHandle !== null) {
|
||||
try {
|
||||
pkcs11.C_Logout(sessionHandle);
|
||||
pkcs11.C_CloseSession(sessionHandle);
|
||||
logger.info("HSM: Terminated session successfully");
|
||||
} catch (error) {
|
||||
logger.error(error, "HSM: Failed to terminate session");
|
||||
} finally {
|
||||
sessionHandle = null;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
if (!pkcs11 || !isInitialized) {
|
||||
throw new Error("PKCS#11 module is not initialized");
|
||||
}
|
||||
|
||||
// Get slot list
|
||||
let slots: pkcs11js.Handle[];
|
||||
try {
|
||||
slots = pkcs11.C_GetSlotList(false); // false to get all slots
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to get slot list: ${(error as Error)?.message}`);
|
||||
}
|
||||
|
||||
if (slots.length === 0) {
|
||||
throw new Error("No slots available");
|
||||
}
|
||||
|
||||
if (appCfg.HSM_SLOT >= slots.length) {
|
||||
throw new Error(`HSM slot ${appCfg.HSM_SLOT} not found or not initialized`);
|
||||
}
|
||||
|
||||
const slotId = slots[appCfg.HSM_SLOT];
|
||||
|
||||
const startTime = Date.now();
|
||||
while (Date.now() - startTime < MAX_TIMEOUT) {
|
||||
try {
|
||||
// Open session
|
||||
// eslint-disable-next-line no-bitwise
|
||||
sessionHandle = pkcs11.C_OpenSession(slotId, pkcs11js.CKF_SERIAL_SESSION | pkcs11js.CKF_RW_SESSION);
|
||||
|
||||
// Login
|
||||
try {
|
||||
pkcs11.C_Login(sessionHandle, pkcs11js.CKU_USER, appCfg.HSM_PIN);
|
||||
logger.info("HSM: Successfully authenticated");
|
||||
break;
|
||||
} catch (error) {
|
||||
// Handle specific error cases
|
||||
if (error instanceof pkcs11js.Pkcs11Error) {
|
||||
if (error.code === pkcs11js.CKR_PIN_INCORRECT) {
|
||||
// We throw instantly here to prevent further attempts, because if too many attempts are made, the HSM will potentially wipe all key material
|
||||
logger.error(error, `HSM: Incorrect PIN detected for HSM slot ${appCfg.HSM_SLOT}`);
|
||||
throw new Error("HSM: Incorrect HSM Pin detected. Please check the HSM configuration.");
|
||||
}
|
||||
if (error.code === pkcs11js.CKR_USER_ALREADY_LOGGED_IN) {
|
||||
logger.warn("HSM: Session already logged in");
|
||||
}
|
||||
}
|
||||
throw error; // Re-throw other errors
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`HSM: Session creation failed. Retrying... Error: ${(error as Error)?.message}`);
|
||||
|
||||
if (sessionHandle !== null) {
|
||||
try {
|
||||
pkcs11.C_CloseSession(sessionHandle);
|
||||
} catch (closeError) {
|
||||
logger.error(closeError, "HSM: Failed to close session");
|
||||
}
|
||||
sessionHandle = null;
|
||||
}
|
||||
|
||||
// Wait before retrying
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, RETRY_INTERVAL);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (sessionHandle === null) {
|
||||
throw new Error("HSM: Failed to open session after maximum retries");
|
||||
}
|
||||
|
||||
// Execute callback with session handle
|
||||
const result = await callbackWithSession(sessionHandle);
|
||||
removeSession();
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error(error, "HSM: Failed to open session");
|
||||
throw error;
|
||||
} finally {
|
||||
// Ensure cleanup
|
||||
removeSession();
|
||||
}
|
||||
};
|
||||
|
||||
const $findKey = (sessionHandle: pkcs11js.Handle, type: HsmKeyType) => {
|
||||
const label = type === HsmKeyType.HMAC ? `${appCfg.HSM_KEY_LABEL}_HMAC` : appCfg.HSM_KEY_LABEL;
|
||||
const keyType = type === HsmKeyType.HMAC ? pkcs11js.CKK_GENERIC_SECRET : pkcs11js.CKK_AES;
|
||||
|
||||
const template = [
|
||||
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||
{ type: pkcs11js.CKA_KEY_TYPE, value: keyType },
|
||||
{ type: pkcs11js.CKA_LABEL, value: label }
|
||||
];
|
||||
|
||||
try {
|
||||
// Initialize search
|
||||
pkcs11.C_FindObjectsInit(sessionHandle, template);
|
||||
|
||||
try {
|
||||
// Find first matching object
|
||||
const handles = pkcs11.C_FindObjects(sessionHandle, 1);
|
||||
|
||||
if (handles.length === 0) {
|
||||
throw new Error("Failed to find master key");
|
||||
}
|
||||
|
||||
return handles[0]; // Return the key handle
|
||||
} finally {
|
||||
// Always finalize the search operation
|
||||
pkcs11.C_FindObjectsFinal(sessionHandle);
|
||||
}
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const $keyExists = (session: pkcs11js.Handle, type: HsmKeyType): boolean => {
|
||||
try {
|
||||
const key = $findKey(session, type);
|
||||
// items(0) will throw an error if no items are found
|
||||
// Return true only if we got a valid object with handle
|
||||
return !!key && key.length > 0;
|
||||
} catch (error) {
|
||||
// If items(0) throws, it means no key was found
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-call
|
||||
logger.error(error, "HSM: Failed while checking for HSM key presence");
|
||||
|
||||
if (error instanceof pkcs11js.Pkcs11Error) {
|
||||
if (error.code === pkcs11js.CKR_OBJECT_HANDLE_INVALID) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
const encrypt: {
|
||||
(data: Buffer, providedSession: pkcs11js.Handle): Promise<Buffer>;
|
||||
(data: Buffer): Promise<Buffer>;
|
||||
} = async (data: Buffer, providedSession?: pkcs11js.Handle) => {
|
||||
if (!pkcs11 || !isInitialized) {
|
||||
throw new Error("PKCS#11 module is not initialized");
|
||||
}
|
||||
|
||||
const $performEncryption = (sessionHandle: pkcs11js.Handle) => {
|
||||
try {
|
||||
const aesKey = $findKey(sessionHandle, HsmKeyType.AES);
|
||||
if (!aesKey) {
|
||||
throw new Error("HSM: Encryption failed, AES key not found");
|
||||
}
|
||||
|
||||
const hmacKey = $findKey(sessionHandle, HsmKeyType.HMAC);
|
||||
if (!hmacKey) {
|
||||
throw new Error("HSM: Encryption failed, HMAC key not found");
|
||||
}
|
||||
|
||||
const iv = Buffer.alloc(IV_LENGTH);
|
||||
pkcs11.C_GenerateRandom(sessionHandle, iv);
|
||||
|
||||
const encryptMechanism = {
|
||||
mechanism: pkcs11js.CKM_AES_CBC_PAD,
|
||||
parameter: iv
|
||||
};
|
||||
|
||||
pkcs11.C_EncryptInit(sessionHandle, encryptMechanism, aesKey);
|
||||
|
||||
// Calculate max buffer size (input length + potential full block of padding)
|
||||
const maxEncryptedLength = Math.ceil(data.length / BLOCK_SIZE) * BLOCK_SIZE + BLOCK_SIZE;
|
||||
|
||||
// Encrypt the data - this returns the encrypted data directly
|
||||
const encryptedData = pkcs11.C_Encrypt(sessionHandle, data, Buffer.alloc(maxEncryptedLength));
|
||||
|
||||
// Initialize HMAC
|
||||
const hmacMechanism = {
|
||||
mechanism: pkcs11js.CKM_SHA256_HMAC
|
||||
};
|
||||
|
||||
pkcs11.C_SignInit(sessionHandle, hmacMechanism, hmacKey);
|
||||
|
||||
// Sign the IV and encrypted data
|
||||
pkcs11.C_SignUpdate(sessionHandle, iv);
|
||||
pkcs11.C_SignUpdate(sessionHandle, encryptedData);
|
||||
|
||||
// Get the HMAC
|
||||
const hmac = Buffer.alloc(HMAC_SIZE);
|
||||
pkcs11.C_SignFinal(sessionHandle, hmac);
|
||||
|
||||
// Combine encrypted data and HMAC [Encrypted Data | HMAC]
|
||||
const finalBuffer = Buffer.alloc(encryptedData.length + hmac.length);
|
||||
encryptedData.copy(finalBuffer);
|
||||
hmac.copy(finalBuffer, encryptedData.length);
|
||||
|
||||
return Buffer.concat([iv, finalBuffer]);
|
||||
} catch (error) {
|
||||
logger.error(error, "HSM: Failed to perform encryption");
|
||||
throw new Error(`HSM: Encryption failed: ${(error as Error)?.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
if (providedSession) {
|
||||
return $performEncryption(providedSession);
|
||||
}
|
||||
|
||||
const result = await $withSession($performEncryption);
|
||||
return result;
|
||||
};
|
||||
|
||||
const decrypt: {
|
||||
(encryptedBlob: Buffer, providedSession: pkcs11js.Handle): Promise<Buffer>;
|
||||
(encryptedBlob: Buffer): Promise<Buffer>;
|
||||
} = async (encryptedBlob: Buffer, providedSession?: pkcs11js.Handle) => {
|
||||
if (!pkcs11 || !isInitialized) {
|
||||
throw new Error("PKCS#11 module is not initialized");
|
||||
}
|
||||
|
||||
const $performDecryption = (sessionHandle: pkcs11js.Handle) => {
|
||||
try {
|
||||
// structure is: [IV (16 bytes) | Encrypted Data (N bytes) | HMAC (32 bytes)]
|
||||
const iv = encryptedBlob.subarray(0, IV_LENGTH);
|
||||
const encryptedDataWithHmac = encryptedBlob.subarray(IV_LENGTH);
|
||||
|
||||
// Split encrypted data and HMAC
|
||||
const hmac = encryptedDataWithHmac.subarray(-HMAC_SIZE); // Last 32 bytes are HMAC
|
||||
|
||||
const encryptedData = encryptedDataWithHmac.subarray(0, -HMAC_SIZE); // Everything except last 32 bytes
|
||||
|
||||
// Find the keys
|
||||
const aesKey = $findKey(sessionHandle, HsmKeyType.AES);
|
||||
if (!aesKey) {
|
||||
throw new Error("HSM: Decryption failed, AES key not found");
|
||||
}
|
||||
|
||||
const hmacKey = $findKey(sessionHandle, HsmKeyType.HMAC);
|
||||
if (!hmacKey) {
|
||||
throw new Error("HSM: Decryption failed, HMAC key not found");
|
||||
}
|
||||
|
||||
// Verify HMAC first
|
||||
const hmacMechanism = {
|
||||
mechanism: pkcs11js.CKM_SHA256_HMAC
|
||||
};
|
||||
|
||||
pkcs11.C_VerifyInit(sessionHandle, hmacMechanism, hmacKey);
|
||||
pkcs11.C_VerifyUpdate(sessionHandle, iv);
|
||||
pkcs11.C_VerifyUpdate(sessionHandle, encryptedData);
|
||||
|
||||
try {
|
||||
pkcs11.C_VerifyFinal(sessionHandle, hmac);
|
||||
} catch (error) {
|
||||
logger.error(error, "HSM: HMAC verification failed");
|
||||
throw new Error("HSM: Decryption failed"); // Generic error for failed verification
|
||||
}
|
||||
|
||||
// Only decrypt if verification passed
|
||||
const decryptMechanism = {
|
||||
mechanism: pkcs11js.CKM_AES_CBC_PAD,
|
||||
parameter: iv
|
||||
};
|
||||
|
||||
pkcs11.C_DecryptInit(sessionHandle, decryptMechanism, aesKey);
|
||||
|
||||
const tempBuffer = Buffer.alloc(encryptedData.length);
|
||||
const decryptedData = pkcs11.C_Decrypt(sessionHandle, encryptedData, tempBuffer);
|
||||
|
||||
// Create a new buffer from the decrypted data
|
||||
return Buffer.from(decryptedData);
|
||||
} catch (error) {
|
||||
logger.error(error, "HSM: Failed to perform decryption");
|
||||
throw new Error("HSM: Decryption failed"); // Generic error for failed decryption, to avoid leaking details about why it failed (such as padding related errors)
|
||||
}
|
||||
};
|
||||
|
||||
if (providedSession) {
|
||||
return $performDecryption(providedSession);
|
||||
}
|
||||
|
||||
const result = await $withSession($performDecryption);
|
||||
return result;
|
||||
};
|
||||
|
||||
// We test the core functionality of the PKCS#11 module that we are using throughout Infisical. This is to ensure that the user doesn't configure a faulty or unsupported HSM device.
|
||||
const $testPkcs11Module = async (session: pkcs11js.Handle) => {
|
||||
try {
|
||||
if (!pkcs11 || !isInitialized) {
|
||||
throw new Error("PKCS#11 module is not initialized");
|
||||
}
|
||||
|
||||
if (!session) {
|
||||
throw new Error("HSM: Attempted to run test without a valid session");
|
||||
}
|
||||
|
||||
const randomData = pkcs11.C_GenerateRandom(session, Buffer.alloc(500));
|
||||
|
||||
const encryptedData = await encrypt(randomData, session);
|
||||
const decryptedData = await decrypt(encryptedData, session);
|
||||
|
||||
const randomDataHex = randomData.toString("hex");
|
||||
const decryptedDataHex = decryptedData.toString("hex");
|
||||
|
||||
if (randomDataHex !== decryptedDataHex && Buffer.compare(randomData, decryptedData)) {
|
||||
throw new Error("HSM: Startup test failed. Decrypted data does not match original data");
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error(error, "HSM: Error testing PKCS#11 module");
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
const isActive = async () => {
|
||||
if (!isInitialized || !appCfg.isHsmConfigured) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let pkcs11TestPassed = false;
|
||||
|
||||
try {
|
||||
pkcs11TestPassed = await $withSession($testPkcs11Module);
|
||||
} catch (err) {
|
||||
logger.error(err, "HSM: Error testing PKCS#11 module");
|
||||
}
|
||||
|
||||
return appCfg.isHsmConfigured && isInitialized && pkcs11TestPassed;
|
||||
};
|
||||
|
||||
const startService = async () => {
|
||||
if (!appCfg.isHsmConfigured || !pkcs11 || !isInitialized) return;
|
||||
|
||||
try {
|
||||
await $withSession(async (sessionHandle) => {
|
||||
// Check if master key exists, create if not
|
||||
|
||||
const genericAttributes = [
|
||||
{ type: pkcs11js.CKA_TOKEN, value: true }, // Persistent storage
|
||||
{ type: pkcs11js.CKA_EXTRACTABLE, value: false }, // Cannot be extracted
|
||||
{ type: pkcs11js.CKA_SENSITIVE, value: true }, // Sensitive value
|
||||
{ type: pkcs11js.CKA_PRIVATE, value: true } // Requires authentication
|
||||
];
|
||||
|
||||
if (!$keyExists(sessionHandle, HsmKeyType.AES)) {
|
||||
// Template for generating 256-bit AES master key
|
||||
const keyTemplate = [
|
||||
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_AES },
|
||||
{ type: pkcs11js.CKA_VALUE_LEN, value: AES_KEY_SIZE / 8 },
|
||||
{ type: pkcs11js.CKA_LABEL, value: appCfg.HSM_KEY_LABEL! },
|
||||
{ type: pkcs11js.CKA_ENCRYPT, value: true }, // Allow encryption
|
||||
{ type: pkcs11js.CKA_DECRYPT, value: true }, // Allow decryption
|
||||
...genericAttributes
|
||||
];
|
||||
|
||||
// Generate the key
|
||||
pkcs11.C_GenerateKey(
|
||||
sessionHandle,
|
||||
{
|
||||
mechanism: pkcs11js.CKM_AES_KEY_GEN
|
||||
},
|
||||
keyTemplate
|
||||
);
|
||||
|
||||
logger.info(`HSM: Master key created successfully with label: ${appCfg.HSM_KEY_LABEL}`);
|
||||
}
|
||||
|
||||
// Check if HMAC key exists, create if not
|
||||
if (!$keyExists(sessionHandle, HsmKeyType.HMAC)) {
|
||||
const hmacKeyTemplate = [
|
||||
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_GENERIC_SECRET },
|
||||
{ type: pkcs11js.CKA_VALUE_LEN, value: HMAC_KEY_SIZE / 8 }, // 256-bit key
|
||||
{ type: pkcs11js.CKA_LABEL, value: `${appCfg.HSM_KEY_LABEL!}_HMAC` },
|
||||
{ type: pkcs11js.CKA_SIGN, value: true }, // Allow signing
|
||||
{ type: pkcs11js.CKA_VERIFY, value: true }, // Allow verification
|
||||
...genericAttributes
|
||||
];
|
||||
|
||||
// Generate the HMAC key
|
||||
pkcs11.C_GenerateKey(
|
||||
sessionHandle,
|
||||
{
|
||||
mechanism: pkcs11js.CKM_GENERIC_SECRET_KEY_GEN
|
||||
},
|
||||
hmacKeyTemplate
|
||||
);
|
||||
|
||||
logger.info(`HSM: HMAC key created successfully with label: ${appCfg.HSM_KEY_LABEL}_HMAC`);
|
||||
}
|
||||
|
||||
// Get slot info to check supported mechanisms
|
||||
const slotId = pkcs11.C_GetSessionInfo(sessionHandle).slotID;
|
||||
const mechanisms = pkcs11.C_GetMechanismList(slotId);
|
||||
|
||||
// Check for AES CBC PAD support
|
||||
const hasAesCbc = mechanisms.includes(pkcs11js.CKM_AES_CBC_PAD);
|
||||
|
||||
if (!hasAesCbc) {
|
||||
throw new Error(`Required mechanism CKM_AEC_CBC_PAD not supported by HSM`);
|
||||
}
|
||||
|
||||
// Run test encryption/decryption
|
||||
const testPassed = await $testPkcs11Module(sessionHandle);
|
||||
|
||||
if (!testPassed) {
|
||||
throw new Error("PKCS#11 module test failed. Please ensure that the HSM is correctly configured.");
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(error, "HSM: Error initializing HSM service:");
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
encrypt,
|
||||
startService,
|
||||
isActive,
|
||||
decrypt
|
||||
};
|
||||
};
|
@@ -1,11 +0,0 @@
|
||||
import pkcs11js from "pkcs11js";
|
||||
|
||||
export type HsmModule = {
|
||||
pkcs11: pkcs11js.PKCS11;
|
||||
isInitialized: boolean;
|
||||
};
|
||||
|
||||
export enum HsmKeyType {
|
||||
AES = "AES",
|
||||
HMAC = "hmac"
|
||||
}
|
@@ -29,7 +29,6 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
auditLogStreams: false,
|
||||
auditLogStreamLimit: 3,
|
||||
samlSSO: false,
|
||||
hsm: false,
|
||||
oidcSSO: false,
|
||||
scim: false,
|
||||
ldap: false,
|
||||
|
@@ -46,7 +46,6 @@ export type TFeatureSet = {
|
||||
auditLogStreams: false;
|
||||
auditLogStreamLimit: 3;
|
||||
samlSSO: false;
|
||||
hsm: false;
|
||||
oidcSSO: false;
|
||||
scim: false;
|
||||
ldap: false;
|
||||
|
@@ -17,7 +17,7 @@ import {
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, OidcAuthError } from "@app/lib/errors";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
@@ -56,7 +56,7 @@ type TOidcConfigServiceFactoryDep = {
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail" | "verify">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne" | "update" | "create">;
|
||||
};
|
||||
@@ -223,7 +223,6 @@ export const oidcConfigServiceFactory = ({
|
||||
let newUser: TUsers | undefined;
|
||||
|
||||
if (serverCfg.trustOidcEmails) {
|
||||
// we prioritize getting the most complete user to create the new alias under
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
@@ -231,23 +230,6 @@ export const oidcConfigServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (!newUser) {
|
||||
// this fetches user entries created via invites
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
username: email
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (newUser && !newUser.isEmailVerified) {
|
||||
// we automatically mark it as email-verified because we've configured trust for OIDC emails
|
||||
newUser = await userDAL.updateById(newUser.id, {
|
||||
isEmailVerified: true
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!newUser) {
|
||||
@@ -350,20 +332,14 @@ export const oidcConfigServiceFactory = ({
|
||||
userId: user.id
|
||||
});
|
||||
|
||||
await smtpService
|
||||
.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
throw new OidcAuthError({
|
||||
message: `Error sending email confirmation code for user registration - contact the Infisical instance admin. ${err.message}`
|
||||
});
|
||||
});
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return { isUserCompleted, providerAuthToken };
|
||||
@@ -419,18 +395,6 @@ export const oidcConfigServiceFactory = ({
|
||||
message: `Organization bot for organization with ID '${org.id}' not found`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
if (isActive && !serverCfg.trustOidcEmails) {
|
||||
const isSmtpConnected = await smtpService.verify();
|
||||
if (!isSmtpConnected) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Cannot enable OIDC when there are issues with the instance's SMTP configuration. Bypass this by turning on trust for OIDC emails in the server admin console."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
|
@@ -29,18 +29,4 @@ function validateOrgSSO(actorAuthMethod: ActorAuthMethod, isOrgSsoEnforced: TOrg
|
||||
}
|
||||
}
|
||||
|
||||
const escapeHandlebarsMissingMetadata = (obj: Record<string, string>) => {
|
||||
const handler = {
|
||||
get(target: Record<string, string>, prop: string) {
|
||||
if (!(prop in target)) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
target[prop] = `{{identity.metadata.${prop}}}`; // Add missing key as an "own" property
|
||||
}
|
||||
return target[prop];
|
||||
}
|
||||
};
|
||||
|
||||
return new Proxy(obj, handler);
|
||||
};
|
||||
|
||||
export { escapeHandlebarsMissingMetadata, isAuthMethodSaml, validateOrgSSO };
|
||||
export { isAuthMethodSaml, validateOrgSSO };
|
||||
|
@@ -21,7 +21,7 @@ import { TServiceTokenDALFactory } from "@app/services/service-token/service-tok
|
||||
|
||||
import { orgAdminPermissions, orgMemberPermissions, orgNoAccessPermissions, OrgPermissionSet } from "./org-permission";
|
||||
import { TPermissionDALFactory } from "./permission-dal";
|
||||
import { escapeHandlebarsMissingMetadata, validateOrgSSO } from "./permission-fns";
|
||||
import { validateOrgSSO } from "./permission-fns";
|
||||
import { TBuildOrgPermissionDTO, TBuildProjectPermissionDTO } from "./permission-service-types";
|
||||
import {
|
||||
buildServiceTokenProjectPermission,
|
||||
@@ -227,13 +227,11 @@ export const permissionServiceFactory = ({
|
||||
})) || [];
|
||||
|
||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
|
||||
objectify(
|
||||
userProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
)
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false, strict: true });
|
||||
const metadataKeyValuePair = objectify(
|
||||
userProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
);
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
@@ -294,15 +292,12 @@ export const permissionServiceFactory = ({
|
||||
})) || [];
|
||||
|
||||
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
|
||||
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
|
||||
objectify(
|
||||
identityProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
)
|
||||
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false, strict: true });
|
||||
const metadataKeyValuePair = objectify(
|
||||
identityProjectPermission.metadata,
|
||||
(i) => i.key,
|
||||
(i) => i.value
|
||||
);
|
||||
|
||||
const interpolateRules = templatedRules(
|
||||
{
|
||||
identity: {
|
||||
|
@@ -582,6 +582,11 @@ export const FOLDERS = {
|
||||
projectSlug: "The slug of the project where the folder is located.",
|
||||
workspaceId: "The ID of the project where the folder is located."
|
||||
},
|
||||
MOVE: {
|
||||
projectId: "The ID of the project to move the folder from.",
|
||||
folderId: "The ID of the folder to move.",
|
||||
newPath: "The new path of the folder."
|
||||
},
|
||||
DELETE: {
|
||||
folderIdOrName: "The ID or name of the folder to delete.",
|
||||
workspaceId: "The ID of the project to delete the folder from.",
|
||||
|
@@ -157,37 +157,16 @@ const envSchema = z
|
||||
INFISICAL_CLOUD: zodStrBool.default("false"),
|
||||
MAINTENANCE_MODE: zodStrBool.default("false"),
|
||||
CAPTCHA_SECRET: zpStr(z.string().optional()),
|
||||
|
||||
// TELEMETRY
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED: zodStrBool.default("false"),
|
||||
OTEL_EXPORT_OTLP_ENDPOINT: zpStr(z.string().optional()),
|
||||
OTEL_OTLP_PUSH_INTERVAL: z.coerce.number().default(30000),
|
||||
OTEL_COLLECTOR_BASIC_AUTH_USERNAME: zpStr(z.string().optional()),
|
||||
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD: zpStr(z.string().optional()),
|
||||
OTEL_EXPORT_TYPE: z.enum(["prometheus", "otlp"]).optional(),
|
||||
|
||||
PLAIN_API_KEY: zpStr(z.string().optional()),
|
||||
PLAIN_WISH_LABEL_IDS: zpStr(z.string().optional()),
|
||||
DISABLE_AUDIT_LOG_GENERATION: zodStrBool.default("false"),
|
||||
SSL_CLIENT_CERTIFICATE_HEADER_KEY: zpStr(z.string().optional()).default("x-ssl-client-cert"),
|
||||
WORKFLOW_SLACK_CLIENT_ID: zpStr(z.string().optional()),
|
||||
WORKFLOW_SLACK_CLIENT_SECRET: zpStr(z.string().optional()),
|
||||
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT: zodStrBool.default("true"),
|
||||
|
||||
// HSM
|
||||
HSM_LIB_PATH: zpStr(z.string().optional()),
|
||||
HSM_PIN: zpStr(z.string().optional()),
|
||||
HSM_KEY_LABEL: zpStr(z.string().optional()),
|
||||
HSM_SLOT: z.coerce.number().optional().default(0)
|
||||
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT: zodStrBool.default("true")
|
||||
})
|
||||
// To ensure that basic encryption is always possible.
|
||||
.refine(
|
||||
(data) => Boolean(data.ENCRYPTION_KEY) || Boolean(data.ROOT_ENCRYPTION_KEY),
|
||||
"Either ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY must be defined."
|
||||
)
|
||||
.transform((data) => ({
|
||||
...data,
|
||||
|
||||
DB_READ_REPLICAS: data.DB_READ_REPLICAS
|
||||
? databaseReadReplicaSchema.parse(JSON.parse(data.DB_READ_REPLICAS))
|
||||
: undefined,
|
||||
@@ -196,14 +175,10 @@ const envSchema = z
|
||||
isRedisConfigured: Boolean(data.REDIS_URL),
|
||||
isDevelopmentMode: data.NODE_ENV === "development",
|
||||
isProductionMode: data.NODE_ENV === "production" || IS_PACKAGED,
|
||||
|
||||
isSecretScanningConfigured:
|
||||
Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
|
||||
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&
|
||||
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET),
|
||||
isHsmConfigured:
|
||||
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined,
|
||||
|
||||
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG,
|
||||
SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(",")
|
||||
}));
|
||||
@@ -212,11 +187,11 @@ let envCfg: Readonly<z.infer<typeof envSchema>>;
|
||||
|
||||
export const getConfig = () => envCfg;
|
||||
// cannot import singleton logger directly as it needs config to load various transport
|
||||
export const initEnvConfig = (logger?: Logger) => {
|
||||
export const initEnvConfig = (logger: Logger) => {
|
||||
const parsedEnv = envSchema.safeParse(process.env);
|
||||
if (!parsedEnv.success) {
|
||||
(logger ?? console).error("Invalid environment variables. Check the error below");
|
||||
(logger ?? console).error(parsedEnv.error.issues);
|
||||
logger.error("Invalid environment variables. Check the error below");
|
||||
logger.error(parsedEnv.error.issues);
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
|
@@ -133,15 +133,3 @@ export class ScimRequestError extends Error {
|
||||
this.status = status;
|
||||
}
|
||||
}
|
||||
|
||||
export class OidcAuthError extends Error {
|
||||
name: string;
|
||||
|
||||
error: unknown;
|
||||
|
||||
constructor({ name, error, message }: { message?: string; name?: string; error?: unknown }) {
|
||||
super(message || "Something went wrong");
|
||||
this.name = name || "OidcAuthError";
|
||||
this.error = error;
|
||||
}
|
||||
}
|
||||
|
@@ -1,91 +0,0 @@
|
||||
import opentelemetry, { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api";
|
||||
import { getNodeAutoInstrumentations } from "@opentelemetry/auto-instrumentations-node";
|
||||
import { OTLPMetricExporter } from "@opentelemetry/exporter-metrics-otlp-proto";
|
||||
import { PrometheusExporter } from "@opentelemetry/exporter-prometheus";
|
||||
import { registerInstrumentations } from "@opentelemetry/instrumentation";
|
||||
import { Resource } from "@opentelemetry/resources";
|
||||
import { AggregationTemporality, MeterProvider, PeriodicExportingMetricReader } from "@opentelemetry/sdk-metrics";
|
||||
import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION } from "@opentelemetry/semantic-conventions";
|
||||
import dotenv from "dotenv";
|
||||
|
||||
import { initEnvConfig } from "../config/env";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const initTelemetryInstrumentation = ({
|
||||
exportType,
|
||||
otlpURL,
|
||||
otlpUser,
|
||||
otlpPassword,
|
||||
otlpPushInterval
|
||||
}: {
|
||||
exportType?: string;
|
||||
otlpURL?: string;
|
||||
otlpUser?: string;
|
||||
otlpPassword?: string;
|
||||
otlpPushInterval?: number;
|
||||
}) => {
|
||||
diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG);
|
||||
|
||||
const resource = Resource.default().merge(
|
||||
new Resource({
|
||||
[ATTR_SERVICE_NAME]: "infisical-core",
|
||||
[ATTR_SERVICE_VERSION]: "0.1.0"
|
||||
})
|
||||
);
|
||||
|
||||
const metricReaders = [];
|
||||
switch (exportType) {
|
||||
case "prometheus": {
|
||||
const promExporter = new PrometheusExporter();
|
||||
metricReaders.push(promExporter);
|
||||
break;
|
||||
}
|
||||
case "otlp": {
|
||||
const otlpExporter = new OTLPMetricExporter({
|
||||
url: `${otlpURL}/v1/metrics`,
|
||||
headers: {
|
||||
Authorization: `Basic ${btoa(`${otlpUser}:${otlpPassword}`)}`
|
||||
},
|
||||
temporalityPreference: AggregationTemporality.DELTA
|
||||
});
|
||||
metricReaders.push(
|
||||
new PeriodicExportingMetricReader({
|
||||
exporter: otlpExporter,
|
||||
exportIntervalMillis: otlpPushInterval
|
||||
})
|
||||
);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error("Invalid OTEL export type");
|
||||
}
|
||||
|
||||
const meterProvider = new MeterProvider({
|
||||
resource,
|
||||
readers: metricReaders
|
||||
});
|
||||
|
||||
opentelemetry.metrics.setGlobalMeterProvider(meterProvider);
|
||||
|
||||
registerInstrumentations({
|
||||
instrumentations: [getNodeAutoInstrumentations()]
|
||||
});
|
||||
};
|
||||
|
||||
const setupTelemetry = () => {
|
||||
const appCfg = initEnvConfig();
|
||||
|
||||
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
|
||||
console.log("Initializing telemetry instrumentation");
|
||||
initTelemetryInstrumentation({
|
||||
otlpURL: appCfg.OTEL_EXPORT_OTLP_ENDPOINT,
|
||||
otlpUser: appCfg.OTEL_COLLECTOR_BASIC_AUTH_USERNAME,
|
||||
otlpPassword: appCfg.OTEL_COLLECTOR_BASIC_AUTH_PASSWORD,
|
||||
otlpPushInterval: appCfg.OTEL_OTLP_PUSH_INTERVAL,
|
||||
exportType: appCfg.OTEL_EXPORT_TYPE
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
void setupTelemetry();
|
@@ -1,10 +1,6 @@
|
||||
import "./lib/telemetry/instrumentation";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import path from "path";
|
||||
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
|
||||
import { initAuditLogDbConnection, initDbConnection } from "./db";
|
||||
import { keyStoreFactory } from "./keystore/keystore";
|
||||
import { formatSmtpConfig, initEnvConfig, IS_PACKAGED } from "./lib/config/env";
|
||||
@@ -20,7 +16,6 @@ dotenv.config();
|
||||
const run = async () => {
|
||||
const logger = await initLogger();
|
||||
const appCfg = initEnvConfig(logger);
|
||||
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: appCfg.DB_CONNECTION_URI,
|
||||
dbRootCert: appCfg.DB_ROOT_CERT,
|
||||
@@ -58,17 +53,13 @@ const run = async () => {
|
||||
const queue = queueServiceFactory(appCfg.REDIS_URL);
|
||||
const keyStore = keyStoreFactory(appCfg.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule();
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({ db, auditLogDb, hsmModule: hsmModule.getModule(), smtp, logger, queue, keyStore });
|
||||
const server = await main({ db, auditLogDb, smtp, logger, queue, keyStore });
|
||||
const bootstrap = await bootstrapCheck({ db });
|
||||
|
||||
// eslint-disable-next-line
|
||||
process.on("SIGINT", async () => {
|
||||
await server.close();
|
||||
await db.destroy();
|
||||
hsmModule.finalize();
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
@@ -76,7 +67,6 @@ const run = async () => {
|
||||
process.on("SIGTERM", async () => {
|
||||
await server.close();
|
||||
await db.destroy();
|
||||
hsmModule.finalize();
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
|
@@ -14,7 +14,6 @@ import fastify from "fastify";
|
||||
import { Knex } from "knex";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig, IS_PACKAGED } from "@app/lib/config/env";
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
@@ -22,7 +21,6 @@ import { TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
|
||||
import { globalRateLimiterCfg } from "./config/rateLimiter";
|
||||
import { addErrorsToResponseSchemas } from "./plugins/add-errors-to-response-schemas";
|
||||
import { apiMetrics } from "./plugins/api-metrics";
|
||||
import { fastifyErrHandler } from "./plugins/error-handler";
|
||||
import { registerExternalNextjs } from "./plugins/external-nextjs";
|
||||
import { serializerCompiler, validatorCompiler, ZodTypeProvider } from "./plugins/fastify-zod";
|
||||
@@ -38,19 +36,16 @@ type TMain = {
|
||||
logger?: Logger;
|
||||
queue: TQueueServiceFactory;
|
||||
keyStore: TKeyStoreFactory;
|
||||
hsmModule: HsmModule;
|
||||
};
|
||||
|
||||
// Run the server!
|
||||
export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, keyStore }: TMain) => {
|
||||
export const main = async ({ db, auditLogDb, smtp, logger, queue, keyStore }: TMain) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const server = fastify({
|
||||
logger: appCfg.NODE_ENV === "test" ? false : logger,
|
||||
trustProxy: true,
|
||||
connectionTimeout: appCfg.isHsmConfigured ? 90_000 : 30_000,
|
||||
ignoreTrailingSlash: true,
|
||||
pluginTimeout: 40_000
|
||||
connectionTimeout: 30 * 1000,
|
||||
ignoreTrailingSlash: true
|
||||
}).withTypeProvider<ZodTypeProvider>();
|
||||
|
||||
server.setValidatorCompiler(validatorCompiler);
|
||||
@@ -87,10 +82,6 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
||||
// pull ip based on various proxy headers
|
||||
await server.register(fastifyIp);
|
||||
|
||||
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
|
||||
await server.register(apiMetrics);
|
||||
}
|
||||
|
||||
await server.register(fastifySwagger);
|
||||
await server.register(fastifyFormBody);
|
||||
await server.register(fastifyErrHandler);
|
||||
@@ -104,7 +95,7 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
||||
|
||||
await server.register(maintenanceMode);
|
||||
|
||||
await server.register(registerRoutes, { smtp, queue, db, auditLogDb, keyStore, hsmModule });
|
||||
await server.register(registerRoutes, { smtp, queue, db, auditLogDb, keyStore });
|
||||
|
||||
if (appCfg.isProductionMode) {
|
||||
await server.register(registerExternalNextjs, {
|
||||
|
@@ -46,10 +46,10 @@ export const bootstrapCheck = async ({ db }: BootstrapOpt) => {
|
||||
await createTransport(smtpCfg)
|
||||
.verify()
|
||||
.then(async () => {
|
||||
console.info(`SMTP - Verified connection to ${appCfg.SMTP_HOST}:${appCfg.SMTP_PORT}`);
|
||||
console.info("SMTP successfully connected");
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
console.error(`SMTP - Failed to connect to ${appCfg.SMTP_HOST}:${appCfg.SMTP_PORT} - ${err.message}`);
|
||||
.catch((err) => {
|
||||
console.error(`SMTP - Failed to connect to ${appCfg.SMTP_HOST}:${appCfg.SMTP_PORT}`);
|
||||
logger.error(err);
|
||||
});
|
||||
|
||||
|
@@ -1,21 +0,0 @@
|
||||
import opentelemetry from "@opentelemetry/api";
|
||||
import fp from "fastify-plugin";
|
||||
|
||||
export const apiMetrics = fp(async (fastify) => {
|
||||
const apiMeter = opentelemetry.metrics.getMeter("API");
|
||||
const latencyHistogram = apiMeter.createHistogram("API_latency", {
|
||||
unit: "ms"
|
||||
});
|
||||
|
||||
fastify.addHook("onResponse", async (request, reply) => {
|
||||
const { method } = request;
|
||||
const route = request.routerPath;
|
||||
const { statusCode } = reply;
|
||||
|
||||
latencyHistogram.record(reply.elapsedTime, {
|
||||
route,
|
||||
method,
|
||||
statusCode
|
||||
});
|
||||
});
|
||||
});
|
@@ -10,7 +10,6 @@ import {
|
||||
GatewayTimeoutError,
|
||||
InternalServerError,
|
||||
NotFoundError,
|
||||
OidcAuthError,
|
||||
RateLimitError,
|
||||
ScimRequestError,
|
||||
UnauthorizedError
|
||||
@@ -84,10 +83,7 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
|
||||
status: error.status,
|
||||
detail: error.detail
|
||||
});
|
||||
} else if (error instanceof OidcAuthError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.InternalServerError)
|
||||
.send({ statusCode: HttpStatusCodes.InternalServerError, message: error.message, error: error.name });
|
||||
// Handle JWT errors and make them more human-readable for the end-user.
|
||||
} else if (error instanceof jwt.JsonWebTokenError) {
|
||||
const message = (() => {
|
||||
if (error.message === JWTErrors.JwtExpired) {
|
||||
|
@@ -1,4 +1,5 @@
|
||||
import { CronJob } from "cron";
|
||||
// import { Redis } from "ioredis";
|
||||
import { Knex } from "knex";
|
||||
import { z } from "zod";
|
||||
|
||||
@@ -30,8 +31,6 @@ import { externalKmsServiceFactory } from "@app/ee/services/external-kms/externa
|
||||
import { groupDALFactory } from "@app/ee/services/group/group-dal";
|
||||
import { groupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { userGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
||||
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
|
||||
import { identityProjectAdditionalPrivilegeDALFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-dal";
|
||||
import { identityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { identityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
||||
@@ -201,8 +200,6 @@ import { getServerCfg, superAdminServiceFactory } from "@app/services/super-admi
|
||||
import { telemetryDALFactory } from "@app/services/telemetry/telemetry-dal";
|
||||
import { telemetryQueueServiceFactory } from "@app/services/telemetry/telemetry-queue";
|
||||
import { telemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
|
||||
import { totpConfigDALFactory } from "@app/services/totp/totp-config-dal";
|
||||
import { totpServiceFactory } from "@app/services/totp/totp-service";
|
||||
import { userDALFactory } from "@app/services/user/user-dal";
|
||||
import { userServiceFactory } from "@app/services/user/user-service";
|
||||
import { userAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
@@ -226,18 +223,10 @@ export const registerRoutes = async (
|
||||
{
|
||||
auditLogDb,
|
||||
db,
|
||||
hsmModule,
|
||||
smtp: smtpService,
|
||||
queue: queueService,
|
||||
keyStore
|
||||
}: {
|
||||
auditLogDb?: Knex;
|
||||
db: Knex;
|
||||
hsmModule: HsmModule;
|
||||
smtp: TSmtpService;
|
||||
queue: TQueueServiceFactory;
|
||||
keyStore: TKeyStoreFactory;
|
||||
}
|
||||
}: { auditLogDb?: Knex; db: Knex; smtp: TSmtpService; queue: TQueueServiceFactory; keyStore: TKeyStoreFactory }
|
||||
) => {
|
||||
const appCfg = getConfig();
|
||||
await server.register(registerSecretScannerGhApp, { prefix: "/ss-webhook" });
|
||||
@@ -350,7 +339,6 @@ export const registerRoutes = async (
|
||||
const slackIntegrationDAL = slackIntegrationDALFactory(db);
|
||||
const projectSlackConfigDAL = projectSlackConfigDALFactory(db);
|
||||
const workflowIntegrationDAL = workflowIntegrationDALFactory(db);
|
||||
const totpConfigDAL = totpConfigDALFactory(db);
|
||||
|
||||
const externalGroupOrgRoleMappingDAL = externalGroupOrgRoleMappingDALFactory(db);
|
||||
|
||||
@@ -364,21 +352,14 @@ export const registerRoutes = async (
|
||||
projectDAL
|
||||
});
|
||||
const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL, keyStore });
|
||||
|
||||
const hsmService = hsmServiceFactory({
|
||||
hsmModule
|
||||
});
|
||||
|
||||
const kmsService = kmsServiceFactory({
|
||||
kmsRootConfigDAL,
|
||||
keyStore,
|
||||
kmsDAL,
|
||||
internalKmsDAL,
|
||||
orgDAL,
|
||||
projectDAL,
|
||||
hsmService
|
||||
projectDAL
|
||||
});
|
||||
|
||||
const externalKmsService = externalKmsServiceFactory({
|
||||
kmsDAL,
|
||||
kmsService,
|
||||
@@ -514,19 +495,12 @@ export const registerRoutes = async (
|
||||
projectMembershipDAL
|
||||
});
|
||||
|
||||
const totpService = totpServiceFactory({
|
||||
totpConfigDAL,
|
||||
userDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const loginService = authLoginServiceFactory({ userDAL, smtpService, tokenService, orgDAL, totpService });
|
||||
const loginService = authLoginServiceFactory({ userDAL, smtpService, tokenService, orgDAL });
|
||||
const passwordService = authPaswordServiceFactory({
|
||||
tokenService,
|
||||
smtpService,
|
||||
authDAL,
|
||||
userDAL,
|
||||
totpConfigDAL
|
||||
userDAL
|
||||
});
|
||||
|
||||
const projectBotService = projectBotServiceFactory({ permissionService, projectBotDAL, projectDAL });
|
||||
@@ -582,7 +556,6 @@ export const registerRoutes = async (
|
||||
userDAL,
|
||||
authService: loginService,
|
||||
serverCfgDAL: superAdminDAL,
|
||||
kmsRootConfigDAL,
|
||||
orgService,
|
||||
keyStore,
|
||||
licenseService,
|
||||
@@ -1288,13 +1261,10 @@ export const registerRoutes = async (
|
||||
});
|
||||
|
||||
await superAdminService.initServerCfg();
|
||||
|
||||
//
|
||||
// setup the communication with license key server
|
||||
await licenseService.init();
|
||||
|
||||
// Start HSM service if it's configured/enabled.
|
||||
await hsmService.startService();
|
||||
|
||||
await telemetryQueue.startTelemetryCheck();
|
||||
await dailyResourceCleanUp.startCleanUp();
|
||||
await dailyExpiringPkiItemAlert.startSendingAlerts();
|
||||
@@ -1372,15 +1342,13 @@ export const registerRoutes = async (
|
||||
secretSharing: secretSharingService,
|
||||
userEngagement: userEngagementService,
|
||||
externalKms: externalKmsService,
|
||||
hsm: hsmService,
|
||||
cmek: cmekService,
|
||||
orgAdmin: orgAdminService,
|
||||
slack: slackService,
|
||||
workflowIntegration: workflowIntegrationService,
|
||||
migration: migrationService,
|
||||
externalGroupOrgRoleMapping: externalGroupOrgRoleMappingService,
|
||||
projectTemplate: projectTemplateService,
|
||||
totp: totpService
|
||||
projectTemplate: projectTemplateService
|
||||
});
|
||||
|
||||
const cronJobs: CronJob[] = [];
|
||||
|
@@ -7,7 +7,6 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { RootKeyEncryptionStrategy } from "@app/services/kms/kms-types";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
@@ -196,57 +195,6 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/encryption-strategies",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
strategies: z
|
||||
.object({
|
||||
strategy: z.nativeEnum(RootKeyEncryptionStrategy),
|
||||
enabled: z.boolean()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
|
||||
handler: async () => {
|
||||
const encryptionDetails = await server.services.superAdmin.getConfiguredEncryptionStrategies();
|
||||
return encryptionDetails;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/encryption-strategies",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
strategy: z.nativeEnum(RootKeyEncryptionStrategy)
|
||||
})
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
handler: async (req) => {
|
||||
await server.services.superAdmin.updateRootEncryptionStrategy(req.body.strategy);
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/signup",
|
||||
|
@@ -108,8 +108,7 @@ export const registerAuthRoutes = async (server: FastifyZodProvider) => {
|
||||
tokenVersionId: tokenVersion.id,
|
||||
accessVersion: tokenVersion.accessVersion,
|
||||
organizationId: decodedToken.organizationId,
|
||||
isMfaVerified: decodedToken.isMfaVerified,
|
||||
mfaMethod: decodedToken.mfaMethod
|
||||
isMfaVerified: decodedToken.isMfaVerified
|
||||
},
|
||||
appCfg.AUTH_SECRET,
|
||||
{ expiresIn: appCfg.JWT_AUTH_LIFETIME }
|
||||
|
@@ -840,91 +840,4 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/secrets-by-keys",
|
||||
config: {
|
||||
rateLimit: secretsLimit
|
||||
},
|
||||
schema: {
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
querystring: z.object({
|
||||
projectId: z.string().trim(),
|
||||
environment: z.string().trim(),
|
||||
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
|
||||
keys: z.string().trim().transform(decodeURIComponent)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secrets: secretRawSchema
|
||||
.extend({
|
||||
secretPath: z.string().optional(),
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
color: true
|
||||
})
|
||||
.extend({ name: z.string() })
|
||||
.array()
|
||||
.optional()
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { secretPath, projectId, environment } = req.query;
|
||||
|
||||
const keys = req.query.keys?.split(",").filter((key) => Boolean(key.trim())) ?? [];
|
||||
if (!keys.length) throw new BadRequestError({ message: "One or more keys required" });
|
||||
|
||||
const { secrets } = await server.services.secret.getSecretsRaw({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
environment,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
projectId,
|
||||
path: secretPath,
|
||||
keys
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId,
|
||||
...req.auditLogInfo,
|
||||
event: {
|
||||
type: EventType.GET_SECRETS,
|
||||
metadata: {
|
||||
environment,
|
||||
secretPath,
|
||||
numberOfSecrets: secrets.length
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (getUserAgentType(req.headers["user-agent"]) !== UserAgentType.K8_OPERATOR) {
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretPulled,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
numberOfSecrets: secrets.length,
|
||||
workspaceId: projectId,
|
||||
environment,
|
||||
secretPath,
|
||||
channel: getUserAgentType(req.headers["user-agent"]),
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return { secrets };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -15,7 +15,7 @@ import { AUDIT_LOGS, ORGANIZATIONS } from "@app/lib/api-docs";
|
||||
import { getLastMidnightDateISO } from "@app/lib/fn";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { ActorType, AuthMode, MfaMethod } from "@app/services/auth/auth-type";
|
||||
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
import { integrationAuthPubSchema } from "../sanitizedSchemas";
|
||||
|
||||
@@ -259,8 +259,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
message: "Membership role must be a valid slug"
|
||||
})
|
||||
.optional(),
|
||||
enforceMfa: z.boolean().optional(),
|
||||
selectedMfaMethod: z.nativeEnum(MfaMethod).optional()
|
||||
enforceMfa: z.boolean().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@@ -5,7 +5,7 @@ import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { FOLDERS } from "@app/lib/api-docs";
|
||||
import { prefixWithSlash, removeTrailingSlash } from "@app/lib/fn";
|
||||
import { isValidFolderName } from "@app/lib/validator";
|
||||
import { readLimit, secretsLimit } from "@app/server/config/rateLimiter";
|
||||
import { readLimit, secretsLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
@@ -84,6 +84,50 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/:folderId/move",
|
||||
method: "PATCH",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Move folder to new path",
|
||||
security: [{ bearerAuth: [] }],
|
||||
params: z.object({
|
||||
folderId: z.string().describe(FOLDERS.MOVE.folderId)
|
||||
}),
|
||||
body: z.object({
|
||||
projectId: z.string().trim().describe(FOLDERS.MOVE.projectId),
|
||||
newPath: z
|
||||
.string()
|
||||
.trim()
|
||||
.describe(FOLDERS.MOVE.newPath)
|
||||
.transform(prefixWithSlash)
|
||||
.transform(removeTrailingSlash)
|
||||
}),
|
||||
|
||||
response: {
|
||||
200: z.object({
|
||||
folder: SecretFoldersSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const folder = await server.services.folder.moveFolder({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
folderId: req.params.folderId,
|
||||
projectId: req.body.projectId,
|
||||
newPath: req.body.newPath
|
||||
});
|
||||
|
||||
return { folder };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/:folderId",
|
||||
method: "PATCH",
|
||||
|
@@ -169,103 +169,4 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
return groupMemberships;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/me/totp",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
isVerified: z.boolean(),
|
||||
recoveryCodes: z.string().array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
return server.services.totp.getUserTotpConfig({
|
||||
userId: req.permission.id
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/me/totp",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
return server.services.totp.deleteUserTotpConfig({
|
||||
userId: req.permission.id
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/me/totp/register",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
otpUrl: z.string(),
|
||||
recoveryCodes: z.string().array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT], {
|
||||
requireOrg: false
|
||||
}),
|
||||
handler: async (req) => {
|
||||
return server.services.totp.registerUserTotp({
|
||||
userId: req.permission.id
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/me/totp/verify",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
totp: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT], {
|
||||
requireOrg: false
|
||||
}),
|
||||
handler: async (req) => {
|
||||
return server.services.totp.verifyUserTotpConfig({
|
||||
userId: req.permission.id,
|
||||
totp: req.body.totp
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/me/totp/recovery-codes",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
return server.services.totp.createUserTotpRecoveryCodes({
|
||||
userId: req.permission.id
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -2,9 +2,8 @@ import jwt from "jsonwebtoken";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { mfaRateLimit } from "@app/server/config/rateLimiter";
|
||||
import { AuthModeMfaJwtTokenPayload, AuthTokenType, MfaMethod } from "@app/services/auth/auth-type";
|
||||
import { AuthModeMfaJwtTokenPayload, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerMfaRouter = async (server: FastifyZodProvider) => {
|
||||
const cfg = getConfig();
|
||||
@@ -50,38 +49,6 @@ export const registerMfaRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/mfa/check/totp",
|
||||
config: {
|
||||
rateLimit: mfaRateLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
isVerified: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
try {
|
||||
const totpConfig = await server.services.totp.getUserTotpConfig({
|
||||
userId: req.mfa.userId
|
||||
});
|
||||
|
||||
return {
|
||||
isVerified: Boolean(totpConfig)
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof NotFoundError || error instanceof BadRequestError) {
|
||||
return { isVerified: false };
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/mfa/verify",
|
||||
method: "POST",
|
||||
@@ -90,8 +57,7 @@ export const registerMfaRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
mfaToken: z.string().trim(),
|
||||
mfaMethod: z.nativeEnum(MfaMethod).optional().default(MfaMethod.EMAIL)
|
||||
mfaToken: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -120,8 +86,7 @@ export const registerMfaRouter = async (server: FastifyZodProvider) => {
|
||||
ip: req.realIp,
|
||||
userId: req.mfa.userId,
|
||||
orgId: req.mfa.orgId,
|
||||
mfaToken: req.body.mfaToken,
|
||||
mfaMethod: req.body.mfaMethod
|
||||
mfaToken: req.body.mfaToken
|
||||
});
|
||||
|
||||
void res.setCookie("jid", token.refresh, {
|
||||
|
@@ -27,7 +27,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
body: z.object({
|
||||
emails: z.string().email().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.emails),
|
||||
usernames: z.string().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.usernames),
|
||||
roleSlugs: z.string().array().min(1).optional().describe(PROJECT_USERS.INVITE_MEMBER.roleSlugs)
|
||||
roleSlugs: z.string().array().optional().describe(PROJECT_USERS.INVITE_MEMBER.roleSlugs)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -49,7 +49,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
|
||||
projects: [
|
||||
{
|
||||
id: req.params.projectId,
|
||||
projectRoleSlug: req.body.roleSlugs || [ProjectMembershipRole.Member]
|
||||
projectRoleSlug: [ProjectMembershipRole.Member]
|
||||
}
|
||||
]
|
||||
});
|
||||
|
@@ -4,7 +4,7 @@ import { AuthTokenSessionsSchema, OrganizationsSchema, UserEncryptionKeysSchema,
|
||||
import { ApiKeysSchema } from "@app/db/schemas/api-keys";
|
||||
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMethod, AuthMode, MfaMethod } from "@app/services/auth/auth-type";
|
||||
import { AuthMethod, AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
@@ -56,8 +56,7 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
isMfaEnabled: z.boolean().optional(),
|
||||
selectedMfaMethod: z.nativeEnum(MfaMethod).optional()
|
||||
isMfaEnabled: z.boolean()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -67,12 +66,7 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
preHandler: verifyAuth([AuthMode.JWT, AuthMode.API_KEY]),
|
||||
handler: async (req) => {
|
||||
const user = await server.services.user.updateUserMfa({
|
||||
userId: req.permission.id,
|
||||
isMfaEnabled: req.body.isMfaEnabled,
|
||||
selectedMfaMethod: req.body.selectedMfaMethod
|
||||
});
|
||||
|
||||
const user = await server.services.user.toggleUserMfa(req.permission.id, req.body.isMfaEnabled);
|
||||
return { user };
|
||||
}
|
||||
});
|
||||
|
@@ -48,8 +48,7 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
|
||||
response: {
|
||||
200: z.object({
|
||||
token: z.string(),
|
||||
isMfaEnabled: z.boolean(),
|
||||
mfaMethod: z.string().optional()
|
||||
isMfaEnabled: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
@@ -65,8 +64,7 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
|
||||
if (tokens.isMfaEnabled) {
|
||||
return {
|
||||
token: tokens.mfa as string,
|
||||
isMfaEnabled: true,
|
||||
mfaMethod: tokens.mfaMethod
|
||||
isMfaEnabled: true
|
||||
};
|
||||
}
|
||||
|
||||
|
@@ -17,7 +17,6 @@ import { TokenType } from "../auth-token/auth-token-types";
|
||||
import { TOrgDALFactory } from "../org/org-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
||||
import { LoginMethod } from "../super-admin/super-admin-types";
|
||||
import { TTotpServiceFactory } from "../totp/totp-service";
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
import { enforceUserLockStatus, validateProviderAuthToken } from "./auth-fns";
|
||||
import {
|
||||
@@ -27,14 +26,13 @@ import {
|
||||
TOauthTokenExchangeDTO,
|
||||
TVerifyMfaTokenDTO
|
||||
} from "./auth-login-type";
|
||||
import { AuthMethod, AuthModeJwtTokenPayload, AuthModeMfaJwtTokenPayload, AuthTokenType, MfaMethod } from "./auth-type";
|
||||
import { AuthMethod, AuthModeJwtTokenPayload, AuthModeMfaJwtTokenPayload, AuthTokenType } from "./auth-type";
|
||||
|
||||
type TAuthLoginServiceFactoryDep = {
|
||||
userDAL: TUserDALFactory;
|
||||
orgDAL: TOrgDALFactory;
|
||||
tokenService: TAuthTokenServiceFactory;
|
||||
smtpService: TSmtpService;
|
||||
totpService: Pick<TTotpServiceFactory, "verifyUserTotp" | "verifyWithUserRecoveryCode">;
|
||||
};
|
||||
|
||||
export type TAuthLoginFactory = ReturnType<typeof authLoginServiceFactory>;
|
||||
@@ -42,8 +40,7 @@ export const authLoginServiceFactory = ({
|
||||
userDAL,
|
||||
tokenService,
|
||||
smtpService,
|
||||
orgDAL,
|
||||
totpService
|
||||
orgDAL
|
||||
}: TAuthLoginServiceFactoryDep) => {
|
||||
/*
|
||||
* Private
|
||||
@@ -103,8 +100,7 @@ export const authLoginServiceFactory = ({
|
||||
userAgent,
|
||||
organizationId,
|
||||
authMethod,
|
||||
isMfaVerified,
|
||||
mfaMethod
|
||||
isMfaVerified
|
||||
}: {
|
||||
user: TUsers;
|
||||
ip: string;
|
||||
@@ -112,7 +108,6 @@ export const authLoginServiceFactory = ({
|
||||
organizationId?: string;
|
||||
authMethod: AuthMethod;
|
||||
isMfaVerified?: boolean;
|
||||
mfaMethod?: MfaMethod;
|
||||
}) => {
|
||||
const cfg = getConfig();
|
||||
await updateUserDeviceSession(user, ip, userAgent);
|
||||
@@ -131,8 +126,7 @@ export const authLoginServiceFactory = ({
|
||||
tokenVersionId: tokenSession.id,
|
||||
accessVersion: tokenSession.accessVersion,
|
||||
organizationId,
|
||||
isMfaVerified,
|
||||
mfaMethod
|
||||
isMfaVerified
|
||||
},
|
||||
cfg.AUTH_SECRET,
|
||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||
@@ -146,8 +140,7 @@ export const authLoginServiceFactory = ({
|
||||
tokenVersionId: tokenSession.id,
|
||||
refreshVersion: tokenSession.refreshVersion,
|
||||
organizationId,
|
||||
isMfaVerified,
|
||||
mfaMethod
|
||||
isMfaVerified
|
||||
},
|
||||
cfg.AUTH_SECRET,
|
||||
{ expiresIn: cfg.JWT_REFRESH_LIFETIME }
|
||||
@@ -360,12 +353,8 @@ export const authLoginServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const shouldCheckMfa = selectedOrg.enforceMfa || user.isMfaEnabled;
|
||||
const orgMfaMethod = selectedOrg.enforceMfa ? selectedOrg.selectedMfaMethod ?? MfaMethod.EMAIL : undefined;
|
||||
const userMfaMethod = user.isMfaEnabled ? user.selectedMfaMethod ?? MfaMethod.EMAIL : undefined;
|
||||
const mfaMethod = orgMfaMethod ?? userMfaMethod;
|
||||
|
||||
if (shouldCheckMfa && (!decodedToken.isMfaVerified || decodedToken.mfaMethod !== mfaMethod)) {
|
||||
// send multi factor auth token if they it enabled
|
||||
if ((selectedOrg.enforceMfa || user.isMfaEnabled) && user.email && !decodedToken.isMfaVerified) {
|
||||
enforceUserLockStatus(Boolean(user.isLocked), user.temporaryLockDateEnd);
|
||||
|
||||
const mfaToken = jwt.sign(
|
||||
@@ -380,14 +369,12 @@ export const authLoginServiceFactory = ({
|
||||
}
|
||||
);
|
||||
|
||||
if (mfaMethod === MfaMethod.EMAIL && user.email) {
|
||||
await sendUserMfaCode({
|
||||
userId: user.id,
|
||||
email: user.email
|
||||
});
|
||||
}
|
||||
await sendUserMfaCode({
|
||||
userId: user.id,
|
||||
email: user.email
|
||||
});
|
||||
|
||||
return { isMfaEnabled: true, mfa: mfaToken, mfaMethod } as const;
|
||||
return { isMfaEnabled: true, mfa: mfaToken } as const;
|
||||
}
|
||||
|
||||
const tokens = await generateUserTokens({
|
||||
@@ -396,8 +383,7 @@ export const authLoginServiceFactory = ({
|
||||
userAgent,
|
||||
ip: ipAddress,
|
||||
organizationId,
|
||||
isMfaVerified: decodedToken.isMfaVerified,
|
||||
mfaMethod: decodedToken.mfaMethod
|
||||
isMfaVerified: decodedToken.isMfaVerified
|
||||
});
|
||||
|
||||
return {
|
||||
@@ -472,39 +458,17 @@ export const authLoginServiceFactory = ({
|
||||
* Multi factor authentication verification of code
|
||||
* Third step of login in which user completes with mfa
|
||||
* */
|
||||
const verifyMfaToken = async ({
|
||||
userId,
|
||||
mfaToken,
|
||||
mfaMethod,
|
||||
mfaJwtToken,
|
||||
ip,
|
||||
userAgent,
|
||||
orgId
|
||||
}: TVerifyMfaTokenDTO) => {
|
||||
const verifyMfaToken = async ({ userId, mfaToken, mfaJwtToken, ip, userAgent, orgId }: TVerifyMfaTokenDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const user = await userDAL.findById(userId);
|
||||
enforceUserLockStatus(Boolean(user.isLocked), user.temporaryLockDateEnd);
|
||||
|
||||
try {
|
||||
if (mfaMethod === MfaMethod.EMAIL) {
|
||||
await tokenService.validateTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_MFA,
|
||||
userId,
|
||||
code: mfaToken
|
||||
});
|
||||
} else if (mfaMethod === MfaMethod.TOTP) {
|
||||
if (mfaToken.length === 6) {
|
||||
await totpService.verifyUserTotp({
|
||||
userId,
|
||||
totp: mfaToken
|
||||
});
|
||||
} else {
|
||||
await totpService.verifyWithUserRecoveryCode({
|
||||
userId,
|
||||
recoveryCode: mfaToken
|
||||
});
|
||||
}
|
||||
}
|
||||
await tokenService.validateTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_MFA,
|
||||
userId,
|
||||
code: mfaToken
|
||||
});
|
||||
} catch (err) {
|
||||
const updatedUser = await processFailedMfaAttempt(userId);
|
||||
if (updatedUser.isLocked) {
|
||||
@@ -549,8 +513,7 @@ export const authLoginServiceFactory = ({
|
||||
userAgent,
|
||||
organizationId: orgId,
|
||||
authMethod: decodedToken.authMethod,
|
||||
isMfaVerified: true,
|
||||
mfaMethod
|
||||
isMfaVerified: true
|
||||
});
|
||||
|
||||
return { token, user: userEnc };
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import { AuthMethod, MfaMethod } from "./auth-type";
|
||||
import { AuthMethod } from "./auth-type";
|
||||
|
||||
export type TLoginGenServerPublicKeyDTO = {
|
||||
email: string;
|
||||
@@ -19,7 +19,6 @@ export type TLoginClientProofDTO = {
|
||||
export type TVerifyMfaTokenDTO = {
|
||||
userId: string;
|
||||
mfaToken: string;
|
||||
mfaMethod: MfaMethod;
|
||||
mfaJwtToken: string;
|
||||
ip: string;
|
||||
userAgent: string;
|
||||
|
@@ -8,7 +8,6 @@ import { generateSrpServerKey, srpCheckClientProof } from "@app/lib/crypto";
|
||||
import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service";
|
||||
import { TokenType } from "../auth-token/auth-token-types";
|
||||
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
||||
import { TTotpConfigDALFactory } from "../totp/totp-config-dal";
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
import { TAuthDALFactory } from "./auth-dal";
|
||||
import { TChangePasswordDTO, TCreateBackupPrivateKeyDTO, TResetPasswordViaBackupKeyDTO } from "./auth-password-type";
|
||||
@@ -19,7 +18,6 @@ type TAuthPasswordServiceFactoryDep = {
|
||||
userDAL: TUserDALFactory;
|
||||
tokenService: TAuthTokenServiceFactory;
|
||||
smtpService: TSmtpService;
|
||||
totpConfigDAL: Pick<TTotpConfigDALFactory, "delete">;
|
||||
};
|
||||
|
||||
export type TAuthPasswordFactory = ReturnType<typeof authPaswordServiceFactory>;
|
||||
@@ -27,8 +25,7 @@ export const authPaswordServiceFactory = ({
|
||||
authDAL,
|
||||
userDAL,
|
||||
tokenService,
|
||||
smtpService,
|
||||
totpConfigDAL
|
||||
smtpService
|
||||
}: TAuthPasswordServiceFactoryDep) => {
|
||||
/*
|
||||
* Pre setup for pass change with srp protocol
|
||||
@@ -188,12 +185,6 @@ export const authPaswordServiceFactory = ({
|
||||
temporaryLockDateEnd: null,
|
||||
consecutiveFailedMfaAttempts: 0
|
||||
});
|
||||
|
||||
/* we reset the mobile authenticator configs of the user
|
||||
because we want this to be one of the recovery modes from account lockout */
|
||||
await totpConfigDAL.delete({
|
||||
userId
|
||||
});
|
||||
};
|
||||
|
||||
/*
|
||||
|
@@ -53,7 +53,6 @@ export type AuthModeJwtTokenPayload = {
|
||||
accessVersion: number;
|
||||
organizationId?: string;
|
||||
isMfaVerified?: boolean;
|
||||
mfaMethod?: MfaMethod;
|
||||
};
|
||||
|
||||
export type AuthModeMfaJwtTokenPayload = {
|
||||
@@ -72,7 +71,6 @@ export type AuthModeRefreshJwtTokenPayload = {
|
||||
refreshVersion: number;
|
||||
organizationId?: string;
|
||||
isMfaVerified?: boolean;
|
||||
mfaMethod?: MfaMethod;
|
||||
};
|
||||
|
||||
export type AuthModeProviderJwtTokenPayload = {
|
||||
@@ -87,8 +85,3 @@ export type AuthModeProviderSignUpTokenPayload = {
|
||||
authTokenType: AuthTokenType.SIGNUP_TOKEN;
|
||||
userId: string;
|
||||
};
|
||||
|
||||
export enum MfaMethod {
|
||||
EMAIL = "email",
|
||||
TOTP = "totp"
|
||||
}
|
||||
|
@@ -29,7 +29,7 @@ import {
|
||||
} from "./identity-aws-auth-types";
|
||||
|
||||
type TIdentityAwsAuthServiceFactoryDep = {
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityAwsAuthDAL: Pick<TIdentityAwsAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
@@ -346,8 +346,6 @@ export const identityAwsAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityAwsAuth = await identityAwsAuthDAL.transaction(async (tx) => {
|
||||
const deletedAwsAuth = await identityAwsAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.AWS_AUTH }, tx);
|
||||
|
||||
return { ...deletedAwsAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityAwsAuth;
|
||||
|
@@ -30,7 +30,7 @@ type TIdentityAzureAuthServiceFactoryDep = {
|
||||
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
||||
>;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
@@ -70,9 +70,7 @@ export const identityAzureAuthServiceFactory = ({
|
||||
.map((servicePrincipalId) => servicePrincipalId.trim())
|
||||
.some((servicePrincipalId) => servicePrincipalId === azureIdentity.oid);
|
||||
|
||||
if (!isServicePrincipalAllowed) {
|
||||
throw new UnauthorizedError({ message: `Service principal '${azureIdentity.oid}' not allowed` });
|
||||
}
|
||||
if (!isServicePrincipalAllowed) throw new UnauthorizedError({ message: "Service principal not allowed" });
|
||||
}
|
||||
|
||||
const identityAccessToken = await identityAzureAuthDAL.transaction(async (tx) => {
|
||||
@@ -319,8 +317,6 @@ export const identityAzureAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityAzureAuth = await identityAzureAuthDAL.transaction(async (tx) => {
|
||||
const deletedAzureAuth = await identityAzureAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.AZURE_AUTH }, tx);
|
||||
|
||||
return { ...deletedAzureAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityAzureAuth;
|
||||
|
@@ -28,7 +28,7 @@ import {
|
||||
type TIdentityGcpAuthServiceFactoryDep = {
|
||||
identityGcpAuthDAL: Pick<TIdentityGcpAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
@@ -365,8 +365,6 @@ export const identityGcpAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityGcpAuth = await identityGcpAuthDAL.transaction(async (tx) => {
|
||||
const deletedGcpAuth = await identityGcpAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.GCP_AUTH }, tx);
|
||||
|
||||
return { ...deletedGcpAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityGcpAuth;
|
||||
|
@@ -41,7 +41,7 @@ type TIdentityKubernetesAuthServiceFactoryDep = {
|
||||
TIdentityKubernetesAuthDALFactory,
|
||||
"create" | "findOne" | "transaction" | "updateById" | "delete"
|
||||
>;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "findById">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
@@ -622,7 +622,6 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityKubernetesAuth = await identityKubernetesAuthDAL.transaction(async (tx) => {
|
||||
const deletedKubernetesAuth = await identityKubernetesAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.KUBERNETES_AUTH }, tx);
|
||||
return { ...deletedKubernetesAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityKubernetesAuth;
|
||||
|
@@ -39,7 +39,7 @@ import {
|
||||
type TIdentityOidcAuthServiceFactoryDep = {
|
||||
identityOidcAuthDAL: TIdentityOidcAuthDALFactory;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
||||
@@ -539,8 +539,6 @@ export const identityOidcAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityOidcAuth = await identityOidcAuthDAL.transaction(async (tx) => {
|
||||
const deletedOidcAuth = await identityOidcAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.OIDC_AUTH }, tx);
|
||||
|
||||
return { ...deletedOidcAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
|
||||
|
@@ -1,7 +1,5 @@
|
||||
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||
|
||||
export const KMS_ROOT_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
|
||||
|
||||
export const getByteLengthForAlgorithm = (encryptionAlgorithm: SymmetricEncryption) => {
|
||||
switch (encryptionAlgorithm) {
|
||||
case SymmetricEncryption.AES_GCM_128:
|
||||
|
@@ -2,14 +2,13 @@ import slugify from "@sindresorhus/slugify";
|
||||
import { Knex } from "knex";
|
||||
import { z } from "zod";
|
||||
|
||||
import { KmsKeysSchema, TKmsRootConfig } from "@app/db/schemas";
|
||||
import { KmsKeysSchema } from "@app/db/schemas";
|
||||
import { AwsKmsProviderFactory } from "@app/ee/services/external-kms/providers/aws-kms";
|
||||
import {
|
||||
ExternalKmsAwsSchema,
|
||||
KmsProviders,
|
||||
TExternalKmsProviderFns
|
||||
} from "@app/ee/services/external-kms/providers/model";
|
||||
import { THsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
||||
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { randomSecureBytes } from "@app/lib/crypto";
|
||||
@@ -18,7 +17,7 @@ import { generateHash } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { getByteLengthForAlgorithm, KMS_ROOT_CONFIG_UUID } from "@app/services/kms/kms-fns";
|
||||
import { getByteLengthForAlgorithm } from "@app/services/kms/kms-fns";
|
||||
|
||||
import { TOrgDALFactory } from "../org/org-dal";
|
||||
import { TProjectDALFactory } from "../project/project-dal";
|
||||
@@ -28,7 +27,6 @@ import { TKmsRootConfigDALFactory } from "./kms-root-config-dal";
|
||||
import {
|
||||
KmsDataKey,
|
||||
KmsType,
|
||||
RootKeyEncryptionStrategy,
|
||||
TDecryptWithKeyDTO,
|
||||
TDecryptWithKmsDTO,
|
||||
TEncryptionWithKeyDTO,
|
||||
@@ -42,14 +40,15 @@ type TKmsServiceFactoryDep = {
|
||||
kmsDAL: TKmsKeyDALFactory;
|
||||
projectDAL: Pick<TProjectDALFactory, "findById" | "updateById" | "transaction">;
|
||||
orgDAL: Pick<TOrgDALFactory, "findById" | "updateById" | "transaction">;
|
||||
kmsRootConfigDAL: Pick<TKmsRootConfigDALFactory, "findById" | "create" | "updateById">;
|
||||
kmsRootConfigDAL: Pick<TKmsRootConfigDALFactory, "findById" | "create">;
|
||||
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "waitTillReady" | "setItemWithExpiry">;
|
||||
internalKmsDAL: Pick<TInternalKmsDALFactory, "create">;
|
||||
hsmService: THsmServiceFactory;
|
||||
};
|
||||
|
||||
export type TKmsServiceFactory = ReturnType<typeof kmsServiceFactory>;
|
||||
|
||||
const KMS_ROOT_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
|
||||
|
||||
const KMS_ROOT_CREATION_WAIT_KEY = "wait_till_ready_kms_root_key";
|
||||
const KMS_ROOT_CREATION_WAIT_TIME = 10;
|
||||
|
||||
@@ -64,8 +63,7 @@ export const kmsServiceFactory = ({
|
||||
keyStore,
|
||||
internalKmsDAL,
|
||||
orgDAL,
|
||||
projectDAL,
|
||||
hsmService
|
||||
projectDAL
|
||||
}: TKmsServiceFactoryDep) => {
|
||||
let ROOT_ENCRYPTION_KEY = Buffer.alloc(0);
|
||||
|
||||
@@ -612,65 +610,6 @@ export const kmsServiceFactory = ({
|
||||
}
|
||||
};
|
||||
|
||||
const $getBasicEncryptionKey = () => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const encryptionKey = appCfg.ENCRYPTION_KEY || appCfg.ROOT_ENCRYPTION_KEY;
|
||||
const isBase64 = !appCfg.ENCRYPTION_KEY;
|
||||
if (!encryptionKey)
|
||||
throw new Error(
|
||||
"Root encryption key not found for KMS service. Did you set the ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY environment variables?"
|
||||
);
|
||||
|
||||
const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8");
|
||||
|
||||
return encryptionKeyBuffer;
|
||||
};
|
||||
|
||||
const $decryptRootKey = async (kmsRootConfig: TKmsRootConfig) => {
|
||||
// case 1: root key is encrypted with HSM
|
||||
if (kmsRootConfig.encryptionStrategy === RootKeyEncryptionStrategy.HSM) {
|
||||
const hsmIsActive = await hsmService.isActive();
|
||||
if (!hsmIsActive) {
|
||||
throw new Error("Unable to decrypt root KMS key. HSM service is inactive. Did you configure the HSM?");
|
||||
}
|
||||
|
||||
const decryptedKey = await hsmService.decrypt(kmsRootConfig.encryptedRootKey);
|
||||
return decryptedKey;
|
||||
}
|
||||
|
||||
// case 2: root key is encrypted with software encryption
|
||||
if (kmsRootConfig.encryptionStrategy === RootKeyEncryptionStrategy.Software) {
|
||||
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
||||
const encryptionKeyBuffer = $getBasicEncryptionKey();
|
||||
|
||||
return cipher.decrypt(kmsRootConfig.encryptedRootKey, encryptionKeyBuffer);
|
||||
}
|
||||
|
||||
throw new Error(`Invalid root key encryption strategy: ${kmsRootConfig.encryptionStrategy}`);
|
||||
};
|
||||
|
||||
const $encryptRootKey = async (plainKeyBuffer: Buffer, strategy: RootKeyEncryptionStrategy) => {
|
||||
if (strategy === RootKeyEncryptionStrategy.HSM) {
|
||||
const hsmIsActive = await hsmService.isActive();
|
||||
if (!hsmIsActive) {
|
||||
throw new Error("Unable to encrypt root KMS key. HSM service is inactive. Did you configure the HSM?");
|
||||
}
|
||||
const encrypted = await hsmService.encrypt(plainKeyBuffer);
|
||||
return encrypted;
|
||||
}
|
||||
|
||||
if (strategy === RootKeyEncryptionStrategy.Software) {
|
||||
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
||||
const encryptionKeyBuffer = $getBasicEncryptionKey();
|
||||
|
||||
return cipher.encrypt(plainKeyBuffer, encryptionKeyBuffer);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
||||
throw new Error(`Invalid root key encryption strategy: ${strategy}`);
|
||||
};
|
||||
|
||||
// by keeping the decrypted data key in inner scope
|
||||
// none of the entities outside can interact directly or expose the data key
|
||||
// NOTICE: If changing here update migrations/utils/kms
|
||||
@@ -832,6 +771,7 @@ export const kmsServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
return kmsDAL.findByIdWithAssociatedKms(key.id, tx);
|
||||
});
|
||||
|
||||
@@ -854,6 +794,14 @@ export const kmsServiceFactory = ({
|
||||
|
||||
// akhilmhdh: a copy of this is made in migrations/utils/kms
|
||||
const startService = async () => {
|
||||
const appCfg = getConfig();
|
||||
// This will switch to a seal process and HMS flow in future
|
||||
const encryptionKey = appCfg.ENCRYPTION_KEY || appCfg.ROOT_ENCRYPTION_KEY;
|
||||
// if root key its base64 encoded
|
||||
const isBase64 = !appCfg.ENCRYPTION_KEY;
|
||||
if (!encryptionKey) throw new Error("Root encryption key not found for KMS service.");
|
||||
const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8");
|
||||
|
||||
const lock = await keyStore.acquireLock([`KMS_ROOT_CFG_LOCK`], 3000, { retryCount: 3 }).catch(() => null);
|
||||
if (!lock) {
|
||||
await keyStore.waitTillReady({
|
||||
@@ -865,69 +813,31 @@ export const kmsServiceFactory = ({
|
||||
|
||||
// check if KMS root key was already generated and saved in DB
|
||||
const kmsRootConfig = await kmsRootConfigDAL.findById(KMS_ROOT_CONFIG_UUID);
|
||||
|
||||
// case 1: a root key already exists in the DB
|
||||
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
||||
if (kmsRootConfig) {
|
||||
if (lock) await lock.release();
|
||||
logger.info(`KMS: Encrypted ROOT Key found from DB. Decrypting. [strategy=${kmsRootConfig.encryptionStrategy}]`);
|
||||
|
||||
const decryptedRootKey = await $decryptRootKey(kmsRootConfig);
|
||||
|
||||
// set the flag so that other instance nodes can start
|
||||
logger.info("KMS: Encrypted ROOT Key found from DB. Decrypting.");
|
||||
const decryptedRootKey = cipher.decrypt(kmsRootConfig.encryptedRootKey, encryptionKeyBuffer);
|
||||
// set the flag so that other instancen nodes can start
|
||||
await keyStore.setItemWithExpiry(KMS_ROOT_CREATION_WAIT_KEY, KMS_ROOT_CREATION_WAIT_TIME, "true");
|
||||
logger.info("KMS: Loading ROOT Key into Memory.");
|
||||
ROOT_ENCRYPTION_KEY = decryptedRootKey;
|
||||
return;
|
||||
}
|
||||
|
||||
// case 2: no config is found, so we create a new root key with basic encryption
|
||||
logger.info("KMS: Generating new ROOT Key");
|
||||
logger.info("KMS: Generating ROOT Key");
|
||||
const newRootKey = randomSecureBytes(32);
|
||||
const encryptedRootKey = await $encryptRootKey(newRootKey, RootKeyEncryptionStrategy.Software).catch((err) => {
|
||||
logger.error({ hsmEnabled: hsmService.isActive() }, "KMS: Failed to encrypt ROOT Key");
|
||||
throw err;
|
||||
});
|
||||
const encryptedRootKey = cipher.encrypt(newRootKey, encryptionKeyBuffer);
|
||||
// @ts-expect-error id is kept as fixed for idempotence and to avoid race condition
|
||||
await kmsRootConfigDAL.create({ encryptedRootKey, id: KMS_ROOT_CONFIG_UUID });
|
||||
|
||||
await kmsRootConfigDAL.create({
|
||||
// @ts-expect-error id is kept as fixed for idempotence and to avoid race condition
|
||||
id: KMS_ROOT_CONFIG_UUID,
|
||||
encryptedRootKey,
|
||||
encryptionStrategy: RootKeyEncryptionStrategy.Software
|
||||
});
|
||||
|
||||
// set the flag so that other instance nodes can start
|
||||
// set the flag so that other instancen nodes can start
|
||||
await keyStore.setItemWithExpiry(KMS_ROOT_CREATION_WAIT_KEY, KMS_ROOT_CREATION_WAIT_TIME, "true");
|
||||
logger.info("KMS: Saved and loaded ROOT Key into memory");
|
||||
if (lock) await lock.release();
|
||||
ROOT_ENCRYPTION_KEY = newRootKey;
|
||||
};
|
||||
|
||||
const updateEncryptionStrategy = async (strategy: RootKeyEncryptionStrategy) => {
|
||||
const kmsRootConfig = await kmsRootConfigDAL.findById(KMS_ROOT_CONFIG_UUID);
|
||||
if (!kmsRootConfig) {
|
||||
throw new NotFoundError({ message: "KMS root config not found" });
|
||||
}
|
||||
|
||||
if (kmsRootConfig.encryptionStrategy === strategy) {
|
||||
return;
|
||||
}
|
||||
|
||||
const decryptedRootKey = await $decryptRootKey(kmsRootConfig);
|
||||
const encryptedRootKey = await $encryptRootKey(decryptedRootKey, strategy);
|
||||
|
||||
if (!encryptedRootKey) {
|
||||
logger.error("KMS: Failed to re-encrypt ROOT Key with selected strategy");
|
||||
throw new Error("Failed to re-encrypt ROOT Key with selected strategy");
|
||||
}
|
||||
|
||||
await kmsRootConfigDAL.updateById(KMS_ROOT_CONFIG_UUID, {
|
||||
encryptedRootKey,
|
||||
encryptionStrategy: strategy
|
||||
});
|
||||
|
||||
ROOT_ENCRYPTION_KEY = decryptedRootKey;
|
||||
};
|
||||
|
||||
return {
|
||||
startService,
|
||||
generateKmsKey,
|
||||
@@ -939,7 +849,6 @@ export const kmsServiceFactory = ({
|
||||
encryptWithRootKey,
|
||||
decryptWithRootKey,
|
||||
getOrgKmsKeyId,
|
||||
updateEncryptionStrategy,
|
||||
getProjectSecretManagerKmsKeyId,
|
||||
updateProjectSecretManagerKmsKey,
|
||||
getProjectKeyBackup,
|
||||
|
@@ -56,8 +56,3 @@ export type TUpdateProjectSecretManagerKmsKeyDTO = {
|
||||
projectId: string;
|
||||
kms: { type: KmsType.Internal } | { type: KmsType.External; kmsId: string };
|
||||
};
|
||||
|
||||
export enum RootKeyEncryptionStrategy {
|
||||
Software = "SOFTWARE",
|
||||
HSM = "HSM"
|
||||
}
|
||||
|
@@ -268,7 +268,7 @@ export const orgServiceFactory = ({
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
orgId,
|
||||
data: { name, slug, authEnforced, scimEnabled, defaultMembershipRoleSlug, enforceMfa, selectedMfaMethod }
|
||||
data: { name, slug, authEnforced, scimEnabled, defaultMembershipRoleSlug, enforceMfa }
|
||||
}: TUpdateOrgDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
@@ -333,8 +333,7 @@ export const orgServiceFactory = ({
|
||||
authEnforced,
|
||||
scimEnabled,
|
||||
defaultMembershipRole,
|
||||
enforceMfa,
|
||||
selectedMfaMethod
|
||||
enforceMfa
|
||||
});
|
||||
if (!org) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` });
|
||||
return org;
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
|
||||
import { ActorAuthMethod, ActorType, MfaMethod } from "../auth/auth-type";
|
||||
import { ActorAuthMethod, ActorType } from "../auth/auth-type";
|
||||
|
||||
export type TUpdateOrgMembershipDTO = {
|
||||
userId: string;
|
||||
@@ -65,7 +65,6 @@ export type TUpdateOrgDTO = {
|
||||
scimEnabled: boolean;
|
||||
defaultMembershipRoleSlug: string;
|
||||
enforceMfa: boolean;
|
||||
selectedMfaMethod: MfaMethod;
|
||||
}>;
|
||||
} & TOrgPermission;
|
||||
|
||||
|
@@ -19,6 +19,7 @@ import {
|
||||
TGetFolderDTO,
|
||||
TGetFoldersDeepByEnvsDTO,
|
||||
TUpdateFolderDTO,
|
||||
TUpdateFolderPathDTO,
|
||||
TUpdateManyFoldersDTO
|
||||
} from "./secret-folder-types";
|
||||
import { TSecretFolderVersionDALFactory } from "./secret-folder-version-dal";
|
||||
@@ -329,6 +330,145 @@ export const secretFolderServiceFactory = ({
|
||||
return { folder: newFolder, old: folder };
|
||||
};
|
||||
|
||||
const moveFolder = async ({
|
||||
folderId,
|
||||
newPath: secretPath,
|
||||
projectId,
|
||||
actor,
|
||||
actorAuthMethod,
|
||||
actorId,
|
||||
actorOrgId
|
||||
}: TUpdateFolderPathDTO) => {
|
||||
const [folder] = await folderDAL.findSecretPathByFolderIds(projectId, [folderId]);
|
||||
|
||||
if (!folder) {
|
||||
throw new NotFoundError({ message: `Folder with ID '${folderId}' not found` });
|
||||
}
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
// Has permission for source folder
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.SecretFolders, { environment: folder.environmentSlug, secretPath: folder.path })
|
||||
);
|
||||
|
||||
// Has permission for destination folder
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.SecretFolders, { environment: folder.environmentSlug, secretPath })
|
||||
);
|
||||
|
||||
if (!folder.parentId) {
|
||||
throw new BadRequestError({ message: `Cannot move root folder` });
|
||||
}
|
||||
|
||||
const parentFolder = await folderDAL.findById(folder.parentId);
|
||||
|
||||
if (!parentFolder) {
|
||||
throw new NotFoundError({ message: `Parent folder with ID '${folder.parentId}' not found` });
|
||||
}
|
||||
|
||||
if (secretPath !== "/") {
|
||||
const newPathSegments = secretPath.split("/").filter(Boolean);
|
||||
|
||||
if (newPathSegments.length === 0) {
|
||||
throw new BadRequestError({ message: `Invalid new path '${secretPath}'` });
|
||||
}
|
||||
}
|
||||
|
||||
if (secretPath === folder.path) {
|
||||
return folder;
|
||||
}
|
||||
|
||||
const existingFolder = await folderDAL.findBySecretPath(
|
||||
projectId,
|
||||
folder.environmentSlug,
|
||||
`${secretPath}/${folder.name}`
|
||||
);
|
||||
|
||||
if (existingFolder) {
|
||||
throw new BadRequestError({
|
||||
message: `Folder with name '${existingFolder.name}' already exists in the new path '${secretPath}'`
|
||||
});
|
||||
}
|
||||
|
||||
const movedFolder = await folderDAL.transaction(async (tx) => {
|
||||
const newParentFolder = await folderDAL.findClosestFolder(projectId, folder.environmentSlug, secretPath, tx);
|
||||
|
||||
if (!newParentFolder) {
|
||||
throw new NotFoundError({ message: `Parent folder with path '${secretPath}' not found` });
|
||||
}
|
||||
|
||||
let parentFolderId = newParentFolder.id;
|
||||
|
||||
// create any missing intermediate folders in the new path
|
||||
if (newParentFolder.path !== secretPath) {
|
||||
const missingSegments = secretPath.substring(newParentFolder.path.length).split("/").filter(Boolean);
|
||||
if (missingSegments.length) {
|
||||
const newFolders: Array<TSecretFoldersInsert & { id: string }> = missingSegments.map((segment) => {
|
||||
const newFolder = {
|
||||
name: segment,
|
||||
parentId: parentFolderId,
|
||||
id: uuidv4(),
|
||||
envId: folder.envId,
|
||||
version: 1
|
||||
};
|
||||
parentFolderId = newFolder.id;
|
||||
return newFolder;
|
||||
});
|
||||
|
||||
parentFolderId = newFolders.at(-1)?.id as string;
|
||||
const docs = await folderDAL.insertMany(newFolders, tx);
|
||||
await folderVersionDAL.insertMany(
|
||||
docs.map((doc) => ({
|
||||
name: doc.name,
|
||||
envId: doc.envId,
|
||||
version: doc.version,
|
||||
folderId: doc.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (parentFolderId === folder.id) {
|
||||
throw new BadRequestError({ message: `Cannot move folder into itself` });
|
||||
}
|
||||
|
||||
// update the folder's parent id to point to the new location
|
||||
const [updatedFolder] = await folderDAL.update(
|
||||
{ id: folder.id },
|
||||
{
|
||||
parentId: parentFolderId,
|
||||
version: (folder.version || 0) + 1
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
// create a new version record
|
||||
await folderVersionDAL.create(
|
||||
{
|
||||
name: updatedFolder.name,
|
||||
envId: updatedFolder.envId,
|
||||
version: updatedFolder.version,
|
||||
folderId: updatedFolder.id
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
return updatedFolder;
|
||||
});
|
||||
|
||||
await snapshotService.performSnapshot(movedFolder.id);
|
||||
return movedFolder;
|
||||
};
|
||||
|
||||
const deleteFolder = async ({
|
||||
projectId,
|
||||
actor,
|
||||
@@ -540,6 +680,7 @@ export const secretFolderServiceFactory = ({
|
||||
createFolder,
|
||||
updateFolder,
|
||||
updateManyFolders,
|
||||
moveFolder,
|
||||
deleteFolder,
|
||||
getFolders,
|
||||
getFolderById,
|
||||
|
@@ -18,6 +18,11 @@ export type TUpdateFolderDTO = {
|
||||
name: string;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TUpdateFolderPathDTO = {
|
||||
folderId: string;
|
||||
newPath: string;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TUpdateManyFoldersDTO = {
|
||||
projectSlug: string;
|
||||
folders: {
|
||||
|
@@ -361,10 +361,6 @@ export const secretV2BridgeDALFactory = (db: TDbClient) => {
|
||||
void bd.whereILike(`${TableName.SecretV2}.key`, `%${filters?.search}%`);
|
||||
}
|
||||
}
|
||||
|
||||
if (filters?.keys) {
|
||||
void bd.whereIn(`${TableName.SecretV2}.key`, filters.keys);
|
||||
}
|
||||
})
|
||||
.where((bd) => {
|
||||
void bd.whereNull(`${TableName.SecretV2}.userId`).orWhere({ userId: userId || null });
|
||||
|
@@ -518,10 +518,7 @@ export const expandSecretReferencesFactory = ({
|
||||
}
|
||||
|
||||
if (referencedSecretValue) {
|
||||
expandedValue = expandedValue.replaceAll(
|
||||
interpolationSyntax,
|
||||
() => referencedSecretValue // prevents special characters from triggering replacement patterns
|
||||
);
|
||||
expandedValue = expandedValue.replaceAll(interpolationSyntax, referencedSecretValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -150,13 +150,9 @@ export const secretV2BridgeServiceFactory = ({
|
||||
}
|
||||
});
|
||||
|
||||
if (
|
||||
referredSecrets.length !==
|
||||
new Set(references.map(({ secretKey, secretPath, environment }) => `${secretKey}.${secretPath}.${environment}`))
|
||||
.size // only count unique references
|
||||
)
|
||||
if (referredSecrets.length !== references.length)
|
||||
throw new BadRequestError({
|
||||
message: `Referenced secret(s) not found: ${diff(
|
||||
message: `Referenced secret not found. Found only ${diff(
|
||||
references.map((el) => el.secretKey),
|
||||
referredSecrets.map((el) => el.key)
|
||||
).join(",")}`
|
||||
@@ -414,13 +410,12 @@ export const secretV2BridgeServiceFactory = ({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
const encryptedValue =
|
||||
typeof secretValue === "string"
|
||||
? {
|
||||
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(secretValue) }).cipherTextBlob,
|
||||
references: getAllSecretReferences(secretValue).nestedReferences
|
||||
}
|
||||
: {};
|
||||
const encryptedValue = secretValue
|
||||
? {
|
||||
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(secretValue) }).cipherTextBlob,
|
||||
references: getAllSecretReferences(secretValue).nestedReferences
|
||||
}
|
||||
: {};
|
||||
|
||||
if (secretValue) {
|
||||
const { nestedReferences, localReferences } = getAllSecretReferences(secretValue);
|
||||
@@ -1166,7 +1161,7 @@ export const secretV2BridgeServiceFactory = ({
|
||||
const newSecrets = await secretDAL.transaction(async (tx) =>
|
||||
fnSecretBulkInsert({
|
||||
inputSecrets: inputSecrets.map((el) => {
|
||||
const references = secretReferencesGroupByInputSecretKey[el.secretKey]?.nestedReferences;
|
||||
const references = secretReferencesGroupByInputSecretKey[el.secretKey].nestedReferences;
|
||||
|
||||
return {
|
||||
version: 1,
|
||||
@@ -1373,7 +1368,7 @@ export const secretV2BridgeServiceFactory = ({
|
||||
typeof el.secretValue !== "undefined"
|
||||
? {
|
||||
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(el.secretValue) }).cipherTextBlob,
|
||||
references: secretReferencesGroupByInputSecretKey[el.secretKey]?.nestedReferences
|
||||
references: secretReferencesGroupByInputSecretKey[el.secretKey].nestedReferences
|
||||
}
|
||||
: {};
|
||||
|
||||
|
@@ -33,7 +33,6 @@ export type TGetSecretsDTO = {
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
search?: string;
|
||||
keys?: string[];
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TGetASecretDTO = {
|
||||
@@ -295,7 +294,6 @@ export type TFindSecretsByFolderIdsFilter = {
|
||||
search?: string;
|
||||
tagSlugs?: string[];
|
||||
includeTagsInSearch?: boolean;
|
||||
keys?: string[];
|
||||
};
|
||||
|
||||
export type TGetSecretsRawByFolderMappingsDTO = {
|
||||
|
@@ -185,7 +185,6 @@ export type TGetSecretsRawDTO = {
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
search?: string;
|
||||
keys?: string[];
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TGetASecretRawDTO = {
|
||||
|
@@ -77,21 +77,5 @@ export const smtpServiceFactory = (cfg: TSmtpConfig) => {
|
||||
}
|
||||
};
|
||||
|
||||
const verify = async () => {
|
||||
const isConnected = smtp
|
||||
.verify()
|
||||
.then(async () => {
|
||||
logger.info("SMTP connected");
|
||||
return true;
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
logger.error("SMTP error");
|
||||
logger.error(err);
|
||||
return false;
|
||||
});
|
||||
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
return { sendMail, verify };
|
||||
return { sendMail };
|
||||
};
|
||||
|
@@ -10,10 +10,7 @@ import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
|
||||
import { TAuthLoginFactory } from "../auth/auth-login-service";
|
||||
import { AuthMethod } from "../auth/auth-type";
|
||||
import { KMS_ROOT_CONFIG_UUID } from "../kms/kms-fns";
|
||||
import { TKmsRootConfigDALFactory } from "../kms/kms-root-config-dal";
|
||||
import { TKmsServiceFactory } from "../kms/kms-service";
|
||||
import { RootKeyEncryptionStrategy } from "../kms/kms-types";
|
||||
import { TOrgServiceFactory } from "../org/org-service";
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
import { TSuperAdminDALFactory } from "./super-admin-dal";
|
||||
@@ -23,8 +20,7 @@ type TSuperAdminServiceFactoryDep = {
|
||||
serverCfgDAL: TSuperAdminDALFactory;
|
||||
userDAL: TUserDALFactory;
|
||||
authService: Pick<TAuthLoginFactory, "generateUserTokens">;
|
||||
kmsService: Pick<TKmsServiceFactory, "encryptWithRootKey" | "decryptWithRootKey" | "updateEncryptionStrategy">;
|
||||
kmsRootConfigDAL: TKmsRootConfigDALFactory;
|
||||
kmsService: Pick<TKmsServiceFactory, "encryptWithRootKey" | "decryptWithRootKey">;
|
||||
orgService: Pick<TOrgServiceFactory, "createOrganization">;
|
||||
keyStore: Pick<TKeyStoreFactory, "getItem" | "setItemWithExpiry" | "deleteItem">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "onPremFeatures">;
|
||||
@@ -51,7 +47,6 @@ export const superAdminServiceFactory = ({
|
||||
authService,
|
||||
orgService,
|
||||
keyStore,
|
||||
kmsRootConfigDAL,
|
||||
kmsService,
|
||||
licenseService
|
||||
}: TSuperAdminServiceFactoryDep) => {
|
||||
@@ -293,70 +288,12 @@ export const superAdminServiceFactory = ({
|
||||
};
|
||||
};
|
||||
|
||||
const getConfiguredEncryptionStrategies = async () => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const kmsRootCfg = await kmsRootConfigDAL.findById(KMS_ROOT_CONFIG_UUID);
|
||||
|
||||
if (!kmsRootCfg) {
|
||||
throw new NotFoundError({ name: "KmsRootConfig", message: "KMS root configuration not found" });
|
||||
}
|
||||
|
||||
const selectedStrategy = kmsRootCfg.encryptionStrategy;
|
||||
const enabledStrategies: { enabled: boolean; strategy: RootKeyEncryptionStrategy }[] = [];
|
||||
|
||||
if (appCfg.ROOT_ENCRYPTION_KEY || appCfg.ENCRYPTION_KEY) {
|
||||
const basicStrategy = RootKeyEncryptionStrategy.Software;
|
||||
|
||||
enabledStrategies.push({
|
||||
enabled: selectedStrategy === basicStrategy,
|
||||
strategy: basicStrategy
|
||||
});
|
||||
}
|
||||
if (appCfg.isHsmConfigured) {
|
||||
const hsmStrategy = RootKeyEncryptionStrategy.HSM;
|
||||
|
||||
enabledStrategies.push({
|
||||
enabled: selectedStrategy === hsmStrategy,
|
||||
strategy: hsmStrategy
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
strategies: enabledStrategies
|
||||
};
|
||||
};
|
||||
|
||||
const updateRootEncryptionStrategy = async (strategy: RootKeyEncryptionStrategy) => {
|
||||
if (!licenseService.onPremFeatures.hsm) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update encryption strategy due to plan restriction. Upgrade to Infisical's Enterprise plan."
|
||||
});
|
||||
}
|
||||
|
||||
const configuredStrategies = await getConfiguredEncryptionStrategies();
|
||||
|
||||
const foundStrategy = configuredStrategies.strategies.find((s) => s.strategy === strategy);
|
||||
|
||||
if (!foundStrategy) {
|
||||
throw new BadRequestError({ message: "Invalid encryption strategy" });
|
||||
}
|
||||
|
||||
if (foundStrategy.enabled) {
|
||||
throw new BadRequestError({ message: "The selected encryption strategy is already enabled" });
|
||||
}
|
||||
|
||||
await kmsService.updateEncryptionStrategy(strategy);
|
||||
};
|
||||
|
||||
return {
|
||||
initServerCfg,
|
||||
updateServerCfg,
|
||||
adminSignUp,
|
||||
getUsers,
|
||||
deleteUser,
|
||||
getAdminSlackConfig,
|
||||
updateRootEncryptionStrategy,
|
||||
getConfiguredEncryptionStrategies
|
||||
getAdminSlackConfig
|
||||
};
|
||||
};
|
||||
|
@@ -1,11 +0,0 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TTotpConfigDALFactory = ReturnType<typeof totpConfigDALFactory>;
|
||||
|
||||
export const totpConfigDALFactory = (db: TDbClient) => {
|
||||
const totpConfigDal = ormify(db, TableName.TotpConfig);
|
||||
|
||||
return totpConfigDal;
|
||||
};
|
@@ -1,3 +0,0 @@
|
||||
import crypto from "node:crypto";
|
||||
|
||||
export const generateRecoveryCode = () => String(crypto.randomInt(10 ** 7, 10 ** 8 - 1));
|
@@ -1,270 +0,0 @@
|
||||
import { authenticator } from "otplib";
|
||||
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
|
||||
import { TKmsServiceFactory } from "../kms/kms-service";
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
import { TTotpConfigDALFactory } from "./totp-config-dal";
|
||||
import { generateRecoveryCode } from "./totp-fns";
|
||||
import {
|
||||
TCreateUserTotpRecoveryCodesDTO,
|
||||
TDeleteUserTotpConfigDTO,
|
||||
TGetUserTotpConfigDTO,
|
||||
TRegisterUserTotpDTO,
|
||||
TVerifyUserTotpConfigDTO,
|
||||
TVerifyUserTotpDTO,
|
||||
TVerifyWithUserRecoveryCodeDTO
|
||||
} from "./totp-types";
|
||||
|
||||
type TTotpServiceFactoryDep = {
|
||||
userDAL: TUserDALFactory;
|
||||
totpConfigDAL: TTotpConfigDALFactory;
|
||||
kmsService: TKmsServiceFactory;
|
||||
};
|
||||
|
||||
export type TTotpServiceFactory = ReturnType<typeof totpServiceFactory>;
|
||||
|
||||
const MAX_RECOVERY_CODE_LIMIT = 10;
|
||||
|
||||
export const totpServiceFactory = ({ totpConfigDAL, kmsService, userDAL }: TTotpServiceFactoryDep) => {
|
||||
const getUserTotpConfig = async ({ userId }: TGetUserTotpConfigDTO) => {
|
||||
const totpConfig = await totpConfigDAL.findOne({
|
||||
userId
|
||||
});
|
||||
|
||||
if (!totpConfig) {
|
||||
throw new NotFoundError({
|
||||
message: "TOTP configuration not found"
|
||||
});
|
||||
}
|
||||
|
||||
if (!totpConfig.isVerified) {
|
||||
throw new BadRequestError({
|
||||
message: "TOTP configuration has not been verified"
|
||||
});
|
||||
}
|
||||
|
||||
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||
const recoveryCodes = decryptWithRoot(totpConfig.encryptedRecoveryCodes).toString().split(",");
|
||||
|
||||
return {
|
||||
isVerified: totpConfig.isVerified,
|
||||
recoveryCodes
|
||||
};
|
||||
};
|
||||
|
||||
const registerUserTotp = async ({ userId }: TRegisterUserTotpDTO) => {
|
||||
const totpConfig = await totpConfigDAL.transaction(async (tx) => {
|
||||
const verifiedTotpConfig = await totpConfigDAL.findOne(
|
||||
{
|
||||
userId,
|
||||
isVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (verifiedTotpConfig) {
|
||||
throw new BadRequestError({
|
||||
message: "TOTP configuration for user already exists"
|
||||
});
|
||||
}
|
||||
|
||||
const unverifiedTotpConfig = await totpConfigDAL.findOne({
|
||||
userId,
|
||||
isVerified: false
|
||||
});
|
||||
|
||||
if (unverifiedTotpConfig) {
|
||||
return unverifiedTotpConfig;
|
||||
}
|
||||
|
||||
const encryptWithRoot = kmsService.encryptWithRootKey();
|
||||
|
||||
// create new TOTP configuration
|
||||
const secret = authenticator.generateSecret();
|
||||
const encryptedSecret = encryptWithRoot(Buffer.from(secret));
|
||||
const recoveryCodes = Array.from({ length: MAX_RECOVERY_CODE_LIMIT }).map(generateRecoveryCode);
|
||||
const encryptedRecoveryCodes = encryptWithRoot(Buffer.from(recoveryCodes.join(",")));
|
||||
const newTotpConfig = await totpConfigDAL.create({
|
||||
userId,
|
||||
encryptedRecoveryCodes,
|
||||
encryptedSecret
|
||||
});
|
||||
|
||||
return newTotpConfig;
|
||||
});
|
||||
|
||||
const user = await userDAL.findById(userId);
|
||||
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||
|
||||
const secret = decryptWithRoot(totpConfig.encryptedSecret).toString();
|
||||
const recoveryCodes = decryptWithRoot(totpConfig.encryptedRecoveryCodes).toString().split(",");
|
||||
const otpUrl = authenticator.keyuri(user.username, "Infisical", secret);
|
||||
|
||||
return {
|
||||
otpUrl,
|
||||
recoveryCodes
|
||||
};
|
||||
};
|
||||
|
||||
const verifyUserTotpConfig = async ({ userId, totp }: TVerifyUserTotpConfigDTO) => {
|
||||
const totpConfig = await totpConfigDAL.findOne({
|
||||
userId
|
||||
});
|
||||
|
||||
if (!totpConfig) {
|
||||
throw new NotFoundError({
|
||||
message: "TOTP configuration not found"
|
||||
});
|
||||
}
|
||||
|
||||
if (totpConfig.isVerified) {
|
||||
throw new BadRequestError({
|
||||
message: "TOTP configuration has already been verified"
|
||||
});
|
||||
}
|
||||
|
||||
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||
const secret = decryptWithRoot(totpConfig.encryptedSecret).toString();
|
||||
const isValid = authenticator.verify({
|
||||
token: totp,
|
||||
secret
|
||||
});
|
||||
|
||||
if (isValid) {
|
||||
await totpConfigDAL.updateById(totpConfig.id, {
|
||||
isVerified: true
|
||||
});
|
||||
} else {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid TOTP token"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const verifyUserTotp = async ({ userId, totp }: TVerifyUserTotpDTO) => {
|
||||
const totpConfig = await totpConfigDAL.findOne({
|
||||
userId
|
||||
});
|
||||
|
||||
if (!totpConfig) {
|
||||
throw new NotFoundError({
|
||||
message: "TOTP configuration not found"
|
||||
});
|
||||
}
|
||||
|
||||
if (!totpConfig.isVerified) {
|
||||
throw new BadRequestError({
|
||||
message: "TOTP configuration has not been verified"
|
||||
});
|
||||
}
|
||||
|
||||
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||
const secret = decryptWithRoot(totpConfig.encryptedSecret).toString();
|
||||
const isValid = authenticator.verify({
|
||||
token: totp,
|
||||
secret
|
||||
});
|
||||
|
||||
if (!isValid) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Invalid TOTP"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const verifyWithUserRecoveryCode = async ({ userId, recoveryCode }: TVerifyWithUserRecoveryCodeDTO) => {
|
||||
const totpConfig = await totpConfigDAL.findOne({
|
||||
userId
|
||||
});
|
||||
|
||||
if (!totpConfig) {
|
||||
throw new NotFoundError({
|
||||
message: "TOTP configuration not found"
|
||||
});
|
||||
}
|
||||
|
||||
if (!totpConfig.isVerified) {
|
||||
throw new BadRequestError({
|
||||
message: "TOTP configuration has not been verified"
|
||||
});
|
||||
}
|
||||
|
||||
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||
const encryptWithRoot = kmsService.encryptWithRootKey();
|
||||
|
||||
const recoveryCodes = decryptWithRoot(totpConfig.encryptedRecoveryCodes).toString().split(",");
|
||||
const matchingCode = recoveryCodes.find((code) => recoveryCode === code);
|
||||
if (!matchingCode) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Invalid TOTP recovery code"
|
||||
});
|
||||
}
|
||||
|
||||
const updatedRecoveryCodes = recoveryCodes.filter((code) => code !== matchingCode);
|
||||
const encryptedRecoveryCodes = encryptWithRoot(Buffer.from(updatedRecoveryCodes.join(",")));
|
||||
await totpConfigDAL.updateById(totpConfig.id, {
|
||||
encryptedRecoveryCodes
|
||||
});
|
||||
};
|
||||
|
||||
const deleteUserTotpConfig = async ({ userId }: TDeleteUserTotpConfigDTO) => {
|
||||
const totpConfig = await totpConfigDAL.findOne({
|
||||
userId
|
||||
});
|
||||
|
||||
if (!totpConfig) {
|
||||
throw new NotFoundError({
|
||||
message: "TOTP configuration not found"
|
||||
});
|
||||
}
|
||||
|
||||
await totpConfigDAL.deleteById(totpConfig.id);
|
||||
};
|
||||
|
||||
const createUserTotpRecoveryCodes = async ({ userId }: TCreateUserTotpRecoveryCodesDTO) => {
|
||||
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||
const encryptWithRoot = kmsService.encryptWithRootKey();
|
||||
|
||||
return totpConfigDAL.transaction(async (tx) => {
|
||||
const totpConfig = await totpConfigDAL.findOne(
|
||||
{
|
||||
userId,
|
||||
isVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (!totpConfig) {
|
||||
throw new NotFoundError({
|
||||
message: "Valid TOTP configuration not found"
|
||||
});
|
||||
}
|
||||
|
||||
const recoveryCodes = decryptWithRoot(totpConfig.encryptedRecoveryCodes).toString().split(",");
|
||||
if (recoveryCodes.length >= MAX_RECOVERY_CODE_LIMIT) {
|
||||
throw new BadRequestError({
|
||||
message: `Cannot have more than ${MAX_RECOVERY_CODE_LIMIT} recovery codes at a time`
|
||||
});
|
||||
}
|
||||
|
||||
const toGenerateCount = MAX_RECOVERY_CODE_LIMIT - recoveryCodes.length;
|
||||
const newRecoveryCodes = Array.from({ length: toGenerateCount }).map(generateRecoveryCode);
|
||||
const encryptedRecoveryCodes = encryptWithRoot(Buffer.from([...recoveryCodes, ...newRecoveryCodes].join(",")));
|
||||
|
||||
await totpConfigDAL.updateById(totpConfig.id, {
|
||||
encryptedRecoveryCodes
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
registerUserTotp,
|
||||
verifyUserTotpConfig,
|
||||
getUserTotpConfig,
|
||||
verifyUserTotp,
|
||||
verifyWithUserRecoveryCode,
|
||||
deleteUserTotpConfig,
|
||||
createUserTotpRecoveryCodes
|
||||
};
|
||||
};
|
@@ -1,30 +0,0 @@
|
||||
export type TRegisterUserTotpDTO = {
|
||||
userId: string;
|
||||
};
|
||||
|
||||
export type TVerifyUserTotpConfigDTO = {
|
||||
userId: string;
|
||||
totp: string;
|
||||
};
|
||||
|
||||
export type TGetUserTotpConfigDTO = {
|
||||
userId: string;
|
||||
};
|
||||
|
||||
export type TVerifyUserTotpDTO = {
|
||||
userId: string;
|
||||
totp: string;
|
||||
};
|
||||
|
||||
export type TVerifyWithUserRecoveryCodeDTO = {
|
||||
userId: string;
|
||||
recoveryCode: string;
|
||||
};
|
||||
|
||||
export type TDeleteUserTotpConfigDTO = {
|
||||
userId: string;
|
||||
};
|
||||
|
||||
export type TCreateUserTotpRecoveryCodesDTO = {
|
||||
userId: string;
|
||||
};
|
@@ -15,7 +15,7 @@ import { AuthMethod } from "../auth/auth-type";
|
||||
import { TGroupProjectDALFactory } from "../group-project/group-project-dal";
|
||||
import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal";
|
||||
import { TUserDALFactory } from "./user-dal";
|
||||
import { TListUserGroupsDTO, TUpdateUserMfaDTO } from "./user-types";
|
||||
import { TListUserGroupsDTO } from "./user-types";
|
||||
|
||||
type TUserServiceFactoryDep = {
|
||||
userDAL: Pick<
|
||||
@@ -171,24 +171,15 @@ export const userServiceFactory = ({
|
||||
});
|
||||
};
|
||||
|
||||
const updateUserMfa = async ({ userId, isMfaEnabled, selectedMfaMethod }: TUpdateUserMfaDTO) => {
|
||||
const toggleUserMfa = async (userId: string, isMfaEnabled: boolean) => {
|
||||
const user = await userDAL.findById(userId);
|
||||
|
||||
if (!user || !user.email) throw new BadRequestError({ name: "Failed to toggle MFA" });
|
||||
|
||||
let mfaMethods;
|
||||
if (isMfaEnabled === undefined) {
|
||||
mfaMethods = undefined;
|
||||
} else {
|
||||
mfaMethods = isMfaEnabled ? ["email"] : [];
|
||||
}
|
||||
|
||||
const updatedUser = await userDAL.updateById(userId, {
|
||||
isMfaEnabled,
|
||||
mfaMethods,
|
||||
selectedMfaMethod
|
||||
mfaMethods: isMfaEnabled ? ["email"] : []
|
||||
});
|
||||
|
||||
return updatedUser;
|
||||
};
|
||||
|
||||
@@ -336,7 +327,7 @@ export const userServiceFactory = ({
|
||||
return {
|
||||
sendEmailVerificationCode,
|
||||
verifyEmailVerificationCode,
|
||||
updateUserMfa,
|
||||
toggleUserMfa,
|
||||
updateUserName,
|
||||
updateAuthMethods,
|
||||
deleteUser,
|
||||
|
@@ -1,7 +1,5 @@
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
|
||||
import { MfaMethod } from "../auth/auth-type";
|
||||
|
||||
export type TListUserGroupsDTO = {
|
||||
username: string;
|
||||
} & Omit<TOrgPermission, "orgId">;
|
||||
@@ -10,9 +8,3 @@ export enum UserEncryption {
|
||||
V1 = 1,
|
||||
V2 = 2
|
||||
}
|
||||
|
||||
export type TUpdateUserMfaDTO = {
|
||||
userId: string;
|
||||
isMfaEnabled?: boolean;
|
||||
selectedMfaMethod?: MfaMethod;
|
||||
};
|
||||
|
@@ -136,9 +136,8 @@ type GetOrganizationsResponse struct {
|
||||
}
|
||||
|
||||
type SelectOrganizationResponse struct {
|
||||
Token string `json:"token"`
|
||||
MfaEnabled bool `json:"isMfaEnabled"`
|
||||
MfaMethod string `json:"mfaMethod"`
|
||||
Token string `json:"token"`
|
||||
MfaEnabled bool `json:"isMfaEnabled"`
|
||||
}
|
||||
|
||||
type SelectOrganizationRequest struct {
|
||||
@@ -261,9 +260,8 @@ type GetLoginTwoV2Response struct {
|
||||
}
|
||||
|
||||
type VerifyMfaTokenRequest struct {
|
||||
Email string `json:"email"`
|
||||
MFAToken string `json:"mfaToken"`
|
||||
MFAMethod string `json:"mfaMethod"`
|
||||
Email string `json:"email"`
|
||||
MFAToken string `json:"mfaToken"`
|
||||
}
|
||||
|
||||
type VerifyMfaTokenResponse struct {
|
||||
|
@@ -79,14 +79,13 @@ var initCmd = &cobra.Command{
|
||||
if tokenResponse.MfaEnabled {
|
||||
i := 1
|
||||
for i < 6 {
|
||||
mfaVerifyCode := askForMFACode(tokenResponse.MfaMethod)
|
||||
|
||||
mfaVerifyCode := askForMFACode()
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient.SetAuthToken(tokenResponse.Token)
|
||||
verifyMFAresponse, mfaErrorResponse, requestError := api.CallVerifyMfaToken(httpClient, api.VerifyMfaTokenRequest{
|
||||
Email: userCreds.UserCredentials.Email,
|
||||
MFAToken: mfaVerifyCode,
|
||||
MFAMethod: tokenResponse.MfaMethod,
|
||||
Email: userCreds.UserCredentials.Email,
|
||||
MFAToken: mfaVerifyCode,
|
||||
})
|
||||
if requestError != nil {
|
||||
util.HandleError(err)
|
||||
@@ -100,7 +99,7 @@ var initCmd = &cobra.Command{
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if mfaErrorResponse.Context.Code == "mfa_expired" {
|
||||
util.PrintErrorMessageAndExit("Your 2FA verification code has expired, please try logging in again")
|
||||
break
|
||||
|
@@ -343,7 +343,7 @@ func cliDefaultLogin(userCredentialsToBeStored *models.UserCredentials) {
|
||||
if loginTwoResponse.MfaEnabled {
|
||||
i := 1
|
||||
for i < 6 {
|
||||
mfaVerifyCode := askForMFACode("email")
|
||||
mfaVerifyCode := askForMFACode()
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient.SetAuthToken(loginTwoResponse.Token)
|
||||
@@ -532,7 +532,7 @@ func askForDomain() error {
|
||||
const (
|
||||
INFISICAL_CLOUD_US = "Infisical Cloud (US Region)"
|
||||
INFISICAL_CLOUD_EU = "Infisical Cloud (EU Region)"
|
||||
SELF_HOSTING = "Self-Hosting or Dedicated Instance"
|
||||
SELF_HOSTING = "Self-Hosting"
|
||||
ADD_NEW_DOMAIN = "Add a new domain"
|
||||
)
|
||||
|
||||
@@ -756,14 +756,13 @@ func GetJwtTokenWithOrganizationId(oldJwtToken string, email string) string {
|
||||
if selectedOrgRes.MfaEnabled {
|
||||
i := 1
|
||||
for i < 6 {
|
||||
mfaVerifyCode := askForMFACode(selectedOrgRes.MfaMethod)
|
||||
mfaVerifyCode := askForMFACode()
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient.SetAuthToken(selectedOrgRes.Token)
|
||||
verifyMFAresponse, mfaErrorResponse, requestError := api.CallVerifyMfaToken(httpClient, api.VerifyMfaTokenRequest{
|
||||
Email: email,
|
||||
MFAToken: mfaVerifyCode,
|
||||
MFAMethod: selectedOrgRes.MfaMethod,
|
||||
Email: email,
|
||||
MFAToken: mfaVerifyCode,
|
||||
})
|
||||
if requestError != nil {
|
||||
util.HandleError(err)
|
||||
@@ -818,15 +817,9 @@ func generateFromPassword(password string, salt []byte, p *params) (hash []byte,
|
||||
return hash, nil
|
||||
}
|
||||
|
||||
func askForMFACode(mfaMethod string) string {
|
||||
var label string
|
||||
if mfaMethod == "totp" {
|
||||
label = "Enter the verification code from your mobile authenticator app or use a recovery code"
|
||||
} else {
|
||||
label = "Enter the 2FA verification code sent to your email"
|
||||
}
|
||||
func askForMFACode() string {
|
||||
mfaCodePromptUI := promptui.Prompt{
|
||||
Label: label,
|
||||
Label: "Enter the 2FA verification code sent to your email",
|
||||
}
|
||||
|
||||
mfaVerifyCode, err := mfaCodePromptUI.Run()
|
||||
|
@@ -1,28 +0,0 @@
|
||||
---
|
||||
title: "Compensation"
|
||||
sidebarTitle: "Compensation"
|
||||
description: "This guide explains how various compensation processes work at Infisical."
|
||||
---
|
||||
|
||||
## Probation period
|
||||
|
||||
We are fully committed to ensuring that you are set up for success, but also understand that it may take some time to determine whether or not there is a long term fit between you and Infisical.
|
||||
|
||||
The first 3 months of your employment with Infisical is a probation period. During this time, you can choose to end your contract with 1 week's notice. If we chose to end your contract, Infisical will pay you 4 weeks' pay, but usually ask you to finish on the same day.
|
||||
|
||||
People in sales roles, such as Account Executives, have a 6 month probation period - this is to account for the fact that it can be difficult to establish whether or not someone is able to close contracts within their first 3 months, given sales cycles.
|
||||
|
||||
Your manager is responsible for monitoring and specifically reviewing your performance throughout this initial period. If under-performance is a concern, or if there is any hesitation regarding the future at Infisical, this should be discussed immediately with you and your manager.
|
||||
|
||||
|
||||
## Severance
|
||||
|
||||
At Infisical, average performance gets a generous severance.
|
||||
|
||||
If Infisical decides to end your contract after the first 3 months of employment have been completed, we will give you 10 weeks' pay. It is likely we will ask you to stop working immediately.
|
||||
|
||||
If the decision to leave is yours, then we just require 1 month of notice.
|
||||
|
||||
We have structured notice in this way as we believe it is in neither Infisical's nor your interest to lock you into a role that is no longer right for you due to financial considerations. This extended notice period only applies in the case of under-performance or a change in business needs - if your contract is terminated due to gross misconduct then you may be dismissed without notice. If this policy conflicts with the requirements of your local jurisdiction, then those local laws will take priority.
|
||||
|
||||
|
@@ -58,7 +58,6 @@
|
||||
"pages": [
|
||||
"handbook/onboarding",
|
||||
"handbook/spending-money",
|
||||
"handbook/compensation",
|
||||
"handbook/time-off",
|
||||
"handbook/hiring",
|
||||
"handbook/meetings",
|
||||
|
@@ -86,7 +86,6 @@ services:
|
||||
- .env
|
||||
ports:
|
||||
- 4000:4000
|
||||
- 9464:9464 # for OTEL collection of Prometheus metrics
|
||||
environment:
|
||||
- NODE_ENV=development
|
||||
- DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable
|
||||
@@ -96,42 +95,6 @@ services:
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
|
||||
prometheus:
|
||||
image: prom/prometheus
|
||||
volumes:
|
||||
- ./prometheus.dev.yml:/etc/prometheus/prometheus.yml
|
||||
ports:
|
||||
- "9090:9090"
|
||||
command:
|
||||
- "--config.file=/etc/prometheus/prometheus.yml"
|
||||
profiles: [metrics]
|
||||
|
||||
otel-collector:
|
||||
image: otel/opentelemetry-collector-contrib
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otelcol-contrib/config.yaml
|
||||
ports:
|
||||
- 1888:1888 # pprof extension
|
||||
- 8888:8888 # Prometheus metrics exposed by the Collector
|
||||
- 8889:8889 # Prometheus exporter metrics
|
||||
- 13133:13133 # health_check extension
|
||||
- 4317:4317 # OTLP gRPC receiver
|
||||
- 4318:4318 # OTLP http receiver
|
||||
- 55679:55679 # zpages extension
|
||||
profiles: [metrics-otel]
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana
|
||||
container_name: grafana
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
- GF_LOG_LEVEL=debug
|
||||
ports:
|
||||
- "3005:3000"
|
||||
volumes:
|
||||
- "grafana_storage:/var/lib/grafana"
|
||||
profiles: [metrics]
|
||||
|
||||
frontend:
|
||||
container_name: infisical-dev-frontend
|
||||
restart: unless-stopped
|
||||
@@ -203,4 +166,3 @@ volumes:
|
||||
driver: local
|
||||
ldap_data:
|
||||
ldap_config:
|
||||
grafana_storage:
|
||||
|
@@ -69,4 +69,4 @@ volumes:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
infisical:
|
||||
infisical:
|
||||
|
@@ -3,3 +3,6 @@ title: "Bulk Create"
|
||||
openapi: "POST /api/v3/secrets/batch/raw"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
||||
|
@@ -3,3 +3,6 @@ title: "Create"
|
||||
openapi: "POST /api/v3/secrets/raw/{secretName}"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
||||
|
@@ -3,3 +3,6 @@ title: "Bulk Delete"
|
||||
openapi: "DELETE /api/v3/secrets/batch/raw"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
||||
|
@@ -3,3 +3,6 @@ title: "Delete"
|
||||
openapi: "DELETE /api/v3/secrets/raw/{secretName}"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
@@ -2,3 +2,7 @@
|
||||
title: "List"
|
||||
openapi: "GET /api/v3/secrets/raw"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
@@ -3,3 +3,6 @@ title: "Retrieve"
|
||||
openapi: "GET /api/v3/secrets/raw/{secretName}"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user