mirror of
https://github.com/Infisical/infisical.git
synced 2025-08-03 20:23:35 +00:00
Compare commits
133 Commits
infisical-
...
daniel/vie
Author | SHA1 | Date | |
---|---|---|---|
|
df21a1fb81 | ||
|
bdbb6346cb | ||
|
ea9da6d2a8 | ||
|
3c2c70912f | ||
|
b607429b99 | ||
|
16c1516979 | ||
|
f5dbbaf1fd | ||
|
2a292455ef | ||
|
4d040706a9 | ||
|
5183f76397 | ||
|
4b3efb43b0 | ||
|
96046726b2 | ||
|
a86a951acc | ||
|
5e70860160 | ||
|
abbd427ee2 | ||
|
8fd5fdbc6a | ||
|
77e1ccc8d7 | ||
|
711cc438f6 | ||
|
8447190bf8 | ||
|
12b447425b | ||
|
9cb1a31287 | ||
|
b00413817d | ||
|
2a8bd74e88 | ||
|
f28f4f7561 | ||
|
f0b05c683b | ||
|
3e8f02a4f9 | ||
|
50ee60a3ea | ||
|
21bdecdf2a | ||
|
bf09461416 | ||
|
1ff615913c | ||
|
281cedf1a2 | ||
|
a8d847f139 | ||
|
2a0c0590f1 | ||
|
2e6d525d27 | ||
|
7fd4249d00 | ||
|
90cfc44592 | ||
|
8c403780c2 | ||
|
b69c091f2f | ||
|
4a66395ce6 | ||
|
8c18753e3f | ||
|
85c5d69c36 | ||
|
94fe577046 | ||
|
a0a579834c | ||
|
b5575f4c20 | ||
|
f98f212ecf | ||
|
b331a4a708 | ||
|
e351a16b5a | ||
|
2cfca823f2 | ||
|
a8398a7009 | ||
|
8c054cedfc | ||
|
24d4f8100c | ||
|
08f23e2d3c | ||
|
d1ad605ac4 | ||
|
9dd5857ff5 | ||
|
babbacdc96 | ||
|
76427f43f7 | ||
|
3badcea95b | ||
|
1a4c0fe8d9 | ||
|
04f6864abc | ||
|
fcbe0f59d2 | ||
|
e95b6fdeaa | ||
|
5391bcd3b2 | ||
|
48fd9e2a56 | ||
|
7b5926d865 | ||
|
034123bcdf | ||
|
f3786788fd | ||
|
c406f6d78d | ||
|
eb66295dd4 | ||
|
798215e84c | ||
|
53f7491441 | ||
|
53f6ab118b | ||
|
0f5a1b13a6 | ||
|
5c606fe45f | ||
|
bbf60169eb | ||
|
e004be22e3 | ||
|
016cb4a7ba | ||
|
9bfc2a5dd2 | ||
|
72dbef97fb | ||
|
f376eaae13 | ||
|
026f883d21 | ||
|
e42f860261 | ||
|
08ec8c9b73 | ||
|
1512d4f496 | ||
|
9f7b42ad91 | ||
|
3045477c32 | ||
|
be4adc2759 | ||
|
4eba80905a | ||
|
b023bc7442 | ||
|
a0029ab469 | ||
|
53605c3880 | ||
|
e5bca5b5df | ||
|
4091bc19e9 | ||
|
23bd048bb9 | ||
|
17a4674821 | ||
|
ec9631107d | ||
|
3fa450b9a7 | ||
|
3b9c62c366 | ||
|
cb3d171d48 | ||
|
c29841fbcf | ||
|
fcccf1bd8d | ||
|
4382825162 | ||
|
f80ef1dcc8 | ||
|
7abf3e3642 | ||
|
82ef35bd08 | ||
|
4eb668b5a5 | ||
|
18edea9f26 | ||
|
787c091948 | ||
|
ff269b1063 | ||
|
ca0636cb25 | ||
|
b995358b7e | ||
|
7aaf0f4ed3 | ||
|
68646bcdf8 | ||
|
9989ceb6d1 | ||
|
95d7ba5f22 | ||
|
2aa6fdf983 | ||
|
be5a32a5d6 | ||
|
f009cd329b | ||
|
e2778864e2 | ||
|
ea7375b2c6 | ||
|
d42566c335 | ||
|
45cbd9f006 | ||
|
8580602ea7 | ||
|
7ff75cdfab | ||
|
bd8c8871c0 | ||
|
d5aa13b277 | ||
|
428dc5d371 | ||
|
f1facf1f2c | ||
|
31dc36d4e2 | ||
|
51f29e5357 | ||
|
30f0f174d1 | ||
|
3e7110f334 | ||
|
ce4c5d8ea1 | ||
|
1c2b4e91ba |
@@ -112,4 +112,11 @@ INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
|
||||
|
||||
# azure app connection
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_ID=
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
|
||||
|
||||
# datadog
|
||||
SHOULD_USE_DATADOG_TRACER=
|
||||
DATADOG_PROFILING_ENABLED=
|
||||
DATADOG_ENV=
|
||||
DATADOG_SERVICE=
|
||||
DATADOG_HOSTNAME=
|
||||
|
@@ -32,10 +32,23 @@ jobs:
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start the server
|
||||
run: |
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
|
||||
echo "Examining built image:"
|
||||
docker image inspect infisical-api | grep -A 5 "Entrypoint"
|
||||
|
||||
docker run --name infisical-api -d -p 4000:4000 \
|
||||
-e DB_CONNECTION_URI=$DB_CONNECTION_URI \
|
||||
-e REDIS_URL=$REDIS_URL \
|
||||
-e JWT_AUTH_SECRET=$JWT_AUTH_SECRET \
|
||||
-e ENCRYPTION_KEY=$ENCRYPTION_KEY \
|
||||
--env-file .env \
|
||||
infisical-api
|
||||
|
||||
echo "Container status right after creation:"
|
||||
docker ps -a | grep infisical-api
|
||||
env:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
@@ -43,27 +56,39 @@ jobs:
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.21.5'
|
||||
go-version: "1.21.5"
|
||||
- name: Wait for container to be stable and check logs
|
||||
run: |
|
||||
SECONDS=0
|
||||
HEALTHY=0
|
||||
while [ $SECONDS -lt 60 ]; do
|
||||
if docker ps | grep infisical-api | grep -q healthy; then
|
||||
echo "Container is healthy."
|
||||
HEALTHY=1
|
||||
# Check if container is running
|
||||
if docker ps | grep infisical-api; then
|
||||
# Try to access the API endpoint
|
||||
if curl -s -f http://localhost:4000/api/docs/json > /dev/null 2>&1; then
|
||||
echo "API endpoint is responding. Container seems healthy."
|
||||
HEALTHY=1
|
||||
break
|
||||
fi
|
||||
else
|
||||
echo "Container is not running!"
|
||||
docker ps -a | grep infisical-api
|
||||
break
|
||||
fi
|
||||
|
||||
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
|
||||
|
||||
docker logs infisical-api
|
||||
|
||||
sleep 2
|
||||
SECONDS=$((SECONDS+2))
|
||||
sleep 5
|
||||
SECONDS=$((SECONDS+5))
|
||||
done
|
||||
|
||||
|
||||
if [ $HEALTHY -ne 1 ]; then
|
||||
echo "Container did not become healthy in time"
|
||||
echo "Container status:"
|
||||
docker ps -a | grep infisical-api
|
||||
echo "Container logs (if any):"
|
||||
docker logs infisical-api || echo "No logs available"
|
||||
echo "Container inspection:"
|
||||
docker inspect infisical-api | grep -A 5 "State"
|
||||
exit 1
|
||||
fi
|
||||
- name: Install openapi-diff
|
||||
@@ -71,7 +96,8 @@ jobs:
|
||||
- name: Running OpenAPI Spec diff action
|
||||
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
|
||||
- name: cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
docker stop infisical-api
|
||||
docker remove infisical-api
|
||||
docker stop infisical-api || true
|
||||
docker rm infisical-api || true
|
@@ -26,7 +26,7 @@ jobs:
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
npm-release:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
working-directory: ./npm
|
||||
needs:
|
||||
@@ -83,7 +83,7 @@ jobs:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-latest
|
||||
needs: [cli-integration-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -103,11 +103,12 @@ jobs:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: libssl1.1 => libssl1.0-dev for OSXCross
|
||||
- name: Setup for libssl1.0-dev
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt update && apt-cache policy libssl1.0-dev
|
||||
sudo apt-get install libssl1.0-dev
|
||||
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
|
||||
sudo apt update
|
||||
sudo apt-get install -y libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
|
@@ -161,6 +161,9 @@ COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
ENV HTTPS_ENABLED false
|
||||
|
@@ -3,13 +3,10 @@ ARG POSTHOG_API_KEY=posthog-api-key
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||
|
||||
FROM node:20-alpine AS base
|
||||
FROM node:20-slim AS base
|
||||
|
||||
FROM base AS frontend-dependencies
|
||||
|
||||
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
|
||||
RUN apk add --no-cache libc6-compat
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json ./
|
||||
@@ -45,8 +42,8 @@ RUN npm run build
|
||||
FROM base AS frontend-runner
|
||||
WORKDIR /app
|
||||
|
||||
RUN addgroup --system --gid 1001 nodejs
|
||||
RUN adduser --system --uid 1001 non-root-user
|
||||
RUN groupadd --system --gid 1001 nodejs
|
||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
|
||||
|
||||
@@ -56,21 +53,23 @@ USER non-root-user
|
||||
## BACKEND
|
||||
##
|
||||
FROM base AS backend-build
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for build
|
||||
RUN apk --update add \
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
freetds-bin \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
freetds-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN groupadd --system --gid 1001 nodejs
|
||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@@ -86,18 +85,19 @@ FROM base AS backend-runner
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for runtime
|
||||
RUN apk --update add \
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
freetds-bin \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
freetds-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@@ -109,34 +109,36 @@ RUN mkdir frontend-build
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
RUN apk add --upgrade --no-cache ca-certificates
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.31.1 && apk add --no-cache git
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install all required runtime dependencies
|
||||
RUN apk --update add \
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
bash \
|
||||
curl \
|
||||
git \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
freetds-bin \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
bash \
|
||||
curl \
|
||||
git \
|
||||
openssh
|
||||
wget \
|
||||
openssh-client \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Setup user permissions
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
RUN groupadd --system --gid 1001 nodejs \
|
||||
&& useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
|
||||
# Give non-root-user permission to update SSL certs
|
||||
RUN chown -R non-root-user /etc/ssl/certs
|
||||
@@ -154,11 +156,11 @@ ENV INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
@@ -166,6 +168,7 @@ ENV HTTPS_ENABLED false
|
||||
ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
ENV TELEMETRY_ENABLED true
|
||||
|
@@ -1,23 +1,22 @@
|
||||
# Build stage
|
||||
FROM node:20-alpine AS build
|
||||
FROM node:20-slim AS build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
unixodbc \
|
||||
freetds \
|
||||
freetds-bin \
|
||||
freetds-dev \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
libc-dev
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@@ -26,36 +25,36 @@ COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM node:20-alpine
|
||||
FROM node:20-slim
|
||||
WORKDIR /app
|
||||
|
||||
ENV npm_config_cache /home/node/.npm
|
||||
|
||||
COPY package*.json ./
|
||||
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
unixodbc \
|
||||
freetds \
|
||||
freetds-bin \
|
||||
freetds-dev \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
libc-dev
|
||||
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN npm ci --only-production && npm cache clean --force
|
||||
|
||||
COPY --from=build /app .
|
||||
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
# Install Infisical CLI
|
||||
RUN apt-get install -y curl bash && \
|
||||
curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && apt-get install -y infisical=0.8.1 git
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
|
@@ -1,4 +1,4 @@
|
||||
FROM node:20-alpine
|
||||
FROM node:20-slim
|
||||
|
||||
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
||||
|
||||
@@ -7,32 +7,32 @@ ARG SOFTHSM2_VERSION=2.5.0
|
||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||
|
||||
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apk --update add \
|
||||
alpine-sdk \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
openssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh
|
||||
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
libssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client \
|
||||
curl \
|
||||
pkg-config
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# build and install SoftHSM2
|
||||
|
||||
# Build and install SoftHSM2
|
||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||
WORKDIR ${SOFTHSM2_SOURCES}
|
||||
|
||||
@@ -45,16 +45,18 @@ RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
|
||||
WORKDIR /root
|
||||
RUN rm -fr ${SOFTHSM2_SOURCES}
|
||||
|
||||
# install pkcs11-tool
|
||||
RUN apk --update add opensc
|
||||
# Install pkcs11-tool
|
||||
RUN apt-get install -y opensc
|
||||
|
||||
RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
RUN mkdir -p /etc/softhsm2/tokens && \
|
||||
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
|
||||
# ? App setup
|
||||
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && \
|
||||
apt-get install -y infisical=0.8.1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
@@ -6,7 +6,7 @@ import { decryptAsymmetric, decryptSymmetric128BitHexKeyUTF8, encryptSymmetric12
|
||||
|
||||
const createServiceToken = async (
|
||||
scopes: { environment: string; secretPath: string }[],
|
||||
permissions: ("read" | "write")[]
|
||||
permissions: ("read" | "write" | "readValue")[]
|
||||
) => {
|
||||
const projectKeyRes = await testServer.inject({
|
||||
method: "GET",
|
||||
@@ -139,7 +139,7 @@ describe("Service token secret ops", async () => {
|
||||
beforeAll(async () => {
|
||||
serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/**", environment: seedData1.environment.slug }],
|
||||
["read", "write"]
|
||||
["read", "write", "readValue"]
|
||||
);
|
||||
|
||||
// this is ensure cli service token decryptiong working fine
|
||||
@@ -496,7 +496,7 @@ describe("Service token fail cases", async () => {
|
||||
test("Unauthorized secret path access", async () => {
|
||||
const serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/", environment: seedData1.environment.slug }],
|
||||
["read", "write"]
|
||||
["read", "readValue", "write"]
|
||||
);
|
||||
const fetchSecrets = await testServer.inject({
|
||||
method: "GET",
|
||||
@@ -518,7 +518,7 @@ describe("Service token fail cases", async () => {
|
||||
test("Unauthorized secret environment access", async () => {
|
||||
const serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/", environment: seedData1.environment.slug }],
|
||||
["read", "write"]
|
||||
["read", "readValue", "write"]
|
||||
);
|
||||
const fetchSecrets = await testServer.inject({
|
||||
method: "GET",
|
||||
@@ -540,7 +540,7 @@ describe("Service token fail cases", async () => {
|
||||
test("Unauthorized write operation", async () => {
|
||||
const serviceToken = await createServiceToken(
|
||||
[{ secretPath: "/", environment: seedData1.environment.slug }],
|
||||
["read"]
|
||||
["read", "readValue"]
|
||||
);
|
||||
const writeSecrets = await testServer.inject({
|
||||
method: "POST",
|
||||
|
@@ -120,4 +120,3 @@ export default {
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
|
2239
backend/package-lock.json
generated
2239
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -60,6 +60,13 @@
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
|
||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
|
||||
"migration:up-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
"migration:down-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback-dev": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migration:unlock-dev": "knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
|
||||
@@ -138,6 +145,7 @@
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@infisical/quic": "^1.0.8",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
@@ -145,10 +153,10 @@
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@octopusdeploy/api-client": "^3.4.1",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
||||
"@opentelemetry/instrumentation": "^0.55.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.57.2",
|
||||
"@opentelemetry/resources": "^1.28.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.28.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.27.0",
|
||||
@@ -169,6 +177,7 @@
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"connect-redis": "^7.1.1",
|
||||
"cron": "^3.1.7",
|
||||
"dd-trace": "^5.40.0",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.28.1",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
@@ -177,6 +186,7 @@
|
||||
"handlebars": "^4.7.8",
|
||||
"hdb": "^0.19.10",
|
||||
"ioredis": "^5.3.2",
|
||||
"isomorphic-dompurify": "^2.22.0",
|
||||
"jmespath": "^0.16.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"jsrp": "^0.2.4",
|
||||
|
@@ -39,7 +39,7 @@ export default {
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations",
|
||||
loadExtensions: [".mjs"]
|
||||
loadExtensions: [".mjs", ".ts"]
|
||||
}
|
||||
},
|
||||
production: {
|
||||
@@ -64,7 +64,7 @@ export default {
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations",
|
||||
loadExtensions: [".mjs"]
|
||||
loadExtensions: [".mjs", ".ts"]
|
||||
}
|
||||
}
|
||||
} as Knex.Config;
|
||||
|
@@ -0,0 +1,313 @@
|
||||
import { MongoAbility, RawRuleOf } from "@casl/ability";
|
||||
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
|
||||
import { Knex } from "knex";
|
||||
import { z } from "zod";
|
||||
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
enum ProjectPermissionSub {
|
||||
Secrets = "secrets"
|
||||
}
|
||||
|
||||
enum SecretActions {
|
||||
Read = "read",
|
||||
ReadValue = "readValue"
|
||||
}
|
||||
|
||||
const UnpackedPermissionSchema = z.object({
|
||||
subject: z
|
||||
.union([z.string().min(1), z.string().array()])
|
||||
.transform((el) => (typeof el !== "string" ? el[0] : el))
|
||||
.optional(),
|
||||
action: z.union([z.string().min(1), z.string().array()]).transform((el) => (typeof el === "string" ? [el] : el)),
|
||||
conditions: z.unknown().optional(),
|
||||
inverted: z.boolean().optional()
|
||||
});
|
||||
|
||||
const $unpackPermissions = (permissions: unknown) =>
|
||||
UnpackedPermissionSchema.array().parse(unpackRules((permissions || []) as PackRule<RawRuleOf<MongoAbility>>[]));
|
||||
|
||||
const $updatePermissionsUp = (permissions: unknown) => {
|
||||
const parsedPermissions = $unpackPermissions(permissions);
|
||||
let shouldUpdate = false;
|
||||
|
||||
for (let i = 0; i < parsedPermissions.length; i += 1) {
|
||||
const parsedPermission = parsedPermissions[i];
|
||||
const { subject, action } = parsedPermission;
|
||||
|
||||
if (subject === ProjectPermissionSub.Secrets) {
|
||||
if (action.includes(SecretActions.Read) && !action.includes(SecretActions.ReadValue)) {
|
||||
action.push(SecretActions.ReadValue);
|
||||
parsedPermissions[i] = { ...parsedPermission, action };
|
||||
shouldUpdate = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
parsedPermissions,
|
||||
shouldUpdate
|
||||
};
|
||||
};
|
||||
|
||||
const $updatePermissionsDown = (permissions: unknown) => {
|
||||
const parsedPermissions = $unpackPermissions(permissions);
|
||||
|
||||
let shouldUpdate = false;
|
||||
for (let i = 0; i < parsedPermissions.length; i += 1) {
|
||||
const parsedPermission = parsedPermissions[i];
|
||||
|
||||
const { subject, action } = parsedPermission;
|
||||
|
||||
if (subject === ProjectPermissionSub.Secrets) {
|
||||
const readValueIndex = action.indexOf(SecretActions.ReadValue);
|
||||
|
||||
if (action.includes(SecretActions.ReadValue) && readValueIndex !== -1) {
|
||||
action.splice(readValueIndex, 1);
|
||||
parsedPermissions[i] = { ...parsedPermission, action };
|
||||
|
||||
shouldUpdate = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const repackedPermissions = packRules(parsedPermissions);
|
||||
|
||||
return {
|
||||
repackedPermissions,
|
||||
shouldUpdate
|
||||
};
|
||||
};
|
||||
|
||||
const CHUNK_SIZE = 1000;
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const projectRoles = await knex(TableName.ProjectRoles).select(selectAllTableCols(TableName.ProjectRoles));
|
||||
const projectIdentityAdditionalPrivileges = await knex(TableName.IdentityProjectAdditionalPrivilege).select(
|
||||
selectAllTableCols(TableName.IdentityProjectAdditionalPrivilege)
|
||||
);
|
||||
const projectUserAdditionalPrivileges = await knex(TableName.ProjectUserAdditionalPrivilege).select(
|
||||
selectAllTableCols(TableName.ProjectUserAdditionalPrivilege)
|
||||
);
|
||||
|
||||
const serviceTokens = await knex(TableName.ServiceToken).select(selectAllTableCols(TableName.ServiceToken));
|
||||
|
||||
const updatedServiceTokens = serviceTokens.reduce<typeof serviceTokens>((acc, serviceToken) => {
|
||||
const { permissions } = serviceToken; // Service tokens are special, and include an array of actions only.
|
||||
|
||||
if (permissions.includes(SecretActions.Read) && !permissions.includes(SecretActions.ReadValue)) {
|
||||
permissions.push(SecretActions.ReadValue);
|
||||
acc.push({
|
||||
...serviceToken,
|
||||
permissions
|
||||
});
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
if (updatedServiceTokens.length > 0) {
|
||||
for (let i = 0; i < updatedServiceTokens.length; i += CHUNK_SIZE) {
|
||||
const chunk = updatedServiceTokens.slice(i, i + CHUNK_SIZE);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.ServiceToken)
|
||||
.whereIn(
|
||||
"id",
|
||||
chunk.map((t) => t.id)
|
||||
)
|
||||
.update({
|
||||
// @ts-expect-error -- raw query
|
||||
permissions: knex.raw(
|
||||
`CASE id
|
||||
${chunk.map((t) => `WHEN '${t.id}' THEN ?::text[]`).join(" ")}
|
||||
END`,
|
||||
chunk.map((t) => t.permissions)
|
||||
)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const updatedRoles = projectRoles.reduce<typeof projectRoles>((acc, projectRole) => {
|
||||
const { shouldUpdate, parsedPermissions } = $updatePermissionsUp(projectRole.permissions);
|
||||
|
||||
if (shouldUpdate) {
|
||||
acc.push({
|
||||
...projectRole,
|
||||
permissions: JSON.stringify(packRules(parsedPermissions))
|
||||
});
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
const updatedIdentityAdditionalPrivileges = projectIdentityAdditionalPrivileges.reduce<
|
||||
typeof projectIdentityAdditionalPrivileges
|
||||
>((acc, identityAdditionalPrivilege) => {
|
||||
const { shouldUpdate, parsedPermissions } = $updatePermissionsUp(identityAdditionalPrivilege.permissions);
|
||||
|
||||
if (shouldUpdate) {
|
||||
acc.push({
|
||||
...identityAdditionalPrivilege,
|
||||
permissions: JSON.stringify(packRules(parsedPermissions))
|
||||
});
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
const updatedUserAdditionalPrivileges = projectUserAdditionalPrivileges.reduce<
|
||||
typeof projectUserAdditionalPrivileges
|
||||
>((acc, userAdditionalPrivilege) => {
|
||||
const { shouldUpdate, parsedPermissions } = $updatePermissionsUp(userAdditionalPrivilege.permissions);
|
||||
|
||||
if (shouldUpdate) {
|
||||
acc.push({
|
||||
...userAdditionalPrivilege,
|
||||
permissions: JSON.stringify(packRules(parsedPermissions))
|
||||
});
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
if (updatedRoles.length > 0) {
|
||||
for (let i = 0; i < updatedRoles.length; i += CHUNK_SIZE) {
|
||||
const chunk = updatedRoles.slice(i, i + CHUNK_SIZE);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.ProjectRoles).insert(chunk).onConflict("id").merge(["permissions"]);
|
||||
}
|
||||
}
|
||||
|
||||
if (updatedIdentityAdditionalPrivileges.length > 0) {
|
||||
for (let i = 0; i < updatedIdentityAdditionalPrivileges.length; i += CHUNK_SIZE) {
|
||||
const chunk = updatedIdentityAdditionalPrivileges.slice(i, i + CHUNK_SIZE);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.IdentityProjectAdditionalPrivilege).insert(chunk).onConflict("id").merge(["permissions"]);
|
||||
}
|
||||
}
|
||||
|
||||
if (updatedUserAdditionalPrivileges.length > 0) {
|
||||
for (let i = 0; i < updatedUserAdditionalPrivileges.length; i += CHUNK_SIZE) {
|
||||
const chunk = updatedUserAdditionalPrivileges.slice(i, i + CHUNK_SIZE);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.ProjectUserAdditionalPrivilege).insert(chunk).onConflict("id").merge(["permissions"]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const projectRoles = await knex(TableName.ProjectRoles).select(selectAllTableCols(TableName.ProjectRoles));
|
||||
const identityAdditionalPrivileges = await knex(TableName.IdentityProjectAdditionalPrivilege).select(
|
||||
selectAllTableCols(TableName.IdentityProjectAdditionalPrivilege)
|
||||
);
|
||||
const userAdditionalPrivileges = await knex(TableName.ProjectUserAdditionalPrivilege).select(
|
||||
selectAllTableCols(TableName.ProjectUserAdditionalPrivilege)
|
||||
);
|
||||
const serviceTokens = await knex(TableName.ServiceToken).select(selectAllTableCols(TableName.ServiceToken));
|
||||
|
||||
const updatedServiceTokens = serviceTokens.reduce<typeof serviceTokens>((acc, serviceToken) => {
|
||||
const { permissions } = serviceToken;
|
||||
|
||||
if (permissions.includes(SecretActions.ReadValue)) {
|
||||
permissions.splice(permissions.indexOf(SecretActions.ReadValue), 1);
|
||||
acc.push({
|
||||
...serviceToken,
|
||||
permissions
|
||||
});
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
if (updatedServiceTokens.length > 0) {
|
||||
for (let i = 0; i < updatedServiceTokens.length; i += CHUNK_SIZE) {
|
||||
const chunk = updatedServiceTokens.slice(i, i + CHUNK_SIZE);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.ServiceToken)
|
||||
.whereIn(
|
||||
"id",
|
||||
chunk.map((t) => t.id)
|
||||
)
|
||||
.update({
|
||||
// @ts-expect-error -- raw query
|
||||
permissions: knex.raw(
|
||||
`CASE id
|
||||
${chunk.map((t) => `WHEN '${t.id}' THEN ?::text[]`).join(" ")}
|
||||
END`,
|
||||
chunk.map((t) => t.permissions)
|
||||
)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const updatedRoles = projectRoles.reduce<typeof projectRoles>((acc, projectRole) => {
|
||||
const { shouldUpdate, repackedPermissions } = $updatePermissionsDown(projectRole.permissions);
|
||||
|
||||
if (shouldUpdate) {
|
||||
acc.push({
|
||||
...projectRole,
|
||||
permissions: JSON.stringify(repackedPermissions)
|
||||
});
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
const updatedIdentityAdditionalPrivileges = identityAdditionalPrivileges.reduce<typeof identityAdditionalPrivileges>(
|
||||
(acc, identityAdditionalPrivilege) => {
|
||||
const { shouldUpdate, repackedPermissions } = $updatePermissionsDown(identityAdditionalPrivilege.permissions);
|
||||
|
||||
if (shouldUpdate) {
|
||||
acc.push({
|
||||
...identityAdditionalPrivilege,
|
||||
permissions: JSON.stringify(repackedPermissions)
|
||||
});
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
const updatedUserAdditionalPrivileges = userAdditionalPrivileges.reduce<typeof userAdditionalPrivileges>(
|
||||
(acc, userAdditionalPrivilege) => {
|
||||
const { shouldUpdate, repackedPermissions } = $updatePermissionsDown(userAdditionalPrivilege.permissions);
|
||||
|
||||
if (shouldUpdate) {
|
||||
acc.push({
|
||||
...userAdditionalPrivilege,
|
||||
permissions: JSON.stringify(repackedPermissions)
|
||||
});
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
if (updatedRoles.length > 0) {
|
||||
for (let i = 0; i < updatedRoles.length; i += CHUNK_SIZE) {
|
||||
const chunk = updatedRoles.slice(i, i + CHUNK_SIZE);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.ProjectRoles).insert(chunk).onConflict("id").merge(["permissions"]);
|
||||
}
|
||||
}
|
||||
|
||||
if (updatedIdentityAdditionalPrivileges.length > 0) {
|
||||
for (let i = 0; i < updatedIdentityAdditionalPrivileges.length; i += CHUNK_SIZE) {
|
||||
const chunk = updatedIdentityAdditionalPrivileges.slice(i, i + CHUNK_SIZE);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.IdentityProjectAdditionalPrivilege).insert(chunk).onConflict("id").merge(["permissions"]);
|
||||
}
|
||||
}
|
||||
|
||||
if (updatedUserAdditionalPrivileges.length > 0) {
|
||||
for (let i = 0; i < updatedUserAdditionalPrivileges.length; i += CHUNK_SIZE) {
|
||||
const chunk = updatedUserAdditionalPrivileges.slice(i, i + CHUNK_SIZE);
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.ProjectUserAdditionalPrivilege).insert(chunk).onConflict("id").merge(["permissions"]);
|
||||
}
|
||||
}
|
||||
}
|
25
backend/src/db/migrations/20250226021631_secret-requests.ts
Normal file
25
backend/src/db/migrations/20250226021631_secret-requests.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { SecretSharingType } from "@app/services/secret-sharing/secret-sharing-types";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
|
||||
if (!hasSharingTypeColumn) {
|
||||
table.string("type", 32).defaultTo(SecretSharingType.Share).notNullable();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
|
||||
if (hasSharingTypeColumn) {
|
||||
table.dropColumn("type");
|
||||
}
|
||||
});
|
||||
}
|
@@ -0,0 +1,31 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
|
||||
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
|
||||
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
if (!hasAuthConsentContentCol) {
|
||||
t.text("authConsentContent");
|
||||
}
|
||||
if (!hasPageFrameContentCol) {
|
||||
t.text("pageFrameContent");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
|
||||
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
if (hasAuthConsentContentCol) {
|
||||
t.dropColumn("authConsentContent");
|
||||
}
|
||||
if (hasPageFrameContentCol) {
|
||||
t.dropColumn("pageFrameContent");
|
||||
}
|
||||
});
|
||||
}
|
@@ -0,0 +1,35 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
for await (const tableName of [
|
||||
TableName.SecretV2,
|
||||
TableName.SecretVersionV2,
|
||||
TableName.SecretApprovalRequestSecretV2
|
||||
]) {
|
||||
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
|
||||
|
||||
if (hasReminderNoteCol) {
|
||||
await knex.schema.alterTable(tableName, (t) => {
|
||||
t.string("reminderNote", 1024).alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
for await (const tableName of [
|
||||
TableName.SecretV2,
|
||||
TableName.SecretVersionV2,
|
||||
TableName.SecretApprovalRequestSecretV2
|
||||
]) {
|
||||
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
|
||||
|
||||
if (hasReminderNoteCol) {
|
||||
await knex.schema.alterTable(tableName, (t) => {
|
||||
t.string("reminderNote").alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
|
||||
|
||||
if (!hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.string("description");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
|
||||
|
||||
if (hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.dropColumn("description");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment"))) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
|
||||
t.string("comment");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment")) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
|
||||
t.dropColumn("comment");
|
||||
});
|
||||
}
|
||||
}
|
@@ -13,7 +13,8 @@ export const SecretApprovalRequestsReviewersSchema = z.object({
|
||||
requestId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
reviewerUserId: z.string().uuid()
|
||||
reviewerUserId: z.string().uuid(),
|
||||
comment: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;
|
||||
|
@@ -15,7 +15,8 @@ export const SecretFoldersSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
envId: z.string().uuid(),
|
||||
parentId: z.string().uuid().nullable().optional(),
|
||||
isReserved: z.boolean().default(false).nullable().optional()
|
||||
isReserved: z.boolean().default(false).nullable().optional(),
|
||||
description: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;
|
||||
|
@@ -12,6 +12,7 @@ import { TImmutableDBKeys } from "./models";
|
||||
export const SecretSharingSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedValue: z.string().nullable().optional(),
|
||||
type: z.string(),
|
||||
iv: z.string().nullable().optional(),
|
||||
tag: z.string().nullable().optional(),
|
||||
hashedHex: z.string().nullable().optional(),
|
||||
|
@@ -23,7 +23,9 @@ export const SuperAdminSchema = z.object({
|
||||
defaultAuthOrgId: z.string().uuid().nullable().optional(),
|
||||
enabledLoginMethods: z.string().array().nullable().optional(),
|
||||
encryptedSlackClientId: zodBuffer.nullable().optional(),
|
||||
encryptedSlackClientSecret: zodBuffer.nullable().optional()
|
||||
encryptedSlackClientSecret: zodBuffer.nullable().optional(),
|
||||
authConsentContent: z.string().nullable().optional(),
|
||||
pageFrameContent: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
|
||||
|
@@ -1,16 +1,11 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import {
|
||||
SecretApprovalRequestsReviewersSchema,
|
||||
SecretApprovalRequestsSchema,
|
||||
SecretTagsSchema,
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { SecretApprovalRequestsReviewersSchema, SecretApprovalRequestsSchema, UsersSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { ApprovalStatus, RequestState } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
|
||||
|
||||
@@ -159,7 +154,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
id: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED])
|
||||
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED]),
|
||||
comment: z.string().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -175,8 +171,25 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
approvalId: req.params.id,
|
||||
status: req.body.status
|
||||
status: req.body.status,
|
||||
comment: req.body.comment
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
projectId: review.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW,
|
||||
metadata: {
|
||||
secretApprovalRequestId: review.requestId,
|
||||
reviewedBy: review.reviewerUserId,
|
||||
status: review.status as ApprovalStatus,
|
||||
comment: review.comment || ""
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { review };
|
||||
}
|
||||
});
|
||||
@@ -232,15 +245,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
}
|
||||
});
|
||||
|
||||
const tagSchema = SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
name: true,
|
||||
color: true
|
||||
})
|
||||
.array()
|
||||
.optional();
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id",
|
||||
@@ -268,13 +272,13 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
environment: z.string(),
|
||||
statusChangedByUser: approvalRequestUser.optional(),
|
||||
committerUser: approvalRequestUser,
|
||||
reviewers: approvalRequestUser.extend({ status: z.string() }).array(),
|
||||
reviewers: approvalRequestUser.extend({ status: z.string(), comment: z.string().optional() }).array(),
|
||||
secretPath: z.string(),
|
||||
commits: secretRawSchema
|
||||
.omit({ _id: true, environment: true, workspace: true, type: true, version: true })
|
||||
.extend({
|
||||
op: z.string(),
|
||||
tags: tagSchema,
|
||||
tags: SanitizedTagSchema.array().optional(),
|
||||
secretMetadata: ResourceMetadataSchema.nullish(),
|
||||
secret: z
|
||||
.object({
|
||||
@@ -293,7 +297,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
secretKey: z.string(),
|
||||
secretValue: z.string().optional(),
|
||||
secretComment: z.string().optional(),
|
||||
tags: tagSchema,
|
||||
tags: SanitizedTagSchema.array().optional(),
|
||||
secretMetadata: ResourceMetadataSchema.nullish()
|
||||
})
|
||||
.optional()
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import z from "zod";
|
||||
|
||||
import { ProjectPermissionActions } from "@app/ee/services/permission/project-permission";
|
||||
import { ProjectPermissionSecretActions } from "@app/ee/services/permission/project-permission";
|
||||
import { RAW_SECRETS } from "@app/lib/api-docs";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
@@ -9,7 +9,7 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const AccessListEntrySchema = z
|
||||
.object({
|
||||
allowedActions: z.nativeEnum(ProjectPermissionActions).array(),
|
||||
allowedActions: z.nativeEnum(ProjectPermissionSecretActions).array(),
|
||||
id: z.string(),
|
||||
membershipId: z.string(),
|
||||
name: z.string()
|
||||
|
@@ -22,7 +22,11 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) =>
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secretVersions: secretRawSchema.array()
|
||||
secretVersions: secretRawSchema
|
||||
.extend({
|
||||
secretValueHidden: z.boolean()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
@@ -37,6 +41,7 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) =>
|
||||
offset: req.query.offset,
|
||||
secretId: req.params.secretId
|
||||
});
|
||||
|
||||
return { secretVersions };
|
||||
}
|
||||
});
|
||||
|
@@ -1,10 +1,10 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretSnapshotsSchema, SecretTagsSchema } from "@app/db/schemas";
|
||||
import { SecretSnapshotsSchema } from "@app/db/schemas";
|
||||
import { PROJECTS } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
@@ -31,13 +31,9 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
secretVersions: secretRawSchema
|
||||
.omit({ _id: true, environment: true, workspace: true, type: true })
|
||||
.extend({
|
||||
secretValueHidden: z.boolean(),
|
||||
secretId: z.string(),
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
name: true,
|
||||
color: true
|
||||
}).array()
|
||||
tags: SanitizedTagSchema.array()
|
||||
})
|
||||
.array(),
|
||||
folderVersion: z.object({ id: z.string(), name: z.string() }).array(),
|
||||
@@ -56,6 +52,7 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.secretSnapshotId
|
||||
});
|
||||
|
||||
return { secretSnapshot };
|
||||
}
|
||||
});
|
||||
|
@@ -22,6 +22,7 @@ import {
|
||||
} from "@app/services/secret-sync/secret-sync-types";
|
||||
|
||||
import { KmipPermission } from "../kmip/kmip-enum";
|
||||
import { ApprovalStatus } from "../secret-approval-request/secret-approval-request-types";
|
||||
|
||||
export type TListProjectAuditLogDTO = {
|
||||
filter: {
|
||||
@@ -165,6 +166,7 @@ export enum EventType {
|
||||
SECRET_APPROVAL_REQUEST = "secret-approval-request",
|
||||
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
|
||||
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
|
||||
SECRET_APPROVAL_REQUEST_REVIEW = "secret-approval-request-review",
|
||||
SIGN_SSH_KEY = "sign-ssh-key",
|
||||
ISSUE_SSH_CREDS = "issue-ssh-creds",
|
||||
CREATE_SSH_CA = "create-ssh-certificate-authority",
|
||||
@@ -250,6 +252,7 @@ export enum EventType {
|
||||
UPDATE_APP_CONNECTION = "update-app-connection",
|
||||
DELETE_APP_CONNECTION = "delete-app-connection",
|
||||
CREATE_SHARED_SECRET = "create-shared-secret",
|
||||
CREATE_SECRET_REQUEST = "create-secret-request",
|
||||
DELETE_SHARED_SECRET = "delete-shared-secret",
|
||||
READ_SHARED_SECRET = "read-shared-secret",
|
||||
GET_SECRET_SYNCS = "get-secret-syncs",
|
||||
@@ -1141,6 +1144,7 @@ interface CreateFolderEvent {
|
||||
folderId: string;
|
||||
folderName: string;
|
||||
folderPath: string;
|
||||
description?: string;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1312,6 +1316,16 @@ interface SecretApprovalRequest {
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretApprovalRequestReview {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW;
|
||||
metadata: {
|
||||
secretApprovalRequestId: string;
|
||||
reviewedBy: string;
|
||||
status: ApprovalStatus;
|
||||
comment: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface SignSshKey {
|
||||
type: EventType.SIGN_SSH_KEY;
|
||||
metadata: {
|
||||
@@ -2020,6 +2034,15 @@ interface CreateSharedSecretEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateSecretRequestEvent {
|
||||
type: EventType.CREATE_SECRET_REQUEST;
|
||||
metadata: {
|
||||
id: string;
|
||||
accessType: string;
|
||||
name?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteSharedSecretEvent {
|
||||
type: EventType.DELETE_SHARED_SECRET;
|
||||
metadata: {
|
||||
@@ -2470,4 +2493,6 @@ export type Event =
|
||||
| KmipOperationActivateEvent
|
||||
| KmipOperationRevokeEvent
|
||||
| KmipOperationLocateEvent
|
||||
| KmipOperationRegisterEvent;
|
||||
| KmipOperationRegisterEvent
|
||||
| CreateSecretRequestEvent
|
||||
| SecretApprovalRequestReview;
|
||||
|
@@ -51,7 +51,6 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
user: providerInputs.username,
|
||||
password: providerInputs.password,
|
||||
ssl,
|
||||
pool: { min: 0, max: 1 },
|
||||
// @ts-expect-error this is because of knexjs type signature issue. This is directly passed to driver
|
||||
// https://github.com/knex/knex/blob/b6507a7129d2b9fafebf5f831494431e64c6a8a0/lib/dialects/mssql/index.js#L66
|
||||
// https://github.com/tediousjs/tedious/blob/ebb023ed90969a7ec0e4b036533ad52739d921f7/test/config.ci.ts#L19
|
||||
@@ -87,7 +86,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
tlsOptions: {
|
||||
ca: relayDetails.certChain,
|
||||
cert: relayDetails.certificate,
|
||||
key: relayDetails.privateKey
|
||||
key: relayDetails.privateKey.toString()
|
||||
}
|
||||
}
|
||||
);
|
||||
@@ -106,10 +105,8 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
};
|
||||
|
||||
if (providerInputs.projectGatewayId) {
|
||||
console.log(">>>>>> inside gateway");
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
console.log(">>>>>> outside gateway");
|
||||
await gatewayCallback();
|
||||
}
|
||||
return isConnected;
|
||||
@@ -121,24 +118,27 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
const password = generatePassword(providerInputs.client);
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
const db = await $getClient({ ...providerInputs, port, host });
|
||||
const { database } = providerInputs;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
try {
|
||||
const { database } = providerInputs;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
expiration,
|
||||
database
|
||||
});
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
expiration,
|
||||
database
|
||||
});
|
||||
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
await db.destroy();
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
if (providerInputs.projectGatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
@@ -154,16 +154,18 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
const { database } = providerInputs;
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
const db = await $getClient({ ...providerInputs, port, host });
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, database });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
|
||||
await db.destroy();
|
||||
try {
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, database });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
if (providerInputs.projectGatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
@@ -187,18 +189,19 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
expiration,
|
||||
database
|
||||
});
|
||||
|
||||
if (renewStatement) {
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
try {
|
||||
if (renewStatement) {
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
|
||||
await db.destroy();
|
||||
};
|
||||
if (providerInputs.projectGatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
|
@@ -474,7 +474,7 @@ export const gatewayServiceFactory = ({
|
||||
relayHost,
|
||||
relayPort: Number(relayPort),
|
||||
tlsOptions: {
|
||||
key: privateKey,
|
||||
key: privateKey.toString(),
|
||||
ca: `${gatewayCaCert.toString("pem")}\n${rootCaCert.toString("pem")}`.trim(),
|
||||
cert: clientCert.toString("pem")
|
||||
},
|
||||
|
@@ -17,6 +17,14 @@ export enum ProjectPermissionActions {
|
||||
Delete = "delete"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionSecretActions {
|
||||
DescribeSecret = "read",
|
||||
ReadValue = "readValue",
|
||||
Create = "create",
|
||||
Edit = "edit",
|
||||
Delete = "delete"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionCmekActions {
|
||||
Read = "read",
|
||||
Create = "create",
|
||||
@@ -115,7 +123,7 @@ export type IdentityManagementSubjectFields = {
|
||||
|
||||
export type ProjectPermissionSet =
|
||||
| [
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSub.Secrets | (ForcedSubject<ProjectPermissionSub.Secrets> & SecretSubjectFields)
|
||||
]
|
||||
| [
|
||||
@@ -429,6 +437,7 @@ const GeneralPermissionSchema = [
|
||||
})
|
||||
];
|
||||
|
||||
// Do not update this schema anymore, as it's kept purely for backwards compatability. Update V2 schema only.
|
||||
export const ProjectPermissionV1Schema = z.discriminatedUnion("subject", [
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Secrets).describe("The entity this permission pertains to."),
|
||||
@@ -460,7 +469,7 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Secrets).describe("The entity this permission pertains to."),
|
||||
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
),
|
||||
conditions: SecretConditionV2Schema.describe(
|
||||
@@ -517,7 +526,6 @@ const buildAdminPermissionRules = () => {
|
||||
|
||||
// Admins get full access to everything
|
||||
[
|
||||
ProjectPermissionSub.Secrets,
|
||||
ProjectPermissionSub.SecretFolders,
|
||||
ProjectPermissionSub.SecretImports,
|
||||
ProjectPermissionSub.SecretApproval,
|
||||
@@ -550,10 +558,21 @@ const buildAdminPermissionRules = () => {
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionActions.Delete
|
||||
],
|
||||
el as ProjectPermissionSub
|
||||
el
|
||||
);
|
||||
});
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionSecretActions.DescribeSecret,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
ProjectPermissionSecretActions.Create,
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
ProjectPermissionSecretActions.Delete
|
||||
],
|
||||
ProjectPermissionSub.Secrets
|
||||
);
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionDynamicSecretActions.ReadRootCredential,
|
||||
@@ -613,10 +632,11 @@ const buildMemberPermissionRules = () => {
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionActions.Delete
|
||||
ProjectPermissionSecretActions.DescribeSecret,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
ProjectPermissionSecretActions.Create,
|
||||
ProjectPermissionSecretActions.Delete
|
||||
],
|
||||
ProjectPermissionSub.Secrets
|
||||
);
|
||||
@@ -788,7 +808,8 @@ export const projectMemberPermissions = buildMemberPermissionRules();
|
||||
const buildViewerPermissionRules = () => {
|
||||
const { can, rules } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
|
||||
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets);
|
||||
can(ProjectPermissionSecretActions.DescribeSecret, ProjectPermissionSub.Secrets);
|
||||
can(ProjectPermissionSecretActions.ReadValue, ProjectPermissionSub.Secrets);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretFolders);
|
||||
can(ProjectPermissionDynamicSecretActions.ReadRootCredential, ProjectPermissionSub.DynamicSecrets);
|
||||
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretImports);
|
||||
@@ -831,6 +852,8 @@ export const buildServiceTokenProjectPermission = (
|
||||
) => {
|
||||
const canWrite = permission.includes("write");
|
||||
const canRead = permission.includes("read");
|
||||
const canReadValue = permission.includes("readValue");
|
||||
|
||||
const { can, build } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
|
||||
scopes.forEach(({ secretPath, environment }) => {
|
||||
[ProjectPermissionSub.Secrets, ProjectPermissionSub.SecretImports, ProjectPermissionSub.SecretFolders].forEach(
|
||||
@@ -860,6 +883,14 @@ export const buildServiceTokenProjectPermission = (
|
||||
environment
|
||||
});
|
||||
}
|
||||
|
||||
if (subject === ProjectPermissionSub.Secrets && canReadValue) {
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionSecretActions.ReadValue, subject as ProjectPermissionSub.Secrets, {
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
@@ -916,7 +947,17 @@ export const backfillPermissionV1SchemaToV2Schema = (
|
||||
subject: ProjectPermissionSub.SecretImports as const
|
||||
}));
|
||||
|
||||
const secretPolicies = secretSubjects.map(({ subject, ...el }) => ({
|
||||
subject: ProjectPermissionSub.Secrets as const,
|
||||
...el,
|
||||
action:
|
||||
el.action.includes(ProjectPermissionActions.Read) && !el.action.includes(ProjectPermissionSecretActions.ReadValue)
|
||||
? el.action.concat(ProjectPermissionSecretActions.ReadValue)
|
||||
: el.action
|
||||
}));
|
||||
|
||||
const secretFolderPolicies = secretSubjects
|
||||
|
||||
.map(({ subject, ...el }) => ({
|
||||
...el,
|
||||
// read permission is not needed anymore
|
||||
@@ -958,6 +999,7 @@ export const backfillPermissionV1SchemaToV2Schema = (
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore-error this is valid ts
|
||||
secretImportPolicies,
|
||||
secretPolicies,
|
||||
dynamicSecretPolicies,
|
||||
hasReadOnlyFolder.length ? [] : secretFolderPolicies
|
||||
);
|
||||
|
@@ -100,6 +100,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("lastName").withSchema("committerUser").as("committerUserLastName"),
|
||||
tx.ref("reviewerUserId").withSchema(TableName.SecretApprovalRequestReviewer),
|
||||
tx.ref("status").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerStatus"),
|
||||
tx.ref("comment").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerComment"),
|
||||
tx.ref("email").withSchema("secretApprovalReviewerUser").as("reviewerEmail"),
|
||||
tx.ref("username").withSchema("secretApprovalReviewerUser").as("reviewerUsername"),
|
||||
tx.ref("firstName").withSchema("secretApprovalReviewerUser").as("reviewerFirstName"),
|
||||
@@ -162,8 +163,10 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
reviewerEmail: email,
|
||||
reviewerLastName: lastName,
|
||||
reviewerUsername: username,
|
||||
reviewerFirstName: firstName
|
||||
}) => (userId ? { userId, status, email, firstName, lastName, username } : undefined)
|
||||
reviewerFirstName: firstName,
|
||||
reviewerComment: comment
|
||||
}) =>
|
||||
userId ? { userId, status, email, firstName, lastName, username, comment: comment ?? "" } : undefined
|
||||
},
|
||||
{
|
||||
key: "approverUserId",
|
||||
|
@@ -58,7 +58,7 @@ import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TSecretApprovalPolicyDALFactory } from "../secret-approval-policy/secret-approval-policy-dal";
|
||||
import { TSecretSnapshotServiceFactory } from "../secret-snapshot/secret-snapshot-service";
|
||||
import { TSecretApprovalRequestDALFactory } from "./secret-approval-request-dal";
|
||||
@@ -88,7 +88,12 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
secretDAL: TSecretDALFactory;
|
||||
secretTagDAL: Pick<
|
||||
TSecretTagDALFactory,
|
||||
"findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret" | "saveTagsToSecretV2" | "deleteTagsToSecretV2"
|
||||
| "findManyTagsById"
|
||||
| "saveTagsToSecret"
|
||||
| "deleteTagsManySecret"
|
||||
| "saveTagsToSecretV2"
|
||||
| "deleteTagsToSecretV2"
|
||||
| "find"
|
||||
>;
|
||||
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
|
||||
snapshotService: Pick<TSecretSnapshotServiceFactory, "performSnapshot">;
|
||||
@@ -106,7 +111,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey" | "encryptWithInputKey" | "decryptWithInputKey">;
|
||||
secretV2BridgeDAL: Pick<
|
||||
TSecretV2BridgeDALFactory,
|
||||
"insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "bulkUpdate" | "deleteMany"
|
||||
"insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "bulkUpdate" | "deleteMany" | "find"
|
||||
>;
|
||||
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
|
||||
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
|
||||
@@ -320,6 +325,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
approvalId,
|
||||
actor,
|
||||
status,
|
||||
comment,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
@@ -372,15 +378,18 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
return secretApprovalRequestReviewerDAL.create(
|
||||
{
|
||||
status,
|
||||
comment,
|
||||
requestId: secretApprovalRequest.id,
|
||||
reviewerUserId: actorId
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
return secretApprovalRequestReviewerDAL.updateById(review.id, { status }, tx);
|
||||
|
||||
return secretApprovalRequestReviewerDAL.updateById(review.id, { status, comment }, tx);
|
||||
});
|
||||
return reviewStatus;
|
||||
|
||||
return { ...reviewStatus, projectId: secretApprovalRequest.projectId };
|
||||
};
|
||||
|
||||
const updateApprovalStatus = async ({
|
||||
@@ -910,7 +919,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
);
|
||||
|
||||
@@ -997,6 +1006,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
: keyName2BlindIndex[secretName];
|
||||
// add tags
|
||||
if (tagIds?.length) commitTagIds[keyName2BlindIndex[secretName]] = tagIds;
|
||||
|
||||
return {
|
||||
...latestSecretVersions[secretId],
|
||||
...el,
|
||||
@@ -1294,7 +1304,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
secretMetadata
|
||||
}) => {
|
||||
const secretId = updatingSecretsGroupByKey[secretKey][0].id;
|
||||
if (tagIds?.length) commitTagIds[secretKey] = tagIds;
|
||||
if (tagIds?.length) commitTagIds[newSecretName ?? secretKey] = tagIds;
|
||||
return {
|
||||
...latestSecretVersions[secretId],
|
||||
secretMetadata,
|
||||
@@ -1359,9 +1369,9 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
const tagsGroupById = groupBy(tags, (i) => i.id);
|
||||
|
||||
commits.forEach((commit) => {
|
||||
let action = ProjectPermissionActions.Create;
|
||||
if (commit.op === SecretOperations.Update) action = ProjectPermissionActions.Edit;
|
||||
if (commit.op === SecretOperations.Delete) action = ProjectPermissionActions.Delete;
|
||||
let action = ProjectPermissionSecretActions.Create;
|
||||
if (commit.op === SecretOperations.Update) action = ProjectPermissionSecretActions.Edit;
|
||||
if (commit.op === SecretOperations.Delete) action = ProjectPermissionSecretActions.Delete;
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
action,
|
||||
|
@@ -80,6 +80,7 @@ export type TStatusChangeDTO = {
|
||||
export type TReviewRequestDTO = {
|
||||
approvalId: string;
|
||||
status: ApprovalStatus;
|
||||
comment?: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TApprovalRequestCountDTO = TProjectPermission;
|
||||
|
@@ -265,6 +265,7 @@ export const secretReplicationServiceFactory = ({
|
||||
folderDAL,
|
||||
secretImportDAL,
|
||||
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : ""),
|
||||
viewSecretValue: true,
|
||||
hasSecretAccess: () => true
|
||||
});
|
||||
// secrets that gets replicated across imports
|
||||
|
@@ -15,7 +15,11 @@ import { TSecretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import {
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSub
|
||||
} from "../permission/project-permission";
|
||||
import { TSecretRotationDALFactory } from "./secret-rotation-dal";
|
||||
import { TSecretRotationQueueFactory } from "./secret-rotation-queue";
|
||||
import { TSecretRotationEncData } from "./secret-rotation-queue/secret-rotation-queue-types";
|
||||
@@ -106,7 +110,7 @@ export const secretRotationServiceFactory = ({
|
||||
});
|
||||
}
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
);
|
||||
|
||||
|
@@ -22,7 +22,11 @@ import { TSecretVersionV2TagDALFactory } from "@app/services/secret-v2-bridge/se
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import {
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSub
|
||||
} from "../permission/project-permission";
|
||||
import {
|
||||
TGetSnapshotDataDTO,
|
||||
TProjectSnapshotCountDTO,
|
||||
@@ -34,6 +38,7 @@ import { TSnapshotFolderDALFactory } from "./snapshot-folder-dal";
|
||||
import { TSnapshotSecretDALFactory } from "./snapshot-secret-dal";
|
||||
import { TSnapshotSecretV2DALFactory } from "./snapshot-secret-v2-dal";
|
||||
import { getFullFolderPath } from "./snapshot-service-fns";
|
||||
import { INFISICAL_SECRET_VALUE_HIDDEN_MASK } from "@app/services/secret/secret-fns";
|
||||
|
||||
type TSecretSnapshotServiceFactoryDep = {
|
||||
snapshotDAL: TSnapshotDALFactory;
|
||||
@@ -97,7 +102,7 @@ export const secretSnapshotServiceFactory = ({
|
||||
|
||||
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSecretActions.DescribeSecret,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
|
||||
@@ -134,7 +139,7 @@ export const secretSnapshotServiceFactory = ({
|
||||
|
||||
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSecretActions.DescribeSecret,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
|
||||
@@ -161,6 +166,7 @@ export const secretSnapshotServiceFactory = ({
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
|
||||
|
||||
const shouldUseBridge = snapshot.projectVersion === 3;
|
||||
let snapshotDetails;
|
||||
if (shouldUseBridge) {
|
||||
@@ -169,68 +175,110 @@ export const secretSnapshotServiceFactory = ({
|
||||
projectId: snapshot.projectId
|
||||
});
|
||||
const encryptedSnapshotDetails = await snapshotDAL.findSecretSnapshotV2DataById(id);
|
||||
|
||||
const fullFolderPath = await getFullFolderPath({
|
||||
folderDAL,
|
||||
folderId: encryptedSnapshotDetails.folderId,
|
||||
envId: encryptedSnapshotDetails.environment.id
|
||||
});
|
||||
|
||||
snapshotDetails = {
|
||||
...encryptedSnapshotDetails,
|
||||
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => ({
|
||||
...el,
|
||||
secretKey: el.key,
|
||||
secretValue: el.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
|
||||
: "",
|
||||
secretComment: el.encryptedComment
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
|
||||
: ""
|
||||
}))
|
||||
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => {
|
||||
const canReadValue = permission.can(
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: encryptedSnapshotDetails.environment.slug,
|
||||
secretPath: fullFolderPath,
|
||||
secretName: el.key,
|
||||
secretTags: el.tags.length ? el.tags.map((tag) => tag.slug) : undefined
|
||||
})
|
||||
);
|
||||
|
||||
let secretValue = "";
|
||||
if (canReadValue) {
|
||||
secretValue = el.encryptedValue
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
|
||||
: "";
|
||||
} else {
|
||||
secretValue = INFISICAL_SECRET_VALUE_HIDDEN_MASK;
|
||||
}
|
||||
|
||||
return {
|
||||
...el,
|
||||
secretKey: el.key,
|
||||
secretValueHidden: !canReadValue,
|
||||
secretValue,
|
||||
secretComment: el.encryptedComment
|
||||
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
|
||||
: ""
|
||||
};
|
||||
})
|
||||
};
|
||||
} else {
|
||||
const encryptedSnapshotDetails = await snapshotDAL.findSecretSnapshotDataById(id);
|
||||
|
||||
const fullFolderPath = await getFullFolderPath({
|
||||
folderDAL,
|
||||
folderId: encryptedSnapshotDetails.folderId,
|
||||
envId: encryptedSnapshotDetails.environment.id
|
||||
});
|
||||
|
||||
const { botKey } = await projectBotService.getBotKey(snapshot.projectId);
|
||||
if (!botKey)
|
||||
throw new NotFoundError({ message: `Project bot key not found for project with ID '${snapshot.projectId}'` });
|
||||
snapshotDetails = {
|
||||
...encryptedSnapshotDetails,
|
||||
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => ({
|
||||
...el,
|
||||
secretKey: decryptSymmetric128BitHexKeyUTF8({
|
||||
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => {
|
||||
const secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: el.secretKeyCiphertext,
|
||||
iv: el.secretKeyIV,
|
||||
tag: el.secretKeyTag,
|
||||
key: botKey
|
||||
}),
|
||||
secretValue: decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: el.secretValueCiphertext,
|
||||
iv: el.secretValueIV,
|
||||
tag: el.secretValueTag,
|
||||
key: botKey
|
||||
}),
|
||||
secretComment:
|
||||
el.secretCommentTag && el.secretCommentIV && el.secretCommentCiphertext
|
||||
? decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: el.secretCommentCiphertext,
|
||||
iv: el.secretCommentIV,
|
||||
tag: el.secretCommentTag,
|
||||
key: botKey
|
||||
})
|
||||
: ""
|
||||
}))
|
||||
});
|
||||
|
||||
const canReadValue = permission.can(
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: encryptedSnapshotDetails.environment.slug,
|
||||
secretPath: fullFolderPath,
|
||||
secretName: secretKey,
|
||||
secretTags: el.tags.length ? el.tags.map((tag) => tag.slug) : undefined
|
||||
})
|
||||
);
|
||||
|
||||
let secretValue = "";
|
||||
|
||||
if (canReadValue) {
|
||||
secretValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: el.secretValueCiphertext,
|
||||
iv: el.secretValueIV,
|
||||
tag: el.secretValueTag,
|
||||
key: botKey
|
||||
});
|
||||
} else {
|
||||
secretValue = INFISICAL_SECRET_VALUE_HIDDEN_MASK;
|
||||
}
|
||||
|
||||
return {
|
||||
...el,
|
||||
secretKey,
|
||||
secretValueHidden: !canReadValue,
|
||||
secretValue,
|
||||
secretComment:
|
||||
el.secretCommentTag && el.secretCommentIV && el.secretCommentCiphertext
|
||||
? decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: el.secretCommentCiphertext,
|
||||
iv: el.secretCommentIV,
|
||||
tag: el.secretCommentTag,
|
||||
key: botKey
|
||||
})
|
||||
: ""
|
||||
};
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
const fullFolderPath = await getFullFolderPath({
|
||||
folderDAL,
|
||||
folderId: snapshotDetails.folderId,
|
||||
envId: snapshotDetails.environment.id
|
||||
});
|
||||
|
||||
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: snapshotDetails.environment.slug,
|
||||
secretPath: fullFolderPath
|
||||
})
|
||||
);
|
||||
|
||||
return snapshotDetails;
|
||||
};
|
||||
|
||||
|
@@ -6,7 +6,6 @@ export const sanitizedSshCertificate = SshCertificatesSchema.pick({
|
||||
sshCertificateTemplateId: true,
|
||||
serialNumber: true,
|
||||
certType: true,
|
||||
publicKey: true,
|
||||
principals: true,
|
||||
keyId: true,
|
||||
notBefore: true,
|
||||
|
@@ -638,7 +638,8 @@ export const FOLDERS = {
|
||||
environment: "The slug of the environment to create the folder in.",
|
||||
name: "The name of the folder to create.",
|
||||
path: "The path of the folder to create.",
|
||||
directory: "The directory of the folder to create. (Deprecated in favor of path)"
|
||||
directory: "The directory of the folder to create. (Deprecated in favor of path)",
|
||||
description: "An optional description label for the folder."
|
||||
},
|
||||
UPDATE: {
|
||||
folderId: "The ID of the folder to update.",
|
||||
@@ -647,7 +648,8 @@ export const FOLDERS = {
|
||||
path: "The path of the folder to update.",
|
||||
directory: "The new directory of the folder to update. (Deprecated in favor of path)",
|
||||
projectSlug: "The slug of the project where the folder is located.",
|
||||
workspaceId: "The ID of the project where the folder is located."
|
||||
workspaceId: "The ID of the project where the folder is located.",
|
||||
description: "An optional description label for the folder."
|
||||
},
|
||||
DELETE: {
|
||||
folderIdOrName: "The ID or name of the folder to delete.",
|
||||
@@ -664,6 +666,7 @@ export const SECRETS = {
|
||||
secretPath: "The path of the secret to attach tags to.",
|
||||
type: "The type of the secret to attach tags to. (shared/personal)",
|
||||
environment: "The slug of the environment where the secret is located",
|
||||
viewSecretValue: "Whether or not to retrieve the secret value.",
|
||||
projectSlug: "The slug of the project where the secret is located.",
|
||||
tagSlugs: "An array of existing tag slugs to attach to the secret."
|
||||
},
|
||||
@@ -687,6 +690,7 @@ export const RAW_SECRETS = {
|
||||
"The slug of the project to list secrets from. This parameter is only applicable by machine identities.",
|
||||
environment: "The slug of the environment to list secrets from.",
|
||||
secretPath: "The secret path to list secrets from.",
|
||||
viewSecretValue: "Whether or not to retrieve the secret value.",
|
||||
includeImports: "Weather to include imported secrets or not.",
|
||||
tagSlugs: "The comma separated tag slugs to filter secrets.",
|
||||
metadataFilter:
|
||||
@@ -715,6 +719,7 @@ export const RAW_SECRETS = {
|
||||
secretPath: "The path of the secret to get.",
|
||||
version: "The version of the secret to get.",
|
||||
type: "The type of the secret to get.",
|
||||
viewSecretValue: "Whether or not to retrieve the secret value.",
|
||||
includeImports: "Weather to include imported secrets or not."
|
||||
},
|
||||
UPDATE: {
|
||||
|
@@ -24,6 +24,7 @@ const databaseReadReplicaSchema = z
|
||||
|
||||
const envSchema = z
|
||||
.object({
|
||||
INFISICAL_PLATFORM_VERSION: zpStr(z.string().optional()),
|
||||
PORT: z.coerce.number().default(IS_PACKAGED ? 8080 : 4000),
|
||||
DISABLE_SECRET_SCANNING: z
|
||||
.enum(["true", "false"])
|
||||
@@ -216,6 +217,13 @@ const envSchema = z
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_ID: zpStr(z.string().optional()),
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRET: zpStr(z.string().optional()),
|
||||
|
||||
// datadog
|
||||
SHOULD_USE_DATADOG_TRACER: zodStrBool.default("false"),
|
||||
DATADOG_PROFILING_ENABLED: zodStrBool.default("false"),
|
||||
DATADOG_ENV: zpStr(z.string().optional().default("prod")),
|
||||
DATADOG_SERVICE: zpStr(z.string().optional().default("infisical-core")),
|
||||
DATADOG_HOSTNAME: zpStr(z.string().optional()),
|
||||
|
||||
/* CORS ----------------------------------------------------------------------------- */
|
||||
|
||||
CORS_ALLOWED_ORIGINS: zpStr(
|
||||
|
@@ -1,4 +1,5 @@
|
||||
/* eslint-disable max-classes-per-file */
|
||||
|
||||
export class DatabaseError extends Error {
|
||||
name: string;
|
||||
|
||||
|
@@ -1,6 +1,8 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import crypto from "node:crypto";
|
||||
import net from "node:net";
|
||||
import tls from "node:tls";
|
||||
|
||||
import * as quic from "@infisical/quic";
|
||||
|
||||
import { BadRequestError } from "../errors";
|
||||
import { logger } from "../logger";
|
||||
@@ -8,34 +10,71 @@ import { logger } from "../logger";
|
||||
const DEFAULT_MAX_RETRIES = 3;
|
||||
const DEFAULT_RETRY_DELAY = 1000; // 1 second
|
||||
|
||||
const createTLSConnection = (relayHost: string, relayPort: number, tlsOptions: tls.TlsOptions = {}) => {
|
||||
return new Promise<tls.TLSSocket>((resolve, reject) => {
|
||||
// @ts-expect-error this is resolved in next connect
|
||||
const socket = new tls.TLSSocket(null, {
|
||||
rejectUnauthorized: true,
|
||||
...tlsOptions
|
||||
});
|
||||
|
||||
const cleanup = () => {
|
||||
socket.removeAllListeners();
|
||||
socket.end();
|
||||
};
|
||||
|
||||
socket.once("error", (err) => {
|
||||
cleanup();
|
||||
reject(err);
|
||||
});
|
||||
|
||||
socket.connect(relayPort, relayHost, () => {
|
||||
resolve(socket);
|
||||
});
|
||||
const parseSubjectDetails = (data: string) => {
|
||||
const values: Record<string, string> = {};
|
||||
data.split("\n").forEach((el) => {
|
||||
const [key, value] = el.split("=");
|
||||
values[key.trim()] = value.trim();
|
||||
});
|
||||
return values;
|
||||
};
|
||||
|
||||
type TTlsOption = { ca: string; cert: string; key: string };
|
||||
|
||||
const createQuicConnection = async (
|
||||
relayHost: string,
|
||||
relayPort: number,
|
||||
tlsOptions: TTlsOption,
|
||||
identityId: string,
|
||||
orgId: string
|
||||
) => {
|
||||
const client = await quic.QUICClient.createQUICClient({
|
||||
host: relayHost,
|
||||
port: relayPort,
|
||||
config: {
|
||||
ca: tlsOptions.ca,
|
||||
cert: tlsOptions.cert,
|
||||
key: tlsOptions.key,
|
||||
applicationProtos: ["infisical-gateway"],
|
||||
verifyPeer: true,
|
||||
verifyCallback: async (certs) => {
|
||||
if (!certs || certs.length === 0) return quic.native.CryptoError.CertificateRequired;
|
||||
const serverCertificate = new crypto.X509Certificate(Buffer.from(certs[0]));
|
||||
const caCertificate = new crypto.X509Certificate(tlsOptions.ca);
|
||||
const isValidServerCertificate = serverCertificate.checkIssued(caCertificate);
|
||||
if (!isValidServerCertificate) return quic.native.CryptoError.BadCertificate;
|
||||
|
||||
const subjectDetails = parseSubjectDetails(serverCertificate.subject);
|
||||
if (subjectDetails.OU !== "Gateway" || subjectDetails.CN !== identityId || subjectDetails.O !== orgId) {
|
||||
return quic.native.CryptoError.CertificateUnknown;
|
||||
}
|
||||
|
||||
if (new Date() > new Date(serverCertificate.validTo) || new Date() < new Date(serverCertificate.validFrom)) {
|
||||
return quic.native.CryptoError.CertificateExpired;
|
||||
}
|
||||
|
||||
const formatedRelayHost =
|
||||
process.env.NODE_ENV === "development" ? relayHost.replace("host.docker.internal", "127.0.0.1") : relayHost;
|
||||
if (!serverCertificate.checkIP(formatedRelayHost)) return quic.native.CryptoError.BadCertificate;
|
||||
},
|
||||
maxIdleTimeout: 90000,
|
||||
keepAliveIntervalTime: 30000
|
||||
},
|
||||
crypto: {
|
||||
ops: {
|
||||
randomBytes: async (data) => {
|
||||
crypto.getRandomValues(new Uint8Array(data));
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
return client;
|
||||
};
|
||||
|
||||
type TPingGatewayAndVerifyDTO = {
|
||||
relayHost: string;
|
||||
relayPort: number;
|
||||
tlsOptions: tls.TlsOptions;
|
||||
tlsOptions: TTlsOption;
|
||||
maxRetries?: number;
|
||||
identityId: string;
|
||||
orgId: string;
|
||||
@@ -44,56 +83,44 @@ type TPingGatewayAndVerifyDTO = {
|
||||
export const pingGatewayAndVerify = async ({
|
||||
relayHost,
|
||||
relayPort,
|
||||
tlsOptions = {},
|
||||
tlsOptions,
|
||||
maxRetries = DEFAULT_MAX_RETRIES,
|
||||
identityId,
|
||||
orgId
|
||||
}: TPingGatewayAndVerifyDTO) => {
|
||||
let lastError: Error | null = null;
|
||||
|
||||
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
|
||||
throw new BadRequestError({
|
||||
error: err as Error
|
||||
});
|
||||
});
|
||||
for (let attempt = 1; attempt <= maxRetries; attempt += 1) {
|
||||
try {
|
||||
const socket = await createTLSConnection(relayHost, relayPort, tlsOptions);
|
||||
socket.setTimeout(2000);
|
||||
const stream = quicClient.connection.newStream("bidi");
|
||||
const pingWriter = stream.writable.getWriter();
|
||||
await pingWriter.write(Buffer.from("PING\n"));
|
||||
pingWriter.releaseLock();
|
||||
|
||||
const pingResult = await new Promise((resolve, reject) => {
|
||||
socket.once("timeout", () => {
|
||||
socket.destroy();
|
||||
reject(new Error("Timeout"));
|
||||
// Read PONG response
|
||||
const reader = stream.readable.getReader();
|
||||
const { value, done } = await reader.read();
|
||||
|
||||
if (done) {
|
||||
throw new BadRequestError({
|
||||
message: "Gateway closed before receiving PONG"
|
||||
});
|
||||
socket.once("close", () => {
|
||||
socket.destroy();
|
||||
}
|
||||
|
||||
const response = Buffer.from(value).toString();
|
||||
|
||||
if (response !== "PONG\n" && response !== "PONG") {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to Ping. Unexpected response: ${response}`
|
||||
});
|
||||
}
|
||||
|
||||
socket.once("end", () => {
|
||||
socket.destroy();
|
||||
});
|
||||
socket.once("error", (err) => {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
socket.write(Buffer.from("PING\n"), () => {
|
||||
socket.once("data", (data) => {
|
||||
const response = (data as string).toString();
|
||||
const certificate = socket.getPeerCertificate();
|
||||
|
||||
if (certificate.subject.CN !== identityId || certificate.subject.O !== orgId) {
|
||||
throw new BadRequestError({
|
||||
message: `Invalid gateway. Certificate not found for ${identityId} in organization ${orgId}`
|
||||
});
|
||||
}
|
||||
|
||||
if (response === "PONG") {
|
||||
resolve(true);
|
||||
} else {
|
||||
reject(new Error(`Unexpected response: ${response}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
socket.end();
|
||||
return pingResult;
|
||||
reader.releaseLock();
|
||||
return;
|
||||
} catch (err) {
|
||||
lastError = err as Error;
|
||||
|
||||
@@ -102,6 +129,8 @@ export const pingGatewayAndVerify = async ({
|
||||
setTimeout(resolve, DEFAULT_RETRY_DELAY);
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
await quicClient.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,76 +143,125 @@ export const pingGatewayAndVerify = async ({
|
||||
interface TProxyServer {
|
||||
server: net.Server;
|
||||
port: number;
|
||||
cleanup: () => void;
|
||||
cleanup: () => Promise<void>;
|
||||
}
|
||||
|
||||
const setupProxyServer = ({
|
||||
const setupProxyServer = async ({
|
||||
targetPort,
|
||||
targetHost,
|
||||
tlsOptions = {},
|
||||
tlsOptions,
|
||||
relayHost,
|
||||
relayPort
|
||||
relayPort,
|
||||
identityId,
|
||||
orgId
|
||||
}: {
|
||||
targetHost: string;
|
||||
targetPort: number;
|
||||
relayPort: number;
|
||||
relayHost: string;
|
||||
tlsOptions: tls.TlsOptions;
|
||||
tlsOptions: TTlsOption;
|
||||
identityId: string;
|
||||
orgId: string;
|
||||
}): Promise<TProxyServer> => {
|
||||
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
|
||||
throw new BadRequestError({
|
||||
error: err as Error
|
||||
});
|
||||
});
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
server.on("connection", async (clientSocket) => {
|
||||
server.on("connection", async (clientConn) => {
|
||||
try {
|
||||
const targetSocket = await createTLSConnection(relayHost, relayPort, tlsOptions);
|
||||
clientConn.setKeepAlive(true, 30000); // 30 seconds
|
||||
clientConn.setNoDelay(true);
|
||||
|
||||
targetSocket.write(Buffer.from(`FORWARD-TCP ${targetHost}:${targetPort}\n`), () => {
|
||||
clientSocket.on("data", (data) => {
|
||||
const flushed = targetSocket.write(data);
|
||||
if (!flushed) {
|
||||
clientSocket.pause();
|
||||
targetSocket.once("drain", () => {
|
||||
clientSocket.resume();
|
||||
});
|
||||
}
|
||||
});
|
||||
const stream = quicClient.connection.newStream("bidi");
|
||||
// Send FORWARD-TCP command
|
||||
const forwardWriter = stream.writable.getWriter();
|
||||
await forwardWriter.write(Buffer.from(`FORWARD-TCP ${targetHost}:${targetPort}\n`));
|
||||
forwardWriter.releaseLock();
|
||||
/* eslint-disable @typescript-eslint/no-misused-promises */
|
||||
// Set up bidirectional copy
|
||||
const setupCopy = async () => {
|
||||
// Client to QUIC
|
||||
// eslint-disable-next-line
|
||||
(async () => {
|
||||
try {
|
||||
const writer = stream.writable.getWriter();
|
||||
|
||||
targetSocket.on("data", (data) => {
|
||||
const flushed = clientSocket.write(data as string);
|
||||
if (!flushed) {
|
||||
targetSocket.pause();
|
||||
clientSocket.once("drain", () => {
|
||||
targetSocket.resume();
|
||||
// Create a handler for client data
|
||||
clientConn.on("data", async (chunk) => {
|
||||
await writer.write(chunk);
|
||||
});
|
||||
|
||||
// Handle client connection close
|
||||
clientConn.on("end", async () => {
|
||||
await writer.close();
|
||||
});
|
||||
|
||||
clientConn.on("error", async (err) => {
|
||||
await writer.abort(err);
|
||||
});
|
||||
} catch (err) {
|
||||
clientConn.destroy();
|
||||
}
|
||||
});
|
||||
})();
|
||||
|
||||
// QUIC to Client
|
||||
void (async () => {
|
||||
try {
|
||||
const reader = stream.readable.getReader();
|
||||
|
||||
let reading = true;
|
||||
while (reading) {
|
||||
const { value, done } = await reader.read();
|
||||
|
||||
if (done) {
|
||||
reading = false;
|
||||
clientConn.end(); // Close client connection when QUIC stream ends
|
||||
break;
|
||||
}
|
||||
|
||||
// Write data to TCP client
|
||||
const canContinue = clientConn.write(Buffer.from(value));
|
||||
|
||||
// Handle backpressure
|
||||
if (!canContinue) {
|
||||
await new Promise((res) => {
|
||||
clientConn.once("drain", res);
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
clientConn.destroy();
|
||||
}
|
||||
})();
|
||||
};
|
||||
await setupCopy();
|
||||
//
|
||||
// Handle connection closure
|
||||
clientConn.on("close", async () => {
|
||||
await stream.destroy();
|
||||
});
|
||||
|
||||
const cleanup = () => {
|
||||
clientSocket?.unpipe();
|
||||
clientSocket?.end();
|
||||
targetSocket?.unpipe();
|
||||
targetSocket?.end();
|
||||
const cleanup = async () => {
|
||||
clientConn?.destroy();
|
||||
await stream.destroy();
|
||||
};
|
||||
|
||||
clientSocket.on("error", (err) => {
|
||||
clientConn.on("error", (err) => {
|
||||
logger.error(err, "Client socket error");
|
||||
cleanup();
|
||||
void cleanup();
|
||||
reject(err);
|
||||
});
|
||||
|
||||
targetSocket.on("error", (err) => {
|
||||
logger.error(err, "Target socket error");
|
||||
cleanup();
|
||||
reject(err);
|
||||
});
|
||||
|
||||
clientSocket.on("end", cleanup);
|
||||
targetSocket.on("end", cleanup);
|
||||
clientConn.on("end", cleanup);
|
||||
} catch (err) {
|
||||
logger.error(err, "Failed to establish target connection:");
|
||||
clientSocket.end();
|
||||
clientConn.end();
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
@@ -192,6 +270,12 @@ const setupProxyServer = ({
|
||||
reject(err);
|
||||
});
|
||||
|
||||
server.on("close", async () => {
|
||||
await quicClient?.destroy();
|
||||
});
|
||||
|
||||
/* eslint-enable */
|
||||
|
||||
server.listen(0, () => {
|
||||
const address = server.address();
|
||||
if (!address || typeof address === "string") {
|
||||
@@ -204,8 +288,9 @@ const setupProxyServer = ({
|
||||
resolve({
|
||||
server,
|
||||
port: address.port,
|
||||
cleanup: () => {
|
||||
cleanup: async () => {
|
||||
server.close();
|
||||
await quicClient?.destroy();
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -217,8 +302,7 @@ interface ProxyOptions {
|
||||
targetPort: number;
|
||||
relayHost: string;
|
||||
relayPort: number;
|
||||
tlsOptions?: tls.TlsOptions;
|
||||
maxRetries?: number;
|
||||
tlsOptions: TTlsOption;
|
||||
identityId: string;
|
||||
orgId: string;
|
||||
}
|
||||
@@ -227,30 +311,19 @@ export const withGatewayProxy = async (
|
||||
callback: (port: number) => Promise<void>,
|
||||
options: ProxyOptions
|
||||
): Promise<void> => {
|
||||
const {
|
||||
relayHost,
|
||||
relayPort,
|
||||
const { relayHost, relayPort, targetHost, targetPort, tlsOptions, identityId, orgId } = options;
|
||||
|
||||
// Setup the proxy server
|
||||
const { port, cleanup } = await setupProxyServer({
|
||||
targetHost,
|
||||
targetPort,
|
||||
tlsOptions = {},
|
||||
maxRetries = DEFAULT_MAX_RETRIES,
|
||||
identityId,
|
||||
orgId
|
||||
} = options;
|
||||
|
||||
// First, try to ping the gateway
|
||||
await pingGatewayAndVerify({
|
||||
relayHost,
|
||||
relayPort,
|
||||
relayHost,
|
||||
tlsOptions,
|
||||
maxRetries,
|
||||
identityId,
|
||||
orgId
|
||||
});
|
||||
|
||||
// Setup the proxy server
|
||||
const { port, cleanup } = await setupProxyServer({ targetHost, targetPort, relayPort, relayHost, tlsOptions });
|
||||
|
||||
try {
|
||||
// Execute the callback with the allocated port
|
||||
await callback(port);
|
||||
@@ -259,6 +332,6 @@ export const withGatewayProxy = async (
|
||||
throw new BadRequestError({ message: (err as Error)?.message });
|
||||
} finally {
|
||||
// Ensure cleanup happens regardless of success or failure
|
||||
cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
};
|
||||
|
@@ -1,11 +1,12 @@
|
||||
import opentelemetry, { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api";
|
||||
import { getNodeAutoInstrumentations } from "@opentelemetry/auto-instrumentations-node";
|
||||
import { OTLPMetricExporter } from "@opentelemetry/exporter-metrics-otlp-proto";
|
||||
import { PrometheusExporter } from "@opentelemetry/exporter-prometheus";
|
||||
import { registerInstrumentations } from "@opentelemetry/instrumentation";
|
||||
import { HttpInstrumentation } from "@opentelemetry/instrumentation-http";
|
||||
import { Resource } from "@opentelemetry/resources";
|
||||
import { AggregationTemporality, MeterProvider, PeriodicExportingMetricReader } from "@opentelemetry/sdk-metrics";
|
||||
import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION } from "@opentelemetry/semantic-conventions";
|
||||
import tracer from "dd-trace";
|
||||
import dotenv from "dotenv";
|
||||
|
||||
import { initEnvConfig } from "../config/env";
|
||||
@@ -69,7 +70,7 @@ const initTelemetryInstrumentation = ({
|
||||
opentelemetry.metrics.setGlobalMeterProvider(meterProvider);
|
||||
|
||||
registerInstrumentations({
|
||||
instrumentations: [getNodeAutoInstrumentations()]
|
||||
instrumentations: [new HttpInstrumentation()]
|
||||
});
|
||||
};
|
||||
|
||||
@@ -86,6 +87,17 @@ const setupTelemetry = () => {
|
||||
exportType: appCfg.OTEL_EXPORT_TYPE
|
||||
});
|
||||
}
|
||||
|
||||
if (appCfg.SHOULD_USE_DATADOG_TRACER) {
|
||||
console.log("Initializing Datadog tracer");
|
||||
tracer.init({
|
||||
profiling: appCfg.DATADOG_PROFILING_ENABLED,
|
||||
version: appCfg.INFISICAL_PLATFORM_VERSION,
|
||||
env: appCfg.DATADOG_ENV,
|
||||
service: appCfg.DATADOG_SERVICE,
|
||||
hostname: appCfg.DATADOG_HOSTNAME
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
void setupTelemetry();
|
||||
|
@@ -1096,7 +1096,9 @@ export const registerRoutes = async (
|
||||
permissionService,
|
||||
secretSharingDAL,
|
||||
orgDAL,
|
||||
kmsService
|
||||
kmsService,
|
||||
smtpService,
|
||||
userDAL
|
||||
});
|
||||
|
||||
const accessApprovalPolicyService = accessApprovalPolicyServiceFactory({
|
||||
|
@@ -7,6 +7,7 @@ import {
|
||||
ProjectRolesSchema,
|
||||
ProjectsSchema,
|
||||
SecretApprovalPoliciesSchema,
|
||||
SecretTagsSchema,
|
||||
UsersSchema
|
||||
} from "@app/db/schemas";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
@@ -232,3 +233,11 @@ export const SanitizedProjectSchema = ProjectsSchema.pick({
|
||||
kmsCertificateKeyId: true,
|
||||
auditLogsRetentionDays: true
|
||||
});
|
||||
|
||||
export const SanitizedTagSchema = SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
color: true
|
||||
}).extend({
|
||||
name: z.string()
|
||||
});
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import DOMPurify from "isomorphic-dompurify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { OrganizationsSchema, SuperAdminSchema, UsersSchema } from "@app/db/schemas";
|
||||
@@ -72,7 +73,21 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
message: "At least one login method should be enabled."
|
||||
}),
|
||||
slackClientId: z.string().optional(),
|
||||
slackClientSecret: z.string().optional()
|
||||
slackClientSecret: z.string().optional(),
|
||||
authConsentContent: z
|
||||
.string()
|
||||
.trim()
|
||||
.refine((content) => DOMPurify.sanitize(content) === content, {
|
||||
message: "Auth consent content contains unsafe HTML."
|
||||
})
|
||||
.optional(),
|
||||
pageFrameContent: z
|
||||
.string()
|
||||
.trim()
|
||||
.refine((content) => DOMPurify.sanitize(content) === content, {
|
||||
message: "Page frame content contains unsafe HTML."
|
||||
})
|
||||
.optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -196,6 +211,27 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/user-management/users/:userId/admin-access",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
userId: z.string()
|
||||
})
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
handler: async (req) => {
|
||||
await server.services.superAdmin.grantServerAdminAccessToUser(req.params.userId);
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/encryption-strategies",
|
||||
|
@@ -1,10 +1,11 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ActionProjectType, SecretFoldersSchema, SecretImportsSchema, SecretTagsSchema } from "@app/db/schemas";
|
||||
import { ActionProjectType, SecretFoldersSchema, SecretImportsSchema } from "@app/db/schemas";
|
||||
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import {
|
||||
ProjectPermissionDynamicSecretActions,
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { DASHBOARD } from "@app/lib/api-docs";
|
||||
@@ -15,7 +16,7 @@ import { secretsLimit } from "@app/server/config/rateLimiter";
|
||||
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { getUserAgentType } from "@app/server/plugins/audit-log";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedDynamicSecretSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { SanitizedDynamicSecretSchema, SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
|
||||
import { SecretsOrderBy } from "@app/services/secret/secret-types";
|
||||
@@ -116,16 +117,10 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
dynamicSecrets: SanitizedDynamicSecretSchema.extend({ environment: z.string() }).array().optional(),
|
||||
secrets: secretRawSchema
|
||||
.extend({
|
||||
secretValueHidden: z.boolean(),
|
||||
secretPath: z.string().optional(),
|
||||
secretMetadata: ResourceMetadataSchema.optional(),
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
color: true
|
||||
})
|
||||
.extend({ name: z.string() })
|
||||
.array()
|
||||
.optional()
|
||||
tags: SanitizedTagSchema.array().optional()
|
||||
})
|
||||
.array()
|
||||
.optional(),
|
||||
@@ -294,6 +289,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
|
||||
if (remainingLimit > 0 && totalSecretCount > adjustedOffset) {
|
||||
secrets = await server.services.secret.getSecretsRawMultiEnv({
|
||||
viewSecretValue: true,
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
@@ -393,6 +389,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
.optional(),
|
||||
search: z.string().trim().describe(DASHBOARD.SECRET_DETAILS_LIST.search).optional(),
|
||||
tags: z.string().trim().transform(decodeURIComponent).describe(DASHBOARD.SECRET_DETAILS_LIST.tags).optional(),
|
||||
viewSecretValue: booleanSchema.default(true),
|
||||
includeSecrets: booleanSchema.describe(DASHBOARD.SECRET_DETAILS_LIST.includeSecrets),
|
||||
includeFolders: booleanSchema.describe(DASHBOARD.SECRET_DETAILS_LIST.includeFolders),
|
||||
includeDynamicSecrets: booleanSchema.describe(DASHBOARD.SECRET_DETAILS_LIST.includeDynamicSecrets),
|
||||
@@ -410,16 +407,10 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
dynamicSecrets: SanitizedDynamicSecretSchema.array().optional(),
|
||||
secrets: secretRawSchema
|
||||
.extend({
|
||||
secretValueHidden: z.boolean(),
|
||||
secretPath: z.string().optional(),
|
||||
secretMetadata: ResourceMetadataSchema.optional(),
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
color: true
|
||||
})
|
||||
.extend({ name: z.string() })
|
||||
.array()
|
||||
.optional()
|
||||
tags: SanitizedTagSchema.array().optional()
|
||||
})
|
||||
.array()
|
||||
.optional(),
|
||||
@@ -601,23 +592,25 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
});
|
||||
|
||||
if (remainingLimit > 0 && totalSecretCount > adjustedOffset) {
|
||||
const secretsRaw = await server.services.secret.getSecretsRaw({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
environment,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
projectId,
|
||||
path: secretPath,
|
||||
orderBy,
|
||||
orderDirection,
|
||||
search,
|
||||
limit: remainingLimit,
|
||||
offset: adjustedOffset,
|
||||
tagSlugs: tags
|
||||
});
|
||||
|
||||
secrets = secretsRaw.secrets;
|
||||
secrets = (
|
||||
await server.services.secret.getSecretsRaw({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
viewSecretValue: req.query.viewSecretValue,
|
||||
throwOnMissingReadValuePermission: false,
|
||||
actorOrgId: req.permission.orgId,
|
||||
environment,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
projectId,
|
||||
path: secretPath,
|
||||
orderBy,
|
||||
orderDirection,
|
||||
search,
|
||||
limit: remainingLimit,
|
||||
offset: adjustedOffset,
|
||||
tagSlugs: tags
|
||||
})
|
||||
).secrets;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId,
|
||||
@@ -696,16 +689,10 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
.optional(),
|
||||
secrets: secretRawSchema
|
||||
.extend({
|
||||
secretValueHidden: z.boolean(),
|
||||
secretPath: z.string().optional(),
|
||||
secretMetadata: ResourceMetadataSchema.optional(),
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
color: true
|
||||
})
|
||||
.extend({ name: z.string() })
|
||||
.array()
|
||||
.optional()
|
||||
tags: SanitizedTagSchema.array().optional()
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
@@ -749,6 +736,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
|
||||
const secrets = await server.services.secret.getSecretsRawByFolderMappings(
|
||||
{
|
||||
filterByAction: ProjectPermissionSecretActions.DescribeSecret,
|
||||
projectId,
|
||||
folderMappings,
|
||||
filters: {
|
||||
@@ -862,22 +850,17 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
projectId: z.string().trim(),
|
||||
environment: z.string().trim(),
|
||||
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
|
||||
keys: z.string().trim().transform(decodeURIComponent)
|
||||
keys: z.string().trim().transform(decodeURIComponent),
|
||||
viewSecretValue: booleanSchema.default(false)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secrets: secretRawSchema
|
||||
.extend({
|
||||
secretValueHidden: z.boolean(),
|
||||
secretPath: z.string().optional(),
|
||||
secretMetadata: ResourceMetadataSchema.optional(),
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
color: true
|
||||
})
|
||||
.extend({ name: z.string() })
|
||||
.array()
|
||||
.optional()
|
||||
tags: SanitizedTagSchema.array().optional()
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
@@ -886,7 +869,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { secretPath, projectId, environment } = req.query;
|
||||
const { secretPath, projectId, environment, viewSecretValue } = req.query;
|
||||
|
||||
const keys = req.query.keys?.split(",").filter((key) => Boolean(key.trim())) ?? [];
|
||||
if (!keys.length) throw new BadRequestError({ message: "One or more keys required" });
|
||||
@@ -895,6 +878,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
viewSecretValue,
|
||||
environment,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
projectId,
|
||||
|
@@ -37,6 +37,7 @@ import { registerProjectMembershipRouter } from "./project-membership-router";
|
||||
import { registerProjectRouter } from "./project-router";
|
||||
import { registerSecretFolderRouter } from "./secret-folder-router";
|
||||
import { registerSecretImportRouter } from "./secret-import-router";
|
||||
import { registerSecretRequestsRouter } from "./secret-requests-router";
|
||||
import { registerSecretSharingRouter } from "./secret-sharing-router";
|
||||
import { registerSecretTagRouter } from "./secret-tag-router";
|
||||
import { registerSlackRouter } from "./slack-router";
|
||||
@@ -110,7 +111,15 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
|
||||
await server.register(registerIntegrationAuthRouter, { prefix: "/integration-auth" });
|
||||
await server.register(registerWebhookRouter, { prefix: "/webhooks" });
|
||||
await server.register(registerIdentityRouter, { prefix: "/identities" });
|
||||
await server.register(registerSecretSharingRouter, { prefix: "/secret-sharing" });
|
||||
|
||||
await server.register(
|
||||
async (secretSharingRouter) => {
|
||||
await secretSharingRouter.register(registerSecretSharingRouter, { prefix: "/shared" });
|
||||
await secretSharingRouter.register(registerSecretRequestsRouter, { prefix: "/requests" });
|
||||
},
|
||||
{ prefix: "/secret-sharing" }
|
||||
);
|
||||
|
||||
await server.register(registerUserEngagementRouter, { prefix: "/user-engagement" });
|
||||
await server.register(registerDashboardRouter, { prefix: "/dashboard" });
|
||||
await server.register(registerCmekRouter, { prefix: "/kms" });
|
||||
|
@@ -47,7 +47,8 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
.default("/")
|
||||
.transform(prefixWithSlash)
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(FOLDERS.CREATE.directory)
|
||||
.describe(FOLDERS.CREATE.directory),
|
||||
description: z.string().optional().nullable().describe(FOLDERS.CREATE.description)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -65,7 +66,8 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body,
|
||||
projectId: req.body.workspaceId,
|
||||
path
|
||||
path,
|
||||
description: req.body.description
|
||||
});
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
@@ -76,7 +78,8 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
environment: req.body.environment,
|
||||
folderId: folder.id,
|
||||
folderName: folder.name,
|
||||
folderPath: path
|
||||
folderPath: path,
|
||||
...(req.body.description ? { description: req.body.description } : {})
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -125,7 +128,8 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
.default("/")
|
||||
.transform(prefixWithSlash)
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(FOLDERS.UPDATE.directory)
|
||||
.describe(FOLDERS.UPDATE.directory),
|
||||
description: z.string().optional().nullable().describe(FOLDERS.UPDATE.description)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -196,7 +200,8 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
.default("/")
|
||||
.transform(prefixWithSlash)
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(FOLDERS.UPDATE.path)
|
||||
.describe(FOLDERS.UPDATE.path),
|
||||
description: z.string().optional().nullable().describe(FOLDERS.UPDATE.description)
|
||||
})
|
||||
.array()
|
||||
.min(1)
|
||||
|
270
backend/src/server/routes/v1/secret-requests-router.ts
Normal file
270
backend/src/server/routes/v1/secret-requests-router.ts
Normal file
@@ -0,0 +1,270 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretSharingSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { SecretSharingAccessType } from "@app/lib/types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { SecretSharingType } from "@app/services/secret-sharing/secret-sharing-types";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
export const registerSecretRequestsRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secretRequest: SecretSharingSchema.omit({
|
||||
encryptedSecret: true,
|
||||
tag: true,
|
||||
iv: true,
|
||||
encryptedValue: true
|
||||
}).extend({
|
||||
isSecretValueSet: z.boolean(),
|
||||
requester: z.object({
|
||||
organizationName: z.string(),
|
||||
firstName: z.string().nullish(),
|
||||
lastName: z.string().nullish(),
|
||||
username: z.string()
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const secretRequest = await req.server.services.secretSharing.getSecretRequestById({
|
||||
id: req.params.id,
|
||||
actorOrgId: req.permission?.orgId,
|
||||
actor: req.permission?.type,
|
||||
actorId: req.permission?.id,
|
||||
actorAuthMethod: req.permission?.authMethod
|
||||
});
|
||||
|
||||
return { secretRequest };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:id/set-value",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
secretValue: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secretRequest: SecretSharingSchema.omit({
|
||||
encryptedSecret: true,
|
||||
tag: true,
|
||||
iv: true,
|
||||
encryptedValue: true
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const secretRequest = await req.server.services.secretSharing.setSecretRequestValue({
|
||||
id: req.params.id,
|
||||
actorOrgId: req.permission?.orgId,
|
||||
actor: req.permission?.type,
|
||||
actorId: req.permission?.id,
|
||||
actorAuthMethod: req.permission?.authMethod,
|
||||
secretValue: req.body.secretValue
|
||||
});
|
||||
|
||||
return { secretRequest };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/:id/reveal-value",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secretRequest: SecretSharingSchema.omit({
|
||||
encryptedSecret: true,
|
||||
tag: true,
|
||||
iv: true,
|
||||
encryptedValue: true
|
||||
}).extend({
|
||||
secretValue: z.string()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const secretRequest = await req.server.services.secretSharing.revealSecretRequestValue({
|
||||
id: req.params.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
orgId: req.permission.orgId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod
|
||||
});
|
||||
|
||||
return { secretRequest };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secretRequest: SecretSharingSchema.omit({
|
||||
encryptedSecret: true,
|
||||
tag: true,
|
||||
iv: true,
|
||||
encryptedValue: true
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const secretRequest = await req.server.services.secretSharing.deleteSharedSecretById({
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorId: req.permission.id,
|
||||
sharedSecretId: req.params.id,
|
||||
orgId: req.permission.orgId,
|
||||
actor: req.permission.type,
|
||||
type: SecretSharingType.Request
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretRequestDeleted,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
secretRequestId: req.params.id,
|
||||
organizationId: req.permission.orgId,
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
return { secretRequest };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
offset: z.coerce.number().min(0).max(100).default(0),
|
||||
limit: z.coerce.number().min(1).max(100).default(25)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secrets: z.array(SecretSharingSchema),
|
||||
totalCount: z.number()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { secrets, totalCount } = await req.server.services.secretSharing.getSharedSecrets({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
type: SecretSharingType.Request,
|
||||
...req.query
|
||||
});
|
||||
|
||||
return {
|
||||
secrets,
|
||||
totalCount
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
name: z.string().max(50).optional(),
|
||||
expiresAt: z.string(),
|
||||
accessType: z.nativeEnum(SecretSharingAccessType).default(SecretSharingAccessType.Organization)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const shareRequest = await req.server.services.secretSharing.createSecretRequest({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
orgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
orgId: req.permission.orgId,
|
||||
...req.auditLogInfo,
|
||||
event: {
|
||||
type: EventType.CREATE_SECRET_REQUEST,
|
||||
metadata: {
|
||||
accessType: req.body.accessType,
|
||||
name: req.body.name,
|
||||
id: shareRequest.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SecretRequestCreated,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
secretRequestId: shareRequest.id,
|
||||
organizationId: req.permission.orgId,
|
||||
secretRequestName: req.body.name,
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
|
||||
return { id: shareRequest.id };
|
||||
}
|
||||
});
|
||||
};
|
@@ -11,6 +11,7 @@ import {
|
||||
} from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { SecretSharingType } from "@app/services/secret-sharing/secret-sharing-types";
|
||||
|
||||
export const registerSecretSharingRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
@@ -38,6 +39,7 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
type: SecretSharingType.Share,
|
||||
...req.query
|
||||
});
|
||||
|
||||
@@ -211,7 +213,8 @@ export const registerSecretSharingRouter = async (server: FastifyZodProvider) =>
|
||||
orgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
sharedSecretId
|
||||
sharedSecretId,
|
||||
type: SecretSharingType.Share
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
|
@@ -94,7 +94,7 @@ export const registerServiceTokenRouter = async (server: FastifyZodProvider) =>
|
||||
iv: z.string().trim(),
|
||||
tag: z.string().trim(),
|
||||
expiresIn: z.number().nullable(),
|
||||
permissions: z.enum(["read", "write"]).array()
|
||||
permissions: z.enum(["read", "write", "readValue"]).array()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@@ -1,13 +1,7 @@
|
||||
import picomatch from "picomatch";
|
||||
import { z } from "zod";
|
||||
|
||||
import {
|
||||
SecretApprovalRequestsSchema,
|
||||
SecretsSchema,
|
||||
SecretTagsSchema,
|
||||
SecretType,
|
||||
ServiceTokenScopes
|
||||
} from "@app/db/schemas";
|
||||
import { SecretApprovalRequestsSchema, SecretsSchema, SecretType, ServiceTokenScopes } from "@app/db/schemas";
|
||||
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { RAW_SECRETS, SECRETS } from "@app/lib/api-docs";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
@@ -23,7 +17,7 @@ import { SecretOperations, SecretProtectionType } from "@app/services/secret/sec
|
||||
import { SecretUpdateMode } from "@app/services/secret-v2-bridge/secret-v2-bridge-types";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
import { secretRawSchema } from "../sanitizedSchemas";
|
||||
import { SanitizedTagSchema, secretRawSchema } from "../sanitizedSchemas";
|
||||
|
||||
const SecretReferenceNode = z.object({
|
||||
key: z.string(),
|
||||
@@ -31,6 +25,14 @@ const SecretReferenceNode = z.object({
|
||||
environment: z.string(),
|
||||
secretPath: z.string()
|
||||
});
|
||||
|
||||
const convertStringBoolean = (defaultValue: boolean = false) => {
|
||||
return z
|
||||
.enum(["true", "false"])
|
||||
.default(defaultValue ? "true" : "false")
|
||||
.transform((value) => value === "true");
|
||||
};
|
||||
|
||||
type TSecretReferenceNode = z.infer<typeof SecretReferenceNode> & { children: TSecretReferenceNode[] };
|
||||
|
||||
const SecretReferenceNodeTree: z.ZodType<TSecretReferenceNode> = SecretReferenceNode.extend({
|
||||
@@ -75,17 +77,9 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secret: SecretsSchema.omit({ secretBlindIndex: true }).merge(
|
||||
z.object({
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
color: true
|
||||
})
|
||||
.extend({ name: z.string() })
|
||||
.array()
|
||||
})
|
||||
)
|
||||
secret: SecretsSchema.omit({ secretBlindIndex: true }).extend({
|
||||
tags: SanitizedTagSchema.array()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
@@ -139,13 +133,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
response: {
|
||||
200: z.object({
|
||||
secret: SecretsSchema.omit({ secretBlindIndex: true }).extend({
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
color: true
|
||||
})
|
||||
.extend({ name: z.string() })
|
||||
.array()
|
||||
tags: SanitizedTagSchema.array()
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -247,21 +235,10 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
workspaceSlug: z.string().trim().optional().describe(RAW_SECRETS.LIST.workspaceSlug),
|
||||
environment: z.string().trim().optional().describe(RAW_SECRETS.LIST.environment),
|
||||
secretPath: z.string().trim().default("/").transform(removeTrailingSlash).describe(RAW_SECRETS.LIST.secretPath),
|
||||
expandSecretReferences: z
|
||||
.enum(["true", "false"])
|
||||
.default("false")
|
||||
.transform((value) => value === "true")
|
||||
.describe(RAW_SECRETS.LIST.expand),
|
||||
recursive: z
|
||||
.enum(["true", "false"])
|
||||
.default("false")
|
||||
.transform((value) => value === "true")
|
||||
.describe(RAW_SECRETS.LIST.recursive),
|
||||
include_imports: z
|
||||
.enum(["true", "false"])
|
||||
.default("false")
|
||||
.transform((value) => value === "true")
|
||||
.describe(RAW_SECRETS.LIST.includeImports),
|
||||
viewSecretValue: convertStringBoolean(true).describe(RAW_SECRETS.LIST.viewSecretValue),
|
||||
expandSecretReferences: convertStringBoolean().describe(RAW_SECRETS.LIST.expand),
|
||||
recursive: convertStringBoolean().describe(RAW_SECRETS.LIST.recursive),
|
||||
include_imports: convertStringBoolean().describe(RAW_SECRETS.LIST.includeImports),
|
||||
tagSlugs: z
|
||||
.string()
|
||||
.describe(RAW_SECRETS.LIST.tagSlugs)
|
||||
@@ -274,15 +251,9 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
secrets: secretRawSchema
|
||||
.extend({
|
||||
secretPath: z.string().optional(),
|
||||
secretValueHidden: z.boolean(),
|
||||
secretMetadata: ResourceMetadataSchema.optional(),
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
color: true
|
||||
})
|
||||
.extend({ name: z.string() })
|
||||
.array()
|
||||
.optional()
|
||||
tags: SanitizedTagSchema.array().optional()
|
||||
})
|
||||
.array(),
|
||||
imports: z
|
||||
@@ -293,6 +264,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
secrets: secretRawSchema
|
||||
.omit({ createdAt: true, updatedAt: true })
|
||||
.extend({
|
||||
secretValueHidden: z.boolean(),
|
||||
secretMetadata: ResourceMetadataSchema.optional()
|
||||
})
|
||||
.array()
|
||||
@@ -342,6 +314,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
expandSecretReferences: req.query.expandSecretReferences,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
projectId: workspaceId,
|
||||
viewSecretValue: req.query.viewSecretValue,
|
||||
path: secretPath,
|
||||
metadataFilter: req.query.metadataFilter,
|
||||
includeImports: req.query.include_imports,
|
||||
@@ -376,6 +349,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return { secrets, imports };
|
||||
}
|
||||
});
|
||||
@@ -403,28 +377,15 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
secretPath: z.string().trim().default("/").transform(removeTrailingSlash).describe(RAW_SECRETS.GET.secretPath),
|
||||
version: z.coerce.number().optional().describe(RAW_SECRETS.GET.version),
|
||||
type: z.nativeEnum(SecretType).default(SecretType.Shared).describe(RAW_SECRETS.GET.type),
|
||||
expandSecretReferences: z
|
||||
.enum(["true", "false"])
|
||||
.default("false")
|
||||
.transform((value) => value === "true")
|
||||
.describe(RAW_SECRETS.GET.expand),
|
||||
include_imports: z
|
||||
.enum(["true", "false"])
|
||||
.default("false")
|
||||
.transform((value) => value === "true")
|
||||
.describe(RAW_SECRETS.GET.includeImports)
|
||||
viewSecretValue: convertStringBoolean(true).describe(RAW_SECRETS.GET.viewSecretValue),
|
||||
expandSecretReferences: convertStringBoolean().describe(RAW_SECRETS.GET.expand),
|
||||
include_imports: convertStringBoolean().describe(RAW_SECRETS.GET.includeImports)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
secret: secretRawSchema.extend({
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
color: true
|
||||
})
|
||||
.extend({ name: z.string() })
|
||||
.array()
|
||||
.optional(),
|
||||
secretValueHidden: z.boolean(),
|
||||
tags: SanitizedTagSchema.array().optional(),
|
||||
secretMetadata: ResourceMetadataSchema.optional()
|
||||
})
|
||||
})
|
||||
@@ -456,6 +417,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
expandSecretReferences: req.query.expandSecretReferences,
|
||||
environment,
|
||||
projectId: workspaceId,
|
||||
viewSecretValue: req.query.viewSecretValue,
|
||||
projectSlug: workspaceSlug,
|
||||
path: secretPath,
|
||||
secretName: req.params.secretName,
|
||||
@@ -537,7 +499,12 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
.optional()
|
||||
.nullable()
|
||||
.describe(RAW_SECRETS.CREATE.secretReminderRepeatDays),
|
||||
secretReminderNote: z.string().optional().nullable().describe(RAW_SECRETS.CREATE.secretReminderNote)
|
||||
secretReminderNote: z
|
||||
.string()
|
||||
.max(1024, "Secret reminder note cannot exceed 1024 characters")
|
||||
.optional()
|
||||
.nullable()
|
||||
.describe(RAW_SECRETS.CREATE.secretReminderNote)
|
||||
}),
|
||||
response: {
|
||||
200: z.union([
|
||||
@@ -640,7 +607,12 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
tagIds: z.string().array().optional().describe(RAW_SECRETS.UPDATE.tagIds),
|
||||
metadata: z.record(z.string()).optional(),
|
||||
secretMetadata: ResourceMetadataSchema.optional(),
|
||||
secretReminderNote: z.string().optional().nullable().describe(RAW_SECRETS.UPDATE.secretReminderNote),
|
||||
secretReminderNote: z
|
||||
.string()
|
||||
.max(1024, "Secret reminder note cannot exceed 1024 characters")
|
||||
.optional()
|
||||
.nullable()
|
||||
.describe(RAW_SECRETS.UPDATE.secretReminderNote),
|
||||
secretReminderRepeatDays: z
|
||||
.number()
|
||||
.optional()
|
||||
@@ -652,7 +624,9 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
response: {
|
||||
200: z.union([
|
||||
z.object({
|
||||
secret: secretRawSchema
|
||||
secret: secretRawSchema.extend({
|
||||
secretValueHidden: z.boolean()
|
||||
})
|
||||
}),
|
||||
z.object({ approval: SecretApprovalRequestsSchema }).describe("When secret protection policy is enabled")
|
||||
])
|
||||
@@ -748,7 +722,9 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
response: {
|
||||
200: z.union([
|
||||
z.object({
|
||||
secret: secretRawSchema
|
||||
secret: secretRawSchema.extend({
|
||||
secretValueHidden: z.boolean()
|
||||
})
|
||||
}),
|
||||
z.object({ approval: SecretApprovalRequestsSchema }).describe("When secret protection policy is enabled")
|
||||
])
|
||||
@@ -770,6 +746,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
if (secretOperation.type === SecretProtectionType.Approval) {
|
||||
return { approval: secretOperation.approval };
|
||||
}
|
||||
|
||||
const { secret } = secretOperation;
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
@@ -832,13 +809,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
workspace: z.string(),
|
||||
environment: z.string(),
|
||||
secretPath: z.string().optional(),
|
||||
tags: SecretTagsSchema.pick({
|
||||
id: true,
|
||||
slug: true,
|
||||
color: true
|
||||
})
|
||||
.extend({ name: z.string() })
|
||||
.array()
|
||||
tags: SanitizedTagSchema.array()
|
||||
})
|
||||
.array(),
|
||||
imports: z
|
||||
@@ -934,10 +905,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
|
||||
type: z.nativeEnum(SecretType).default(SecretType.Shared),
|
||||
version: z.coerce.number().optional(),
|
||||
include_imports: z
|
||||
.enum(["true", "false"])
|
||||
.default("false")
|
||||
.transform((value) => value === "true")
|
||||
include_imports: convertStringBoolean()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -1208,6 +1176,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
z.object({
|
||||
secret: SecretsSchema.omit({ secretBlindIndex: true }).merge(
|
||||
z.object({
|
||||
secretValueHidden: z.boolean(),
|
||||
_id: z.string(),
|
||||
workspace: z.string(),
|
||||
environment: z.string()
|
||||
@@ -1377,13 +1346,12 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
response: {
|
||||
200: z.union([
|
||||
z.object({
|
||||
secret: SecretsSchema.omit({ secretBlindIndex: true }).merge(
|
||||
z.object({
|
||||
_id: z.string(),
|
||||
workspace: z.string(),
|
||||
environment: z.string()
|
||||
})
|
||||
)
|
||||
secret: SecretsSchema.omit({ secretBlindIndex: true }).extend({
|
||||
_id: z.string(),
|
||||
secretValueHidden: z.boolean(),
|
||||
workspace: z.string(),
|
||||
environment: z.string()
|
||||
})
|
||||
}),
|
||||
z.object({ approval: SecretApprovalRequestsSchema }).describe("When secret protection policy is enabled")
|
||||
])
|
||||
@@ -1695,7 +1663,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
response: {
|
||||
200: z.union([
|
||||
z.object({
|
||||
secrets: SecretsSchema.omit({ secretBlindIndex: true }).array()
|
||||
secrets: SecretsSchema.omit({ secretBlindIndex: true }).extend({ secretValueHidden: z.boolean() }).array()
|
||||
}),
|
||||
z.object({ approval: SecretApprovalRequestsSchema }).describe("When secret protection policy is enabled")
|
||||
])
|
||||
@@ -1810,7 +1778,11 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
response: {
|
||||
200: z.union([
|
||||
z.object({
|
||||
secrets: SecretsSchema.omit({ secretBlindIndex: true }).array()
|
||||
secrets: SecretsSchema.omit({ secretBlindIndex: true })
|
||||
.extend({
|
||||
secretValueHidden: z.boolean()
|
||||
})
|
||||
.array()
|
||||
}),
|
||||
z.object({ approval: SecretApprovalRequestsSchema }).describe("When secret protection policy is enabled")
|
||||
])
|
||||
@@ -2053,7 +2025,12 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
skipMultilineEncoding: z.boolean().optional().describe(RAW_SECRETS.UPDATE.skipMultilineEncoding),
|
||||
newSecretName: SecretNameSchema.optional().describe(RAW_SECRETS.UPDATE.newSecretName),
|
||||
tagIds: z.string().array().optional().describe(RAW_SECRETS.UPDATE.tagIds),
|
||||
secretReminderNote: z.string().optional().nullable().describe(RAW_SECRETS.UPDATE.secretReminderNote),
|
||||
secretReminderNote: z
|
||||
.string()
|
||||
.max(1024, "Secret reminder note cannot exceed 1024 characters")
|
||||
.optional()
|
||||
.nullable()
|
||||
.describe(RAW_SECRETS.UPDATE.secretReminderNote),
|
||||
secretMetadata: ResourceMetadataSchema.optional(),
|
||||
secretReminderRepeatDays: z
|
||||
.number()
|
||||
@@ -2067,7 +2044,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
response: {
|
||||
200: z.union([
|
||||
z.object({
|
||||
secrets: secretRawSchema.array()
|
||||
secrets: secretRawSchema.extend({ secretValueHidden: z.boolean() }).array()
|
||||
}),
|
||||
z.object({ approval: SecretApprovalRequestsSchema }).describe("When secret protection policy is enabled")
|
||||
])
|
||||
@@ -2189,7 +2166,11 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
response: {
|
||||
200: z.union([
|
||||
z.object({
|
||||
secrets: secretRawSchema.array()
|
||||
secrets: secretRawSchema
|
||||
.extend({
|
||||
secretValueHidden: z.boolean()
|
||||
})
|
||||
.array()
|
||||
}),
|
||||
z.object({ approval: SecretApprovalRequestsSchema }).describe("When secret protection policy is enabled")
|
||||
])
|
||||
|
@@ -31,9 +31,9 @@ export type TImportDataIntoInfisicalDTO = {
|
||||
projectEnvDAL: Pick<TProjectEnvDALFactory, "find" | "findLastEnvPosition" | "create" | "findOne">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
|
||||
secretDAL: Pick<TSecretV2BridgeDALFactory, "insertMany" | "upsertSecretReferences" | "findBySecretKeys">;
|
||||
secretDAL: Pick<TSecretV2BridgeDALFactory, "insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "find">;
|
||||
secretVersionDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "create">;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "create">;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "create" | "find">;
|
||||
secretVersionTagDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany" | "create">;
|
||||
|
||||
resourceMetadataDAL: Pick<TResourceMetadataDALFactory, "insertMany">;
|
||||
|
@@ -27,9 +27,9 @@ export type TExternalMigrationQueueFactoryDep = {
|
||||
projectEnvDAL: Pick<TProjectEnvDALFactory, "find" | "findLastEnvPosition" | "create" | "findOne">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
|
||||
secretDAL: Pick<TSecretV2BridgeDALFactory, "insertMany" | "upsertSecretReferences" | "findBySecretKeys">;
|
||||
secretDAL: Pick<TSecretV2BridgeDALFactory, "insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "find">;
|
||||
secretVersionDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "create">;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "create">;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "create" | "find">;
|
||||
secretVersionTagDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany" | "create">;
|
||||
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "create" | "findBySecretPath" | "findOne" | "findById">;
|
||||
|
@@ -134,7 +134,15 @@ const getAppsHeroku = async ({ accessToken }: { accessToken: string }) => {
|
||||
* Return list of names of apps for Vercel integration
|
||||
* This is re-used for getting custom environments for Vercel
|
||||
*/
|
||||
export const getAppsVercel = async ({ accessToken, teamId }: { teamId?: string | null; accessToken: string }) => {
|
||||
export const getAppsVercel = async ({
|
||||
accessToken,
|
||||
teamId,
|
||||
includeCustomEnvironments
|
||||
}: {
|
||||
teamId?: string | null;
|
||||
accessToken: string;
|
||||
includeCustomEnvironments?: boolean;
|
||||
}) => {
|
||||
const apps: Array<{ name: string; appId: string; customEnvironments: Array<{ slug: string; id: string }> }> = [];
|
||||
|
||||
const limit = "20";
|
||||
@@ -145,12 +153,6 @@ export const getAppsVercel = async ({ accessToken, teamId }: { teamId?: string |
|
||||
projects: {
|
||||
name: string;
|
||||
id: string;
|
||||
customEnvironments?: {
|
||||
id: string;
|
||||
type: string;
|
||||
description: string;
|
||||
slug: string;
|
||||
}[];
|
||||
}[];
|
||||
pagination: {
|
||||
count: number;
|
||||
@@ -159,6 +161,20 @@ export const getAppsVercel = async ({ accessToken, teamId }: { teamId?: string |
|
||||
};
|
||||
}
|
||||
|
||||
const getProjectCustomEnvironments = async (projectId: string) => {
|
||||
const { data } = await request.get<{ environments: { id: string; slug: string }[] }>(
|
||||
`${IntegrationUrls.VERCEL_API_URL}/v9/projects/${projectId}/custom-environments`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return data.environments;
|
||||
};
|
||||
|
||||
while (hasMorePages) {
|
||||
const params: { [key: string]: string } = {
|
||||
limit
|
||||
@@ -180,17 +196,38 @@ export const getAppsVercel = async ({ accessToken, teamId }: { teamId?: string |
|
||||
}
|
||||
});
|
||||
|
||||
data.projects.forEach((a) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.id,
|
||||
customEnvironments:
|
||||
a.customEnvironments?.map((env) => ({
|
||||
slug: env.slug,
|
||||
id: env.id
|
||||
})) ?? []
|
||||
if (includeCustomEnvironments) {
|
||||
const projectsWithCustomEnvironments = await Promise.all(
|
||||
data.projects.map(async (a) => {
|
||||
const customEnvironments = await getProjectCustomEnvironments(a.id);
|
||||
|
||||
return {
|
||||
...a,
|
||||
customEnvironments
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
projectsWithCustomEnvironments.forEach((a) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.id,
|
||||
customEnvironments:
|
||||
a.customEnvironments?.map((env) => ({
|
||||
slug: env.slug,
|
||||
id: env.id
|
||||
})) ?? []
|
||||
});
|
||||
});
|
||||
});
|
||||
} else {
|
||||
data.projects.forEach((a) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.id,
|
||||
customEnvironments: []
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
next = data.pagination.next;
|
||||
|
||||
|
@@ -1851,6 +1851,7 @@ export const integrationAuthServiceFactory = ({
|
||||
const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey);
|
||||
|
||||
const vercelApps = await getAppsVercel({
|
||||
includeCustomEnvironments: true,
|
||||
accessToken,
|
||||
teamId
|
||||
});
|
||||
|
@@ -68,7 +68,8 @@ const getIntegrationSecretsV2 = async (
|
||||
secretDAL: secretV2BridgeDAL,
|
||||
secretImportDAL,
|
||||
secretImports,
|
||||
hasSecretAccess: () => true
|
||||
hasSecretAccess: () => true,
|
||||
viewSecretValue: true
|
||||
});
|
||||
|
||||
for (let i = importedSecrets.length - 1; i >= 0; i -= 1) {
|
||||
|
@@ -2,7 +2,11 @@ import { ForbiddenError, subject } from "@casl/ability";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import {
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
@@ -92,7 +96,7 @@ export const integrationServiceFactory = ({
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Integrations);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: sourceEnvironment,
|
||||
secretPath
|
||||
@@ -175,7 +179,7 @@ export const integrationServiceFactory = ({
|
||||
|
||||
if (environment || secretPath) {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: newEnvironment,
|
||||
secretPath: newSecretPath
|
||||
|
@@ -11,7 +11,11 @@ import {
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import {
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
|
||||
import { InfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-types";
|
||||
import { TSshCertificateAuthorityDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-dal";
|
||||
@@ -747,7 +751,10 @@ export const projectServiceFactory = ({
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionSecretActions.DescribeSecret,
|
||||
ProjectPermissionSub.Secrets
|
||||
);
|
||||
|
||||
const project = await projectDAL.findProjectById(projectId);
|
||||
|
||||
|
@@ -20,7 +20,7 @@ type TDailyResourceCleanUpQueueServiceFactoryDep = {
|
||||
secretDAL: Pick<TSecretDALFactory, "pruneSecretReminders">;
|
||||
secretFolderVersionDAL: Pick<TSecretFolderVersionDALFactory, "pruneExcessVersions">;
|
||||
snapshotDAL: Pick<TSnapshotDALFactory, "pruneExcessSnapshots">;
|
||||
secretSharingDAL: Pick<TSecretSharingDALFactory, "pruneExpiredSharedSecrets">;
|
||||
secretSharingDAL: Pick<TSecretSharingDALFactory, "pruneExpiredSharedSecrets" | "pruneExpiredSecretRequests">;
|
||||
queueService: TQueueServiceFactory;
|
||||
};
|
||||
|
||||
@@ -45,6 +45,7 @@ export const dailyResourceCleanUpQueueServiceFactory = ({
|
||||
await identityAccessTokenDAL.removeExpiredTokens();
|
||||
await identityUniversalAuthClientSecretDAL.removeExpiredClientSecrets();
|
||||
await secretSharingDAL.pruneExpiredSharedSecrets();
|
||||
await secretSharingDAL.pruneExpiredSecretRequests();
|
||||
await snapshotDAL.pruneExcessSnapshots();
|
||||
await secretVersionDAL.pruneExcessVersions();
|
||||
await secretVersionV2DAL.pruneExcessVersions();
|
||||
|
@@ -50,7 +50,8 @@ export const secretFolderServiceFactory = ({
|
||||
actorOrgId,
|
||||
name,
|
||||
environment,
|
||||
path: secretPath
|
||||
path: secretPath,
|
||||
description
|
||||
}: TCreateFolderDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
@@ -121,7 +122,10 @@ export const secretFolderServiceFactory = ({
|
||||
}
|
||||
}
|
||||
|
||||
const doc = await folderDAL.create({ name, envId: env.id, version: 1, parentId: parentFolderId }, tx);
|
||||
const doc = await folderDAL.create(
|
||||
{ name, envId: env.id, version: 1, parentId: parentFolderId, description },
|
||||
tx
|
||||
);
|
||||
await folderVersionDAL.create(
|
||||
{
|
||||
name: doc.name,
|
||||
@@ -170,7 +174,7 @@ export const secretFolderServiceFactory = ({
|
||||
const result = await folderDAL.transaction(async (tx) =>
|
||||
Promise.all(
|
||||
folders.map(async (newFolder) => {
|
||||
const { environment, path: secretPath, id, name } = newFolder;
|
||||
const { environment, path: secretPath, id, name, description } = newFolder;
|
||||
|
||||
const parentFolder = await folderDAL.findBySecretPath(project.id, environment, secretPath);
|
||||
if (!parentFolder) {
|
||||
@@ -217,7 +221,7 @@ export const secretFolderServiceFactory = ({
|
||||
|
||||
const [doc] = await folderDAL.update(
|
||||
{ envId: env.id, id: folder.id, parentId: parentFolder.id },
|
||||
{ name },
|
||||
{ name, description },
|
||||
tx
|
||||
);
|
||||
await folderVersionDAL.create(
|
||||
@@ -259,7 +263,8 @@ export const secretFolderServiceFactory = ({
|
||||
name,
|
||||
environment,
|
||||
path: secretPath,
|
||||
id
|
||||
id,
|
||||
description
|
||||
}: TUpdateFolderDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
@@ -312,7 +317,7 @@ export const secretFolderServiceFactory = ({
|
||||
const newFolder = await folderDAL.transaction(async (tx) => {
|
||||
const [doc] = await folderDAL.update(
|
||||
{ envId: env.id, id: folder.id, parentId: parentFolder.id, isReserved: false },
|
||||
{ name },
|
||||
{ name, description },
|
||||
tx
|
||||
);
|
||||
await folderVersionDAL.create(
|
||||
|
@@ -9,6 +9,7 @@ export type TCreateFolderDTO = {
|
||||
environment: string;
|
||||
path: string;
|
||||
name: string;
|
||||
description?: string | null;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TUpdateFolderDTO = {
|
||||
@@ -16,6 +17,7 @@ export type TUpdateFolderDTO = {
|
||||
path: string;
|
||||
id: string;
|
||||
name: string;
|
||||
description?: string | null;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TUpdateManyFoldersDTO = {
|
||||
@@ -25,6 +27,7 @@ export type TUpdateManyFoldersDTO = {
|
||||
path: string;
|
||||
id: string;
|
||||
name: string;
|
||||
description?: string | null;
|
||||
}[];
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
|
@@ -3,6 +3,7 @@ import { groupBy, unique } from "@app/lib/fn";
|
||||
|
||||
import { ResourceMetadataDTO } from "../resource-metadata/resource-metadata-schema";
|
||||
import { TSecretDALFactory } from "../secret/secret-dal";
|
||||
import { INFISICAL_SECRET_VALUE_HIDDEN_MASK } from "../secret/secret-fns";
|
||||
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
|
||||
import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal";
|
||||
import { TSecretImportDALFactory } from "./secret-import-dal";
|
||||
@@ -32,6 +33,12 @@ type TSecretImportSecretsV2 = {
|
||||
folderId: string | undefined;
|
||||
importFolderId: string;
|
||||
secrets: (TSecretsV2 & {
|
||||
secretTags: {
|
||||
slug: string;
|
||||
name: string;
|
||||
color?: string | null;
|
||||
id: string;
|
||||
}[];
|
||||
workspace: string;
|
||||
environment: string;
|
||||
_id: string;
|
||||
@@ -39,6 +46,7 @@ type TSecretImportSecretsV2 = {
|
||||
// akhilmhdh: yes i know you can put ?.
|
||||
// But for somereason ts consider ? and undefined explicit as different just ts things
|
||||
secretValue: string;
|
||||
secretValueHidden: boolean;
|
||||
secretComment: string;
|
||||
secretMetadata?: ResourceMetadataDTO;
|
||||
})[];
|
||||
@@ -150,12 +158,14 @@ export const fnSecretsV2FromImports = async ({
|
||||
secretImportDAL,
|
||||
decryptor,
|
||||
expandSecretReferences,
|
||||
hasSecretAccess
|
||||
hasSecretAccess,
|
||||
viewSecretValue
|
||||
}: {
|
||||
secretImports: (Omit<TSecretImports, "importEnv"> & {
|
||||
importEnv: { id: string; slug: string; name: string };
|
||||
})[];
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findByManySecretPath">;
|
||||
viewSecretValue: boolean;
|
||||
secretDAL: Pick<TSecretV2BridgeDALFactory, "find">;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "findByFolderIds">;
|
||||
decryptor: (value?: Buffer | null) => string;
|
||||
@@ -168,9 +178,14 @@ export const fnSecretsV2FromImports = async ({
|
||||
hasSecretAccess: (environment: string, secretPath: string, secretName: string, secretTagSlugs: string[]) => boolean;
|
||||
}) => {
|
||||
const cyclicDetector = new Set();
|
||||
const stack: { secretImports: typeof rootSecretImports; depth: number; parentImportedSecrets: TSecretsV2[] }[] = [
|
||||
{ secretImports: rootSecretImports, depth: 0, parentImportedSecrets: [] }
|
||||
];
|
||||
const stack: {
|
||||
secretImports: typeof rootSecretImports;
|
||||
depth: number;
|
||||
parentImportedSecrets: (TSecretsV2 & {
|
||||
secretValueHidden: boolean;
|
||||
secretTags: { slug: string; name: string; id: string; color?: string | null }[];
|
||||
})[];
|
||||
}[] = [{ secretImports: rootSecretImports, depth: 0, parentImportedSecrets: [] }];
|
||||
|
||||
const processedImports: TSecretImportSecretsV2[] = [];
|
||||
|
||||
@@ -229,7 +244,9 @@ export const fnSecretsV2FromImports = async ({
|
||||
.map((item) => ({
|
||||
...item,
|
||||
secretKey: item.key,
|
||||
secretValue: decryptor(item.encryptedValue),
|
||||
secretValue: viewSecretValue ? decryptor(item.encryptedValue) : INFISICAL_SECRET_VALUE_HIDDEN_MASK,
|
||||
secretValueHidden: !viewSecretValue,
|
||||
secretTags: item.tags,
|
||||
secretComment: decryptor(item.encryptedComment),
|
||||
environment: importEnv.slug,
|
||||
workspace: "", // This field should not be used, it's only here to keep the older Python SDK versions backwards compatible with the new Postgres backend.
|
||||
@@ -267,6 +284,8 @@ export const fnSecretsV2FromImports = async ({
|
||||
processedImport.secrets = unique(processedImport.secrets, (i) => i.key);
|
||||
return Promise.allSettled(
|
||||
processedImport.secrets.map(async (decryptedSecret, index) => {
|
||||
if (decryptedSecret.secretValueHidden) return;
|
||||
|
||||
const expandedSecretValue = await expandSecretReferences({
|
||||
value: decryptedSecret.secretValue,
|
||||
secretPath: processedImport.secretPath,
|
||||
|
@@ -5,7 +5,11 @@ import { ForbiddenError, subject } from "@casl/ability";
|
||||
import { ActionProjectType, TableName } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import {
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { getReplicationFolderName } from "@app/ee/services/secret-replication/secret-replication-service";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
|
||||
@@ -90,7 +94,7 @@ export const secretImportServiceFactory = ({
|
||||
|
||||
// check if user has permission to import from target path
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSecretActions.DescribeSecret,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: data.environment,
|
||||
secretPath: data.path
|
||||
@@ -402,7 +406,7 @@ export const secretImportServiceFactory = ({
|
||||
|
||||
// check if user has permission to import from target path
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSecretActions.DescribeSecret,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: secretImportDoc.importEnv.slug,
|
||||
secretPath: secretImportDoc.importPath
|
||||
@@ -596,7 +600,7 @@ export const secretImportServiceFactory = ({
|
||||
const secretImports = await secretImportDAL.find({ folderId: folder.id, isReplication: false });
|
||||
const allowedImports = secretImports.filter((el) =>
|
||||
permission.can(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: el.importEnv.slug,
|
||||
secretPath: el.importPath
|
||||
@@ -642,12 +646,13 @@ export const secretImportServiceFactory = ({
|
||||
const importedSecrets = await fnSecretsV2FromImports({
|
||||
secretImports,
|
||||
folderDAL,
|
||||
viewSecretValue: true,
|
||||
secretDAL: secretV2BridgeDAL,
|
||||
secretImportDAL,
|
||||
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : ""),
|
||||
hasSecretAccess: (expandEnvironment, expandSecretPath, expandSecretKey, expandSecretTags) =>
|
||||
permission.can(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: expandEnvironment,
|
||||
secretPath: expandSecretPath,
|
||||
@@ -667,7 +672,7 @@ export const secretImportServiceFactory = ({
|
||||
|
||||
const allowedImports = secretImports.filter((el) =>
|
||||
permission.can(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: el.importEnv.slug,
|
||||
secretPath: el.importPath
|
||||
@@ -683,7 +688,10 @@ export const secretImportServiceFactory = ({
|
||||
return importedSecrets.map((el) => ({
|
||||
...el,
|
||||
secrets: el.secrets.map((encryptedSecret) =>
|
||||
decryptSecretRaw({ ...encryptedSecret, workspace: projectId, environment, secretPath }, botKey)
|
||||
decryptSecretRaw(
|
||||
{ ...encryptedSecret, workspace: projectId, environment, secretPath, secretValueHidden: false },
|
||||
botKey
|
||||
)
|
||||
)
|
||||
}));
|
||||
};
|
||||
|
@@ -2,17 +2,61 @@ import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName, TSecretSharing } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { DatabaseError, NotFoundError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
import { SecretSharingType } from "./secret-sharing-types";
|
||||
|
||||
export type TSecretSharingDALFactory = ReturnType<typeof secretSharingDALFactory>;
|
||||
|
||||
export const secretSharingDALFactory = (db: TDbClient) => {
|
||||
const sharedSecretOrm = ormify(db, TableName.SecretSharing);
|
||||
|
||||
const countAllUserOrgSharedSecrets = async ({ orgId, userId }: { orgId: string; userId: string }) => {
|
||||
const getSecretRequestById = async (id: string) => {
|
||||
const repDb = db.replicaNode();
|
||||
|
||||
const secretRequest = await repDb(TableName.SecretSharing)
|
||||
.leftJoin(TableName.Organization, `${TableName.Organization}.id`, `${TableName.SecretSharing}.orgId`)
|
||||
.leftJoin(TableName.Users, `${TableName.Users}.id`, `${TableName.SecretSharing}.userId`)
|
||||
.where(`${TableName.SecretSharing}.id`, id)
|
||||
.where(`${TableName.SecretSharing}.type`, SecretSharingType.Request)
|
||||
.select(
|
||||
repDb.ref("name").withSchema(TableName.Organization).as("orgName"),
|
||||
repDb.ref("firstName").withSchema(TableName.Users).as("requesterFirstName"),
|
||||
repDb.ref("lastName").withSchema(TableName.Users).as("requesterLastName"),
|
||||
repDb.ref("username").withSchema(TableName.Users).as("requesterUsername")
|
||||
)
|
||||
.select(selectAllTableCols(TableName.SecretSharing))
|
||||
.first();
|
||||
|
||||
if (!secretRequest) {
|
||||
throw new NotFoundError({
|
||||
message: `Secret request with ID '${id}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
...secretRequest,
|
||||
requester: {
|
||||
organizationName: secretRequest.orgName,
|
||||
firstName: secretRequest.requesterFirstName,
|
||||
lastName: secretRequest.requesterLastName,
|
||||
username: secretRequest.requesterUsername
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const countAllUserOrgSharedSecrets = async ({
|
||||
orgId,
|
||||
userId,
|
||||
type
|
||||
}: {
|
||||
orgId: string;
|
||||
userId: string;
|
||||
type: SecretSharingType;
|
||||
}) => {
|
||||
try {
|
||||
interface CountResult {
|
||||
count: string;
|
||||
@@ -22,6 +66,7 @@ export const secretSharingDALFactory = (db: TDbClient) => {
|
||||
.replicaNode()(TableName.SecretSharing)
|
||||
.where(`${TableName.SecretSharing}.orgId`, orgId)
|
||||
.where(`${TableName.SecretSharing}.userId`, userId)
|
||||
.where(`${TableName.SecretSharing}.type`, type)
|
||||
.count("*")
|
||||
.first();
|
||||
|
||||
@@ -38,6 +83,7 @@ export const secretSharingDALFactory = (db: TDbClient) => {
|
||||
const docs = await (tx || db)(TableName.SecretSharing)
|
||||
.where("expiresAt", "<", today)
|
||||
.andWhere("encryptedValue", "<>", "")
|
||||
.andWhere("type", SecretSharingType.Share)
|
||||
.update({
|
||||
encryptedValue: "",
|
||||
tag: "",
|
||||
@@ -50,6 +96,26 @@ export const secretSharingDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const pruneExpiredSecretRequests = async (tx?: Knex) => {
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning expired secret requests started`);
|
||||
try {
|
||||
const today = new Date();
|
||||
|
||||
const docs = await (tx || db)(TableName.SecretSharing)
|
||||
.whereNotNull("expiresAt")
|
||||
.andWhere("expiresAt", "<", today)
|
||||
.andWhere("encryptedSecret", null)
|
||||
.andWhere("type", SecretSharingType.Request)
|
||||
.delete();
|
||||
|
||||
logger.info(`${QueueName.DailyResourceCleanUp}: pruning expired secret requests completed`);
|
||||
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "pruneExpiredSecretRequests" });
|
||||
}
|
||||
};
|
||||
|
||||
const findActiveSharedSecrets = async (filters: Partial<TSecretSharing>, tx?: Knex) => {
|
||||
try {
|
||||
const now = new Date();
|
||||
@@ -57,6 +123,7 @@ export const secretSharingDALFactory = (db: TDbClient) => {
|
||||
.where(filters)
|
||||
.andWhere("expiresAt", ">", now)
|
||||
.andWhere("encryptedValue", "<>", "")
|
||||
.andWhere("type", SecretSharingType.Share)
|
||||
.select(selectAllTableCols(TableName.SecretSharing))
|
||||
.orderBy("expiresAt", "asc");
|
||||
} catch (error) {
|
||||
@@ -86,7 +153,9 @@ export const secretSharingDALFactory = (db: TDbClient) => {
|
||||
...sharedSecretOrm,
|
||||
countAllUserOrgSharedSecrets,
|
||||
pruneExpiredSharedSecrets,
|
||||
pruneExpiredSecretRequests,
|
||||
softDeleteById,
|
||||
findActiveSharedSecrets
|
||||
findActiveSharedSecrets,
|
||||
getSecretRequestById
|
||||
};
|
||||
};
|
||||
|
@@ -4,26 +4,36 @@ import bcrypt from "bcrypt";
|
||||
|
||||
import { TSecretSharing } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { SecretSharingAccessType } from "@app/lib/types";
|
||||
import { isUuidV4 } from "@app/lib/validator";
|
||||
|
||||
import { TKmsServiceFactory } from "../kms/kms-service";
|
||||
import { TOrgDALFactory } from "../org/org-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
import { TSecretSharingDALFactory } from "./secret-sharing-dal";
|
||||
import {
|
||||
SecretSharingType,
|
||||
TCreatePublicSharedSecretDTO,
|
||||
TCreateSecretRequestDTO,
|
||||
TCreateSharedSecretDTO,
|
||||
TDeleteSharedSecretDTO,
|
||||
TGetActiveSharedSecretByIdDTO,
|
||||
TGetSharedSecretsDTO
|
||||
TGetSecretRequestByIdDTO,
|
||||
TGetSharedSecretsDTO,
|
||||
TRevealSecretRequestValueDTO,
|
||||
TSetSecretRequestValueDTO
|
||||
} from "./secret-sharing-types";
|
||||
|
||||
type TSecretSharingServiceFactoryDep = {
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
secretSharingDAL: TSecretSharingDALFactory;
|
||||
orgDAL: TOrgDALFactory;
|
||||
userDAL: TUserDALFactory;
|
||||
kmsService: TKmsServiceFactory;
|
||||
smtpService: TSmtpService;
|
||||
};
|
||||
|
||||
export type TSecretSharingServiceFactory = ReturnType<typeof secretSharingServiceFactory>;
|
||||
@@ -32,7 +42,9 @@ export const secretSharingServiceFactory = ({
|
||||
permissionService,
|
||||
secretSharingDAL,
|
||||
orgDAL,
|
||||
kmsService
|
||||
kmsService,
|
||||
smtpService,
|
||||
userDAL
|
||||
}: TSecretSharingServiceFactoryDep) => {
|
||||
const $validateSharedSecretExpiry = (expiresAt: string) => {
|
||||
if (new Date(expiresAt) < new Date()) {
|
||||
@@ -75,7 +87,6 @@ export const secretSharingServiceFactory = ({
|
||||
}
|
||||
|
||||
const encryptWithRoot = kmsService.encryptWithRootKey();
|
||||
|
||||
const encryptedSecret = encryptWithRoot(Buffer.from(secretValue));
|
||||
|
||||
const id = crypto.randomBytes(32).toString("hex");
|
||||
@@ -88,6 +99,7 @@ export const secretSharingServiceFactory = ({
|
||||
encryptedValue: null,
|
||||
encryptedSecret,
|
||||
name,
|
||||
type: SecretSharingType.Share,
|
||||
password: hashedPassword,
|
||||
expiresAt: new Date(expiresAt),
|
||||
expiresAfterViews,
|
||||
@@ -101,6 +113,191 @@ export const secretSharingServiceFactory = ({
|
||||
return { id: idToReturn };
|
||||
};
|
||||
|
||||
const createSecretRequest = async ({
|
||||
actor,
|
||||
accessType,
|
||||
expiresAt,
|
||||
name,
|
||||
actorId,
|
||||
orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TCreateSecretRequestDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
if (!permission) throw new ForbiddenRequestError({ name: "User is not a part of the specified organization" });
|
||||
|
||||
$validateSharedSecretExpiry(expiresAt);
|
||||
|
||||
const newSecretRequest = await secretSharingDAL.create({
|
||||
type: SecretSharingType.Request,
|
||||
userId: actorId,
|
||||
orgId,
|
||||
name,
|
||||
encryptedSecret: null,
|
||||
accessType,
|
||||
expiresAt: new Date(expiresAt)
|
||||
});
|
||||
|
||||
return { id: newSecretRequest.id };
|
||||
};
|
||||
|
||||
const revealSecretRequestValue = async ({
|
||||
id,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
orgId,
|
||||
actorAuthMethod
|
||||
}: TRevealSecretRequestValueDTO) => {
|
||||
const secretRequest = await secretSharingDAL.getSecretRequestById(id);
|
||||
|
||||
if (!secretRequest) {
|
||||
throw new NotFoundError({ message: `Secret request with ID '${id}' not found` });
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
if (!permission) throw new ForbiddenRequestError({ name: "User is not a part of the specified organization" });
|
||||
|
||||
if (secretRequest.userId !== actorId || secretRequest.orgId !== orgId) {
|
||||
throw new ForbiddenRequestError({ name: "User does not have permission to access this secret request" });
|
||||
}
|
||||
|
||||
if (!secretRequest.encryptedSecret) {
|
||||
throw new BadRequestError({ message: "Secret request has no value set" });
|
||||
}
|
||||
|
||||
const decryptWithRoot = kmsService.decryptWithRootKey();
|
||||
const decryptedSecret = decryptWithRoot(secretRequest.encryptedSecret);
|
||||
|
||||
return { ...secretRequest, secretValue: decryptedSecret.toString() };
|
||||
};
|
||||
|
||||
const getSecretRequestById = async ({
|
||||
id,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TGetSecretRequestByIdDTO) => {
|
||||
const secretRequest = await secretSharingDAL.getSecretRequestById(id);
|
||||
|
||||
if (!secretRequest) {
|
||||
throw new NotFoundError({ message: `Secret request with ID '${id}' not found` });
|
||||
}
|
||||
|
||||
if (secretRequest.accessType === SecretSharingAccessType.Organization) {
|
||||
if (!secretRequest.orgId) {
|
||||
throw new BadRequestError({ message: "No organization ID present on secret request" });
|
||||
}
|
||||
|
||||
if (!actorOrgId) {
|
||||
throw new UnauthorizedError();
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
secretRequest.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
if (!permission) throw new ForbiddenRequestError({ name: "User is not a part of the specified organization" });
|
||||
}
|
||||
|
||||
if (secretRequest.expiresAt && secretRequest.expiresAt < new Date()) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Access denied: Secret request has expired"
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
...secretRequest,
|
||||
isSecretValueSet: Boolean(secretRequest.encryptedSecret)
|
||||
};
|
||||
};
|
||||
|
||||
const setSecretRequestValue = async ({
|
||||
id,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
secretValue
|
||||
}: TSetSecretRequestValueDTO) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const secretRequest = await secretSharingDAL.getSecretRequestById(id);
|
||||
|
||||
if (!secretRequest) {
|
||||
throw new NotFoundError({ message: `Secret request with ID '${id}' not found` });
|
||||
}
|
||||
|
||||
let respondentUsername: string | undefined;
|
||||
|
||||
if (secretRequest.accessType === SecretSharingAccessType.Organization) {
|
||||
if (!secretRequest.orgId) {
|
||||
throw new BadRequestError({ message: "No organization ID present on secret request" });
|
||||
}
|
||||
|
||||
if (!actorOrgId) {
|
||||
throw new UnauthorizedError();
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
secretRequest.orgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
if (!permission) throw new ForbiddenRequestError({ name: "User is not a part of the specified organization" });
|
||||
|
||||
const user = await userDAL.findById(actorId);
|
||||
|
||||
if (!user) {
|
||||
throw new NotFoundError({ message: `User with ID '${actorId}' not found` });
|
||||
}
|
||||
|
||||
respondentUsername = user.username;
|
||||
}
|
||||
|
||||
if (secretRequest.encryptedSecret) {
|
||||
throw new BadRequestError({ message: "Secret request already has a value set" });
|
||||
}
|
||||
|
||||
if (secretValue.length > 10_000) {
|
||||
throw new BadRequestError({ message: "Shared secret value too long" });
|
||||
}
|
||||
|
||||
if (secretRequest.expiresAt && secretRequest.expiresAt < new Date()) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Access denied: Secret request has expired"
|
||||
});
|
||||
}
|
||||
|
||||
const encryptWithRoot = kmsService.encryptWithRootKey();
|
||||
const encryptedSecret = encryptWithRoot(Buffer.from(secretValue));
|
||||
|
||||
const request = await secretSharingDAL.transaction(async (tx) => {
|
||||
const updatedRequest = await secretSharingDAL.updateById(id, { encryptedSecret }, tx);
|
||||
|
||||
await smtpService.sendMail({
|
||||
recipients: [secretRequest.requesterUsername],
|
||||
subjectLine: "Secret Request Completed",
|
||||
substitutions: {
|
||||
name: secretRequest.name,
|
||||
respondentUsername,
|
||||
secretRequestUrl: `${appCfg.SITE_URL}/organization/secret-sharing?selectedTab=request-secret`
|
||||
},
|
||||
template: SmtpTemplates.SecretRequestCompleted
|
||||
});
|
||||
|
||||
return updatedRequest;
|
||||
});
|
||||
|
||||
return request;
|
||||
};
|
||||
|
||||
const createPublicSharedSecret = async ({
|
||||
password,
|
||||
secretValue,
|
||||
@@ -121,6 +318,7 @@ export const secretSharingServiceFactory = ({
|
||||
encryptedValue: null,
|
||||
iv: null,
|
||||
tag: null,
|
||||
type: SecretSharingType.Share,
|
||||
encryptedSecret,
|
||||
password: hashedPassword,
|
||||
expiresAt: new Date(expiresAt),
|
||||
@@ -137,7 +335,8 @@ export const secretSharingServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
offset,
|
||||
limit
|
||||
limit,
|
||||
type
|
||||
}: TGetSharedSecretsDTO) => {
|
||||
if (!actorOrgId) throw new ForbiddenRequestError();
|
||||
|
||||
@@ -153,14 +352,16 @@ export const secretSharingServiceFactory = ({
|
||||
const secrets = await secretSharingDAL.find(
|
||||
{
|
||||
userId: actorId,
|
||||
orgId: actorOrgId
|
||||
orgId: actorOrgId,
|
||||
type
|
||||
},
|
||||
{ offset, limit, sort: [["createdAt", "desc"]] }
|
||||
);
|
||||
|
||||
const count = await secretSharingDAL.countAllUserOrgSharedSecrets({
|
||||
orgId: actorOrgId,
|
||||
userId: actorId
|
||||
userId: actorId,
|
||||
type
|
||||
});
|
||||
|
||||
return {
|
||||
@@ -187,9 +388,11 @@ export const secretSharingServiceFactory = ({
|
||||
const sharedSecret = isUuidV4(sharedSecretId)
|
||||
? await secretSharingDAL.findOne({
|
||||
id: sharedSecretId,
|
||||
type: SecretSharingType.Share,
|
||||
hashedHex
|
||||
})
|
||||
: await secretSharingDAL.findOne({
|
||||
type: SecretSharingType.Share,
|
||||
identifier: Buffer.from(sharedSecretId, "base64url").toString("hex")
|
||||
});
|
||||
|
||||
@@ -254,7 +457,7 @@ export const secretSharingServiceFactory = ({
|
||||
secret: {
|
||||
...sharedSecret,
|
||||
...(decryptedSecretValue && {
|
||||
secretValue: Buffer.from(decryptedSecretValue).toString()
|
||||
secretValue: decryptedSecretValue.toString()
|
||||
}),
|
||||
orgName:
|
||||
sharedSecret.accessType === SecretSharingAccessType.Organization && orgId === sharedSecret.orgId
|
||||
@@ -270,11 +473,17 @@ export const secretSharingServiceFactory = ({
|
||||
if (!permission) throw new ForbiddenRequestError({ name: "User does not belong to the specified organization" });
|
||||
|
||||
const sharedSecret = isUuidV4(sharedSecretId)
|
||||
? await secretSharingDAL.findById(sharedSecretId)
|
||||
: await secretSharingDAL.findOne({ identifier: sharedSecretId });
|
||||
? await secretSharingDAL.findOne({ id: sharedSecretId, type: deleteSharedSecretInput.type })
|
||||
: await secretSharingDAL.findOne({ identifier: sharedSecretId, type: deleteSharedSecretInput.type });
|
||||
|
||||
if (sharedSecret.orgId && sharedSecret.orgId !== orgId)
|
||||
if (sharedSecret.userId !== actorId) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "User does not have permission to delete shared secret"
|
||||
});
|
||||
}
|
||||
if (sharedSecret.orgId && sharedSecret.orgId !== orgId) {
|
||||
throw new ForbiddenRequestError({ message: "User does not have permission to delete shared secret" });
|
||||
}
|
||||
|
||||
const deletedSharedSecret = await secretSharingDAL.deleteById(sharedSecretId);
|
||||
|
||||
@@ -286,6 +495,11 @@ export const secretSharingServiceFactory = ({
|
||||
createPublicSharedSecret,
|
||||
getSharedSecrets,
|
||||
deleteSharedSecretById,
|
||||
getSharedSecretById
|
||||
getSharedSecretById,
|
||||
|
||||
createSecretRequest,
|
||||
getSecretRequestById,
|
||||
setSecretRequestValue,
|
||||
revealSecretRequestValue
|
||||
};
|
||||
};
|
||||
|
@@ -1,8 +1,14 @@
|
||||
import { SecretSharingAccessType, TGenericPermission } from "@app/lib/types";
|
||||
import { SecretSharingAccessType, TGenericPermission, TOrgPermission } from "@app/lib/types";
|
||||
|
||||
import { ActorAuthMethod, ActorType } from "../auth/auth-type";
|
||||
|
||||
export enum SecretSharingType {
|
||||
Share = "share",
|
||||
Request = "request"
|
||||
}
|
||||
|
||||
export type TGetSharedSecretsDTO = {
|
||||
type: SecretSharingType;
|
||||
offset: number;
|
||||
limit: number;
|
||||
} & TGenericPermission;
|
||||
@@ -39,6 +45,26 @@ export type TValidateActiveSharedSecretDTO = TGetActiveSharedSecretByIdDTO & {
|
||||
|
||||
export type TCreateSharedSecretDTO = TSharedSecretPermission & TCreatePublicSharedSecretDTO;
|
||||
|
||||
export type TCreateSecretRequestDTO = {
|
||||
name?: string;
|
||||
accessType: SecretSharingAccessType;
|
||||
expiresAt: string;
|
||||
} & TOrgPermission;
|
||||
|
||||
export type TRevealSecretRequestValueDTO = {
|
||||
id: string;
|
||||
} & TOrgPermission;
|
||||
|
||||
export type TGetSecretRequestByIdDTO = {
|
||||
id: string;
|
||||
} & Omit<TOrgPermission, "orgId">;
|
||||
|
||||
export type TSetSecretRequestValueDTO = {
|
||||
id: string;
|
||||
secretValue: string;
|
||||
} & Omit<TOrgPermission, "orgId">;
|
||||
|
||||
export type TDeleteSharedSecretDTO = {
|
||||
sharedSecretId: string;
|
||||
type: SecretSharingType;
|
||||
} & TSharedSecretPermission;
|
||||
|
@@ -249,7 +249,8 @@ export const secretSyncQueueFactory = ({
|
||||
expandSecretReferences,
|
||||
secretImportDAL,
|
||||
secretImports,
|
||||
hasSecretAccess: () => true
|
||||
hasSecretAccess: () => true,
|
||||
viewSecretValue: true
|
||||
});
|
||||
|
||||
for (let i = importedSecrets.length - 1; i >= 0; i -= 1) {
|
||||
|
@@ -3,7 +3,7 @@ import { ForbiddenError, subject } from "@casl/ability";
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import {
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSecretSyncActions,
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
@@ -179,7 +179,7 @@ export const secretSyncServiceFactory = ({
|
||||
);
|
||||
|
||||
ForbiddenError.from(projectPermission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment,
|
||||
secretPath
|
||||
@@ -270,7 +270,7 @@ export const secretSyncServiceFactory = ({
|
||||
throw new BadRequestError({ message: "Must specify both source environment and secret path" });
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: updatedEnvironment,
|
||||
secretPath: updatedSecretPath
|
||||
|
@@ -47,6 +47,7 @@ export const secretTagDALFactory = (db: TDbClient) => {
|
||||
throw new DatabaseError({ error, name: "Find all by ids" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...secretTagOrm,
|
||||
saveTagsToSecret: secretJnTagOrm.insertMany,
|
||||
|
@@ -7,6 +7,7 @@ import { logger } from "@app/lib/logger";
|
||||
|
||||
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
|
||||
import { ResourceMetadataDTO } from "../resource-metadata/resource-metadata-schema";
|
||||
import { INFISICAL_SECRET_VALUE_HIDDEN_MASK } from "../secret/secret-fns";
|
||||
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
|
||||
import { TSecretV2BridgeDALFactory } from "./secret-v2-bridge-dal";
|
||||
import { TFnSecretBulkDelete, TFnSecretBulkInsert, TFnSecretBulkUpdate } from "./secret-v2-bridge-types";
|
||||
@@ -102,6 +103,7 @@ export const fnSecretBulkInsert = async ({
|
||||
[`${TableName.SecretV2}Id` as const]: newSecretGroupedByKeyName[key][0].id
|
||||
}))
|
||||
);
|
||||
|
||||
const secretVersions = await secretVersionDAL.insertMany(
|
||||
sanitizedInputSecrets.map((el) => ({
|
||||
...el,
|
||||
@@ -137,6 +139,7 @@ export const fnSecretBulkInsert = async ({
|
||||
if (newSecretTags.length) {
|
||||
const secTags = await secretTagDAL.saveTagsToSecretV2(newSecretTags, tx);
|
||||
const secVersionsGroupBySecId = groupBy(secretVersions, (i) => i.secretId);
|
||||
|
||||
const newSecretVersionTags = secTags.flatMap(({ secrets_v2Id, secret_tagsId }) => ({
|
||||
[`${TableName.SecretVersionV2}Id` as const]: secVersionsGroupBySecId[secrets_v2Id][0].id,
|
||||
[`${TableName.SecretTag}Id` as const]: secret_tagsId
|
||||
@@ -145,7 +148,16 @@ export const fnSecretBulkInsert = async ({
|
||||
await secretVersionTagDAL.insertMany(newSecretVersionTags, tx);
|
||||
}
|
||||
|
||||
return newSecrets.map((secret) => ({ ...secret, _id: secret.id }));
|
||||
const secretsWithTags = await secretDAL.find(
|
||||
{
|
||||
$in: {
|
||||
[`${TableName.SecretV2}.id` as "id"]: newSecrets.map((s) => s.id)
|
||||
}
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
|
||||
return secretsWithTags.map((secret) => ({ ...secret, _id: secret.id }));
|
||||
};
|
||||
|
||||
export const fnSecretBulkUpdate = async ({
|
||||
@@ -283,7 +295,15 @@ export const fnSecretBulkUpdate = async ({
|
||||
tx
|
||||
);
|
||||
|
||||
return newSecrets.map((secret) => ({ ...secret, _id: secret.id }));
|
||||
const secretsWithTags = await secretDAL.find(
|
||||
{
|
||||
$in: {
|
||||
[`${TableName.SecretV2}.id` as "id"]: newSecrets.map((s) => s.id)
|
||||
}
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
return secretsWithTags.map((secret) => ({ ...secret, _id: secret.id }));
|
||||
};
|
||||
|
||||
export const fnSecretBulkDelete = async ({
|
||||
@@ -516,7 +536,7 @@ export const expandSecretReferencesFactory = ({
|
||||
const referredValue = await fetchSecret(environment, secretPath, secretKey);
|
||||
if (!canExpandValue(environment, secretPath, secretKey, referredValue.tags))
|
||||
throw new ForbiddenRequestError({
|
||||
message: `You are attempting to reference secret named ${secretKey} from environment ${environment} in path ${secretPath} which you do not have access to.`
|
||||
message: `You are attempting to reference secret named ${secretKey} from environment ${environment} in path ${secretPath} which you do not have access to read value on.`
|
||||
});
|
||||
|
||||
const cacheKey = getCacheUniqueKey(environment, secretPath);
|
||||
@@ -535,7 +555,7 @@ export const expandSecretReferencesFactory = ({
|
||||
const referedValue = await fetchSecret(secretReferenceEnvironment, secretReferencePath, secretReferenceKey);
|
||||
if (!canExpandValue(secretReferenceEnvironment, secretReferencePath, secretReferenceKey, referedValue.tags))
|
||||
throw new ForbiddenRequestError({
|
||||
message: `You are attempting to reference secret named ${secretReferenceKey} from environment ${secretReferenceEnvironment} in path ${secretReferencePath} which you do not have access to.`
|
||||
message: `You are attempting to reference secret named ${secretReferenceKey} from environment ${secretReferenceEnvironment} in path ${secretReferencePath} which you do not have access to read value on.`
|
||||
});
|
||||
|
||||
const cacheKey = getCacheUniqueKey(secretReferenceEnvironment, secretReferencePath);
|
||||
@@ -623,13 +643,13 @@ export const reshapeBridgeSecret = (
|
||||
name: string;
|
||||
}[];
|
||||
secretMetadata?: ResourceMetadataDTO;
|
||||
}
|
||||
},
|
||||
secretValueHidden: boolean
|
||||
) => ({
|
||||
secretKey: secret.key,
|
||||
secretPath,
|
||||
workspace: workspaceId,
|
||||
environment,
|
||||
secretValue: secret.value || "",
|
||||
secretComment: secret.comment || "",
|
||||
version: secret.version,
|
||||
type: secret.type,
|
||||
@@ -643,5 +663,15 @@ export const reshapeBridgeSecret = (
|
||||
metadata: secret.metadata,
|
||||
secretMetadata: secret.secretMetadata,
|
||||
createdAt: secret.createdAt,
|
||||
updatedAt: secret.updatedAt
|
||||
updatedAt: secret.updatedAt,
|
||||
|
||||
...(secretValueHidden
|
||||
? {
|
||||
secretValue: INFISICAL_SECRET_VALUE_HIDDEN_MASK,
|
||||
secretValueHidden: true
|
||||
}
|
||||
: {
|
||||
secretValue: secret.value || "",
|
||||
secretValueHidden: false
|
||||
})
|
||||
});
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,7 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { SecretType, TSecretsV2, TSecretsV2Insert, TSecretsV2Update } from "@app/db/schemas";
|
||||
import { ProjectPermissionSecretActions } from "@app/ee/services/permission/project-permission";
|
||||
import { OrderByDirection, TProjectPermission } from "@app/lib/types";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { SecretsOrderBy } from "@app/services/secret/secret-types";
|
||||
@@ -36,6 +37,8 @@ export type TGetSecretsDTO = {
|
||||
includeImports?: boolean;
|
||||
recursive?: boolean;
|
||||
tagSlugs?: string[];
|
||||
viewSecretValue: boolean;
|
||||
throwOnMissingReadValuePermission?: boolean;
|
||||
metadataFilter?: {
|
||||
key?: string;
|
||||
value?: string;
|
||||
@@ -48,6 +51,11 @@ export type TGetSecretsDTO = {
|
||||
keys?: string[];
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TGetSecretsMissingReadValuePermissionDTO = Omit<
|
||||
TGetSecretsDTO,
|
||||
"viewSecretValue" | "recursive" | "expandSecretReferences"
|
||||
>;
|
||||
|
||||
export type TGetASecretDTO = {
|
||||
secretName: string;
|
||||
path: string;
|
||||
@@ -57,6 +65,7 @@ export type TGetASecretDTO = {
|
||||
includeImports?: boolean;
|
||||
version?: number;
|
||||
projectId: string;
|
||||
viewSecretValue: boolean;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TCreateSecretDTO = TProjectPermission & {
|
||||
@@ -164,9 +173,9 @@ export type TFnSecretBulkInsert = {
|
||||
}
|
||||
>;
|
||||
resourceMetadataDAL: Pick<TResourceMetadataDALFactory, "insertMany">;
|
||||
secretDAL: Pick<TSecretV2BridgeDALFactory, "insertMany" | "upsertSecretReferences">;
|
||||
secretDAL: Pick<TSecretV2BridgeDALFactory, "insertMany" | "upsertSecretReferences" | "find">;
|
||||
secretVersionDAL: Pick<TSecretVersionV2DALFactory, "insertMany">;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2">;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "find">;
|
||||
secretVersionTagDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
|
||||
};
|
||||
|
||||
@@ -188,9 +197,9 @@ export type TFnSecretBulkUpdate = {
|
||||
data: TRequireReferenceIfValue & { tags?: string[]; secretMetadata?: ResourceMetadataDTO };
|
||||
}[];
|
||||
resourceMetadataDAL: Pick<TResourceMetadataDALFactory, "insertMany" | "delete">;
|
||||
secretDAL: Pick<TSecretV2BridgeDALFactory, "bulkUpdate" | "upsertSecretReferences">;
|
||||
secretDAL: Pick<TSecretV2BridgeDALFactory, "bulkUpdate" | "upsertSecretReferences" | "find">;
|
||||
secretVersionDAL: Pick<TSecretVersionV2DALFactory, "insertMany">;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "deleteTagsToSecretV2">;
|
||||
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "deleteTagsToSecretV2" | "find">;
|
||||
secretVersionTagDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
|
||||
tx?: Knex;
|
||||
};
|
||||
@@ -332,4 +341,5 @@ export type TGetSecretsRawByFolderMappingsDTO = {
|
||||
folderMappings: { folderId: string; path: string; environment: string }[];
|
||||
userId: string;
|
||||
filters: TFindSecretsByFolderIdsFilter;
|
||||
filterByAction?: ProjectPermissionSecretActions;
|
||||
};
|
||||
|
@@ -1,9 +1,9 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName, TSecretVersionsV2, TSecretVersionsV2Update } from "@app/db/schemas";
|
||||
import { SecretVersionsV2Schema, TableName, TSecretVersionsV2, TSecretVersionsV2Update } from "@app/db/schemas";
|
||||
import { BadRequestError, DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { ormify, selectAllTableCols, sqlNestRelationships, TFindOpt } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
@@ -12,6 +12,58 @@ export type TSecretVersionV2DALFactory = ReturnType<typeof secretVersionV2Bridge
|
||||
export const secretVersionV2BridgeDALFactory = (db: TDbClient) => {
|
||||
const secretVersionV2Orm = ormify(db, TableName.SecretVersionV2);
|
||||
|
||||
const findBySecretId = async (secretId: string, { offset, limit, sort, tx }: TFindOpt<TSecretVersionsV2> = {}) => {
|
||||
try {
|
||||
const query = (tx || db.replicaNode())(TableName.SecretVersionV2)
|
||||
.where(`${TableName.SecretVersionV2}.secretId`, secretId)
|
||||
.leftJoin(TableName.SecretV2, `${TableName.SecretVersionV2}.secretId`, `${TableName.SecretV2}.id`)
|
||||
.leftJoin(
|
||||
TableName.SecretV2JnTag,
|
||||
`${TableName.SecretV2}.id`,
|
||||
`${TableName.SecretV2JnTag}.${TableName.SecretV2}Id`
|
||||
)
|
||||
.leftJoin(
|
||||
TableName.SecretTag,
|
||||
`${TableName.SecretV2JnTag}.${TableName.SecretTag}Id`,
|
||||
`${TableName.SecretTag}.id`
|
||||
)
|
||||
.select(selectAllTableCols(TableName.SecretVersionV2))
|
||||
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
|
||||
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
|
||||
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"));
|
||||
|
||||
if (limit) void query.limit(limit);
|
||||
if (offset) void query.offset(offset);
|
||||
if (sort) {
|
||||
void query.orderBy(sort.map(([column, order, nulls]) => ({ column: column as string, order, nulls })));
|
||||
}
|
||||
|
||||
const docs = await query;
|
||||
|
||||
const data = sqlNestRelationships({
|
||||
data: docs,
|
||||
key: "id",
|
||||
parentMapper: (el) => ({ _id: el.id, ...SecretVersionsV2Schema.parse(el) }),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "tagId",
|
||||
label: "tags" as const,
|
||||
mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({
|
||||
id,
|
||||
color,
|
||||
slug,
|
||||
name: slug
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return data;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: `${TableName.SecretVersionV2}: FindBySecretId` });
|
||||
}
|
||||
};
|
||||
|
||||
// This will fetch all latest secret versions from a folder
|
||||
const findLatestVersionByFolderId = async (folderId: string, tx?: Knex) => {
|
||||
try {
|
||||
@@ -124,6 +176,7 @@ export const secretVersionV2BridgeDALFactory = (db: TDbClient) => {
|
||||
pruneExcessVersions,
|
||||
findLatestVersionMany,
|
||||
bulkUpdate,
|
||||
findLatestVersionByFolderId
|
||||
findLatestVersionByFolderId,
|
||||
findBySecretId
|
||||
};
|
||||
};
|
||||
|
@@ -169,6 +169,48 @@ export const secretDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const findManySecretsWithTags = async (
|
||||
filter: {
|
||||
secretIds: string[];
|
||||
type: SecretType;
|
||||
},
|
||||
tx?: Knex
|
||||
) => {
|
||||
try {
|
||||
const secrets = await (tx || db.replicaNode())(TableName.Secret)
|
||||
.whereIn(`${TableName.Secret}.id` as "id", filter.secretIds)
|
||||
.where("type", filter.type)
|
||||
.leftJoin(TableName.JnSecretTag, `${TableName.Secret}.id`, `${TableName.JnSecretTag}.${TableName.Secret}Id`)
|
||||
.leftJoin(TableName.SecretTag, `${TableName.JnSecretTag}.${TableName.SecretTag}Id`, `${TableName.SecretTag}.id`)
|
||||
.select(selectAllTableCols(TableName.Secret))
|
||||
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
|
||||
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
|
||||
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"));
|
||||
|
||||
const data = sqlNestRelationships({
|
||||
data: secrets,
|
||||
key: "id",
|
||||
parentMapper: (el) => ({ _id: el.id, ...SecretsSchema.parse(el) }),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "tagId",
|
||||
label: "tags" as const,
|
||||
mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({
|
||||
id,
|
||||
color,
|
||||
slug,
|
||||
name: slug
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return data;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "get many secrets with tags" });
|
||||
}
|
||||
};
|
||||
|
||||
const findByFolderIds = async (folderIds: string[], userId?: string, tx?: Knex) => {
|
||||
try {
|
||||
// check if not uui then userId id is null (corner case because service token's ID is not UUI in effort to keep backwards compatibility from mongo)
|
||||
@@ -443,6 +485,7 @@ export const secretDALFactory = (db: TDbClient) => {
|
||||
upsertSecretReferences,
|
||||
findReferencedSecretReferences,
|
||||
findAllProjectSecretValues,
|
||||
pruneSecretReminders
|
||||
pruneSecretReminders,
|
||||
findManySecretsWithTags
|
||||
};
|
||||
};
|
||||
|
@@ -13,7 +13,7 @@ import {
|
||||
TSecrets
|
||||
} from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
buildSecretBlindIndexFromName,
|
||||
@@ -51,6 +51,8 @@ import {
|
||||
TUpdateManySecretsRawFnFactory
|
||||
} from "./secret-types";
|
||||
|
||||
export const INFISICAL_SECRET_VALUE_HIDDEN_MASK = "<hidden-by-infisical>";
|
||||
|
||||
export const generateSecretBlindIndexBySalt = async (secretName: string, secretBlindIndexDoc: TSecretBlindIndexes) => {
|
||||
const appCfg = getConfig();
|
||||
const secretBlindIndex = await buildSecretBlindIndexFromName({
|
||||
@@ -190,7 +192,7 @@ export const recursivelyGetSecretPaths = ({
|
||||
const allowedPaths = paths.filter(
|
||||
(folder) =>
|
||||
permission.can(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment,
|
||||
secretPath: folder.path
|
||||
@@ -344,6 +346,7 @@ export const interpolateSecrets = ({ projectId, secretEncKey, secretDAL, folderD
|
||||
|
||||
export const decryptSecretRaw = (
|
||||
secret: TSecrets & {
|
||||
secretValueHidden: boolean;
|
||||
workspace: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
@@ -362,12 +365,14 @@ export const decryptSecretRaw = (
|
||||
key
|
||||
});
|
||||
|
||||
const secretValue = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key
|
||||
});
|
||||
const secretValue = !secret.secretValueHidden
|
||||
? decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key
|
||||
})
|
||||
: INFISICAL_SECRET_VALUE_HIDDEN_MASK;
|
||||
|
||||
let secretComment = "";
|
||||
|
||||
@@ -385,6 +390,7 @@ export const decryptSecretRaw = (
|
||||
secretPath: secret.secretPath,
|
||||
workspace: secret.workspace,
|
||||
environment: secret.environment,
|
||||
secretValueHidden: secret.secretValueHidden,
|
||||
secretValue,
|
||||
secretComment,
|
||||
version: secret.version,
|
||||
@@ -1197,3 +1203,23 @@ export const fnDeleteProjectSecretReminders = async (
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const conditionallyHideSecretValue = (
|
||||
shouldHideValue: boolean,
|
||||
{
|
||||
secretValueCiphertext,
|
||||
secretValueIV,
|
||||
secretValueTag
|
||||
}: {
|
||||
secretValueCiphertext: string;
|
||||
secretValueIV: string;
|
||||
secretValueTag: string;
|
||||
}
|
||||
) => {
|
||||
return {
|
||||
secretValueCiphertext: shouldHideValue ? INFISICAL_SECRET_VALUE_HIDDEN_MASK : secretValueCiphertext,
|
||||
secretValueIV: shouldHideValue ? INFISICAL_SECRET_VALUE_HIDDEN_MASK : secretValueIV,
|
||||
secretValueTag: shouldHideValue ? INFISICAL_SECRET_VALUE_HIDDEN_MASK : secretValueTag,
|
||||
secretValueHidden: shouldHideValue
|
||||
};
|
||||
};
|
||||
|
@@ -402,7 +402,8 @@ export const secretQueueFactory = ({
|
||||
expandSecretReferences,
|
||||
secretImportDAL,
|
||||
secretImports,
|
||||
hasSecretAccess: () => true
|
||||
hasSecretAccess: () => true,
|
||||
viewSecretValue: true
|
||||
});
|
||||
|
||||
for (let i = importedSecrets.length - 1; i >= 0; i -= 1) {
|
||||
|
@@ -6,6 +6,7 @@ import {
|
||||
ActionProjectType,
|
||||
ProjectMembershipRole,
|
||||
ProjectUpgradeStatus,
|
||||
ProjectVersion,
|
||||
SecretEncryptionAlgo,
|
||||
SecretKeyEncoding,
|
||||
SecretsSchema,
|
||||
@@ -13,7 +14,11 @@ import {
|
||||
} from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import {
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
import { TSecretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal";
|
||||
import { TSecretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal";
|
||||
@@ -48,6 +53,7 @@ import { TSecretV2BridgeServiceFactory } from "../secret-v2-bridge/secret-v2-bri
|
||||
import { TGetSecretReferencesTreeDTO } from "../secret-v2-bridge/secret-v2-bridge-types";
|
||||
import { TSecretDALFactory } from "./secret-dal";
|
||||
import {
|
||||
conditionallyHideSecretValue,
|
||||
decryptSecretRaw,
|
||||
fnSecretBlindIndexCheck,
|
||||
fnSecretBulkDelete,
|
||||
@@ -95,7 +101,7 @@ type TSecretServiceFactoryDep = {
|
||||
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
|
||||
folderDAL: Pick<
|
||||
TSecretFolderDALFactory,
|
||||
"findBySecretPath" | "updateById" | "findById" | "findByManySecretPath" | "find"
|
||||
"findBySecretPath" | "updateById" | "findById" | "findByManySecretPath" | "find" | "findSecretPathByFolderIds"
|
||||
>;
|
||||
secretV2BridgeService: TSecretV2BridgeServiceFactory;
|
||||
secretBlindIndexDAL: TSecretBlindIndexDALFactory;
|
||||
@@ -204,7 +210,7 @@ export const secretServiceFactory = ({
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionSecretActions.Create,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
|
||||
@@ -322,7 +328,7 @@ export const secretServiceFactory = ({
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
|
||||
@@ -444,7 +450,22 @@ export const secretServiceFactory = ({
|
||||
environmentSlug: folder.environment.slug
|
||||
});
|
||||
}
|
||||
return { ...updatedSecret[0], workspace: projectId, environment, secretPath: path };
|
||||
|
||||
const secretValueHidden = !permission.can(
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment,
|
||||
secretPath: path
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
...updatedSecret[0],
|
||||
...conditionallyHideSecretValue(secretValueHidden, updatedSecret[0]),
|
||||
workspace: projectId,
|
||||
environment,
|
||||
secretPath: path
|
||||
};
|
||||
};
|
||||
|
||||
const deleteSecret = async ({
|
||||
@@ -467,7 +488,7 @@ export const secretServiceFactory = ({
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Delete,
|
||||
ProjectPermissionSecretActions.Delete,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
|
||||
@@ -540,7 +561,19 @@ export const secretServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
return { ...deletedSecret[0], _id: deletedSecret[0].id, workspace: projectId, environment, secretPath: path };
|
||||
const secretValueHidden = !permission.can(
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
|
||||
return {
|
||||
...deletedSecret[0],
|
||||
...conditionallyHideSecretValue(secretValueHidden, deletedSecret[0]),
|
||||
_id: deletedSecret[0].id,
|
||||
workspace: projectId,
|
||||
environment,
|
||||
secretPath: path
|
||||
};
|
||||
};
|
||||
|
||||
const getSecrets = async ({
|
||||
@@ -589,7 +622,7 @@ export const secretServiceFactory = ({
|
||||
paths = deepPaths.map(({ folderId, path: p }) => ({ folderId, path: p }));
|
||||
} else {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
|
||||
@@ -614,7 +647,7 @@ export const secretServiceFactory = ({
|
||||
actor === ActorType.SERVICE
|
||||
? true
|
||||
: permission.can(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: importEnv.slug,
|
||||
secretPath: importPath
|
||||
@@ -671,7 +704,7 @@ export const secretServiceFactory = ({
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
|
||||
@@ -721,7 +754,7 @@ export const secretServiceFactory = ({
|
||||
actor === ActorType.SERVICE
|
||||
? true
|
||||
: permission.can(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: importEnv.slug,
|
||||
secretPath: importPath
|
||||
@@ -739,6 +772,7 @@ export const secretServiceFactory = ({
|
||||
if (secretBlindIndex === importedSecrets[i].secrets[j].secretBlindIndex) {
|
||||
return {
|
||||
...importedSecrets[i].secrets[j],
|
||||
secretValueHidden: false,
|
||||
workspace: projectId,
|
||||
environment: importedSecrets[i].environment,
|
||||
secretPath: importedSecrets[i].secretPath
|
||||
@@ -749,7 +783,13 @@ export const secretServiceFactory = ({
|
||||
}
|
||||
if (!secret) throw new NotFoundError({ message: `Secret with name '${secretName}' not found` });
|
||||
|
||||
return { ...secret, workspace: projectId, environment, secretPath: path };
|
||||
return {
|
||||
...secret,
|
||||
secretValueHidden: false, // Always false because we check permission at the beginning of the function
|
||||
workspace: projectId,
|
||||
environment,
|
||||
secretPath: path
|
||||
};
|
||||
};
|
||||
|
||||
const createManySecret = async ({
|
||||
@@ -771,7 +811,7 @@ export const secretServiceFactory = ({
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionSecretActions.Create,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
|
||||
@@ -859,7 +899,7 @@ export const secretServiceFactory = ({
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
|
||||
@@ -901,8 +941,8 @@ export const secretServiceFactory = ({
|
||||
if (tagIds.length !== tags.length) throw new NotFoundError({ message: "One or more tags not found" });
|
||||
|
||||
const references = await getSecretReference(projectId);
|
||||
const secrets = await secretDAL.transaction(async (tx) =>
|
||||
fnSecretBulkUpdate({
|
||||
const secrets = await secretDAL.transaction(async (tx) => {
|
||||
const updatedSecrets = await fnSecretBulkUpdate({
|
||||
folderId,
|
||||
projectId,
|
||||
tx,
|
||||
@@ -932,8 +972,18 @@ export const secretServiceFactory = ({
|
||||
secretVersionDAL,
|
||||
secretTagDAL,
|
||||
secretVersionTagDAL
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
const secretValueHidden = !permission.can(
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
|
||||
return updatedSecrets.map((secret) => ({
|
||||
...secret,
|
||||
...conditionallyHideSecretValue(secretValueHidden, secret)
|
||||
}));
|
||||
});
|
||||
|
||||
await snapshotService.performSnapshot(folderId);
|
||||
await secretQueueService.syncSecrets({
|
||||
@@ -967,7 +1017,7 @@ export const secretServiceFactory = ({
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Delete,
|
||||
ProjectPermissionSecretActions.Delete,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
|
||||
@@ -1019,7 +1069,15 @@ export const secretServiceFactory = ({
|
||||
}
|
||||
}
|
||||
|
||||
return secrets;
|
||||
const secretValueHidden = !permission.can(
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
|
||||
);
|
||||
|
||||
return secrets.map((secret) => ({
|
||||
...secret,
|
||||
...conditionallyHideSecretValue(secretValueHidden, secret)
|
||||
}));
|
||||
});
|
||||
|
||||
await snapshotService.performSnapshot(folderId);
|
||||
@@ -1180,6 +1238,7 @@ export const secretServiceFactory = ({
|
||||
secretName,
|
||||
path: secretPath,
|
||||
environment,
|
||||
viewSecretValue: false,
|
||||
type: "shared"
|
||||
});
|
||||
|
||||
@@ -1194,10 +1253,11 @@ export const secretServiceFactory = ({
|
||||
| (typeof groupPermissions)[number]
|
||||
) => {
|
||||
const allowedActions = [
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionActions.Delete,
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionActions.Edit
|
||||
ProjectPermissionSecretActions.DescribeSecret,
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
ProjectPermissionSecretActions.Delete,
|
||||
ProjectPermissionSecretActions.Create,
|
||||
ProjectPermissionSecretActions.Edit
|
||||
].filter((action) =>
|
||||
entityPermission.permission.can(
|
||||
action,
|
||||
@@ -1234,11 +1294,13 @@ export const secretServiceFactory = ({
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
viewSecretValue,
|
||||
environment,
|
||||
includeImports,
|
||||
expandSecretReferences,
|
||||
recursive,
|
||||
tagSlugs = [],
|
||||
throwOnMissingReadValuePermission = true,
|
||||
...paramsV2
|
||||
}: TGetSecretsRawDTO) => {
|
||||
const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId);
|
||||
@@ -1249,6 +1311,8 @@ export const secretServiceFactory = ({
|
||||
actorId,
|
||||
actor,
|
||||
actorOrgId,
|
||||
viewSecretValue,
|
||||
throwOnMissingReadValuePermission,
|
||||
environment,
|
||||
path,
|
||||
recursive,
|
||||
@@ -1257,6 +1321,7 @@ export const secretServiceFactory = ({
|
||||
tagSlugs,
|
||||
...paramsV2
|
||||
});
|
||||
|
||||
return { secrets, imports };
|
||||
}
|
||||
|
||||
@@ -1285,14 +1350,20 @@ export const secretServiceFactory = ({
|
||||
recursive
|
||||
});
|
||||
|
||||
const decryptedSecrets = secrets.map((el) => decryptSecretRaw(el, botKey));
|
||||
const decryptedSecrets = secrets.map((el) => decryptSecretRaw({ ...el, secretValueHidden: false }, botKey));
|
||||
const filteredSecrets = tagSlugs.length
|
||||
? decryptedSecrets.filter((secret) => Boolean(secret.tags?.find((el) => tagSlugs.includes(el.slug))))
|
||||
: decryptedSecrets;
|
||||
const processedImports = (imports || [])?.map(({ secrets: importedSecrets, ...el }) => {
|
||||
const decryptedImportSecrets = importedSecrets.map((sec) =>
|
||||
decryptSecretRaw(
|
||||
{ ...sec, environment: el.environment, workspace: projectId, secretPath: el.secretPath },
|
||||
{
|
||||
...sec,
|
||||
environment: el.environment,
|
||||
workspace: projectId,
|
||||
secretPath: el.secretPath,
|
||||
secretValueHidden: false
|
||||
},
|
||||
botKey
|
||||
)
|
||||
);
|
||||
@@ -1303,6 +1374,7 @@ export const secretServiceFactory = ({
|
||||
const importedEntries = decryptedImportSecrets.reduce(
|
||||
(
|
||||
accum: {
|
||||
secretValueHidden: boolean;
|
||||
secretKey: string;
|
||||
secretPath: string;
|
||||
workspace: string;
|
||||
@@ -1346,6 +1418,7 @@ export const secretServiceFactory = ({
|
||||
Object.keys(secretsGroupByPath).map((groupedPath) =>
|
||||
Promise.allSettled(
|
||||
secretsGroupByPath[groupedPath].map(async (decryptedSecret, index) => {
|
||||
if (decryptedSecret.secretValueHidden) return;
|
||||
const expandedSecretValue = await expandSecret({
|
||||
value: decryptedSecret.secretValue,
|
||||
secretPath: groupedPath,
|
||||
@@ -1362,6 +1435,7 @@ export const secretServiceFactory = ({
|
||||
processedImports.map((processedImport) =>
|
||||
Promise.allSettled(
|
||||
processedImport.secrets.map(async (decryptedSecret, index) => {
|
||||
if (decryptedSecret.secretValueHidden) return;
|
||||
const expandedSecretValue = await expandSecret({
|
||||
value: decryptedSecret.secretValue,
|
||||
secretPath: path,
|
||||
@@ -1387,6 +1461,7 @@ export const secretServiceFactory = ({
|
||||
path,
|
||||
actor,
|
||||
environment,
|
||||
viewSecretValue,
|
||||
projectId: workspaceId,
|
||||
expandSecretReferences,
|
||||
projectSlug,
|
||||
@@ -1406,6 +1481,7 @@ export const secretServiceFactory = ({
|
||||
includeImports,
|
||||
actorAuthMethod,
|
||||
path,
|
||||
viewSecretValue,
|
||||
actorOrgId,
|
||||
actor,
|
||||
actorId,
|
||||
@@ -1436,6 +1512,7 @@ export const secretServiceFactory = ({
|
||||
message: `Project bot for project with ID '${projectId}' not found. Please upgrade your project.`,
|
||||
name: "bot_not_found_error"
|
||||
});
|
||||
|
||||
const decryptedSecret = decryptSecretRaw(encryptedSecret, botKey);
|
||||
|
||||
if (expandSecretReferences) {
|
||||
@@ -1454,7 +1531,10 @@ export const secretServiceFactory = ({
|
||||
decryptedSecret.secretValue = expandedSecretValue || "";
|
||||
}
|
||||
|
||||
return { secretMetadata: undefined, ...decryptedSecret };
|
||||
return {
|
||||
secretMetadata: undefined,
|
||||
...decryptedSecret
|
||||
};
|
||||
};
|
||||
|
||||
const createSecretRaw = async ({
|
||||
@@ -1605,7 +1685,16 @@ export const secretServiceFactory = ({
|
||||
tags: tagIds
|
||||
});
|
||||
|
||||
return { type: SecretProtectionType.Direct as const, secret: decryptSecretRaw(secret, botKey) };
|
||||
return {
|
||||
type: SecretProtectionType.Direct as const,
|
||||
secret: decryptSecretRaw(
|
||||
{
|
||||
...secret,
|
||||
secretValueHidden: false
|
||||
},
|
||||
botKey
|
||||
)
|
||||
};
|
||||
};
|
||||
|
||||
const updateSecretRaw = async ({
|
||||
@@ -2001,7 +2090,7 @@ export const secretServiceFactory = ({
|
||||
return {
|
||||
type: SecretProtectionType.Direct as const,
|
||||
secrets: secrets.map((secret) =>
|
||||
decryptSecretRaw({ ...secret, workspace: projectId, environment, secretPath }, botKey)
|
||||
decryptSecretRaw({ ...secret, workspace: projectId, environment, secretPath, secretValueHidden: false }, botKey)
|
||||
)
|
||||
};
|
||||
};
|
||||
@@ -2290,6 +2379,12 @@ export const secretServiceFactory = ({
|
||||
const folder = await folderDAL.findById(secret.folderId);
|
||||
if (!folder) throw new NotFoundError({ message: `Folder with ID '${secret.folderId}' not found` });
|
||||
|
||||
const [folderWithPath] = await folderDAL.findSecretPathByFolderIds(folder.projectId, [folder.id]);
|
||||
|
||||
if (!folderWithPath) {
|
||||
throw new NotFoundError({ message: `Folder with ID '${folder.id}' not found` });
|
||||
}
|
||||
|
||||
const { botKey } = await projectBotService.getBotKey(folder.projectId);
|
||||
if (!botKey)
|
||||
throw new NotFoundError({ message: `Project bot for project with ID '${folder.projectId}' not found` });
|
||||
@@ -2303,18 +2398,42 @@ export const secretServiceFactory = ({
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
|
||||
const secretVersions = await secretVersionDAL.find({ secretId }, { offset, limit, sort: [["createdAt", "desc"]] });
|
||||
return secretVersions.map((el) =>
|
||||
decryptSecretRaw(
|
||||
const secretVersions = await secretVersionDAL.findBySecretId(secretId, {
|
||||
offset,
|
||||
limit,
|
||||
sort: [["createdAt", "desc"]]
|
||||
});
|
||||
return secretVersions.map((el) => {
|
||||
const secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key: botKey
|
||||
});
|
||||
|
||||
const secretValueHidden = permission.cannot(
|
||||
ProjectPermissionSecretActions.ReadValue,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: folder.environment.envSlug,
|
||||
secretPath: folderWithPath.path,
|
||||
secretName: secretKey,
|
||||
...(el.tags?.length && {
|
||||
secretTags: el.tags.map((tag) => tag.slug)
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
return decryptSecretRaw(
|
||||
{
|
||||
secretValueHidden,
|
||||
...el,
|
||||
workspace: folder.projectId,
|
||||
environment: folder.environment.envSlug,
|
||||
secretPath: "/"
|
||||
secretPath: folderWithPath.path
|
||||
},
|
||||
botKey
|
||||
)
|
||||
);
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
const attachTags = async ({
|
||||
@@ -2340,7 +2459,7 @@ export const secretServiceFactory = ({
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
);
|
||||
|
||||
@@ -2446,7 +2565,7 @@ export const secretServiceFactory = ({
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
ProjectPermissionSecretActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
);
|
||||
|
||||
@@ -2612,7 +2731,7 @@ export const secretServiceFactory = ({
|
||||
message: `Project with slug '${projectSlug}' not found`
|
||||
});
|
||||
}
|
||||
if (project.version === 3) {
|
||||
if (project.version === ProjectVersion.V3) {
|
||||
return secretV2BridgeService.moveSecrets({
|
||||
sourceEnvironment,
|
||||
sourceSecretPath,
|
||||
@@ -2637,30 +2756,6 @@ export const secretServiceFactory = ({
|
||||
actionProjectType: ActionProjectType.SecretManager
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Delete,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: sourceEnvironment,
|
||||
secretPath: sourceSecretPath
|
||||
})
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: destinationEnvironment,
|
||||
secretPath: destinationSecretPath
|
||||
})
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: destinationEnvironment,
|
||||
secretPath: destinationSecretPath
|
||||
})
|
||||
);
|
||||
|
||||
const { botKey } = await projectBotService.getBotKey(project.id);
|
||||
if (!botKey) {
|
||||
throw new NotFoundError({
|
||||
@@ -2688,11 +2783,9 @@ export const secretServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const sourceSecrets = await secretDAL.find({
|
||||
const sourceSecrets = await secretDAL.findManySecretsWithTags({
|
||||
type: SecretType.Shared,
|
||||
$in: {
|
||||
id: secretIds
|
||||
}
|
||||
secretIds
|
||||
});
|
||||
|
||||
if (sourceSecrets.length !== secretIds.length) {
|
||||
@@ -2701,21 +2794,52 @@ export const secretServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const decryptedSourceSecrets = sourceSecrets.map((secret) => ({
|
||||
...secret,
|
||||
secretKey: decryptSymmetric128BitHexKeyUTF8({
|
||||
const sourceActions = [
|
||||
ProjectPermissionSecretActions.Delete,
|
||||
ProjectPermissionSecretActions.DescribeSecret,
|
||||
ProjectPermissionSecretActions.ReadValue
|
||||
] as const;
|
||||
const destinationActions = [ProjectPermissionSecretActions.Create, ProjectPermissionSecretActions.Edit] as const;
|
||||
|
||||
const decryptedSourceSecrets = sourceSecrets.map((secret) => {
|
||||
const secretKey = decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretKeyCiphertext,
|
||||
iv: secret.secretKeyIV,
|
||||
tag: secret.secretKeyTag,
|
||||
key: botKey
|
||||
}),
|
||||
secretValue: decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key: botKey
|
||||
})
|
||||
}));
|
||||
});
|
||||
|
||||
for (const destinationAction of destinationActions) {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
destinationAction,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: destinationEnvironment,
|
||||
secretPath: destinationSecretPath
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
for (const sourceAction of sourceActions) {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
sourceAction,
|
||||
subject(ProjectPermissionSub.Secrets, {
|
||||
environment: sourceEnvironment,
|
||||
secretPath: sourceSecretPath
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
...secret,
|
||||
secretKey,
|
||||
secretValue: decryptSymmetric128BitHexKeyUTF8({
|
||||
ciphertext: secret.secretValueCiphertext,
|
||||
iv: secret.secretValueIV,
|
||||
tag: secret.secretValueTag,
|
||||
key: botKey
|
||||
})
|
||||
};
|
||||
});
|
||||
|
||||
let isSourceUpdated = false;
|
||||
let isDestinationUpdated = false;
|
||||
|
@@ -180,6 +180,8 @@ export type TGetSecretsRawDTO = {
|
||||
expandSecretReferences?: boolean;
|
||||
path: string;
|
||||
environment: string;
|
||||
viewSecretValue: boolean;
|
||||
throwOnMissingReadValuePermission?: boolean;
|
||||
includeImports?: boolean;
|
||||
recursive?: boolean;
|
||||
tagSlugs?: string[];
|
||||
@@ -205,6 +207,7 @@ export type TGetASecretRawDTO = {
|
||||
secretName: string;
|
||||
path: string;
|
||||
environment: string;
|
||||
viewSecretValue: boolean;
|
||||
expandSecretReferences?: boolean;
|
||||
type: "shared" | "personal";
|
||||
includeImports?: boolean;
|
||||
@@ -409,7 +412,7 @@ export type TCreateManySecretsRawFnFactory = {
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
secretV2BridgeDAL: Pick<
|
||||
TSecretV2BridgeDALFactory,
|
||||
"insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "bulkUpdate" | "deleteMany"
|
||||
"insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "bulkUpdate" | "deleteMany" | "find"
|
||||
>;
|
||||
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
|
||||
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
|
||||
@@ -446,7 +449,7 @@ export type TUpdateManySecretsRawFnFactory = {
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
secretV2BridgeDAL: Pick<
|
||||
TSecretV2BridgeDALFactory,
|
||||
"insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "bulkUpdate" | "deleteMany"
|
||||
"insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "bulkUpdate" | "deleteMany" | "find"
|
||||
>;
|
||||
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
|
||||
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
|
||||
|
@@ -1,9 +1,9 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName, TSecretVersions, TSecretVersionsUpdate } from "@app/db/schemas";
|
||||
import { SecretVersionsSchema, TableName, TSecretVersions, TSecretVersionsUpdate } from "@app/db/schemas";
|
||||
import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { ormify, selectAllTableCols, sqlNestRelationships, TFindOpt } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueName } from "@app/queue";
|
||||
|
||||
@@ -12,6 +12,50 @@ export type TSecretVersionDALFactory = ReturnType<typeof secretVersionDALFactory
|
||||
export const secretVersionDALFactory = (db: TDbClient) => {
|
||||
const secretVersionOrm = ormify(db, TableName.SecretVersion);
|
||||
|
||||
const findBySecretId = async (secretId: string, { offset, limit, sort, tx }: TFindOpt<TSecretVersions> = {}) => {
|
||||
try {
|
||||
const query = (tx || db.replicaNode())(TableName.SecretVersion)
|
||||
.where(`${TableName.SecretVersion}.secretId`, secretId)
|
||||
.leftJoin(TableName.Secret, `${TableName.SecretVersion}.secretId`, `${TableName.Secret}.id`)
|
||||
.leftJoin(TableName.JnSecretTag, `${TableName.Secret}.id`, `${TableName.JnSecretTag}.${TableName.Secret}Id`)
|
||||
.leftJoin(TableName.SecretTag, `${TableName.JnSecretTag}.${TableName.SecretTag}Id`, `${TableName.SecretTag}.id`)
|
||||
.select(selectAllTableCols(TableName.SecretVersion))
|
||||
.select(db.ref("id").withSchema(TableName.SecretTag).as("tagId"))
|
||||
.select(db.ref("color").withSchema(TableName.SecretTag).as("tagColor"))
|
||||
.select(db.ref("slug").withSchema(TableName.SecretTag).as("tagSlug"));
|
||||
|
||||
if (limit) void query.limit(limit);
|
||||
if (offset) void query.offset(offset);
|
||||
if (sort) {
|
||||
void query.orderBy(sort.map(([column, order, nulls]) => ({ column: column as string, order, nulls })));
|
||||
}
|
||||
|
||||
const docs = await query;
|
||||
|
||||
const data = sqlNestRelationships({
|
||||
data: docs,
|
||||
key: "id",
|
||||
parentMapper: (el) => ({ _id: el.id, ...SecretVersionsSchema.parse(el) }),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "tagId",
|
||||
label: "tags" as const,
|
||||
mapper: ({ tagId: id, tagColor: color, tagSlug: slug }) => ({
|
||||
id,
|
||||
color,
|
||||
slug,
|
||||
name: slug
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return data;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: `${TableName.SecretVersion}: FindBySecretId` });
|
||||
}
|
||||
};
|
||||
|
||||
// This will fetch all latest secret versions from a folder
|
||||
const findLatestVersionByFolderId = async (folderId: string, tx?: Knex) => {
|
||||
try {
|
||||
@@ -149,6 +193,7 @@ export const secretVersionDALFactory = (db: TDbClient) => {
|
||||
findLatestVersionMany,
|
||||
bulkUpdate,
|
||||
findLatestVersionByFolderId,
|
||||
findBySecretId,
|
||||
bulkUpdateNoVersionIncrement
|
||||
};
|
||||
};
|
||||
|
@@ -5,7 +5,11 @@ import bcrypt from "bcrypt";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import {
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSecretActions,
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
|
||||
|
||||
@@ -67,7 +71,7 @@ export const serviceTokenServiceFactory = ({
|
||||
|
||||
scopes.forEach(({ environment, secretPath }) => {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionSecretActions.Create,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
);
|
||||
});
|
||||
|
@@ -7,7 +7,7 @@ export type TCreateServiceTokenDTO = {
|
||||
iv: string;
|
||||
tag: string;
|
||||
expiresIn?: number | null;
|
||||
permissions: ("read" | "write")[];
|
||||
permissions: ("read" | "write" | "readValue")[];
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TGetServiceTokenInfoDTO = Omit<TProjectPermission, "projectId">;
|
||||
|
@@ -39,7 +39,8 @@ export enum SmtpTemplates {
|
||||
SecretSyncFailed = "secretSyncFailed.handlebars",
|
||||
ExternalImportSuccessful = "externalImportSuccessful.handlebars",
|
||||
ExternalImportFailed = "externalImportFailed.handlebars",
|
||||
ExternalImportStarted = "externalImportStarted.handlebars"
|
||||
ExternalImportStarted = "externalImportStarted.handlebars",
|
||||
SecretRequestCompleted = "secretRequestCompleted.handlebars"
|
||||
}
|
||||
|
||||
export enum SmtpHost {
|
||||
|
@@ -0,0 +1,33 @@
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge" />
|
||||
<title>Secret Request Completed</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h2>Infisical</h2>
|
||||
<h2>A secret has been shared with you</h2>
|
||||
|
||||
{{#if name}}
|
||||
<p>Secret request name: {{name}}</p>
|
||||
{{/if}}
|
||||
{{#if respondentUsername}}
|
||||
<p>Shared by: {{respondentUsername}}</p>
|
||||
{{/if}}
|
||||
|
||||
<br />
|
||||
<br/>
|
||||
|
||||
<p>
|
||||
You can access the secret by clicking the link below.
|
||||
</p>
|
||||
<p>
|
||||
<a href="{{secretRequestUrl}}">Access Secret</a>
|
||||
</p>
|
||||
|
||||
{{emailFooter}}
|
||||
</body>
|
||||
|
||||
</html>
|
@@ -291,6 +291,15 @@ export const superAdminServiceFactory = ({
|
||||
return user;
|
||||
};
|
||||
|
||||
const grantServerAdminAccessToUser = async (userId: string) => {
|
||||
if (!licenseService.onPremFeatures?.instanceUserManagement) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to grant server admin access to user due to plan restriction. Upgrade to Infisical's Pro plan."
|
||||
});
|
||||
}
|
||||
await userDAL.updateById(userId, { superAdmin: true });
|
||||
};
|
||||
|
||||
const getAdminSlackConfig = async () => {
|
||||
const serverCfg = await serverCfgDAL.findById(ADMIN_CONFIG_DB_UUID);
|
||||
|
||||
@@ -381,6 +390,7 @@ export const superAdminServiceFactory = ({
|
||||
deleteUser,
|
||||
getAdminSlackConfig,
|
||||
updateRootEncryptionStrategy,
|
||||
getConfiguredEncryptionStrategies
|
||||
getConfiguredEncryptionStrategies,
|
||||
grantServerAdminAccessToUser
|
||||
};
|
||||
};
|
||||
|
@@ -13,7 +13,9 @@ export enum PostHogEventTypes {
|
||||
IntegrationCreated = "Integration Created",
|
||||
MachineIdentityCreated = "Machine Identity Created",
|
||||
UserOrgInvitation = "User Org Invitation",
|
||||
TelemetryInstanceStats = "Self Hosted Instance Stats"
|
||||
TelemetryInstanceStats = "Self Hosted Instance Stats",
|
||||
SecretRequestCreated = "Secret Request Created",
|
||||
SecretRequestDeleted = "Secret Request Deleted"
|
||||
}
|
||||
|
||||
export type TSecretModifiedEvent = {
|
||||
@@ -120,6 +122,23 @@ export type TTelemetryInstanceStatsEvent = {
|
||||
};
|
||||
};
|
||||
|
||||
export type TSecretRequestCreatedEvent = {
|
||||
event: PostHogEventTypes.SecretRequestCreated;
|
||||
properties: {
|
||||
secretRequestId: string;
|
||||
organizationId: string;
|
||||
secretRequestName?: string;
|
||||
};
|
||||
};
|
||||
|
||||
export type TSecretRequestDeletedEvent = {
|
||||
event: PostHogEventTypes.SecretRequestDeleted;
|
||||
properties: {
|
||||
secretRequestId: string;
|
||||
organizationId: string;
|
||||
};
|
||||
};
|
||||
|
||||
export type TPostHogEvent = { distinctId: string } & (
|
||||
| TSecretModifiedEvent
|
||||
| TAdminInitEvent
|
||||
@@ -130,4 +149,6 @@ export type TPostHogEvent = { distinctId: string } & (
|
||||
| TIntegrationCreatedEvent
|
||||
| TProjectCreateEvent
|
||||
| TTelemetryInstanceStatsEvent
|
||||
| TSecretRequestCreatedEvent
|
||||
| TSecretRequestDeletedEvent
|
||||
);
|
||||
|
8
cli/config/infisical-relay.yaml
Normal file
8
cli/config/infisical-relay.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
public_ip: 127.0.0.1
|
||||
auth_secret: changeThisOnProduction
|
||||
realm: infisical.org
|
||||
# set port 5349 for tls
|
||||
# port: 5349
|
||||
# tls_private_key_path: /full-path
|
||||
# tls_ca_path: /full-path
|
||||
# tls_cert_path: /full-path
|
20
cli/go.mod
20
cli/go.mod
@@ -1,6 +1,8 @@
|
||||
module github.com/Infisical/infisical-merge
|
||||
|
||||
go 1.21
|
||||
go 1.23.0
|
||||
|
||||
toolchain go1.23.5
|
||||
|
||||
require (
|
||||
github.com/bradleyjkemp/cupaloy/v2 v2.8.0
|
||||
@@ -21,12 +23,13 @@ require (
|
||||
github.com/pion/logging v0.2.3
|
||||
github.com/pion/turn/v4 v4.0.0
|
||||
github.com/posthog/posthog-go v0.0.0-20221221115252-24dfed35d71a
|
||||
github.com/quic-go/quic-go v0.50.0
|
||||
github.com/rs/cors v1.11.0
|
||||
github.com/rs/zerolog v1.26.1
|
||||
github.com/spf13/cobra v1.6.1
|
||||
github.com/spf13/viper v1.8.1
|
||||
github.com/stretchr/testify v1.9.0
|
||||
golang.org/x/crypto v0.33.0
|
||||
golang.org/x/crypto v0.35.0
|
||||
golang.org/x/term v0.29.0
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
)
|
||||
@@ -58,13 +61,15 @@ require (
|
||||
github.com/dvsekhvalnov/jose2go v1.6.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/fsnotify/fsnotify v1.4.9 // indirect
|
||||
github.com/go-logr/logr v1.4.1 // indirect
|
||||
github.com/go-logr/logr v1.4.2 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-openapi/errors v0.20.2 // indirect
|
||||
github.com/go-openapi/strfmt v0.21.3 // indirect
|
||||
github.com/go-task/slim-sprig/v3 v3.0.0 // indirect
|
||||
github.com/godbus/dbus/v5 v5.1.0 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
||||
github.com/golang/protobuf v1.5.4 // indirect
|
||||
github.com/google/pprof v0.0.0-20250302191652-9094ed2288e7 // indirect
|
||||
github.com/google/s2a-go v0.1.7 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.12.5 // indirect
|
||||
@@ -82,6 +87,7 @@ require (
|
||||
github.com/muesli/mango-pflag v0.1.0 // indirect
|
||||
github.com/muesli/termenv v0.15.2 // indirect
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/onsi/ginkgo/v2 v2.22.2 // indirect
|
||||
github.com/pelletier/go-toml v1.9.3 // indirect
|
||||
github.com/pion/dtls/v3 v3.0.4 // indirect
|
||||
github.com/pion/randutil v0.1.0 // indirect
|
||||
@@ -103,17 +109,21 @@ require (
|
||||
go.opentelemetry.io/otel v1.24.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.24.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.24.0 // indirect
|
||||
golang.org/x/net v0.33.0 // indirect
|
||||
go.uber.org/mock v0.5.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20250228200357-dead58393ab7 // indirect
|
||||
golang.org/x/mod v0.23.0 // indirect
|
||||
golang.org/x/net v0.35.0 // indirect
|
||||
golang.org/x/oauth2 v0.21.0 // indirect
|
||||
golang.org/x/sync v0.11.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/text v0.22.0 // indirect
|
||||
golang.org/x/time v0.6.0 // indirect
|
||||
golang.org/x/tools v0.30.0 // indirect
|
||||
google.golang.org/api v0.188.0 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240701130421-f6361c86f094 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240708141625-4ad9e859172b // indirect
|
||||
google.golang.org/grpc v1.64.1 // indirect
|
||||
google.golang.org/protobuf v1.34.2 // indirect
|
||||
google.golang.org/protobuf v1.36.1 // indirect
|
||||
gopkg.in/ini.v1 v1.62.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
34
cli/go.sum
34
cli/go.sum
@@ -144,8 +144,8 @@ github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
|
||||
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
|
||||
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||
github.com/go-openapi/errors v0.20.2 h1:dxy7PGTqEh94zj2E3h1cUmQQWiM1+aeCROfAr02EmK8=
|
||||
@@ -154,6 +154,8 @@ github.com/go-openapi/strfmt v0.21.3 h1:xwhj5X6CjXEZZHMWy1zKJxvW9AfHC9pkyUjLvHtK
|
||||
github.com/go-openapi/strfmt v0.21.3/go.mod h1:k+RzNO0Da+k3FrrynSNN8F7n/peCmQQqbbXjtDfvmGg=
|
||||
github.com/go-resty/resty/v2 v2.16.5 h1:hBKqmWrr7uRc3euHVqmh1HTHcKn99Smr7o5spptdhTM=
|
||||
github.com/go-resty/resty/v2 v2.16.5/go.mod h1:hkJtXbA2iKHzJheXYvQ8snQES5ZLGKMwQ07xAwp/fiA=
|
||||
github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI=
|
||||
github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
|
||||
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk=
|
||||
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
@@ -222,6 +224,8 @@ github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLe
|
||||
github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/pprof v0.0.0-20250302191652-9094ed2288e7 h1:+J3r2e8+RsmN3vKfo75g0YSY61ms37qzPglu4p0sGro=
|
||||
github.com/google/pprof v0.0.0-20250302191652-9094ed2288e7/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144=
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o=
|
||||
github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw=
|
||||
@@ -342,6 +346,10 @@ github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWb
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=
|
||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||
github.com/onsi/ginkgo/v2 v2.22.2 h1:/3X8Panh8/WwhU/3Ssa6rCKqPLuAkVY2I0RoyDLySlU=
|
||||
github.com/onsi/ginkgo/v2 v2.22.2/go.mod h1:oeMosUL+8LtarXBHu/c0bx2D/K9zyQ6uX3cTyztHwsk=
|
||||
github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8=
|
||||
github.com/onsi/gomega v1.36.2/go.mod h1:DdwyADRjrc825LhMEkD76cHR5+pUnjhUN8GlHlRPHzY=
|
||||
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||
github.com/pelletier/go-toml v1.9.3 h1:zeC5b1GviRUyKYd6OJPvBU/mcVDVoL1OhT17FCt5dSQ=
|
||||
github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
|
||||
@@ -369,6 +377,8 @@ github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndr
|
||||
github.com/posthog/posthog-go v0.0.0-20221221115252-24dfed35d71a h1:Ey0XWvrg6u6hyIn1Kd/jCCmL+bMv9El81tvuGBbxZGg=
|
||||
github.com/posthog/posthog-go v0.0.0-20221221115252-24dfed35d71a/go.mod h1:oa2sAs9tGai3VldabTV0eWejt/O4/OOD7azP8GaikqU=
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/quic-go/quic-go v0.50.0 h1:3H/ld1pa3CYhkcc20TPIyG1bNsdhn9qZBGN3b9/UyUo=
|
||||
github.com/quic-go/quic-go v0.50.0/go.mod h1:Vim6OmUvlYdwBhXP9ZVrtGmCMWa3wEqhq3NgYrI8b4E=
|
||||
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
@@ -461,6 +471,8 @@ go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8p
|
||||
go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI=
|
||||
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
|
||||
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
|
||||
go.uber.org/mock v0.5.0 h1:KAMbZvZPyBPWgD14IrIQ38QCyjwpvVVV6K/bHl1IwQU=
|
||||
go.uber.org/mock v0.5.0/go.mod h1:ge71pBPLYDk7QIi1LupWxdAykm7KIEFchiOqd6z7qMM=
|
||||
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
|
||||
go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo=
|
||||
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
@@ -472,8 +484,8 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20211215165025-cf75a172585e/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8=
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.33.0 h1:IOBPskki6Lysi0lo9qQvbxiQ+FvsCC/YWOecCHAixus=
|
||||
golang.org/x/crypto v0.33.0/go.mod h1:bVdXmD7IV/4GdElGPozy6U7lWdRXA4qyRVGJV57uQ5M=
|
||||
golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs=
|
||||
golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
@@ -484,6 +496,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
|
||||
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
||||
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
||||
golang.org/x/exp v0.0.0-20250228200357-dead58393ab7 h1:aWwlzYV971S4BXRS9AmqwDLAD85ouC6X+pocatKY58c=
|
||||
golang.org/x/exp v0.0.0-20250228200357-dead58393ab7/go.mod h1:BHOTPb3L19zxehTsLoJXVaTktb06DFgmdW6Wb9s8jqk=
|
||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
@@ -509,6 +523,8 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.23.0 h1:Zb7khfcRGKk+kqfxFaP5tZqCnDZMjC5VtUBs87Hr6QM=
|
||||
golang.org/x/mod v0.23.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
@@ -547,8 +563,8 @@ golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLd
|
||||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
||||
golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
|
||||
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||
golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8=
|
||||
golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
@@ -697,6 +713,8 @@ golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4f
|
||||
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
|
||||
golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo=
|
||||
golang.org/x/tools v0.30.0 h1:BgcpHewrV5AUp2G9MebG4XPFI1E2W41zU1SaqVA9vJY=
|
||||
golang.org/x/tools v0.30.0/go.mod h1:c347cR/OJfw5TI+GfX7RUPNMdDRRbjvYTS0jPyvsVtY=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
@@ -811,8 +829,8 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj
|
||||
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg=
|
||||
google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw=
|
||||
google.golang.org/protobuf v1.36.1 h1:yBPeRvTftaleIgM3PZ/WBIZ7XM/eEYAaEyCwvyjq/gk=
|
||||
google.golang.org/protobuf v1.36.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
|
@@ -53,35 +53,85 @@ var gatewayCmd = &cobra.Command{
|
||||
<-sigCh
|
||||
close(sigStopCh)
|
||||
cancel()
|
||||
|
||||
// If we get a second signal, force exit
|
||||
<-sigCh
|
||||
log.Warn().Msgf("Force exit triggered")
|
||||
os.Exit(1)
|
||||
}()
|
||||
|
||||
// Main gateway retry loop with proper context handling
|
||||
retryTicker := time.NewTicker(5 * time.Second)
|
||||
defer retryTicker.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-sigStopCh:
|
||||
if ctx.Err() != nil {
|
||||
log.Info().Msg("Shutting down gateway")
|
||||
return
|
||||
default:
|
||||
gatewayInstance, err := gateway.NewGateway(token.Token)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
}
|
||||
gatewayInstance, err := gateway.NewGateway(token.Token)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
if err = gatewayInstance.ConnectWithRelay(); err != nil {
|
||||
log.Error().Msgf("Gateway connection error with relay: %s", err)
|
||||
log.Info().Msg("Restarting gateway...")
|
||||
time.Sleep(5 * time.Second)
|
||||
continue
|
||||
}
|
||||
err = gatewayInstance.Listen(ctx)
|
||||
if err == nil {
|
||||
// meaning everything went smooth and we are exiting
|
||||
if err = gatewayInstance.ConnectWithRelay(); err != nil {
|
||||
if ctx.Err() != nil {
|
||||
log.Info().Msg("Shutting down gateway")
|
||||
return
|
||||
}
|
||||
|
||||
log.Error().Msgf("Gateway listen error: %s", err)
|
||||
log.Info().Msg("Restarting gateway...")
|
||||
time.Sleep(5 * time.Second)
|
||||
log.Error().Msgf("Gateway connection error with relay: %s", err)
|
||||
log.Info().Msg("Retrying connection in 5 seconds...")
|
||||
select {
|
||||
case <-retryTicker.C:
|
||||
continue
|
||||
case <-ctx.Done():
|
||||
log.Info().Msg("Shutting down gateway")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
err = gatewayInstance.Listen(ctx)
|
||||
if ctx.Err() != nil {
|
||||
log.Info().Msg("Gateway shutdown complete")
|
||||
return
|
||||
}
|
||||
log.Error().Msgf("Gateway listen error: %s", err)
|
||||
log.Info().Msg("Retrying connection in 5 seconds...")
|
||||
select {
|
||||
case <-retryTicker.C:
|
||||
continue
|
||||
case <-ctx.Done():
|
||||
log.Info().Msg("Shutting down gateway")
|
||||
return
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
var gatewayRelayCmd = &cobra.Command{
|
||||
Example: `infisical gateway relay`,
|
||||
Short: "Used to run infisical gateway relay",
|
||||
Use: "relay",
|
||||
DisableFlagsInUseLine: true,
|
||||
Args: cobra.NoArgs,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
relayConfigFilePath, err := cmd.Flags().GetString("config")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
if relayConfigFilePath == "" {
|
||||
util.HandleError(fmt.Errorf("Missing config file"))
|
||||
}
|
||||
|
||||
gatewayRelay, err := gateway.NewGatewayRelay(relayConfigFilePath)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Failed to initialize gateway")
|
||||
}
|
||||
err = gatewayRelay.Run()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Failed to start gateway")
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -93,5 +143,9 @@ func init() {
|
||||
})
|
||||
gatewayCmd.Flags().String("token", "", "Connect with Infisical using machine identity access token")
|
||||
|
||||
gatewayRelayCmd.Flags().String("config", "", "Relay config yaml file path")
|
||||
|
||||
gatewayCmd.AddCommand(gatewayRelayCmd)
|
||||
|
||||
rootCmd.AddCommand(gatewayCmd)
|
||||
}
|
||||
|
@@ -3,20 +3,49 @@ package gateway
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"io"
|
||||
"net"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/quic-go/quic-go"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
func handleConnection(conn net.Conn) {
|
||||
defer conn.Close()
|
||||
log.Info().Msgf("New connection from: %s", conn.RemoteAddr().String())
|
||||
func handleConnection(ctx context.Context, quicConn quic.Connection) {
|
||||
log.Info().Msgf("New connection from: %s", quicConn.RemoteAddr().String())
|
||||
// Use WaitGroup to track all streams
|
||||
var wg sync.WaitGroup
|
||||
for {
|
||||
// Accept the first stream, which we'll use for commands
|
||||
stream, err := quicConn.AcceptStream(ctx)
|
||||
if err != nil {
|
||||
log.Printf("Failed to accept QUIC stream: %v", err)
|
||||
break
|
||||
}
|
||||
wg.Add(1)
|
||||
go func(stream quic.Stream) {
|
||||
defer wg.Done()
|
||||
defer stream.Close()
|
||||
|
||||
handleStream(stream, quicConn)
|
||||
}(stream)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
log.Printf("All streams closed for connection: %s", quicConn.RemoteAddr().String())
|
||||
}
|
||||
|
||||
func handleStream(stream quic.Stream, quicConn quic.Connection) {
|
||||
streamID := stream.StreamID()
|
||||
log.Printf("New stream %d from: %s", streamID, quicConn.RemoteAddr().String())
|
||||
|
||||
// Use buffered reader for better handling of fragmented data
|
||||
reader := bufio.NewReader(conn)
|
||||
reader := bufio.NewReader(stream)
|
||||
defer stream.Close()
|
||||
|
||||
for {
|
||||
msg, err := reader.ReadBytes('\n')
|
||||
if err != nil {
|
||||
@@ -39,6 +68,7 @@ func handleConnection(conn net.Conn) {
|
||||
return
|
||||
}
|
||||
defer destTarget.Close()
|
||||
log.Info().Msgf("Starting secure transmission between %s->%s", quicConn.LocalAddr().String(), destTarget.LocalAddr().String())
|
||||
|
||||
// Handle buffered data
|
||||
buffered := reader.Buffered()
|
||||
@@ -56,10 +86,11 @@ func handleConnection(conn net.Conn) {
|
||||
}
|
||||
}
|
||||
|
||||
CopyData(conn, destTarget)
|
||||
CopyDataFromQuicToTcp(stream, destTarget)
|
||||
log.Info().Msgf("Ending secure transmission between %s->%s", quicConn.LocalAddr().String(), destTarget.LocalAddr().String())
|
||||
return
|
||||
case "PING":
|
||||
if _, err := conn.Write([]byte("PONG")); err != nil {
|
||||
if _, err := stream.Write([]byte("PONG\n")); err != nil {
|
||||
log.Error().Msgf("Error writing PONG response: %v", err)
|
||||
}
|
||||
return
|
||||
@@ -74,34 +105,38 @@ type CloseWrite interface {
|
||||
CloseWrite() error
|
||||
}
|
||||
|
||||
func CopyData(src, dst net.Conn) {
|
||||
func CopyDataFromQuicToTcp(quicStream quic.Stream, tcpConn net.Conn) {
|
||||
// Create a WaitGroup to wait for both copy operations
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(2)
|
||||
|
||||
copyAndClose := func(dst, src net.Conn, done chan<- bool) {
|
||||
// Start copying from QUIC stream to TCP
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
_, err := io.Copy(dst, src)
|
||||
if err != nil && !errors.Is(err, io.EOF) {
|
||||
log.Error().Msgf("Copy error: %v", err)
|
||||
if _, err := io.Copy(tcpConn, quicStream); err != nil {
|
||||
log.Error().Msgf("Error copying quic->postgres: %v", err)
|
||||
}
|
||||
|
||||
// Signal we're done writing
|
||||
done <- true
|
||||
|
||||
// Half close the connection if possible
|
||||
if c, ok := dst.(CloseWrite); ok {
|
||||
c.CloseWrite()
|
||||
if e, ok := tcpConn.(CloseWrite); ok {
|
||||
log.Debug().Msg("Closing TCP write end")
|
||||
e.CloseWrite()
|
||||
} else {
|
||||
log.Debug().Msg("TCP connection does not support CloseWrite")
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
done1 := make(chan bool, 1)
|
||||
done2 := make(chan bool, 1)
|
||||
|
||||
go copyAndClose(dst, src, done1)
|
||||
go copyAndClose(src, dst, done2)
|
||||
// Start copying from TCP to QUIC stream
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
if _, err := io.Copy(quicStream, tcpConn); err != nil {
|
||||
log.Debug().Msgf("Error copying postgres->quic: %v", err)
|
||||
}
|
||||
// Close the write side of the QUIC stream
|
||||
if err := quicStream.Close(); err != nil && !strings.Contains(err.Error(), "close called for canceled stream") {
|
||||
log.Error().Msgf("Error closing QUIC stream write: %v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
// Wait for both copies to complete
|
||||
<-done1
|
||||
<-done2
|
||||
wg.Wait()
|
||||
}
|
||||
|
@@ -6,15 +6,19 @@ import (
|
||||
"crypto/x509"
|
||||
"fmt"
|
||||
"net"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/systemd"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/pion/logging"
|
||||
"github.com/pion/turn/v4"
|
||||
"github.com/rs/zerolog/log"
|
||||
|
||||
"github.com/quic-go/quic-go"
|
||||
)
|
||||
|
||||
type GatewayConfig struct {
|
||||
@@ -56,13 +60,12 @@ func (g *Gateway) ConnectWithRelay() error {
|
||||
if relayPort == "5349" {
|
||||
log.Info().Msgf("Provided relay port %s. Using TLS", relayPort)
|
||||
conn, err = tls.Dial("tcp", relayDetails.TurnServerAddress, &tls.Config{
|
||||
InsecureSkipVerify: false,
|
||||
ServerName: relayAddress,
|
||||
ServerName: relayAddress,
|
||||
})
|
||||
} else {
|
||||
log.Info().Msgf("Provided relay port %s. Using non TLS connection.", relayPort)
|
||||
peerAddr, err := net.ResolveTCPAddr("tcp", relayDetails.TurnServerAddress)
|
||||
if err != nil {
|
||||
peerAddr, errPeer := net.ResolveTCPAddr("tcp", relayDetails.TurnServerAddress)
|
||||
if errPeer != nil {
|
||||
return fmt.Errorf("Failed to parse turn server address: %w", err)
|
||||
}
|
||||
conn, err = net.DialTCP("tcp", nil, peerAddr)
|
||||
@@ -74,6 +77,10 @@ func (g *Gateway) ConnectWithRelay() error {
|
||||
|
||||
// Start a new TURN Client and wrap our net.Conn in a STUNConn
|
||||
// This allows us to simulate datagram based communication over a net.Conn
|
||||
logger := logging.NewDefaultLoggerFactory()
|
||||
if os.Getenv("LOG_LEVEL") == "debug" {
|
||||
logger.DefaultLogLevel = logging.LogLevelDebug
|
||||
}
|
||||
cfg := &turn.ClientConfig{
|
||||
STUNServerAddr: relayDetails.TurnServerAddress,
|
||||
TURNServerAddr: relayDetails.TurnServerAddress,
|
||||
@@ -81,7 +88,7 @@ func (g *Gateway) ConnectWithRelay() error {
|
||||
Username: relayDetails.TurnServerUsername,
|
||||
Password: relayDetails.TurnServerPassword,
|
||||
Realm: relayDetails.TurnServerRealm,
|
||||
LoggerFactory: logging.NewDefaultLoggerFactory(),
|
||||
LoggerFactory: logger,
|
||||
}
|
||||
|
||||
client, err := turn.NewClient(cfg)
|
||||
@@ -95,10 +102,6 @@ func (g *Gateway) ConnectWithRelay() error {
|
||||
TurnServerAddress: relayDetails.TurnServerAddress,
|
||||
InfisicalStaticIp: relayDetails.InfisicalStaticIp,
|
||||
}
|
||||
// if port not specific allow all port
|
||||
if relayDetails.InfisicalStaticIp != "" && !strings.Contains(relayDetails.InfisicalStaticIp, ":") {
|
||||
g.config.InfisicalStaticIp = g.config.InfisicalStaticIp + ":0"
|
||||
}
|
||||
|
||||
g.client = client
|
||||
return nil
|
||||
@@ -112,23 +115,24 @@ func (g *Gateway) Listen(ctx context.Context) error {
|
||||
}
|
||||
|
||||
log.Info().Msg("Connected with relay")
|
||||
|
||||
// Allocate a relay socket on the TURN server. On success, it
|
||||
// will return a net.PacketConn which represents the remote
|
||||
// socket.
|
||||
relayNonTlsConn, err := g.client.AllocateTCP()
|
||||
relayUdpConnection, err := g.client.Allocate()
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to allocate relay connection: %w", err)
|
||||
}
|
||||
|
||||
log.Info().Msg(relayNonTlsConn.Addr().String())
|
||||
log.Info().Msg(relayUdpConnection.LocalAddr().String())
|
||||
defer func() {
|
||||
if closeErr := relayNonTlsConn.Close(); closeErr != nil {
|
||||
if closeErr := relayUdpConnection.Close(); closeErr != nil {
|
||||
log.Error().Msgf("Failed to close connection: %s", closeErr)
|
||||
}
|
||||
}()
|
||||
|
||||
gatewayCert, err := api.CallExchangeRelayCertV1(g.httpClient, api.ExchangeRelayCertRequestV1{
|
||||
RelayAddress: relayNonTlsConn.Addr().String(),
|
||||
RelayAddress: relayUdpConnection.LocalAddr().String(),
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -139,176 +143,221 @@ func (g *Gateway) Listen(ctx context.Context) error {
|
||||
g.config.Certificate = gatewayCert.Certificate
|
||||
g.config.CertificateChain = gatewayCert.CertificateChain
|
||||
|
||||
done := make(chan bool, 1)
|
||||
errCh := make(chan error, 1)
|
||||
shutdownCh := make(chan bool, 1)
|
||||
|
||||
if g.config.InfisicalStaticIp != "" {
|
||||
log.Info().Msgf("Found static ip from Infisical: %s. Creating permission IP lifecycle", g.config.InfisicalStaticIp)
|
||||
peerAddr, err := net.ResolveTCPAddr("tcp", g.config.InfisicalStaticIp)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to parse infisical static ip: %w", err)
|
||||
}
|
||||
g.registerPermissionLifecycle(func() error {
|
||||
err := relayNonTlsConn.CreatePermissions(peerAddr)
|
||||
return err
|
||||
}, done)
|
||||
if err = g.createPermissionForStaticIps(g.config.InfisicalStaticIp); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
g.registerHeartBeat(ctx, errCh)
|
||||
|
||||
cert, err := tls.X509KeyPair([]byte(gatewayCert.Certificate), []byte(gatewayCert.PrivateKey))
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to parse cert: %s", err)
|
||||
return fmt.Errorf("failed to parse cert: %w", err)
|
||||
}
|
||||
|
||||
caCertPool := x509.NewCertPool()
|
||||
caCertPool.AppendCertsFromPEM([]byte(gatewayCert.CertificateChain))
|
||||
|
||||
relayConn := tls.NewListener(relayNonTlsConn, &tls.Config{
|
||||
// Setup QUIC server
|
||||
tlsConfig := &tls.Config{
|
||||
Certificates: []tls.Certificate{cert},
|
||||
MinVersion: tls.VersionTLS12,
|
||||
ClientCAs: caCertPool,
|
||||
ClientAuth: tls.RequireAndVerifyClientCert,
|
||||
})
|
||||
NextProtos: []string{"infisical-gateway"},
|
||||
}
|
||||
|
||||
// Setup QUIC listener on the relayConn
|
||||
quicConfig := &quic.Config{
|
||||
EnableDatagrams: true,
|
||||
MaxIdleTimeout: 10 * time.Second,
|
||||
KeepAlivePeriod: 2 * time.Second,
|
||||
}
|
||||
|
||||
g.registerRelayIsActive(ctx, relayUdpConnection.LocalAddr().String(), errCh)
|
||||
quicListener, err := quic.Listen(relayUdpConnection, tlsConfig, quicConfig)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to listen for QUIC: %w", err)
|
||||
}
|
||||
defer quicListener.Close()
|
||||
|
||||
log.Printf("Listener started on %s", quicListener.Addr())
|
||||
|
||||
errCh := make(chan error, 1)
|
||||
log.Info().Msg("Gateway started successfully")
|
||||
g.registerHeartBeat(errCh, done)
|
||||
g.registerRelayIsActive(relayNonTlsConn.Addr().String(), errCh, done)
|
||||
|
||||
// Create a WaitGroup to track active connections
|
||||
var wg sync.WaitGroup
|
||||
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case <-done:
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-shutdownCh:
|
||||
return
|
||||
default:
|
||||
// Accept new relay connection
|
||||
conn, err := relayConn.Accept()
|
||||
quicConn, err := quicListener.Accept(context.Background())
|
||||
if err != nil {
|
||||
if !strings.Contains(err.Error(), "data contains incomplete STUN or TURN frame") {
|
||||
log.Error().Msgf("Failed to accept connection: %v", err)
|
||||
}
|
||||
log.Printf("Failed to accept QUIC connection: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
tlsConn, ok := conn.(*tls.Conn)
|
||||
if !ok {
|
||||
log.Error().Msg("Failed to convert to TLS connection")
|
||||
conn.Close()
|
||||
continue
|
||||
}
|
||||
|
||||
err = tlsConn.Handshake()
|
||||
if err != nil {
|
||||
log.Error().Msgf("TLS handshake failed: %v", err)
|
||||
conn.Close()
|
||||
continue
|
||||
}
|
||||
|
||||
// Get connection state which contains certificate information
|
||||
state := tlsConn.ConnectionState()
|
||||
if len(state.PeerCertificates) > 0 {
|
||||
organizationUnit := state.PeerCertificates[0].Subject.OrganizationalUnit
|
||||
commonName := state.PeerCertificates[0].Subject.CommonName
|
||||
tlsState := quicConn.ConnectionState().TLS
|
||||
if len(tlsState.PeerCertificates) > 0 {
|
||||
organizationUnit := tlsState.PeerCertificates[0].Subject.OrganizationalUnit
|
||||
commonName := tlsState.PeerCertificates[0].Subject.CommonName
|
||||
if organizationUnit[0] != "gateway-client" || commonName != "cloud" {
|
||||
log.Error().Msgf("Client certificate verification failed. Received %s, %s", organizationUnit, commonName)
|
||||
errMsg := fmt.Sprintf("Client certificate verification failed. Received %s, %s", organizationUnit, commonName)
|
||||
log.Error().Msg(errMsg)
|
||||
quicConn.CloseWithError(1, errMsg)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Handle the connection in a goroutine
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
go func(c quic.Connection) {
|
||||
defer wg.Done()
|
||||
handleConnection(conn)
|
||||
}()
|
||||
defer c.CloseWithError(0, "connection closed")
|
||||
|
||||
// Monitor parent context to close this connection when needed
|
||||
go func() {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
c.CloseWithError(0, "connection closed") // Force close connection when context is canceled
|
||||
case <-shutdownCh:
|
||||
c.CloseWithError(0, "connection closed") // Force close connection when accepting loop is done
|
||||
}
|
||||
}()
|
||||
|
||||
handleConnection(ctx, c)
|
||||
}(quicConn)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
var isShutdown bool
|
||||
// make this compatiable with systemd notify mode
|
||||
systemd.SdNotify(false, systemd.SdNotifyReady)
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
log.Info().Msg("Shutting down gateway...")
|
||||
isShutdown = true
|
||||
case err = <-errCh:
|
||||
log.Error().Err(err).Msg("Gateway error occurred")
|
||||
}
|
||||
|
||||
// Signal the accept loop to stop
|
||||
close(done)
|
||||
wg.Wait()
|
||||
close(shutdownCh)
|
||||
|
||||
if isShutdown {
|
||||
log.Info().Msg("Gateway shutdown complete")
|
||||
// Set a timeout for waiting on connections to close
|
||||
waitCh := make(chan struct{})
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(waitCh)
|
||||
}()
|
||||
|
||||
select {
|
||||
case <-waitCh:
|
||||
// All connections closed normally
|
||||
case <-time.After(5 * time.Second):
|
||||
log.Warn().Msg("Timeout waiting for connections to close gracefully")
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (g *Gateway) registerHeartBeat(errCh chan error, done chan bool) {
|
||||
ticker := time.NewTicker(1 * time.Hour)
|
||||
func (g *Gateway) registerHeartBeat(ctx context.Context, errCh chan error) {
|
||||
ticker := time.NewTicker(30 * time.Minute)
|
||||
defer ticker.Stop()
|
||||
|
||||
go func() {
|
||||
time.Sleep(10 * time.Second)
|
||||
log.Info().Msg("Registering first heart beat")
|
||||
err := api.CallGatewayHeartBeatV1(g.httpClient)
|
||||
if err != nil {
|
||||
log.Error().Msgf("Failed to register heartbeat: %s", err)
|
||||
}
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-done:
|
||||
ticker.Stop()
|
||||
return
|
||||
case <-ticker.C:
|
||||
log.Info().Msg("Registering heart beat")
|
||||
err := api.CallGatewayHeartBeatV1(g.httpClient)
|
||||
if err := api.CallGatewayHeartBeatV1(g.httpClient); err != nil {
|
||||
errCh <- err
|
||||
} else {
|
||||
log.Info().Msg("Gateway is reachable by Infisical")
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (g *Gateway) registerPermissionLifecycle(permissionFn func() error, done chan bool) {
|
||||
ticker := time.NewTicker(3 * time.Minute)
|
||||
|
||||
go func() {
|
||||
// wait for 5 mins
|
||||
permissionFn()
|
||||
log.Printf("Created permission for incoming connections")
|
||||
for {
|
||||
select {
|
||||
case <-done:
|
||||
ticker.Stop()
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-ticker.C:
|
||||
permissionFn()
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (g *Gateway) registerRelayIsActive(serverAddr string, errCh chan error, done chan bool) {
|
||||
func (g *Gateway) createPermissionForStaticIps(staticIps string) error {
|
||||
if staticIps == "" {
|
||||
return fmt.Errorf("Missing Infisical static ips for permission")
|
||||
}
|
||||
|
||||
splittedIps := strings.Split(staticIps, ",")
|
||||
resolvedIps := make([]net.Addr, 0)
|
||||
for _, ip := range splittedIps {
|
||||
ip = strings.TrimSpace(ip)
|
||||
if ip == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// if port not specific allow all port
|
||||
if !strings.Contains(ip, ":") {
|
||||
ip = ip + ":0"
|
||||
}
|
||||
|
||||
peerAddr, err := net.ResolveUDPAddr("udp", ip)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to resolve static ip for permission: %w", err)
|
||||
}
|
||||
|
||||
resolvedIps = append(resolvedIps, peerAddr)
|
||||
}
|
||||
|
||||
if err := g.client.CreatePermission(resolvedIps...); err != nil {
|
||||
return fmt.Errorf("Failed to set ip permission: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Gateway) registerRelayIsActive(ctx context.Context, relayAddress string, errCh chan error) error {
|
||||
ticker := time.NewTicker(10 * time.Second)
|
||||
maxFailures := 3
|
||||
failures := 0
|
||||
|
||||
go func() {
|
||||
time.Sleep(5 * time.Second)
|
||||
time.Sleep(2 * time.Second)
|
||||
for {
|
||||
select {
|
||||
case <-done:
|
||||
ticker.Stop()
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-ticker.C:
|
||||
conn, err := net.Dial("tcp", serverAddr)
|
||||
if err != nil {
|
||||
errCh <- err
|
||||
return
|
||||
// Configure TLS to skip verification
|
||||
tlsConfig := &tls.Config{
|
||||
InsecureSkipVerify: true,
|
||||
NextProtos: []string{"infisical-gateway"},
|
||||
}
|
||||
if conn != nil {
|
||||
conn.Close()
|
||||
quicConfig := &quic.Config{
|
||||
EnableDatagrams: true,
|
||||
}
|
||||
func() {
|
||||
checkCtx, cancel := context.WithTimeout(ctx, 3*time.Second)
|
||||
defer cancel()
|
||||
conn, err := quic.DialAddr(checkCtx, relayAddress, tlsConfig, quicConfig)
|
||||
if err != nil {
|
||||
failures++
|
||||
log.Warn().Err(err).Int("failures", failures).Msg("Relay connection check failed")
|
||||
if failures >= maxFailures {
|
||||
errCh <- fmt.Errorf("relay connection check failed: %w", err)
|
||||
}
|
||||
}
|
||||
if conn != nil {
|
||||
conn.CloseWithError(0, "closed")
|
||||
}
|
||||
}()
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
187
cli/packages/gateway/relay.go
Normal file
187
cli/packages/gateway/relay.go
Normal file
@@ -0,0 +1,187 @@
|
||||
package gateway
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"crypto/x509"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net"
|
||||
"os"
|
||||
"os/signal"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"syscall"
|
||||
|
||||
udplistener "github.com/Infisical/infisical-merge/packages/gateway/udp_listener"
|
||||
"github.com/Infisical/infisical-merge/packages/systemd"
|
||||
"github.com/pion/logging"
|
||||
"github.com/pion/turn/v4"
|
||||
"github.com/rs/zerolog/log"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
var (
|
||||
errMissingTlsCert = errors.New("Missing TLS files")
|
||||
)
|
||||
|
||||
type GatewayRelay struct {
|
||||
Config *GatewayRelayConfig
|
||||
}
|
||||
|
||||
type GatewayRelayConfig struct {
|
||||
PublicIP string `yaml:"public_ip"`
|
||||
Port int `yaml:"port"`
|
||||
Realm string `yaml:"realm"`
|
||||
AuthSecret string `yaml:"auth_secret"`
|
||||
RelayMinPort uint16 `yaml:"relay_min_port"`
|
||||
RelayMaxPort uint16 `yaml:"relay_max_port"`
|
||||
TlsCertPath string `yaml:"tls_cert_path"`
|
||||
TlsPrivateKeyPath string `yaml:"tls_private_key_path"`
|
||||
TlsCaPath string `yaml:"tls_ca_path"`
|
||||
|
||||
tls tls.Certificate
|
||||
tlsCa string
|
||||
isTlsEnabled bool
|
||||
}
|
||||
|
||||
func NewGatewayRelay(configFilePath string) (*GatewayRelay, error) {
|
||||
cfgFile, err := os.ReadFile(configFilePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var cfg GatewayRelayConfig
|
||||
if err := yaml.Unmarshal(cfgFile, &cfg); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if cfg.PublicIP == "" {
|
||||
return nil, fmt.Errorf("Missing public ip")
|
||||
}
|
||||
|
||||
if cfg.AuthSecret == "" {
|
||||
return nil, fmt.Errorf("Missing auth secret")
|
||||
}
|
||||
|
||||
if cfg.Realm == "" {
|
||||
cfg.Realm = "infisical.org"
|
||||
}
|
||||
|
||||
if cfg.RelayMinPort == 0 {
|
||||
cfg.RelayMinPort = 49152
|
||||
}
|
||||
|
||||
if cfg.RelayMaxPort == 0 {
|
||||
cfg.RelayMaxPort = 65535
|
||||
}
|
||||
|
||||
if cfg.Port == 0 {
|
||||
cfg.Port = 3478
|
||||
} else if cfg.Port == 5349 {
|
||||
if cfg.TlsCertPath == "" || cfg.TlsPrivateKeyPath == "" {
|
||||
return nil, errMissingTlsCert
|
||||
}
|
||||
|
||||
cert, err := tls.LoadX509KeyPair(cfg.TlsCertPath, cfg.TlsPrivateKeyPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to read load server tls key pair: %w", err)
|
||||
}
|
||||
|
||||
if cfg.TlsCaPath != "" {
|
||||
ca, err := os.ReadFile(cfg.TlsCaPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to read tls ca: %w", err)
|
||||
}
|
||||
cfg.tlsCa = string(ca)
|
||||
}
|
||||
|
||||
cfg.tls = cert
|
||||
cfg.isTlsEnabled = true
|
||||
}
|
||||
|
||||
return &GatewayRelay{
|
||||
Config: &cfg,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (g *GatewayRelay) Run() error {
|
||||
addr, err := net.ResolveTCPAddr("tcp", "0.0.0.0:"+strconv.Itoa(g.Config.Port))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to parse server address: %s", err)
|
||||
}
|
||||
|
||||
// NewLongTermAuthHandler takes a pion.LeveledLogger. This allows you to intercept messages
|
||||
// and process them yourself.
|
||||
logger := logging.NewDefaultLeveledLoggerForScope("lt-creds", logging.LogLevelTrace, os.Stdout)
|
||||
|
||||
// Create `numThreads` UDP listeners to pass into pion/turn
|
||||
// pion/turn itself doesn't allocate any UDP sockets, but lets the user pass them in
|
||||
// this allows us to add logging, storage or modify inbound/outbound traffic
|
||||
// UDP listeners share the same local address:port with setting SO_REUSEPORT and the kernel
|
||||
// will load-balance received packets per the IP 5-tuple
|
||||
listenerConfig := udplistener.SetupListenerConfig()
|
||||
|
||||
publicIP := g.Config.PublicIP
|
||||
relayAddressGenerator := &turn.RelayAddressGeneratorPortRange{
|
||||
RelayAddress: net.ParseIP(publicIP), // Claim that we are listening on IP passed by user
|
||||
Address: "0.0.0.0", // But actually be listening on every interface
|
||||
MinPort: g.Config.RelayMinPort,
|
||||
MaxPort: g.Config.RelayMaxPort,
|
||||
}
|
||||
|
||||
threadNum := runtime.NumCPU()
|
||||
listenerConfigs := make([]turn.ListenerConfig, threadNum)
|
||||
var connAddress string
|
||||
for i := 0; i < threadNum; i++ {
|
||||
conn, listErr := listenerConfig.Listen(context.Background(), addr.Network(), addr.String())
|
||||
if listErr != nil {
|
||||
return fmt.Errorf("Failed to allocate TCP listener at %s:%s %s", addr.Network(), addr.String(), listErr)
|
||||
}
|
||||
|
||||
listenerConfigs[i] = turn.ListenerConfig{
|
||||
RelayAddressGenerator: relayAddressGenerator,
|
||||
}
|
||||
|
||||
if g.Config.isTlsEnabled {
|
||||
caCertPool := x509.NewCertPool()
|
||||
caCertPool.AppendCertsFromPEM([]byte(g.Config.tlsCa))
|
||||
|
||||
listenerConfigs[i].Listener = tls.NewListener(conn, &tls.Config{
|
||||
Certificates: []tls.Certificate{g.Config.tls},
|
||||
ClientCAs: caCertPool,
|
||||
})
|
||||
} else {
|
||||
listenerConfigs[i].Listener = conn
|
||||
}
|
||||
connAddress = conn.Addr().String()
|
||||
}
|
||||
|
||||
loggerF := logging.NewDefaultLoggerFactory()
|
||||
loggerF.DefaultLogLevel = logging.LogLevelDebug
|
||||
|
||||
server, err := turn.NewServer(turn.ServerConfig{
|
||||
Realm: g.Config.Realm,
|
||||
AuthHandler: turn.LongTermTURNRESTAuthHandler(g.Config.AuthSecret, logger),
|
||||
// PacketConnConfigs is a list of UDP Listeners and the configuration around them
|
||||
ListenerConfigs: listenerConfigs,
|
||||
LoggerFactory: loggerF,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to start server: %w", err)
|
||||
}
|
||||
|
||||
log.Info().Msgf("Relay listening on %s\n", connAddress)
|
||||
|
||||
// make this compatiable with systemd notify mode
|
||||
systemd.SdNotify(false, systemd.SdNotifyReady)
|
||||
// Block until user sends SIGINT or SIGTERM
|
||||
sigs := make(chan os.Signal, 1)
|
||||
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
|
||||
<-sigs
|
||||
|
||||
if err = server.Close(); err != nil {
|
||||
return fmt.Errorf("Failed to close server: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
26
cli/packages/gateway/udp_listener/listener_unix.go
Normal file
26
cli/packages/gateway/udp_listener/listener_unix.go
Normal file
@@ -0,0 +1,26 @@
|
||||
//go:build !windows
|
||||
// +build !windows
|
||||
|
||||
package udplistener
|
||||
|
||||
import (
|
||||
"net"
|
||||
"syscall"
|
||||
|
||||
"golang.org/x/sys/unix"
|
||||
// other imports
|
||||
)
|
||||
|
||||
func SetupListenerConfig() *net.ListenConfig {
|
||||
return &net.ListenConfig{
|
||||
Control: func(network, address string, conn syscall.RawConn) error {
|
||||
var operr error
|
||||
if err := conn.Control(func(fd uintptr) {
|
||||
operr = syscall.SetsockoptInt(int(fd), syscall.SOL_SOCKET, unix.SO_REUSEPORT, 1)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
return operr
|
||||
},
|
||||
}
|
||||
}
|
18
cli/packages/gateway/udp_listener/listener_windows.go
Normal file
18
cli/packages/gateway/udp_listener/listener_windows.go
Normal file
@@ -0,0 +1,18 @@
|
||||
//go:build windows
|
||||
// +build windows
|
||||
|
||||
package udplistener
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
func SetupListenerConfig() *net.ListenConfig {
|
||||
return &net.ListenConfig{
|
||||
Control: func(network, address string, conn syscall.RawConn) error {
|
||||
return fmt.Errorf("Infisical relay not supported for windows.")
|
||||
},
|
||||
}
|
||||
}
|
84
cli/packages/systemd/daemon.go
Normal file
84
cli/packages/systemd/daemon.go
Normal file
@@ -0,0 +1,84 @@
|
||||
// Copyright 2014 Docker, Inc.
|
||||
// Copyright 2015-2018 CoreOS, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
// Package daemon provides a Go implementation of the sd_notify protocol.
|
||||
// It can be used to inform systemd of service start-up completion, watchdog
|
||||
// events, and other status changes.
|
||||
//
|
||||
// https://www.freedesktop.org/software/systemd/man/sd_notify.html#Description
|
||||
package systemd
|
||||
|
||||
import (
|
||||
"net"
|
||||
"os"
|
||||
)
|
||||
|
||||
const (
|
||||
// SdNotifyReady tells the service manager that service startup is finished
|
||||
// or the service finished loading its configuration.
|
||||
SdNotifyReady = "READY=1"
|
||||
|
||||
// SdNotifyStopping tells the service manager that the service is beginning
|
||||
// its shutdown.
|
||||
SdNotifyStopping = "STOPPING=1"
|
||||
|
||||
// SdNotifyReloading tells the service manager that this service is
|
||||
// reloading its configuration. Note that you must call SdNotifyReady when
|
||||
// it completed reloading.
|
||||
SdNotifyReloading = "RELOADING=1"
|
||||
|
||||
// SdNotifyWatchdog tells the service manager to update the watchdog
|
||||
// timestamp for the service.
|
||||
SdNotifyWatchdog = "WATCHDOG=1"
|
||||
)
|
||||
|
||||
// SdNotify sends a message to the init daemon. It is common to ignore the error.
|
||||
// If `unsetEnvironment` is true, the environment variable `NOTIFY_SOCKET`
|
||||
// will be unconditionally unset.
|
||||
//
|
||||
// It returns one of the following:
|
||||
// (false, nil) - notification not supported (i.e. NOTIFY_SOCKET is unset)
|
||||
// (false, err) - notification supported, but failure happened (e.g. error connecting to NOTIFY_SOCKET or while sending data)
|
||||
// (true, nil) - notification supported, data has been sent
|
||||
func SdNotify(unsetEnvironment bool, state string) (bool, error) {
|
||||
socketAddr := &net.UnixAddr{
|
||||
Name: os.Getenv("NOTIFY_SOCKET"),
|
||||
Net: "unixgram",
|
||||
}
|
||||
|
||||
// NOTIFY_SOCKET not set
|
||||
if socketAddr.Name == "" {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
if unsetEnvironment {
|
||||
if err := os.Unsetenv("NOTIFY_SOCKET"); err != nil {
|
||||
return false, err
|
||||
}
|
||||
}
|
||||
|
||||
conn, err := net.DialUnix(socketAddr.Net, nil, socketAddr)
|
||||
// Error connecting to NOTIFY_SOCKET
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
defer conn.Close()
|
||||
|
||||
if _, err = conn.Write([]byte(state)); err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
@@ -12,18 +12,15 @@ Plus, our team is remote-first and spread across the globe (from San Francisco t
|
||||
|
||||
## Onboarding buddy
|
||||
|
||||
Every new joiner has an onboarding buddy who should ideally be in the the same timezone. The onboarding buddy should be able to help with any questions that pop up during the first few weeks. Of course, everyone is available to help, but it's good to have a dedicated person that you can go to with any questions.
|
||||
Every new joiner at Infisical will have an onboarding buddy—a teammate in a similar time zone who’s there to help you settle in. They are your go-to person for any questions that come up. Of course, everyone on the team is happy to help, but it’s always nice to have a dedicated person who’s there for you. Don’t hesitate to reach out to your buddy if you’re unsure about something or need a hand! Your onboarding buddy will set up regular syncs for your first two months—ideally at least 2-3 times a week.
|
||||
|
||||
If you’re joining the engineering team, your onboarding buddy will:
|
||||
1. Walk you through Infisical’s development process and share any best practices to keep in mind when tackling tickets.
|
||||
2. Be your go-to person if you are blocked or need to think through your sprint task.
|
||||
3. Help you ship something small on day one!
|
||||
|
||||
## Onboarding Checklist
|
||||
|
||||
1. Join the weekly all-hands meeting. It typically happens on Monday's at 8:30am PT.
|
||||
2. Ship something together on day one – even if tiny! It feels great to hit the ground running, with a development environment all ready to go.
|
||||
3. Check out the [Areas of Responsibility (AoR) Table](https://docs.google.com/spreadsheets/d/1RnXlGFg83Sgu0dh7ycuydsSobmFfI3A0XkGw7vrVxEI/edit?usp=sharing). This is helpful to know who you can ask about particular areas of Infisical. Feel free to add yourself to the areas you'd be most interesting to dive into.
|
||||
4. Read the [Infisical Strategy Doc](https://docs.google.com/document/d/1uV9IaahYwbZ5OuzDTFdQMSa1P0mpMOnetGB-xqf4G40).
|
||||
5. Update your LinkedIn profile with one of [Infisical's official banners](https://drive.google.com/drive/u/0/folders/1oSNWjbpRl9oNYwxM_98IqzKs9fAskrb2) (if you want to). You can also coordinate your social posts in the #marketing Slack channel, so that we can boost it from Infisical's official social media accounts.
|
||||
6. Over the first few weeks, feel free to schedule 1:1s with folks on the team to get to know them a bit better.
|
||||
7. Change your Slack username in the users channel to `[NAME] (Infisical)`.
|
||||
8. Go through the [technical overview](https://infisical.com/docs/internals/overview) of Infisical.
|
||||
9. Request a company credit card (Maidul will be able to help with that).
|
||||
Your hiring manager will send you an onboarding checklist doc for your first day.
|
||||
|
||||
|
||||
|
@@ -24,6 +24,9 @@ Make sure you keep copies for all receipts. If you expense something on a compan
|
||||
|
||||
You should default to using your company card in all cases - it has no transaction fees. If using your personal card is unavoidable, please reach out to Maidul to get it reimbursed manually.
|
||||
|
||||
## Training
|
||||
|
||||
For engineers, you’re welcome to take an approved Udemy course. Please reach out to Maidul. For the GTM team, you may buy a book a month if it’s relevant to your work.
|
||||
|
||||
# Equipment
|
||||
|
||||
@@ -55,4 +58,4 @@ For any equipment related questions, please reach out to Maidul.
|
||||
|
||||
## Brex
|
||||
|
||||
We use Brex as our primary credit card provider. Don't have a company card yet? Reach out to Maidul.
|
||||
We use Brex as our primary credit card provider. Don't have a company card yet? Reach out to Maidul.
|
||||
|
@@ -11,13 +11,6 @@ To interact with the Infisical API, you will need to obtain an access token. Fol
|
||||
**FAQ**
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="What happened to the Service Token and API Key authentication modes?">
|
||||
The Service Token and API Key authentication modes are being deprecated out in favor of [Identities](/documentation/platform/identity).
|
||||
We expect to make a deprecation notice in the coming months alongside a larger deprecation initiative planned for Q1/Q2 2024.
|
||||
|
||||
With identities, we're improving significantly over the shortcomings of Service Tokens and API Keys. Amongst many differences, identities provide broader access over the Infisical API, utilizes the same role-based
|
||||
permission system used by users, and comes with ample more configurable security measures.
|
||||
</Accordion>
|
||||
<Accordion title="Why can I not create, read, update, or delete an identity?">
|
||||
There are a few reasons for why this might happen:
|
||||
|
||||
|
@@ -7,21 +7,22 @@ The Server Admin Console provides **server administrators** with the ability to
|
||||
customize settings and manage users for their entire Infisical instance.
|
||||
|
||||
<Note>
|
||||
The first user to setup an account on your Infisical instance is designated as the server administrator by default.
|
||||
The first user to setup an account on your Infisical instance is designated as
|
||||
the server administrator by default.
|
||||
</Note>
|
||||
|
||||
## Accessing the Server Admin Console
|
||||
|
||||
|
||||
On the sidebar, tap on your initials to access the settings dropdown and press the **Server Admin Console** option.
|
||||
|
||||

|
||||
|
||||
## General Tab
|
||||
|
||||
Configure general settings for your instance.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
### Allow User Signups
|
||||
|
||||
@@ -39,6 +40,22 @@ If you're using SAML/LDAP/OIDC for only one organization on your instance, you c
|
||||
|
||||
By default, users signing up through SAML/LDAP/OIDC will still need to verify their email address to prevent email spoofing. This requirement can be skipped by enabling the switch to trust logins through the respective method.
|
||||
|
||||
### Notices
|
||||
|
||||
Auth consent content is displayed to users on the login page. They can be used to display important information to users, such as a maintenance message or a new feature announcement. Both HTML and Markdown formatting are supported, allowing for customized styling like below:
|
||||
|
||||
```
|
||||
**You are entering a confidential website**
|
||||
```
|
||||
|
||||
```html
|
||||
<div style="font-weight: bold;">You are entering a confidential website</div>
|
||||
```
|
||||
|
||||

|
||||
|
||||
Page frame content is displayed as a header and footer in ALL protected pages. Like the auth consent content, both HTML and Markdown formatting are supported here as well.
|
||||

|
||||
|
||||
## Authentication Tab
|
||||
|
||||
@@ -46,24 +63,23 @@ From this tab, you can configure which login methods are enabled for your instan
|
||||
|
||||

|
||||
|
||||
|
||||
## Rate Limit Tab
|
||||
|
||||
This tab allows you to set various rate limits for your Infisical instance. You do not need to redeploy when making changes to rate limits as these will be propagated automatically.
|
||||
|
||||

|
||||
|
||||
|
||||
<Note>
|
||||
Note that rate limit configuration is a paid feature. Please contact sales@infisical.com to purchase a license for its use.
|
||||
Note that rate limit configuration is a paid feature. Please contact
|
||||
sales@infisical.com to purchase a license for its use.
|
||||
</Note>
|
||||
|
||||
## User Management Tab
|
||||
|
||||
From this tab, you can view all the users who have signed up for your instance. You can search for users using the search bar and remove them from your instance by pressing the **X** button on their respective row.
|
||||
|
||||
From this tab, you can view all the users who have signed up for your instance. You can search for users using the search bar and remove them from your instance by clicking on the three dots icon on the right. Additionally, the Server Admin can grant server administrator access to other users through this menu.
|
||||

|
||||
|
||||
<Note>
|
||||
Note that rate limit configuration is a paid feature. Please contact sales@infisical.com to purchase a license for its use.
|
||||
Note that rate limit configuration is a paid feature. Please contact
|
||||
sales@infisical.com to purchase a license for its use.
|
||||
</Note>
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user