mirror of
https://github.com/Infisical/infisical.git
synced 2025-08-02 08:27:38 +00:00
Compare commits
100 Commits
feat/grant
...
additional
Author | SHA1 | Date | |
---|---|---|---|
|
4380ce5a45 | ||
|
b4d07180d3 | ||
|
ee20e57efa | ||
|
706feafbf2 | ||
|
fc4e3f1f72 | ||
|
dcd5f20325 | ||
|
58f3e116a3 | ||
|
7bc5aad8ec | ||
|
a16dc3aef6 | ||
|
da7746c639 | ||
|
aa76924ee6 | ||
|
d8f679e72d | ||
|
bf6cfbac7a | ||
|
8e82813894 | ||
|
df21a1fb81 | ||
|
bdbb6346cb | ||
|
ea9da6d2a8 | ||
|
3c2c70912f | ||
|
b607429b99 | ||
|
16c1516979 | ||
|
f5dbbaf1fd | ||
|
2a292455ef | ||
|
4d040706a9 | ||
|
5183f76397 | ||
|
4b3efb43b0 | ||
|
96046726b2 | ||
|
a86a951acc | ||
|
5e70860160 | ||
|
abbd427ee2 | ||
|
8fd5fdbc6a | ||
|
77e1ccc8d7 | ||
|
711cc438f6 | ||
|
8447190bf8 | ||
|
12b447425b | ||
|
9cb1a31287 | ||
|
b00413817d | ||
|
2a8bd74e88 | ||
|
f28f4f7561 | ||
|
f0b05c683b | ||
|
3e8f02a4f9 | ||
|
50ee60a3ea | ||
|
21bdecdf2a | ||
|
bf09461416 | ||
|
1ff615913c | ||
|
281cedf1a2 | ||
|
a8d847f139 | ||
|
2a0c0590f1 | ||
|
2e6d525d27 | ||
|
7fd4249d00 | ||
|
90cfc44592 | ||
|
8c403780c2 | ||
|
b69c091f2f | ||
|
4a66395ce6 | ||
|
8c18753e3f | ||
|
85c5d69c36 | ||
|
94fe577046 | ||
|
a0a579834c | ||
|
b5575f4c20 | ||
|
f98f212ecf | ||
|
b331a4a708 | ||
|
e351a16b5a | ||
|
2cfca823f2 | ||
|
a8398a7009 | ||
|
8c054cedfc | ||
|
24d4f8100c | ||
|
08f23e2d3c | ||
|
d1ad605ac4 | ||
|
9dd5857ff5 | ||
|
babbacdc96 | ||
|
76427f43f7 | ||
|
3badcea95b | ||
|
1a4c0fe8d9 | ||
|
04f6864abc | ||
|
fcbe0f59d2 | ||
|
e95b6fdeaa | ||
|
5391bcd3b2 | ||
|
48fd9e2a56 | ||
|
7b5926d865 | ||
|
034123bcdf | ||
|
c406f6d78d | ||
|
eb66295dd4 | ||
|
798215e84c | ||
|
53f6ab118b | ||
|
0f5a1b13a6 | ||
|
5c606fe45f | ||
|
e004be22e3 | ||
|
016cb4a7ba | ||
|
9bfc2a5dd2 | ||
|
72dbef97fb | ||
|
f376eaae13 | ||
|
026f883d21 | ||
|
e42f860261 | ||
|
1512d4f496 | ||
|
9f7b42ad91 | ||
|
3045477c32 | ||
|
4eba80905a | ||
|
b023bc7442 | ||
|
a0029ab469 | ||
|
53605c3880 | ||
|
4382825162 |
@@ -32,10 +32,23 @@ jobs:
|
||||
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
|
||||
- name: Start the server
|
||||
run: |
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
|
||||
echo "Examining built image:"
|
||||
docker image inspect infisical-api | grep -A 5 "Entrypoint"
|
||||
|
||||
docker run --name infisical-api -d -p 4000:4000 \
|
||||
-e DB_CONNECTION_URI=$DB_CONNECTION_URI \
|
||||
-e REDIS_URL=$REDIS_URL \
|
||||
-e JWT_AUTH_SECRET=$JWT_AUTH_SECRET \
|
||||
-e ENCRYPTION_KEY=$ENCRYPTION_KEY \
|
||||
--env-file .env \
|
||||
infisical-api
|
||||
|
||||
echo "Container status right after creation:"
|
||||
docker ps -a | grep infisical-api
|
||||
env:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
@@ -43,27 +56,39 @@ jobs:
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.21.5'
|
||||
go-version: "1.21.5"
|
||||
- name: Wait for container to be stable and check logs
|
||||
run: |
|
||||
SECONDS=0
|
||||
HEALTHY=0
|
||||
while [ $SECONDS -lt 60 ]; do
|
||||
if docker ps | grep infisical-api | grep -q healthy; then
|
||||
echo "Container is healthy."
|
||||
HEALTHY=1
|
||||
# Check if container is running
|
||||
if docker ps | grep infisical-api; then
|
||||
# Try to access the API endpoint
|
||||
if curl -s -f http://localhost:4000/api/docs/json > /dev/null 2>&1; then
|
||||
echo "API endpoint is responding. Container seems healthy."
|
||||
HEALTHY=1
|
||||
break
|
||||
fi
|
||||
else
|
||||
echo "Container is not running!"
|
||||
docker ps -a | grep infisical-api
|
||||
break
|
||||
fi
|
||||
|
||||
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
|
||||
|
||||
docker logs infisical-api
|
||||
|
||||
sleep 2
|
||||
SECONDS=$((SECONDS+2))
|
||||
sleep 5
|
||||
SECONDS=$((SECONDS+5))
|
||||
done
|
||||
|
||||
|
||||
if [ $HEALTHY -ne 1 ]; then
|
||||
echo "Container did not become healthy in time"
|
||||
echo "Container status:"
|
||||
docker ps -a | grep infisical-api
|
||||
echo "Container logs (if any):"
|
||||
docker logs infisical-api || echo "No logs available"
|
||||
echo "Container inspection:"
|
||||
docker inspect infisical-api | grep -A 5 "State"
|
||||
exit 1
|
||||
fi
|
||||
- name: Install openapi-diff
|
||||
@@ -71,7 +96,8 @@ jobs:
|
||||
- name: Running OpenAPI Spec diff action
|
||||
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
|
||||
- name: cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker compose -f "docker-compose.dev.yml" down
|
||||
docker stop infisical-api
|
||||
docker remove infisical-api
|
||||
docker stop infisical-api || true
|
||||
docker rm infisical-api || true
|
@@ -26,7 +26,7 @@ jobs:
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
npm-release:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
working-directory: ./npm
|
||||
needs:
|
||||
@@ -83,7 +83,7 @@ jobs:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-latest
|
||||
needs: [cli-integration-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -103,11 +103,12 @@ jobs:
|
||||
go-version: ">=1.19.3"
|
||||
cache: true
|
||||
cache-dependency-path: cli/go.sum
|
||||
- name: libssl1.1 => libssl1.0-dev for OSXCross
|
||||
- name: Setup for libssl1.0-dev
|
||||
run: |
|
||||
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
|
||||
sudo apt update && apt-cache policy libssl1.0-dev
|
||||
sudo apt-get install libssl1.0-dev
|
||||
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
|
||||
sudo apt update
|
||||
sudo apt-get install -y libssl1.0-dev
|
||||
- name: OSXCross for CGO Support
|
||||
run: |
|
||||
mkdir ../../osxcross
|
||||
|
@@ -3,13 +3,10 @@ ARG POSTHOG_API_KEY=posthog-api-key
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||
|
||||
FROM node:20-alpine AS base
|
||||
FROM node:20-slim AS base
|
||||
|
||||
FROM base AS frontend-dependencies
|
||||
|
||||
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
|
||||
RUN apk add --no-cache libc6-compat
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY frontend/package.json frontend/package-lock.json ./
|
||||
@@ -45,8 +42,8 @@ RUN npm run build
|
||||
FROM base AS frontend-runner
|
||||
WORKDIR /app
|
||||
|
||||
RUN addgroup --system --gid 1001 nodejs
|
||||
RUN adduser --system --uid 1001 non-root-user
|
||||
RUN groupadd --system --gid 1001 nodejs
|
||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
|
||||
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
|
||||
|
||||
@@ -56,21 +53,23 @@ USER non-root-user
|
||||
## BACKEND
|
||||
##
|
||||
FROM base AS backend-build
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for build
|
||||
RUN apk --update add \
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
freetds-bin \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
freetds-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN groupadd --system --gid 1001 nodejs
|
||||
RUN useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@@ -86,18 +85,19 @@ FROM base AS backend-runner
|
||||
WORKDIR /app
|
||||
|
||||
# Install all required dependencies for runtime
|
||||
RUN apk --update add \
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
freetds-bin \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
freetds-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@@ -109,34 +109,36 @@ RUN mkdir frontend-build
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
RUN apk add --upgrade --no-cache ca-certificates
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.31.1 && apk add --no-cache git
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install all required runtime dependencies
|
||||
RUN apk --update add \
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
bash \
|
||||
curl \
|
||||
git \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
freetds-bin \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
bash \
|
||||
curl \
|
||||
git \
|
||||
openssh
|
||||
wget \
|
||||
openssh-client \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Setup user permissions
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
RUN groupadd --system --gid 1001 nodejs \
|
||||
&& useradd --system --uid 1001 --gid nodejs non-root-user
|
||||
|
||||
# Give non-root-user permission to update SSL certs
|
||||
RUN chown -R non-root-user /etc/ssl/certs
|
||||
@@ -154,9 +156,7 @@ ENV INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
|
@@ -1,23 +1,22 @@
|
||||
# Build stage
|
||||
FROM node:20-alpine AS build
|
||||
FROM node:20-slim AS build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
unixodbc \
|
||||
freetds \
|
||||
freetds-bin \
|
||||
freetds-dev \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
libc-dev
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@@ -26,36 +25,36 @@ COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Production stage
|
||||
FROM node:20-alpine
|
||||
FROM node:20-slim
|
||||
WORKDIR /app
|
||||
|
||||
ENV npm_config_cache /home/node/.npm
|
||||
|
||||
COPY package*.json ./
|
||||
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
unixodbc \
|
||||
freetds \
|
||||
freetds-bin \
|
||||
freetds-dev \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
libc-dev
|
||||
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN npm ci --only-production && npm cache clean --force
|
||||
|
||||
COPY --from=build /app .
|
||||
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
# Install Infisical CLI
|
||||
RUN apt-get install -y curl bash && \
|
||||
curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && apt-get install -y infisical=0.8.1 git
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
|
||||
CMD node healthcheck.js
|
||||
|
@@ -1,4 +1,4 @@
|
||||
FROM node:20-alpine
|
||||
FROM node:20-slim
|
||||
|
||||
# ? Setup a test SoftHSM module. In production a real HSM is used.
|
||||
|
||||
@@ -7,32 +7,32 @@ ARG SOFTHSM2_VERSION=2.5.0
|
||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||
|
||||
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apk --update add \
|
||||
alpine-sdk \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
openssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh
|
||||
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
autoconf \
|
||||
automake \
|
||||
git \
|
||||
libtool \
|
||||
libssl-dev \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
openssh-client \
|
||||
curl \
|
||||
pkg-config
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apt-get install -y \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# build and install SoftHSM2
|
||||
|
||||
# Build and install SoftHSM2
|
||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||
WORKDIR ${SOFTHSM2_SOURCES}
|
||||
|
||||
@@ -45,16 +45,18 @@ RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
|
||||
WORKDIR /root
|
||||
RUN rm -fr ${SOFTHSM2_SOURCES}
|
||||
|
||||
# install pkcs11-tool
|
||||
RUN apk --update add opensc
|
||||
# Install pkcs11-tool
|
||||
RUN apt-get install -y opensc
|
||||
|
||||
RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
RUN mkdir -p /etc/softhsm2/tokens && \
|
||||
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
|
||||
|
||||
# ? App setup
|
||||
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.8.1 && apk add --no-cache git
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
|
||||
apt-get update && \
|
||||
apt-get install -y infisical=0.8.1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
1325
backend/package-lock.json
generated
1325
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -145,6 +145,7 @@
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@infisical/quic": "^1.0.8",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
@@ -152,10 +153,10 @@
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@octopusdeploy/api-client": "^3.4.1",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
||||
"@opentelemetry/instrumentation": "^0.55.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.57.2",
|
||||
"@opentelemetry/resources": "^1.28.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.28.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.27.0",
|
||||
|
@@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
|
||||
|
||||
if (!hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.string("description");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
|
||||
|
||||
if (hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
t.dropColumn("description");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment"))) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
|
||||
t.string("comment");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment")) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
|
||||
t.dropColumn("comment");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,34 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEventTypeCol = await knex.schema.hasColumn(TableName.AuditLog, "eventType");
|
||||
const hasEventMetadataCol = await knex.schema.hasColumn(TableName.AuditLog, "eventMetadata");
|
||||
const hasUserAgentTypeCol = await knex.schema.hasColumn(TableName.AuditLog, "userAgentType");
|
||||
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (hasEventTypeCol) t.index("eventType");
|
||||
if (hasUserAgentTypeCol) t.index("userAgentType");
|
||||
});
|
||||
|
||||
if (hasEventMetadataCol) {
|
||||
await knex.raw(`CREATE INDEX IF NOT EXISTS idx_audit_logs_secret_path
|
||||
ON audit_logs ("projectId", ("eventMetadata"->>'secretPath'), "createdAt" DESC);`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEventTypeCol = await knex.schema.hasColumn(TableName.AuditLog, "eventType");
|
||||
const hasEventMetadataCol = await knex.schema.hasColumn(TableName.AuditLog, "eventMetadata");
|
||||
const hasUserAgentTypeCol = await knex.schema.hasColumn(TableName.AuditLog, "userAgentType");
|
||||
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (hasEventTypeCol) t.dropIndex("eventType");
|
||||
if (hasUserAgentTypeCol) t.dropIndex("userAgentType");
|
||||
});
|
||||
|
||||
if (hasEventMetadataCol) {
|
||||
await knex.raw(`DROP INDEX IF EXISTS idx_audit_logs_secret_path`);
|
||||
}
|
||||
}
|
@@ -13,7 +13,8 @@ export const SecretApprovalRequestsReviewersSchema = z.object({
|
||||
requestId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
reviewerUserId: z.string().uuid()
|
||||
reviewerUserId: z.string().uuid(),
|
||||
comment: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;
|
||||
|
@@ -15,7 +15,8 @@ export const SecretFoldersSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
envId: z.string().uuid(),
|
||||
parentId: z.string().uuid().nullable().optional(),
|
||||
isReserved: z.boolean().default(false).nullable().optional()
|
||||
isReserved: z.boolean().default(false).nullable().optional(),
|
||||
description: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;
|
||||
|
@@ -159,7 +159,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
id: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED])
|
||||
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED]),
|
||||
comment: z.string().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -175,8 +176,25 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
approvalId: req.params.id,
|
||||
status: req.body.status
|
||||
status: req.body.status,
|
||||
comment: req.body.comment
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
projectId: review.projectId,
|
||||
event: {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW,
|
||||
metadata: {
|
||||
secretApprovalRequestId: review.requestId,
|
||||
reviewedBy: review.reviewerUserId,
|
||||
status: review.status as ApprovalStatus,
|
||||
comment: review.comment || ""
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { review };
|
||||
}
|
||||
});
|
||||
@@ -267,7 +285,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
environment: z.string(),
|
||||
statusChangedByUser: approvalRequestUser.optional(),
|
||||
committerUser: approvalRequestUser,
|
||||
reviewers: approvalRequestUser.extend({ status: z.string() }).array(),
|
||||
reviewers: approvalRequestUser.extend({ status: z.string(), comment: z.string().optional() }).array(),
|
||||
secretPath: z.string(),
|
||||
commits: secretRawSchema
|
||||
.omit({ _id: true, environment: true, workspace: true, type: true, version: true })
|
||||
|
@@ -91,7 +91,7 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
|
||||
if (projectId && secretPath) {
|
||||
void sqlQuery.whereRaw(`"eventMetadata" @> jsonb_build_object('secretPath', ?::text)`, [secretPath]);
|
||||
void sqlQuery.whereRaw(`"eventMetadata" ->> 'secretPath' = ?::text`, [secretPath]);
|
||||
}
|
||||
|
||||
// Filter by actor type
|
||||
@@ -112,8 +112,8 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
void sqlQuery.whereRaw(`"${TableName.AuditLog}"."createdAt" <= ?::timestamptz`, [endDate]);
|
||||
}
|
||||
|
||||
// we timeout long running queries to prevent DB resource issues (2 minutes)
|
||||
const docs = await sqlQuery.timeout(1000 * 120);
|
||||
// we time out long-running queries to prevent DB resource issues (a little less than 30 seconds [server timeout])
|
||||
const docs = await sqlQuery.timeout(1000 * 29.5);
|
||||
|
||||
return docs;
|
||||
} catch (error) {
|
||||
|
@@ -22,6 +22,7 @@ import {
|
||||
} from "@app/services/secret-sync/secret-sync-types";
|
||||
|
||||
import { KmipPermission } from "../kmip/kmip-enum";
|
||||
import { ApprovalStatus } from "../secret-approval-request/secret-approval-request-types";
|
||||
|
||||
export type TListProjectAuditLogDTO = {
|
||||
filter: {
|
||||
@@ -165,6 +166,7 @@ export enum EventType {
|
||||
SECRET_APPROVAL_REQUEST = "secret-approval-request",
|
||||
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
|
||||
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
|
||||
SECRET_APPROVAL_REQUEST_REVIEW = "secret-approval-request-review",
|
||||
SIGN_SSH_KEY = "sign-ssh-key",
|
||||
ISSUE_SSH_CREDS = "issue-ssh-creds",
|
||||
CREATE_SSH_CA = "create-ssh-certificate-authority",
|
||||
@@ -1142,6 +1144,7 @@ interface CreateFolderEvent {
|
||||
folderId: string;
|
||||
folderName: string;
|
||||
folderPath: string;
|
||||
description?: string;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1313,6 +1316,16 @@ interface SecretApprovalRequest {
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretApprovalRequestReview {
|
||||
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW;
|
||||
metadata: {
|
||||
secretApprovalRequestId: string;
|
||||
reviewedBy: string;
|
||||
status: ApprovalStatus;
|
||||
comment: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface SignSshKey {
|
||||
type: EventType.SIGN_SSH_KEY;
|
||||
metadata: {
|
||||
@@ -2481,4 +2494,5 @@ export type Event =
|
||||
| KmipOperationRevokeEvent
|
||||
| KmipOperationLocateEvent
|
||||
| KmipOperationRegisterEvent
|
||||
| CreateSecretRequestEvent;
|
||||
| CreateSecretRequestEvent
|
||||
| SecretApprovalRequestReview;
|
||||
|
@@ -86,7 +86,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
tlsOptions: {
|
||||
ca: relayDetails.certChain,
|
||||
cert: relayDetails.certificate,
|
||||
key: relayDetails.privateKey
|
||||
key: relayDetails.privateKey.toString()
|
||||
}
|
||||
}
|
||||
);
|
||||
|
@@ -474,7 +474,7 @@ export const gatewayServiceFactory = ({
|
||||
relayHost,
|
||||
relayPort: Number(relayPort),
|
||||
tlsOptions: {
|
||||
key: privateKey,
|
||||
key: privateKey.toString(),
|
||||
ca: `${gatewayCaCert.toString("pem")}\n${rootCaCert.toString("pem")}`.trim(),
|
||||
cert: clientCert.toString("pem")
|
||||
},
|
||||
|
@@ -100,6 +100,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("lastName").withSchema("committerUser").as("committerUserLastName"),
|
||||
tx.ref("reviewerUserId").withSchema(TableName.SecretApprovalRequestReviewer),
|
||||
tx.ref("status").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerStatus"),
|
||||
tx.ref("comment").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerComment"),
|
||||
tx.ref("email").withSchema("secretApprovalReviewerUser").as("reviewerEmail"),
|
||||
tx.ref("username").withSchema("secretApprovalReviewerUser").as("reviewerUsername"),
|
||||
tx.ref("firstName").withSchema("secretApprovalReviewerUser").as("reviewerFirstName"),
|
||||
@@ -162,8 +163,10 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
reviewerEmail: email,
|
||||
reviewerLastName: lastName,
|
||||
reviewerUsername: username,
|
||||
reviewerFirstName: firstName
|
||||
}) => (userId ? { userId, status, email, firstName, lastName, username } : undefined)
|
||||
reviewerFirstName: firstName,
|
||||
reviewerComment: comment
|
||||
}) =>
|
||||
userId ? { userId, status, email, firstName, lastName, username, comment: comment ?? "" } : undefined
|
||||
},
|
||||
{
|
||||
key: "approverUserId",
|
||||
|
@@ -320,6 +320,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
approvalId,
|
||||
actor,
|
||||
status,
|
||||
comment,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
@@ -372,15 +373,18 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
return secretApprovalRequestReviewerDAL.create(
|
||||
{
|
||||
status,
|
||||
comment,
|
||||
requestId: secretApprovalRequest.id,
|
||||
reviewerUserId: actorId
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
return secretApprovalRequestReviewerDAL.updateById(review.id, { status }, tx);
|
||||
|
||||
return secretApprovalRequestReviewerDAL.updateById(review.id, { status, comment }, tx);
|
||||
});
|
||||
return reviewStatus;
|
||||
|
||||
return { ...reviewStatus, projectId: secretApprovalRequest.projectId };
|
||||
};
|
||||
|
||||
const updateApprovalStatus = async ({
|
||||
|
@@ -80,6 +80,7 @@ export type TStatusChangeDTO = {
|
||||
export type TReviewRequestDTO = {
|
||||
approvalId: string;
|
||||
status: ApprovalStatus;
|
||||
comment?: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TApprovalRequestCountDTO = TProjectPermission;
|
||||
|
@@ -638,7 +638,8 @@ export const FOLDERS = {
|
||||
environment: "The slug of the environment to create the folder in.",
|
||||
name: "The name of the folder to create.",
|
||||
path: "The path of the folder to create.",
|
||||
directory: "The directory of the folder to create. (Deprecated in favor of path)"
|
||||
directory: "The directory of the folder to create. (Deprecated in favor of path)",
|
||||
description: "An optional description label for the folder."
|
||||
},
|
||||
UPDATE: {
|
||||
folderId: "The ID of the folder to update.",
|
||||
@@ -647,7 +648,8 @@ export const FOLDERS = {
|
||||
path: "The path of the folder to update.",
|
||||
directory: "The new directory of the folder to update. (Deprecated in favor of path)",
|
||||
projectSlug: "The slug of the project where the folder is located.",
|
||||
workspaceId: "The ID of the project where the folder is located."
|
||||
workspaceId: "The ID of the project where the folder is located.",
|
||||
description: "An optional description label for the folder."
|
||||
},
|
||||
DELETE: {
|
||||
folderIdOrName: "The ID or name of the folder to delete.",
|
||||
|
@@ -1,6 +1,8 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import crypto from "node:crypto";
|
||||
import net from "node:net";
|
||||
import tls from "node:tls";
|
||||
|
||||
import quicDefault, * as quicModule from "@infisical/quic";
|
||||
|
||||
import { BadRequestError } from "../errors";
|
||||
import { logger } from "../logger";
|
||||
@@ -8,34 +10,73 @@ import { logger } from "../logger";
|
||||
const DEFAULT_MAX_RETRIES = 3;
|
||||
const DEFAULT_RETRY_DELAY = 1000; // 1 second
|
||||
|
||||
const createTLSConnection = (relayHost: string, relayPort: number, tlsOptions: tls.TlsOptions = {}) => {
|
||||
return new Promise<tls.TLSSocket>((resolve, reject) => {
|
||||
// @ts-expect-error this is resolved in next connect
|
||||
const socket = new tls.TLSSocket(null, {
|
||||
rejectUnauthorized: true,
|
||||
...tlsOptions
|
||||
});
|
||||
const quic = quicDefault || quicModule;
|
||||
|
||||
const cleanup = () => {
|
||||
socket.removeAllListeners();
|
||||
socket.end();
|
||||
};
|
||||
|
||||
socket.once("error", (err) => {
|
||||
cleanup();
|
||||
reject(err);
|
||||
});
|
||||
|
||||
socket.connect(relayPort, relayHost, () => {
|
||||
resolve(socket);
|
||||
});
|
||||
const parseSubjectDetails = (data: string) => {
|
||||
const values: Record<string, string> = {};
|
||||
data.split("\n").forEach((el) => {
|
||||
const [key, value] = el.split("=");
|
||||
values[key.trim()] = value.trim();
|
||||
});
|
||||
return values;
|
||||
};
|
||||
|
||||
type TTlsOption = { ca: string; cert: string; key: string };
|
||||
|
||||
const createQuicConnection = async (
|
||||
relayHost: string,
|
||||
relayPort: number,
|
||||
tlsOptions: TTlsOption,
|
||||
identityId: string,
|
||||
orgId: string
|
||||
) => {
|
||||
const client = await quic.QUICClient.createQUICClient({
|
||||
host: relayHost,
|
||||
port: relayPort,
|
||||
config: {
|
||||
ca: tlsOptions.ca,
|
||||
cert: tlsOptions.cert,
|
||||
key: tlsOptions.key,
|
||||
applicationProtos: ["infisical-gateway"],
|
||||
verifyPeer: true,
|
||||
verifyCallback: async (certs) => {
|
||||
if (!certs || certs.length === 0) return quic.native.CryptoError.CertificateRequired;
|
||||
const serverCertificate = new crypto.X509Certificate(Buffer.from(certs[0]));
|
||||
const caCertificate = new crypto.X509Certificate(tlsOptions.ca);
|
||||
const isValidServerCertificate = serverCertificate.checkIssued(caCertificate);
|
||||
if (!isValidServerCertificate) return quic.native.CryptoError.BadCertificate;
|
||||
|
||||
const subjectDetails = parseSubjectDetails(serverCertificate.subject);
|
||||
if (subjectDetails.OU !== "Gateway" || subjectDetails.CN !== identityId || subjectDetails.O !== orgId) {
|
||||
return quic.native.CryptoError.CertificateUnknown;
|
||||
}
|
||||
|
||||
if (new Date() > new Date(serverCertificate.validTo) || new Date() < new Date(serverCertificate.validFrom)) {
|
||||
return quic.native.CryptoError.CertificateExpired;
|
||||
}
|
||||
|
||||
const formatedRelayHost =
|
||||
process.env.NODE_ENV === "development" ? relayHost.replace("host.docker.internal", "127.0.0.1") : relayHost;
|
||||
if (!serverCertificate.checkIP(formatedRelayHost)) return quic.native.CryptoError.BadCertificate;
|
||||
},
|
||||
maxIdleTimeout: 90000,
|
||||
keepAliveIntervalTime: 30000
|
||||
},
|
||||
crypto: {
|
||||
ops: {
|
||||
randomBytes: async (data) => {
|
||||
crypto.getRandomValues(new Uint8Array(data));
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
return client;
|
||||
};
|
||||
|
||||
type TPingGatewayAndVerifyDTO = {
|
||||
relayHost: string;
|
||||
relayPort: number;
|
||||
tlsOptions: tls.TlsOptions;
|
||||
tlsOptions: TTlsOption;
|
||||
maxRetries?: number;
|
||||
identityId: string;
|
||||
orgId: string;
|
||||
@@ -44,56 +85,44 @@ type TPingGatewayAndVerifyDTO = {
|
||||
export const pingGatewayAndVerify = async ({
|
||||
relayHost,
|
||||
relayPort,
|
||||
tlsOptions = {},
|
||||
tlsOptions,
|
||||
maxRetries = DEFAULT_MAX_RETRIES,
|
||||
identityId,
|
||||
orgId
|
||||
}: TPingGatewayAndVerifyDTO) => {
|
||||
let lastError: Error | null = null;
|
||||
|
||||
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
|
||||
throw new BadRequestError({
|
||||
error: err as Error
|
||||
});
|
||||
});
|
||||
for (let attempt = 1; attempt <= maxRetries; attempt += 1) {
|
||||
try {
|
||||
const socket = await createTLSConnection(relayHost, relayPort, tlsOptions);
|
||||
socket.setTimeout(2000);
|
||||
const stream = quicClient.connection.newStream("bidi");
|
||||
const pingWriter = stream.writable.getWriter();
|
||||
await pingWriter.write(Buffer.from("PING\n"));
|
||||
pingWriter.releaseLock();
|
||||
|
||||
const pingResult = await new Promise((resolve, reject) => {
|
||||
socket.once("timeout", () => {
|
||||
socket.destroy();
|
||||
reject(new Error("Timeout"));
|
||||
// Read PONG response
|
||||
const reader = stream.readable.getReader();
|
||||
const { value, done } = await reader.read();
|
||||
|
||||
if (done) {
|
||||
throw new BadRequestError({
|
||||
message: "Gateway closed before receiving PONG"
|
||||
});
|
||||
socket.once("close", () => {
|
||||
socket.destroy();
|
||||
}
|
||||
|
||||
const response = Buffer.from(value).toString();
|
||||
|
||||
if (response !== "PONG\n" && response !== "PONG") {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to Ping. Unexpected response: ${response}`
|
||||
});
|
||||
}
|
||||
|
||||
socket.once("end", () => {
|
||||
socket.destroy();
|
||||
});
|
||||
socket.once("error", (err) => {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
socket.write(Buffer.from("PING\n"), () => {
|
||||
socket.once("data", (data) => {
|
||||
const response = (data as string).toString();
|
||||
const certificate = socket.getPeerCertificate();
|
||||
|
||||
if (certificate.subject.CN !== identityId || certificate.subject.O !== orgId) {
|
||||
throw new BadRequestError({
|
||||
message: `Invalid gateway. Certificate not found for ${identityId} in organization ${orgId}`
|
||||
});
|
||||
}
|
||||
|
||||
if (response === "PONG") {
|
||||
resolve(true);
|
||||
} else {
|
||||
reject(new Error(`Unexpected response: ${response}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
socket.end();
|
||||
return pingResult;
|
||||
reader.releaseLock();
|
||||
return;
|
||||
} catch (err) {
|
||||
lastError = err as Error;
|
||||
|
||||
@@ -102,6 +131,8 @@ export const pingGatewayAndVerify = async ({
|
||||
setTimeout(resolve, DEFAULT_RETRY_DELAY);
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
await quicClient.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,76 +145,125 @@ export const pingGatewayAndVerify = async ({
|
||||
interface TProxyServer {
|
||||
server: net.Server;
|
||||
port: number;
|
||||
cleanup: () => void;
|
||||
cleanup: () => Promise<void>;
|
||||
}
|
||||
|
||||
const setupProxyServer = ({
|
||||
const setupProxyServer = async ({
|
||||
targetPort,
|
||||
targetHost,
|
||||
tlsOptions = {},
|
||||
tlsOptions,
|
||||
relayHost,
|
||||
relayPort
|
||||
relayPort,
|
||||
identityId,
|
||||
orgId
|
||||
}: {
|
||||
targetHost: string;
|
||||
targetPort: number;
|
||||
relayPort: number;
|
||||
relayHost: string;
|
||||
tlsOptions: tls.TlsOptions;
|
||||
tlsOptions: TTlsOption;
|
||||
identityId: string;
|
||||
orgId: string;
|
||||
}): Promise<TProxyServer> => {
|
||||
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
|
||||
throw new BadRequestError({
|
||||
error: err as Error
|
||||
});
|
||||
});
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
server.on("connection", async (clientSocket) => {
|
||||
server.on("connection", async (clientConn) => {
|
||||
try {
|
||||
const targetSocket = await createTLSConnection(relayHost, relayPort, tlsOptions);
|
||||
clientConn.setKeepAlive(true, 30000); // 30 seconds
|
||||
clientConn.setNoDelay(true);
|
||||
|
||||
targetSocket.write(Buffer.from(`FORWARD-TCP ${targetHost}:${targetPort}\n`), () => {
|
||||
clientSocket.on("data", (data) => {
|
||||
const flushed = targetSocket.write(data);
|
||||
if (!flushed) {
|
||||
clientSocket.pause();
|
||||
targetSocket.once("drain", () => {
|
||||
clientSocket.resume();
|
||||
});
|
||||
}
|
||||
});
|
||||
const stream = quicClient.connection.newStream("bidi");
|
||||
// Send FORWARD-TCP command
|
||||
const forwardWriter = stream.writable.getWriter();
|
||||
await forwardWriter.write(Buffer.from(`FORWARD-TCP ${targetHost}:${targetPort}\n`));
|
||||
forwardWriter.releaseLock();
|
||||
/* eslint-disable @typescript-eslint/no-misused-promises */
|
||||
// Set up bidirectional copy
|
||||
const setupCopy = async () => {
|
||||
// Client to QUIC
|
||||
// eslint-disable-next-line
|
||||
(async () => {
|
||||
try {
|
||||
const writer = stream.writable.getWriter();
|
||||
|
||||
targetSocket.on("data", (data) => {
|
||||
const flushed = clientSocket.write(data as string);
|
||||
if (!flushed) {
|
||||
targetSocket.pause();
|
||||
clientSocket.once("drain", () => {
|
||||
targetSocket.resume();
|
||||
// Create a handler for client data
|
||||
clientConn.on("data", async (chunk) => {
|
||||
await writer.write(chunk);
|
||||
});
|
||||
|
||||
// Handle client connection close
|
||||
clientConn.on("end", async () => {
|
||||
await writer.close();
|
||||
});
|
||||
|
||||
clientConn.on("error", async (err) => {
|
||||
await writer.abort(err);
|
||||
});
|
||||
} catch (err) {
|
||||
clientConn.destroy();
|
||||
}
|
||||
});
|
||||
})();
|
||||
|
||||
// QUIC to Client
|
||||
void (async () => {
|
||||
try {
|
||||
const reader = stream.readable.getReader();
|
||||
|
||||
let reading = true;
|
||||
while (reading) {
|
||||
const { value, done } = await reader.read();
|
||||
|
||||
if (done) {
|
||||
reading = false;
|
||||
clientConn.end(); // Close client connection when QUIC stream ends
|
||||
break;
|
||||
}
|
||||
|
||||
// Write data to TCP client
|
||||
const canContinue = clientConn.write(Buffer.from(value));
|
||||
|
||||
// Handle backpressure
|
||||
if (!canContinue) {
|
||||
await new Promise((res) => {
|
||||
clientConn.once("drain", res);
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
clientConn.destroy();
|
||||
}
|
||||
})();
|
||||
};
|
||||
await setupCopy();
|
||||
//
|
||||
// Handle connection closure
|
||||
clientConn.on("close", async () => {
|
||||
await stream.destroy();
|
||||
});
|
||||
|
||||
const cleanup = () => {
|
||||
clientSocket?.unpipe();
|
||||
clientSocket?.end();
|
||||
targetSocket?.unpipe();
|
||||
targetSocket?.end();
|
||||
const cleanup = async () => {
|
||||
clientConn?.destroy();
|
||||
await stream.destroy();
|
||||
};
|
||||
|
||||
clientSocket.on("error", (err) => {
|
||||
clientConn.on("error", (err) => {
|
||||
logger.error(err, "Client socket error");
|
||||
cleanup();
|
||||
void cleanup();
|
||||
reject(err);
|
||||
});
|
||||
|
||||
targetSocket.on("error", (err) => {
|
||||
logger.error(err, "Target socket error");
|
||||
cleanup();
|
||||
reject(err);
|
||||
});
|
||||
|
||||
clientSocket.on("end", cleanup);
|
||||
targetSocket.on("end", cleanup);
|
||||
clientConn.on("end", cleanup);
|
||||
} catch (err) {
|
||||
logger.error(err, "Failed to establish target connection:");
|
||||
clientSocket.end();
|
||||
clientConn.end();
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
@@ -192,6 +272,12 @@ const setupProxyServer = ({
|
||||
reject(err);
|
||||
});
|
||||
|
||||
server.on("close", async () => {
|
||||
await quicClient?.destroy();
|
||||
});
|
||||
|
||||
/* eslint-enable */
|
||||
|
||||
server.listen(0, () => {
|
||||
const address = server.address();
|
||||
if (!address || typeof address === "string") {
|
||||
@@ -204,8 +290,9 @@ const setupProxyServer = ({
|
||||
resolve({
|
||||
server,
|
||||
port: address.port,
|
||||
cleanup: () => {
|
||||
cleanup: async () => {
|
||||
server.close();
|
||||
await quicClient?.destroy();
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -217,8 +304,7 @@ interface ProxyOptions {
|
||||
targetPort: number;
|
||||
relayHost: string;
|
||||
relayPort: number;
|
||||
tlsOptions?: tls.TlsOptions;
|
||||
maxRetries?: number;
|
||||
tlsOptions: TTlsOption;
|
||||
identityId: string;
|
||||
orgId: string;
|
||||
}
|
||||
@@ -227,30 +313,19 @@ export const withGatewayProxy = async (
|
||||
callback: (port: number) => Promise<void>,
|
||||
options: ProxyOptions
|
||||
): Promise<void> => {
|
||||
const {
|
||||
relayHost,
|
||||
relayPort,
|
||||
const { relayHost, relayPort, targetHost, targetPort, tlsOptions, identityId, orgId } = options;
|
||||
|
||||
// Setup the proxy server
|
||||
const { port, cleanup } = await setupProxyServer({
|
||||
targetHost,
|
||||
targetPort,
|
||||
tlsOptions = {},
|
||||
maxRetries = DEFAULT_MAX_RETRIES,
|
||||
identityId,
|
||||
orgId
|
||||
} = options;
|
||||
|
||||
// First, try to ping the gateway
|
||||
await pingGatewayAndVerify({
|
||||
relayHost,
|
||||
relayPort,
|
||||
relayHost,
|
||||
tlsOptions,
|
||||
maxRetries,
|
||||
identityId,
|
||||
orgId
|
||||
});
|
||||
|
||||
// Setup the proxy server
|
||||
const { port, cleanup } = await setupProxyServer({ targetHost, targetPort, relayPort, relayHost, tlsOptions });
|
||||
|
||||
try {
|
||||
// Execute the callback with the allocated port
|
||||
await callback(port);
|
||||
@@ -259,6 +334,6 @@ export const withGatewayProxy = async (
|
||||
throw new BadRequestError({ message: (err as Error)?.message });
|
||||
} finally {
|
||||
// Ensure cleanup happens regardless of success or failure
|
||||
cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
};
|
||||
|
@@ -1,8 +1,8 @@
|
||||
import opentelemetry, { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api";
|
||||
import { getNodeAutoInstrumentations } from "@opentelemetry/auto-instrumentations-node";
|
||||
import { OTLPMetricExporter } from "@opentelemetry/exporter-metrics-otlp-proto";
|
||||
import { PrometheusExporter } from "@opentelemetry/exporter-prometheus";
|
||||
import { registerInstrumentations } from "@opentelemetry/instrumentation";
|
||||
import { HttpInstrumentation } from "@opentelemetry/instrumentation-http";
|
||||
import { Resource } from "@opentelemetry/resources";
|
||||
import { AggregationTemporality, MeterProvider, PeriodicExportingMetricReader } from "@opentelemetry/sdk-metrics";
|
||||
import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION } from "@opentelemetry/semantic-conventions";
|
||||
@@ -70,7 +70,7 @@ const initTelemetryInstrumentation = ({
|
||||
opentelemetry.metrics.setGlobalMeterProvider(meterProvider);
|
||||
|
||||
registerInstrumentations({
|
||||
instrumentations: [getNodeAutoInstrumentations()]
|
||||
instrumentations: [new HttpInstrumentation()]
|
||||
});
|
||||
};
|
||||
|
||||
|
@@ -47,7 +47,8 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
.default("/")
|
||||
.transform(prefixWithSlash)
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(FOLDERS.CREATE.directory)
|
||||
.describe(FOLDERS.CREATE.directory),
|
||||
description: z.string().optional().nullable().describe(FOLDERS.CREATE.description)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -65,7 +66,8 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body,
|
||||
projectId: req.body.workspaceId,
|
||||
path
|
||||
path,
|
||||
description: req.body.description
|
||||
});
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
@@ -76,7 +78,8 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
environment: req.body.environment,
|
||||
folderId: folder.id,
|
||||
folderName: folder.name,
|
||||
folderPath: path
|
||||
folderPath: path,
|
||||
...(req.body.description ? { description: req.body.description } : {})
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -125,7 +128,8 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
.default("/")
|
||||
.transform(prefixWithSlash)
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(FOLDERS.UPDATE.directory)
|
||||
.describe(FOLDERS.UPDATE.directory),
|
||||
description: z.string().optional().nullable().describe(FOLDERS.UPDATE.description)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@@ -196,7 +200,8 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
|
||||
.default("/")
|
||||
.transform(prefixWithSlash)
|
||||
.transform(removeTrailingSlash)
|
||||
.describe(FOLDERS.UPDATE.path)
|
||||
.describe(FOLDERS.UPDATE.path),
|
||||
description: z.string().optional().nullable().describe(FOLDERS.UPDATE.description)
|
||||
})
|
||||
.array()
|
||||
.min(1)
|
||||
|
@@ -134,7 +134,15 @@ const getAppsHeroku = async ({ accessToken }: { accessToken: string }) => {
|
||||
* Return list of names of apps for Vercel integration
|
||||
* This is re-used for getting custom environments for Vercel
|
||||
*/
|
||||
export const getAppsVercel = async ({ accessToken, teamId }: { teamId?: string | null; accessToken: string }) => {
|
||||
export const getAppsVercel = async ({
|
||||
accessToken,
|
||||
teamId,
|
||||
includeCustomEnvironments
|
||||
}: {
|
||||
teamId?: string | null;
|
||||
accessToken: string;
|
||||
includeCustomEnvironments?: boolean;
|
||||
}) => {
|
||||
const apps: Array<{ name: string; appId: string; customEnvironments: Array<{ slug: string; id: string }> }> = [];
|
||||
|
||||
const limit = "20";
|
||||
@@ -145,12 +153,6 @@ export const getAppsVercel = async ({ accessToken, teamId }: { teamId?: string |
|
||||
projects: {
|
||||
name: string;
|
||||
id: string;
|
||||
customEnvironments?: {
|
||||
id: string;
|
||||
type: string;
|
||||
description: string;
|
||||
slug: string;
|
||||
}[];
|
||||
}[];
|
||||
pagination: {
|
||||
count: number;
|
||||
@@ -159,6 +161,20 @@ export const getAppsVercel = async ({ accessToken, teamId }: { teamId?: string |
|
||||
};
|
||||
}
|
||||
|
||||
const getProjectCustomEnvironments = async (projectId: string) => {
|
||||
const { data } = await request.get<{ environments: { id: string; slug: string }[] }>(
|
||||
`${IntegrationUrls.VERCEL_API_URL}/v9/projects/${projectId}/custom-environments`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return data.environments;
|
||||
};
|
||||
|
||||
while (hasMorePages) {
|
||||
const params: { [key: string]: string } = {
|
||||
limit
|
||||
@@ -180,17 +196,38 @@ export const getAppsVercel = async ({ accessToken, teamId }: { teamId?: string |
|
||||
}
|
||||
});
|
||||
|
||||
data.projects.forEach((a) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.id,
|
||||
customEnvironments:
|
||||
a.customEnvironments?.map((env) => ({
|
||||
slug: env.slug,
|
||||
id: env.id
|
||||
})) ?? []
|
||||
if (includeCustomEnvironments) {
|
||||
const projectsWithCustomEnvironments = await Promise.all(
|
||||
data.projects.map(async (a) => {
|
||||
const customEnvironments = await getProjectCustomEnvironments(a.id);
|
||||
|
||||
return {
|
||||
...a,
|
||||
customEnvironments
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
projectsWithCustomEnvironments.forEach((a) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.id,
|
||||
customEnvironments:
|
||||
a.customEnvironments?.map((env) => ({
|
||||
slug: env.slug,
|
||||
id: env.id
|
||||
})) ?? []
|
||||
});
|
||||
});
|
||||
});
|
||||
} else {
|
||||
data.projects.forEach((a) => {
|
||||
apps.push({
|
||||
name: a.name,
|
||||
appId: a.id,
|
||||
customEnvironments: []
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
next = data.pagination.next;
|
||||
|
||||
|
@@ -1851,6 +1851,7 @@ export const integrationAuthServiceFactory = ({
|
||||
const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey);
|
||||
|
||||
const vercelApps = await getAppsVercel({
|
||||
includeCustomEnvironments: true,
|
||||
accessToken,
|
||||
teamId
|
||||
});
|
||||
|
@@ -50,7 +50,8 @@ export const secretFolderServiceFactory = ({
|
||||
actorOrgId,
|
||||
name,
|
||||
environment,
|
||||
path: secretPath
|
||||
path: secretPath,
|
||||
description
|
||||
}: TCreateFolderDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
@@ -121,7 +122,10 @@ export const secretFolderServiceFactory = ({
|
||||
}
|
||||
}
|
||||
|
||||
const doc = await folderDAL.create({ name, envId: env.id, version: 1, parentId: parentFolderId }, tx);
|
||||
const doc = await folderDAL.create(
|
||||
{ name, envId: env.id, version: 1, parentId: parentFolderId, description },
|
||||
tx
|
||||
);
|
||||
await folderVersionDAL.create(
|
||||
{
|
||||
name: doc.name,
|
||||
@@ -170,7 +174,7 @@ export const secretFolderServiceFactory = ({
|
||||
const result = await folderDAL.transaction(async (tx) =>
|
||||
Promise.all(
|
||||
folders.map(async (newFolder) => {
|
||||
const { environment, path: secretPath, id, name } = newFolder;
|
||||
const { environment, path: secretPath, id, name, description } = newFolder;
|
||||
|
||||
const parentFolder = await folderDAL.findBySecretPath(project.id, environment, secretPath);
|
||||
if (!parentFolder) {
|
||||
@@ -217,7 +221,7 @@ export const secretFolderServiceFactory = ({
|
||||
|
||||
const [doc] = await folderDAL.update(
|
||||
{ envId: env.id, id: folder.id, parentId: parentFolder.id },
|
||||
{ name },
|
||||
{ name, description },
|
||||
tx
|
||||
);
|
||||
await folderVersionDAL.create(
|
||||
@@ -259,7 +263,8 @@ export const secretFolderServiceFactory = ({
|
||||
name,
|
||||
environment,
|
||||
path: secretPath,
|
||||
id
|
||||
id,
|
||||
description
|
||||
}: TUpdateFolderDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
@@ -312,7 +317,7 @@ export const secretFolderServiceFactory = ({
|
||||
const newFolder = await folderDAL.transaction(async (tx) => {
|
||||
const [doc] = await folderDAL.update(
|
||||
{ envId: env.id, id: folder.id, parentId: parentFolder.id, isReserved: false },
|
||||
{ name },
|
||||
{ name, description },
|
||||
tx
|
||||
);
|
||||
await folderVersionDAL.create(
|
||||
|
@@ -9,6 +9,7 @@ export type TCreateFolderDTO = {
|
||||
environment: string;
|
||||
path: string;
|
||||
name: string;
|
||||
description?: string | null;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TUpdateFolderDTO = {
|
||||
@@ -16,6 +17,7 @@ export type TUpdateFolderDTO = {
|
||||
path: string;
|
||||
id: string;
|
||||
name: string;
|
||||
description?: string | null;
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TUpdateManyFoldersDTO = {
|
||||
@@ -25,6 +27,7 @@ export type TUpdateManyFoldersDTO = {
|
||||
path: string;
|
||||
id: string;
|
||||
name: string;
|
||||
description?: string | null;
|
||||
}[];
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
|
8
cli/config/example-infisical-relay.yaml
Normal file
8
cli/config/example-infisical-relay.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
public_ip: 127.0.0.1
|
||||
auth_secret: example-auth-secret
|
||||
realm: infisical.org
|
||||
# set port 5349 for tls
|
||||
# port: 5349
|
||||
# tls_private_key_path: /full-path
|
||||
# tls_ca_path: /full-path
|
||||
# tls_cert_path: /full-path
|
8
cli/config/infisical-relay.yaml
Normal file
8
cli/config/infisical-relay.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
public_ip: 127.0.0.1
|
||||
auth_secret: changeThisOnProduction
|
||||
realm: infisical.org
|
||||
# set port 5349 for tls
|
||||
# port: 5349
|
||||
# tls_private_key_path: /full-path
|
||||
# tls_ca_path: /full-path
|
||||
# tls_cert_path: /full-path
|
26
cli/go.mod
26
cli/go.mod
@@ -1,6 +1,8 @@
|
||||
module github.com/Infisical/infisical-merge
|
||||
|
||||
go 1.21
|
||||
go 1.23.0
|
||||
|
||||
toolchain go1.23.5
|
||||
|
||||
require (
|
||||
github.com/bradleyjkemp/cupaloy/v2 v2.8.0
|
||||
@@ -21,12 +23,14 @@ require (
|
||||
github.com/pion/logging v0.2.3
|
||||
github.com/pion/turn/v4 v4.0.0
|
||||
github.com/posthog/posthog-go v0.0.0-20221221115252-24dfed35d71a
|
||||
github.com/quic-go/quic-go v0.50.0
|
||||
github.com/rs/cors v1.11.0
|
||||
github.com/rs/zerolog v1.26.1
|
||||
github.com/spf13/cobra v1.6.1
|
||||
github.com/spf13/viper v1.8.1
|
||||
github.com/stretchr/testify v1.9.0
|
||||
golang.org/x/crypto v0.33.0
|
||||
github.com/stretchr/testify v1.10.0
|
||||
golang.org/x/crypto v0.35.0
|
||||
golang.org/x/sys v0.30.0
|
||||
golang.org/x/term v0.29.0
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
)
|
||||
@@ -58,13 +62,15 @@ require (
|
||||
github.com/dvsekhvalnov/jose2go v1.6.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/fsnotify/fsnotify v1.4.9 // indirect
|
||||
github.com/go-logr/logr v1.4.1 // indirect
|
||||
github.com/go-logr/logr v1.4.2 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-openapi/errors v0.20.2 // indirect
|
||||
github.com/go-openapi/strfmt v0.21.3 // indirect
|
||||
github.com/go-task/slim-sprig/v3 v3.0.0 // indirect
|
||||
github.com/godbus/dbus/v5 v5.1.0 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
||||
github.com/golang/protobuf v1.5.4 // indirect
|
||||
github.com/google/pprof v0.0.0-20250302191652-9094ed2288e7 // indirect
|
||||
github.com/google/s2a-go v0.1.7 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.12.5 // indirect
|
||||
@@ -82,6 +88,7 @@ require (
|
||||
github.com/muesli/mango-pflag v0.1.0 // indirect
|
||||
github.com/muesli/termenv v0.15.2 // indirect
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/onsi/ginkgo/v2 v2.22.2 // indirect
|
||||
github.com/pelletier/go-toml v1.9.3 // indirect
|
||||
github.com/pion/dtls/v3 v3.0.4 // indirect
|
||||
github.com/pion/randutil v0.1.0 // indirect
|
||||
@@ -103,17 +110,20 @@ require (
|
||||
go.opentelemetry.io/otel v1.24.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.24.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.24.0 // indirect
|
||||
golang.org/x/net v0.33.0 // indirect
|
||||
go.uber.org/mock v0.5.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20250228200357-dead58393ab7 // indirect
|
||||
golang.org/x/mod v0.23.0 // indirect
|
||||
golang.org/x/net v0.35.0 // indirect
|
||||
golang.org/x/oauth2 v0.21.0 // indirect
|
||||
golang.org/x/sync v0.11.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/text v0.22.0 // indirect
|
||||
golang.org/x/time v0.6.0 // indirect
|
||||
golang.org/x/tools v0.30.0 // indirect
|
||||
google.golang.org/api v0.188.0 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240701130421-f6361c86f094 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240708141625-4ad9e859172b // indirect
|
||||
google.golang.org/grpc v1.64.1 // indirect
|
||||
google.golang.org/protobuf v1.34.2 // indirect
|
||||
google.golang.org/protobuf v1.36.1 // indirect
|
||||
gopkg.in/ini.v1 v1.62.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
@@ -129,3 +139,5 @@ require (
|
||||
)
|
||||
|
||||
replace github.com/zalando/go-keyring => github.com/Infisical/go-keyring v1.0.2
|
||||
|
||||
replace github.com/pion/turn/v4 => github.com/Infisical/turn/v4 v4.0.1
|
||||
|
42
cli/go.sum
42
cli/go.sum
@@ -49,6 +49,8 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/Infisical/go-keyring v1.0.2 h1:dWOkI/pB/7RocfSJgGXbXxLDcVYsdslgjEPmVhb+nl8=
|
||||
github.com/Infisical/go-keyring v1.0.2/go.mod h1:LWOnn/sw9FxDW/0VY+jHFAfOFEe03xmwBVSfJnBowto=
|
||||
github.com/Infisical/turn/v4 v4.0.1 h1:omdelNsnFfzS5cu86W5OBR68by68a8sva4ogR0lQQnw=
|
||||
github.com/Infisical/turn/v4 v4.0.1/go.mod h1:pMMKP/ieNAG/fN5cZiN4SDuyKsXtNTr0ccN7IToA1zs=
|
||||
github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0=
|
||||
github.com/alessio/shellescape v1.4.1/go.mod h1:PZAiSCk0LJaZkiCSkPv8qIobYglO3FPpyFjDCtHLS30=
|
||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||
@@ -144,8 +146,8 @@ github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
|
||||
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
|
||||
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||
github.com/go-openapi/errors v0.20.2 h1:dxy7PGTqEh94zj2E3h1cUmQQWiM1+aeCROfAr02EmK8=
|
||||
@@ -154,6 +156,8 @@ github.com/go-openapi/strfmt v0.21.3 h1:xwhj5X6CjXEZZHMWy1zKJxvW9AfHC9pkyUjLvHtK
|
||||
github.com/go-openapi/strfmt v0.21.3/go.mod h1:k+RzNO0Da+k3FrrynSNN8F7n/peCmQQqbbXjtDfvmGg=
|
||||
github.com/go-resty/resty/v2 v2.16.5 h1:hBKqmWrr7uRc3euHVqmh1HTHcKn99Smr7o5spptdhTM=
|
||||
github.com/go-resty/resty/v2 v2.16.5/go.mod h1:hkJtXbA2iKHzJheXYvQ8snQES5ZLGKMwQ07xAwp/fiA=
|
||||
github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI=
|
||||
github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
|
||||
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk=
|
||||
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
@@ -222,6 +226,8 @@ github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLe
|
||||
github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/pprof v0.0.0-20250302191652-9094ed2288e7 h1:+J3r2e8+RsmN3vKfo75g0YSY61ms37qzPglu4p0sGro=
|
||||
github.com/google/pprof v0.0.0-20250302191652-9094ed2288e7/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144=
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o=
|
||||
github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw=
|
||||
@@ -342,6 +348,10 @@ github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWb
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=
|
||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||
github.com/onsi/ginkgo/v2 v2.22.2 h1:/3X8Panh8/WwhU/3Ssa6rCKqPLuAkVY2I0RoyDLySlU=
|
||||
github.com/onsi/ginkgo/v2 v2.22.2/go.mod h1:oeMosUL+8LtarXBHu/c0bx2D/K9zyQ6uX3cTyztHwsk=
|
||||
github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8=
|
||||
github.com/onsi/gomega v1.36.2/go.mod h1:DdwyADRjrc825LhMEkD76cHR5+pUnjhUN8GlHlRPHzY=
|
||||
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||
github.com/pelletier/go-toml v1.9.3 h1:zeC5b1GviRUyKYd6OJPvBU/mcVDVoL1OhT17FCt5dSQ=
|
||||
github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
|
||||
@@ -357,8 +367,6 @@ github.com/pion/stun/v3 v3.0.0 h1:4h1gwhWLWuZWOJIJR9s2ferRO+W3zA/b6ijOI6mKzUw=
|
||||
github.com/pion/stun/v3 v3.0.0/go.mod h1:HvCN8txt8mwi4FBvS3EmDghW6aQJ24T+y+1TKjB5jyU=
|
||||
github.com/pion/transport/v3 v3.0.7 h1:iRbMH05BzSNwhILHoBoAPxoB9xQgOaJk+591KC9P1o0=
|
||||
github.com/pion/transport/v3 v3.0.7/go.mod h1:YleKiTZ4vqNxVwh77Z0zytYi7rXHl7j6uPLGhhz9rwo=
|
||||
github.com/pion/turn/v4 v4.0.0 h1:qxplo3Rxa9Yg1xXDxxH8xaqcyGUtbHYw4QSCvmFWvhM=
|
||||
github.com/pion/turn/v4 v4.0.0/go.mod h1:MuPDkm15nYSklKpN8vWJ9W2M0PlyQZqYt1McGuxG7mA=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
@@ -369,6 +377,8 @@ github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndr
|
||||
github.com/posthog/posthog-go v0.0.0-20221221115252-24dfed35d71a h1:Ey0XWvrg6u6hyIn1Kd/jCCmL+bMv9El81tvuGBbxZGg=
|
||||
github.com/posthog/posthog-go v0.0.0-20221221115252-24dfed35d71a/go.mod h1:oa2sAs9tGai3VldabTV0eWejt/O4/OOD7azP8GaikqU=
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/quic-go/quic-go v0.50.0 h1:3H/ld1pa3CYhkcc20TPIyG1bNsdhn9qZBGN3b9/UyUo=
|
||||
github.com/quic-go/quic-go v0.50.0/go.mod h1:Vim6OmUvlYdwBhXP9ZVrtGmCMWa3wEqhq3NgYrI8b4E=
|
||||
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
@@ -415,8 +425,8 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
|
||||
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
||||
github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
|
||||
@@ -461,6 +471,8 @@ go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8p
|
||||
go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI=
|
||||
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
|
||||
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
|
||||
go.uber.org/mock v0.5.0 h1:KAMbZvZPyBPWgD14IrIQ38QCyjwpvVVV6K/bHl1IwQU=
|
||||
go.uber.org/mock v0.5.0/go.mod h1:ge71pBPLYDk7QIi1LupWxdAykm7KIEFchiOqd6z7qMM=
|
||||
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
|
||||
go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo=
|
||||
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
@@ -472,8 +484,8 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20211215165025-cf75a172585e/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8=
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.33.0 h1:IOBPskki6Lysi0lo9qQvbxiQ+FvsCC/YWOecCHAixus=
|
||||
golang.org/x/crypto v0.33.0/go.mod h1:bVdXmD7IV/4GdElGPozy6U7lWdRXA4qyRVGJV57uQ5M=
|
||||
golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs=
|
||||
golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
@@ -484,6 +496,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
|
||||
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
||||
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
||||
golang.org/x/exp v0.0.0-20250228200357-dead58393ab7 h1:aWwlzYV971S4BXRS9AmqwDLAD85ouC6X+pocatKY58c=
|
||||
golang.org/x/exp v0.0.0-20250228200357-dead58393ab7/go.mod h1:BHOTPb3L19zxehTsLoJXVaTktb06DFgmdW6Wb9s8jqk=
|
||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
@@ -509,6 +523,8 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.23.0 h1:Zb7khfcRGKk+kqfxFaP5tZqCnDZMjC5VtUBs87Hr6QM=
|
||||
golang.org/x/mod v0.23.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
@@ -547,8 +563,8 @@ golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLd
|
||||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
||||
golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
|
||||
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||
golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8=
|
||||
golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
@@ -697,6 +713,8 @@ golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4f
|
||||
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
|
||||
golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo=
|
||||
golang.org/x/tools v0.30.0 h1:BgcpHewrV5AUp2G9MebG4XPFI1E2W41zU1SaqVA9vJY=
|
||||
golang.org/x/tools v0.30.0/go.mod h1:c347cR/OJfw5TI+GfX7RUPNMdDRRbjvYTS0jPyvsVtY=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
@@ -811,8 +829,8 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj
|
||||
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg=
|
||||
google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw=
|
||||
google.golang.org/protobuf v1.36.1 h1:yBPeRvTftaleIgM3PZ/WBIZ7XM/eEYAaEyCwvyjq/gk=
|
||||
google.golang.org/protobuf v1.36.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
|
@@ -109,12 +109,38 @@ var gatewayCmd = &cobra.Command{
|
||||
},
|
||||
}
|
||||
|
||||
var gatewayRelayCmd = &cobra.Command{
|
||||
Example: `infisical gateway relay`,
|
||||
Short: "Used to run infisical gateway relay",
|
||||
Use: "relay",
|
||||
DisableFlagsInUseLine: true,
|
||||
Args: cobra.NoArgs,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
relayConfigFilePath, err := cmd.Flags().GetString("config")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
if relayConfigFilePath == "" {
|
||||
util.HandleError(fmt.Errorf("Missing config file"))
|
||||
}
|
||||
|
||||
gatewayRelay, err := gateway.NewGatewayRelay(relayConfigFilePath)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Failed to initialize gateway")
|
||||
}
|
||||
err = gatewayRelay.Run()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Failed to start gateway")
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
gatewayCmd.SetHelpFunc(func(command *cobra.Command, strings []string) {
|
||||
command.Flags().MarkHidden("domain")
|
||||
command.Parent().HelpFunc()(command, strings)
|
||||
})
|
||||
gatewayCmd.Flags().String("token", "", "Connect with Infisical using machine identity access token")
|
||||
|
||||
gatewayRelayCmd.Flags().String("config", "", "Relay config yaml file path")
|
||||
|
||||
gatewayCmd.AddCommand(gatewayRelayCmd)
|
||||
rootCmd.AddCommand(gatewayCmd)
|
||||
}
|
||||
|
@@ -3,20 +3,49 @@ package gateway
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"io"
|
||||
"net"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/quic-go/quic-go"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
func handleConnection(conn net.Conn) {
|
||||
defer conn.Close()
|
||||
log.Info().Msgf("New connection from: %s", conn.RemoteAddr().String())
|
||||
func handleConnection(ctx context.Context, quicConn quic.Connection) {
|
||||
log.Info().Msgf("New connection from: %s", quicConn.RemoteAddr().String())
|
||||
// Use WaitGroup to track all streams
|
||||
var wg sync.WaitGroup
|
||||
for {
|
||||
// Accept the first stream, which we'll use for commands
|
||||
stream, err := quicConn.AcceptStream(ctx)
|
||||
if err != nil {
|
||||
log.Printf("Failed to accept QUIC stream: %v", err)
|
||||
break
|
||||
}
|
||||
wg.Add(1)
|
||||
go func(stream quic.Stream) {
|
||||
defer wg.Done()
|
||||
defer stream.Close()
|
||||
|
||||
handleStream(stream, quicConn)
|
||||
}(stream)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
log.Printf("All streams closed for connection: %s", quicConn.RemoteAddr().String())
|
||||
}
|
||||
|
||||
func handleStream(stream quic.Stream, quicConn quic.Connection) {
|
||||
streamID := stream.StreamID()
|
||||
log.Printf("New stream %d from: %s", streamID, quicConn.RemoteAddr().String())
|
||||
|
||||
// Use buffered reader for better handling of fragmented data
|
||||
reader := bufio.NewReader(conn)
|
||||
reader := bufio.NewReader(stream)
|
||||
defer stream.Close()
|
||||
|
||||
for {
|
||||
msg, err := reader.ReadBytes('\n')
|
||||
if err != nil {
|
||||
@@ -39,6 +68,7 @@ func handleConnection(conn net.Conn) {
|
||||
return
|
||||
}
|
||||
defer destTarget.Close()
|
||||
log.Info().Msgf("Starting secure transmission between %s->%s", quicConn.LocalAddr().String(), destTarget.LocalAddr().String())
|
||||
|
||||
// Handle buffered data
|
||||
buffered := reader.Buffered()
|
||||
@@ -56,10 +86,11 @@ func handleConnection(conn net.Conn) {
|
||||
}
|
||||
}
|
||||
|
||||
CopyData(conn, destTarget)
|
||||
CopyDataFromQuicToTcp(stream, destTarget)
|
||||
log.Info().Msgf("Ending secure transmission between %s->%s", quicConn.LocalAddr().String(), destTarget.LocalAddr().String())
|
||||
return
|
||||
case "PING":
|
||||
if _, err := conn.Write([]byte("PONG")); err != nil {
|
||||
if _, err := stream.Write([]byte("PONG\n")); err != nil {
|
||||
log.Error().Msgf("Error writing PONG response: %v", err)
|
||||
}
|
||||
return
|
||||
@@ -74,34 +105,38 @@ type CloseWrite interface {
|
||||
CloseWrite() error
|
||||
}
|
||||
|
||||
func CopyData(src, dst net.Conn) {
|
||||
func CopyDataFromQuicToTcp(quicStream quic.Stream, tcpConn net.Conn) {
|
||||
// Create a WaitGroup to wait for both copy operations
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(2)
|
||||
|
||||
copyAndClose := func(dst, src net.Conn, done chan<- bool) {
|
||||
// Start copying from QUIC stream to TCP
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
_, err := io.Copy(dst, src)
|
||||
if err != nil && !errors.Is(err, io.EOF) {
|
||||
log.Error().Msgf("Copy error: %v", err)
|
||||
if _, err := io.Copy(tcpConn, quicStream); err != nil {
|
||||
log.Error().Msgf("Error copying quic->postgres: %v", err)
|
||||
}
|
||||
|
||||
// Signal we're done writing
|
||||
done <- true
|
||||
|
||||
// Half close the connection if possible
|
||||
if c, ok := dst.(CloseWrite); ok {
|
||||
c.CloseWrite()
|
||||
if e, ok := tcpConn.(CloseWrite); ok {
|
||||
log.Debug().Msg("Closing TCP write end")
|
||||
e.CloseWrite()
|
||||
} else {
|
||||
log.Debug().Msg("TCP connection does not support CloseWrite")
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
done1 := make(chan bool, 1)
|
||||
done2 := make(chan bool, 1)
|
||||
|
||||
go copyAndClose(dst, src, done1)
|
||||
go copyAndClose(src, dst, done2)
|
||||
// Start copying from TCP to QUIC stream
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
if _, err := io.Copy(quicStream, tcpConn); err != nil {
|
||||
log.Debug().Msgf("Error copying postgres->quic: %v", err)
|
||||
}
|
||||
// Close the write side of the QUIC stream
|
||||
if err := quicStream.Close(); err != nil && !strings.Contains(err.Error(), "close called for canceled stream") {
|
||||
log.Error().Msgf("Error closing QUIC stream write: %v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
// Wait for both copies to complete
|
||||
<-done1
|
||||
<-done2
|
||||
wg.Wait()
|
||||
}
|
||||
|
@@ -6,15 +6,19 @@ import (
|
||||
"crypto/x509"
|
||||
"fmt"
|
||||
"net"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/systemd"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/pion/logging"
|
||||
"github.com/pion/turn/v4"
|
||||
"github.com/rs/zerolog/log"
|
||||
|
||||
"github.com/quic-go/quic-go"
|
||||
)
|
||||
|
||||
type GatewayConfig struct {
|
||||
@@ -56,13 +60,12 @@ func (g *Gateway) ConnectWithRelay() error {
|
||||
if relayPort == "5349" {
|
||||
log.Info().Msgf("Provided relay port %s. Using TLS", relayPort)
|
||||
conn, err = tls.Dial("tcp", relayDetails.TurnServerAddress, &tls.Config{
|
||||
InsecureSkipVerify: false,
|
||||
ServerName: relayAddress,
|
||||
ServerName: relayAddress,
|
||||
})
|
||||
} else {
|
||||
log.Info().Msgf("Provided relay port %s. Using non TLS connection.", relayPort)
|
||||
peerAddr, err := net.ResolveTCPAddr("tcp", relayDetails.TurnServerAddress)
|
||||
if err != nil {
|
||||
peerAddr, errPeer := net.ResolveTCPAddr("tcp", relayDetails.TurnServerAddress)
|
||||
if errPeer != nil {
|
||||
return fmt.Errorf("Failed to parse turn server address: %w", err)
|
||||
}
|
||||
conn, err = net.DialTCP("tcp", nil, peerAddr)
|
||||
@@ -74,6 +77,10 @@ func (g *Gateway) ConnectWithRelay() error {
|
||||
|
||||
// Start a new TURN Client and wrap our net.Conn in a STUNConn
|
||||
// This allows us to simulate datagram based communication over a net.Conn
|
||||
logger := logging.NewDefaultLoggerFactory()
|
||||
if os.Getenv("LOG_LEVEL") == "debug" {
|
||||
logger.DefaultLogLevel = logging.LogLevelDebug
|
||||
}
|
||||
cfg := &turn.ClientConfig{
|
||||
STUNServerAddr: relayDetails.TurnServerAddress,
|
||||
TURNServerAddr: relayDetails.TurnServerAddress,
|
||||
@@ -81,7 +88,7 @@ func (g *Gateway) ConnectWithRelay() error {
|
||||
Username: relayDetails.TurnServerUsername,
|
||||
Password: relayDetails.TurnServerPassword,
|
||||
Realm: relayDetails.TurnServerRealm,
|
||||
LoggerFactory: logging.NewDefaultLoggerFactory(),
|
||||
LoggerFactory: logger,
|
||||
}
|
||||
|
||||
client, err := turn.NewClient(cfg)
|
||||
@@ -95,10 +102,6 @@ func (g *Gateway) ConnectWithRelay() error {
|
||||
TurnServerAddress: relayDetails.TurnServerAddress,
|
||||
InfisicalStaticIp: relayDetails.InfisicalStaticIp,
|
||||
}
|
||||
// if port not specific allow all port
|
||||
if relayDetails.InfisicalStaticIp != "" && !strings.Contains(relayDetails.InfisicalStaticIp, ":") {
|
||||
g.config.InfisicalStaticIp = g.config.InfisicalStaticIp + ":0"
|
||||
}
|
||||
|
||||
g.client = client
|
||||
return nil
|
||||
@@ -116,20 +119,20 @@ func (g *Gateway) Listen(ctx context.Context) error {
|
||||
// Allocate a relay socket on the TURN server. On success, it
|
||||
// will return a net.PacketConn which represents the remote
|
||||
// socket.
|
||||
relayNonTlsConn, err := g.client.AllocateTCP()
|
||||
relayUdpConnection, err := g.client.Allocate()
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to allocate relay connection: %w", err)
|
||||
}
|
||||
|
||||
log.Info().Msg(relayNonTlsConn.Addr().String())
|
||||
log.Info().Msg(relayUdpConnection.LocalAddr().String())
|
||||
defer func() {
|
||||
if closeErr := relayNonTlsConn.Close(); closeErr != nil {
|
||||
if closeErr := relayUdpConnection.Close(); closeErr != nil {
|
||||
log.Error().Msgf("Failed to close connection: %s", closeErr)
|
||||
}
|
||||
}()
|
||||
|
||||
gatewayCert, err := api.CallExchangeRelayCertV1(g.httpClient, api.ExchangeRelayCertRequestV1{
|
||||
RelayAddress: relayNonTlsConn.Addr().String(),
|
||||
RelayAddress: relayUdpConnection.LocalAddr().String(),
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -140,49 +143,54 @@ func (g *Gateway) Listen(ctx context.Context) error {
|
||||
g.config.Certificate = gatewayCert.Certificate
|
||||
g.config.CertificateChain = gatewayCert.CertificateChain
|
||||
|
||||
errCh := make(chan error, 1)
|
||||
shutdownCh := make(chan bool, 1)
|
||||
|
||||
if g.config.InfisicalStaticIp != "" {
|
||||
log.Info().Msgf("Found static ip from Infisical: %s. Creating permission IP lifecycle", g.config.InfisicalStaticIp)
|
||||
peerAddr, err := net.ResolveTCPAddr("tcp", g.config.InfisicalStaticIp)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to parse infisical static ip: %w", err)
|
||||
}
|
||||
g.registerPermissionLifecycle(func() error {
|
||||
err := relayNonTlsConn.CreatePermissions(peerAddr)
|
||||
return err
|
||||
}, shutdownCh)
|
||||
if err = g.createPermissionForStaticIps(g.config.InfisicalStaticIp); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
g.registerHeartBeat(ctx, errCh)
|
||||
|
||||
cert, err := tls.X509KeyPair([]byte(gatewayCert.Certificate), []byte(gatewayCert.PrivateKey))
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to parse cert: %s", err)
|
||||
return fmt.Errorf("failed to parse cert: %w", err)
|
||||
}
|
||||
|
||||
caCertPool := x509.NewCertPool()
|
||||
caCertPool.AppendCertsFromPEM([]byte(gatewayCert.CertificateChain))
|
||||
|
||||
relayConn := tls.NewListener(relayNonTlsConn, &tls.Config{
|
||||
// Setup QUIC server
|
||||
tlsConfig := &tls.Config{
|
||||
Certificates: []tls.Certificate{cert},
|
||||
MinVersion: tls.VersionTLS12,
|
||||
ClientCAs: caCertPool,
|
||||
ClientAuth: tls.RequireAndVerifyClientCert,
|
||||
})
|
||||
NextProtos: []string{"infisical-gateway"},
|
||||
}
|
||||
|
||||
// Setup QUIC listener on the relayConn
|
||||
quicConfig := &quic.Config{
|
||||
EnableDatagrams: true,
|
||||
MaxIdleTimeout: 10 * time.Second,
|
||||
KeepAlivePeriod: 2 * time.Second,
|
||||
}
|
||||
|
||||
g.registerRelayIsActive(ctx, errCh)
|
||||
quicListener, err := quic.Listen(relayUdpConnection, tlsConfig, quicConfig)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to listen for QUIC: %w", err)
|
||||
}
|
||||
defer quicListener.Close()
|
||||
|
||||
log.Printf("Listener started on %s", quicListener.Addr())
|
||||
|
||||
errCh := make(chan error, 1)
|
||||
log.Info().Msg("Gateway started successfully")
|
||||
g.registerHeartBeat(errCh, shutdownCh)
|
||||
g.registerRelayIsActive(relayNonTlsConn.Addr().String(), errCh, shutdownCh)
|
||||
|
||||
// Create a WaitGroup to track active connections
|
||||
var wg sync.WaitGroup
|
||||
|
||||
go func() {
|
||||
for {
|
||||
if relayDeadlineConn, ok := relayConn.(*net.TCPListener); ok {
|
||||
relayDeadlineConn.SetDeadline(time.Now().Add(1 * time.Second))
|
||||
}
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
@@ -190,75 +198,53 @@ func (g *Gateway) Listen(ctx context.Context) error {
|
||||
return
|
||||
default:
|
||||
// Accept new relay connection
|
||||
conn, err := relayConn.Accept()
|
||||
quicConn, err := quicListener.Accept(context.Background())
|
||||
if err != nil {
|
||||
// Check if it's a timeout error (which we expect due to our deadline)
|
||||
if netErr, ok := err.(net.Error); ok && netErr.Timeout() {
|
||||
continue
|
||||
}
|
||||
|
||||
if !strings.Contains(err.Error(), "data contains incomplete STUN or TURN frame") {
|
||||
log.Error().Msgf("Failed to accept connection: %v", err)
|
||||
}
|
||||
log.Printf("Failed to accept QUIC connection: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
tlsConn, ok := conn.(*tls.Conn)
|
||||
if !ok {
|
||||
log.Error().Msg("Failed to convert to TLS connection")
|
||||
conn.Close()
|
||||
continue
|
||||
}
|
||||
|
||||
// Set a deadline for the handshake to prevent hanging
|
||||
tlsConn.SetDeadline(time.Now().Add(10 * time.Second))
|
||||
err = tlsConn.Handshake()
|
||||
// Clear the deadline after handshake
|
||||
tlsConn.SetDeadline(time.Time{})
|
||||
if err != nil {
|
||||
log.Error().Msgf("TLS handshake failed: %v", err)
|
||||
conn.Close()
|
||||
continue
|
||||
}
|
||||
|
||||
// Get connection state which contains certificate information
|
||||
state := tlsConn.ConnectionState()
|
||||
if len(state.PeerCertificates) > 0 {
|
||||
organizationUnit := state.PeerCertificates[0].Subject.OrganizationalUnit
|
||||
commonName := state.PeerCertificates[0].Subject.CommonName
|
||||
tlsState := quicConn.ConnectionState().TLS
|
||||
if len(tlsState.PeerCertificates) > 0 {
|
||||
organizationUnit := tlsState.PeerCertificates[0].Subject.OrganizationalUnit
|
||||
commonName := tlsState.PeerCertificates[0].Subject.CommonName
|
||||
if organizationUnit[0] != "gateway-client" || commonName != "cloud" {
|
||||
log.Error().Msgf("Client certificate verification failed. Received %s, %s", organizationUnit, commonName)
|
||||
conn.Close()
|
||||
errMsg := fmt.Sprintf("Client certificate verification failed. Received %s, %s", organizationUnit, commonName)
|
||||
log.Error().Msg(errMsg)
|
||||
quicConn.CloseWithError(1, errMsg)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Handle the connection in a goroutine
|
||||
wg.Add(1)
|
||||
go func(c net.Conn) {
|
||||
go func(c quic.Connection) {
|
||||
defer wg.Done()
|
||||
defer c.Close()
|
||||
defer c.CloseWithError(0, "connection closed")
|
||||
|
||||
// Monitor parent context to close this connection when needed
|
||||
go func() {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
c.Close() // Force close connection when context is canceled
|
||||
c.CloseWithError(0, "connection closed") // Force close connection when context is canceled
|
||||
case <-shutdownCh:
|
||||
c.Close() // Force close connection when accepting loop is done
|
||||
c.CloseWithError(0, "connection closed") // Force close connection when accepting loop is done
|
||||
}
|
||||
}()
|
||||
|
||||
handleConnection(c)
|
||||
}(conn)
|
||||
handleConnection(ctx, c)
|
||||
}(quicConn)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
// make this compatiable with systemd notify mode
|
||||
systemd.SdNotify(false, systemd.SdNotifyReady)
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
log.Info().Msg("Shutting down gateway...")
|
||||
case err = <-errCh:
|
||||
log.Error().Err(err).Msg("Gateway error occurred")
|
||||
}
|
||||
|
||||
// Signal the accept loop to stop
|
||||
@@ -281,70 +267,107 @@ func (g *Gateway) Listen(ctx context.Context) error {
|
||||
return err
|
||||
}
|
||||
|
||||
func (g *Gateway) registerHeartBeat(errCh chan error, done chan bool) {
|
||||
ticker := time.NewTicker(1 * time.Hour)
|
||||
func (g *Gateway) registerHeartBeat(ctx context.Context, errCh chan error) {
|
||||
ticker := time.NewTicker(30 * time.Minute)
|
||||
defer ticker.Stop()
|
||||
|
||||
go func() {
|
||||
time.Sleep(10 * time.Second)
|
||||
log.Info().Msg("Registering first heart beat")
|
||||
err := api.CallGatewayHeartBeatV1(g.httpClient)
|
||||
if err != nil {
|
||||
log.Error().Msgf("Failed to register heartbeat: %s", err)
|
||||
}
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-done:
|
||||
ticker.Stop()
|
||||
return
|
||||
case <-ticker.C:
|
||||
log.Info().Msg("Registering heart beat")
|
||||
err := api.CallGatewayHeartBeatV1(g.httpClient)
|
||||
if err := api.CallGatewayHeartBeatV1(g.httpClient); err != nil {
|
||||
errCh <- err
|
||||
} else {
|
||||
log.Info().Msg("Gateway is reachable by Infisical")
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (g *Gateway) registerPermissionLifecycle(permissionFn func() error, done chan bool) {
|
||||
ticker := time.NewTicker(3 * time.Minute)
|
||||
|
||||
go func() {
|
||||
// wait for 5 mins
|
||||
permissionFn()
|
||||
log.Printf("Created permission for incoming connections")
|
||||
for {
|
||||
select {
|
||||
case <-done:
|
||||
ticker.Stop()
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-ticker.C:
|
||||
permissionFn()
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (g *Gateway) registerRelayIsActive(serverAddr string, errCh chan error, done chan bool) {
|
||||
ticker := time.NewTicker(10 * time.Second)
|
||||
func (g *Gateway) createPermissionForStaticIps(staticIps string) error {
|
||||
if staticIps == "" {
|
||||
return fmt.Errorf("Missing Infisical static ips for permission")
|
||||
}
|
||||
|
||||
splittedIps := strings.Split(staticIps, ",")
|
||||
resolvedIps := make([]net.Addr, 0)
|
||||
for _, ip := range splittedIps {
|
||||
ip = strings.TrimSpace(ip)
|
||||
if ip == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// if port not specific allow all port
|
||||
if !strings.Contains(ip, ":") {
|
||||
ip = ip + ":0"
|
||||
}
|
||||
|
||||
peerAddr, err := net.ResolveUDPAddr("udp", ip)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to resolve static ip for permission: %w", err)
|
||||
}
|
||||
|
||||
resolvedIps = append(resolvedIps, peerAddr)
|
||||
}
|
||||
|
||||
if err := g.client.CreatePermission(resolvedIps...); err != nil {
|
||||
return fmt.Errorf("Failed to set ip permission: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Gateway) registerRelayIsActive(ctx context.Context, errCh chan error) error {
|
||||
ticker := time.NewTicker(15 * time.Second)
|
||||
maxFailures := 3
|
||||
failures := 0
|
||||
|
||||
log.Info().Msg("Starting relay connection health check")
|
||||
|
||||
go func() {
|
||||
time.Sleep(5 * time.Second)
|
||||
for {
|
||||
select {
|
||||
case <-done:
|
||||
ticker.Stop()
|
||||
case <-ctx.Done():
|
||||
log.Info().Msg("Stopping relay connection health check")
|
||||
return
|
||||
case <-ticker.C:
|
||||
conn, err := net.Dial("tcp", serverAddr)
|
||||
if err != nil {
|
||||
errCh <- err
|
||||
return
|
||||
}
|
||||
if conn != nil {
|
||||
conn.Close()
|
||||
}
|
||||
func() {
|
||||
log.Debug().Msg("Performing relay connection health check")
|
||||
|
||||
if g.client == nil {
|
||||
failures++
|
||||
log.Warn().Int("failures", failures).Msg("TURN client is nil")
|
||||
if failures >= maxFailures {
|
||||
errCh <- fmt.Errorf("relay connection check failed: TURN client is nil")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// we try to refresh permissions - this is a lightweight operation
|
||||
// that will fail immediately if the UDP connection is broken. good for health check
|
||||
log.Debug().Msg("Refreshing TURN permissions to verify connection")
|
||||
if err := g.createPermissionForStaticIps(g.config.InfisicalStaticIp); err != nil {
|
||||
failures++
|
||||
log.Warn().Err(err).Int("failures", failures).Msg("Failed to refresh TURN permissions")
|
||||
if failures >= maxFailures {
|
||||
errCh <- fmt.Errorf("relay connection check failed: %w", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
log.Debug().Msg("Successfully refreshed TURN permissions - connection is healthy")
|
||||
if failures > 0 {
|
||||
log.Info().Int("previous_failures", failures).Msg("Relay connection restored")
|
||||
failures = 0
|
||||
}
|
||||
}()
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
190
cli/packages/gateway/relay.go
Normal file
190
cli/packages/gateway/relay.go
Normal file
@@ -0,0 +1,190 @@
|
||||
//go:build !windows
|
||||
// +build !windows
|
||||
|
||||
package gateway
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"crypto/x509"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net"
|
||||
"os"
|
||||
"os/signal"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"syscall"
|
||||
|
||||
udplistener "github.com/Infisical/infisical-merge/packages/gateway/udp_listener"
|
||||
"github.com/Infisical/infisical-merge/packages/systemd"
|
||||
"github.com/pion/logging"
|
||||
"github.com/pion/turn/v4"
|
||||
"github.com/rs/zerolog/log"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
var (
|
||||
errMissingTlsCert = errors.New("Missing TLS files")
|
||||
)
|
||||
|
||||
type GatewayRelay struct {
|
||||
Config *GatewayRelayConfig
|
||||
}
|
||||
|
||||
type GatewayRelayConfig struct {
|
||||
PublicIP string `yaml:"public_ip"`
|
||||
Port int `yaml:"port"`
|
||||
Realm string `yaml:"realm"`
|
||||
AuthSecret string `yaml:"auth_secret"`
|
||||
RelayMinPort uint16 `yaml:"relay_min_port"`
|
||||
RelayMaxPort uint16 `yaml:"relay_max_port"`
|
||||
TlsCertPath string `yaml:"tls_cert_path"`
|
||||
TlsPrivateKeyPath string `yaml:"tls_private_key_path"`
|
||||
TlsCaPath string `yaml:"tls_ca_path"`
|
||||
|
||||
tls tls.Certificate
|
||||
tlsCa string
|
||||
isTlsEnabled bool
|
||||
}
|
||||
|
||||
func NewGatewayRelay(configFilePath string) (*GatewayRelay, error) {
|
||||
cfgFile, err := os.ReadFile(configFilePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var cfg GatewayRelayConfig
|
||||
if err := yaml.Unmarshal(cfgFile, &cfg); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if cfg.PublicIP == "" {
|
||||
return nil, fmt.Errorf("Missing public ip")
|
||||
}
|
||||
|
||||
if cfg.AuthSecret == "" {
|
||||
return nil, fmt.Errorf("Missing auth secret")
|
||||
}
|
||||
|
||||
if cfg.Realm == "" {
|
||||
cfg.Realm = "infisical.org"
|
||||
}
|
||||
|
||||
if cfg.RelayMinPort == 0 {
|
||||
cfg.RelayMinPort = 49152
|
||||
}
|
||||
|
||||
if cfg.RelayMaxPort == 0 {
|
||||
cfg.RelayMaxPort = 65535
|
||||
}
|
||||
|
||||
if cfg.Port == 0 {
|
||||
cfg.Port = 3478
|
||||
} else if cfg.Port == 5349 {
|
||||
if cfg.TlsCertPath == "" || cfg.TlsPrivateKeyPath == "" {
|
||||
return nil, errMissingTlsCert
|
||||
}
|
||||
|
||||
cert, err := tls.LoadX509KeyPair(cfg.TlsCertPath, cfg.TlsPrivateKeyPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to read load server tls key pair: %w", err)
|
||||
}
|
||||
|
||||
if cfg.TlsCaPath != "" {
|
||||
ca, err := os.ReadFile(cfg.TlsCaPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to read tls ca: %w", err)
|
||||
}
|
||||
cfg.tlsCa = string(ca)
|
||||
}
|
||||
|
||||
cfg.tls = cert
|
||||
cfg.isTlsEnabled = true
|
||||
}
|
||||
|
||||
return &GatewayRelay{
|
||||
Config: &cfg,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (g *GatewayRelay) Run() error {
|
||||
addr, err := net.ResolveTCPAddr("tcp", "0.0.0.0:"+strconv.Itoa(g.Config.Port))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to parse server address: %s", err)
|
||||
}
|
||||
|
||||
// NewLongTermAuthHandler takes a pion.LeveledLogger. This allows you to intercept messages
|
||||
// and process them yourself.
|
||||
logger := logging.NewDefaultLeveledLoggerForScope("lt-creds", logging.LogLevelTrace, os.Stdout)
|
||||
|
||||
// Create `numThreads` UDP listeners to pass into pion/turn
|
||||
// pion/turn itself doesn't allocate any UDP sockets, but lets the user pass them in
|
||||
// this allows us to add logging, storage or modify inbound/outbound traffic
|
||||
// UDP listeners share the same local address:port with setting SO_REUSEPORT and the kernel
|
||||
// will load-balance received packets per the IP 5-tuple
|
||||
listenerConfig := udplistener.SetupListenerConfig()
|
||||
|
||||
publicIP := g.Config.PublicIP
|
||||
relayAddressGenerator := &turn.RelayAddressGeneratorPortRange{
|
||||
RelayAddress: net.ParseIP(publicIP), // Claim that we are listening on IP passed by user
|
||||
Address: "0.0.0.0", // But actually be listening on every interface
|
||||
MinPort: g.Config.RelayMinPort,
|
||||
MaxPort: g.Config.RelayMaxPort,
|
||||
}
|
||||
|
||||
threadNum := runtime.NumCPU()
|
||||
listenerConfigs := make([]turn.ListenerConfig, threadNum)
|
||||
var connAddress string
|
||||
for i := 0; i < threadNum; i++ {
|
||||
conn, listErr := listenerConfig.Listen(context.Background(), addr.Network(), addr.String())
|
||||
if listErr != nil {
|
||||
return fmt.Errorf("Failed to allocate TCP listener at %s:%s %s", addr.Network(), addr.String(), listErr)
|
||||
}
|
||||
|
||||
listenerConfigs[i] = turn.ListenerConfig{
|
||||
RelayAddressGenerator: relayAddressGenerator,
|
||||
}
|
||||
|
||||
if g.Config.isTlsEnabled {
|
||||
caCertPool := x509.NewCertPool()
|
||||
caCertPool.AppendCertsFromPEM([]byte(g.Config.tlsCa))
|
||||
|
||||
listenerConfigs[i].Listener = tls.NewListener(conn, &tls.Config{
|
||||
Certificates: []tls.Certificate{g.Config.tls},
|
||||
ClientCAs: caCertPool,
|
||||
})
|
||||
} else {
|
||||
listenerConfigs[i].Listener = conn
|
||||
}
|
||||
connAddress = conn.Addr().String()
|
||||
}
|
||||
|
||||
loggerF := logging.NewDefaultLoggerFactory()
|
||||
loggerF.DefaultLogLevel = logging.LogLevelDebug
|
||||
|
||||
server, err := turn.NewServer(turn.ServerConfig{
|
||||
Realm: g.Config.Realm,
|
||||
AuthHandler: turn.LongTermTURNRESTAuthHandler(g.Config.AuthSecret, logger),
|
||||
// PacketConnConfigs is a list of UDP Listeners and the configuration around them
|
||||
ListenerConfigs: listenerConfigs,
|
||||
LoggerFactory: loggerF,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to start server: %w", err)
|
||||
}
|
||||
|
||||
log.Info().Msgf("Relay listening on %s\n", connAddress)
|
||||
|
||||
// make this compatiable with systemd notify mode
|
||||
systemd.SdNotify(false, systemd.SdNotifyReady)
|
||||
// Block until user sends SIGINT or SIGTERM
|
||||
sigs := make(chan os.Signal, 1)
|
||||
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
|
||||
<-sigs
|
||||
|
||||
if err = server.Close(); err != nil {
|
||||
return fmt.Errorf("Failed to close server: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
37
cli/packages/gateway/relay_windows.go
Normal file
37
cli/packages/gateway/relay_windows.go
Normal file
@@ -0,0 +1,37 @@
|
||||
//go:build windows
|
||||
// +build windows
|
||||
|
||||
package gateway
|
||||
|
||||
import (
|
||||
"errors"
|
||||
)
|
||||
|
||||
var (
|
||||
errMissingTlsCert = errors.New("Missing TLS files")
|
||||
errWindowsNotSupported = errors.New("Relay is not supported on Windows")
|
||||
)
|
||||
|
||||
type GatewayRelay struct {
|
||||
Config *GatewayRelayConfig
|
||||
}
|
||||
|
||||
type GatewayRelayConfig struct {
|
||||
PublicIP string
|
||||
Port int
|
||||
Realm string
|
||||
AuthSecret string
|
||||
RelayMinPort uint16
|
||||
RelayMaxPort uint16
|
||||
TlsCertPath string
|
||||
TlsPrivateKeyPath string
|
||||
TlsCaPath string
|
||||
}
|
||||
|
||||
func NewGatewayRelay(configFilePath string) (*GatewayRelay, error) {
|
||||
return nil, errWindowsNotSupported
|
||||
}
|
||||
|
||||
func (g *GatewayRelay) Run() error {
|
||||
return errWindowsNotSupported
|
||||
}
|
26
cli/packages/gateway/udp_listener/listener_unix.go
Normal file
26
cli/packages/gateway/udp_listener/listener_unix.go
Normal file
@@ -0,0 +1,26 @@
|
||||
//go:build !windows
|
||||
// +build !windows
|
||||
|
||||
package udplistener
|
||||
|
||||
import (
|
||||
"net"
|
||||
"syscall"
|
||||
|
||||
"golang.org/x/sys/unix"
|
||||
// other imports
|
||||
)
|
||||
|
||||
func SetupListenerConfig() *net.ListenConfig {
|
||||
return &net.ListenConfig{
|
||||
Control: func(network, address string, conn syscall.RawConn) error {
|
||||
var operr error
|
||||
if err := conn.Control(func(fd uintptr) {
|
||||
operr = syscall.SetsockoptInt(int(fd), syscall.SOL_SOCKET, unix.SO_REUSEPORT, 1)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
return operr
|
||||
},
|
||||
}
|
||||
}
|
18
cli/packages/gateway/udp_listener/listener_windows.go
Normal file
18
cli/packages/gateway/udp_listener/listener_windows.go
Normal file
@@ -0,0 +1,18 @@
|
||||
//go:build windows
|
||||
// +build windows
|
||||
|
||||
package udplistener
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
func SetupListenerConfig() *net.ListenConfig {
|
||||
return &net.ListenConfig{
|
||||
Control: func(network, address string, conn syscall.RawConn) error {
|
||||
return fmt.Errorf("Infisical relay not supported for windows.")
|
||||
},
|
||||
}
|
||||
}
|
84
cli/packages/systemd/daemon.go
Normal file
84
cli/packages/systemd/daemon.go
Normal file
@@ -0,0 +1,84 @@
|
||||
// Copyright 2014 Docker, Inc.
|
||||
// Copyright 2015-2018 CoreOS, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
// Package daemon provides a Go implementation of the sd_notify protocol.
|
||||
// It can be used to inform systemd of service start-up completion, watchdog
|
||||
// events, and other status changes.
|
||||
//
|
||||
// https://www.freedesktop.org/software/systemd/man/sd_notify.html#Description
|
||||
package systemd
|
||||
|
||||
import (
|
||||
"net"
|
||||
"os"
|
||||
)
|
||||
|
||||
const (
|
||||
// SdNotifyReady tells the service manager that service startup is finished
|
||||
// or the service finished loading its configuration.
|
||||
SdNotifyReady = "READY=1"
|
||||
|
||||
// SdNotifyStopping tells the service manager that the service is beginning
|
||||
// its shutdown.
|
||||
SdNotifyStopping = "STOPPING=1"
|
||||
|
||||
// SdNotifyReloading tells the service manager that this service is
|
||||
// reloading its configuration. Note that you must call SdNotifyReady when
|
||||
// it completed reloading.
|
||||
SdNotifyReloading = "RELOADING=1"
|
||||
|
||||
// SdNotifyWatchdog tells the service manager to update the watchdog
|
||||
// timestamp for the service.
|
||||
SdNotifyWatchdog = "WATCHDOG=1"
|
||||
)
|
||||
|
||||
// SdNotify sends a message to the init daemon. It is common to ignore the error.
|
||||
// If `unsetEnvironment` is true, the environment variable `NOTIFY_SOCKET`
|
||||
// will be unconditionally unset.
|
||||
//
|
||||
// It returns one of the following:
|
||||
// (false, nil) - notification not supported (i.e. NOTIFY_SOCKET is unset)
|
||||
// (false, err) - notification supported, but failure happened (e.g. error connecting to NOTIFY_SOCKET or while sending data)
|
||||
// (true, nil) - notification supported, data has been sent
|
||||
func SdNotify(unsetEnvironment bool, state string) (bool, error) {
|
||||
socketAddr := &net.UnixAddr{
|
||||
Name: os.Getenv("NOTIFY_SOCKET"),
|
||||
Net: "unixgram",
|
||||
}
|
||||
|
||||
// NOTIFY_SOCKET not set
|
||||
if socketAddr.Name == "" {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
if unsetEnvironment {
|
||||
if err := os.Unsetenv("NOTIFY_SOCKET"); err != nil {
|
||||
return false, err
|
||||
}
|
||||
}
|
||||
|
||||
conn, err := net.DialUnix(socketAddr.Net, nil, socketAddr)
|
||||
// Error connecting to NOTIFY_SOCKET
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
defer conn.Close()
|
||||
|
||||
if _, err = conn.Write([]byte(state)); err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
@@ -122,6 +122,7 @@ export const eventToNameMap: { [K in EventType]: string } = {
|
||||
"OIDC group membership mapping assigned user to groups",
|
||||
[EventType.OIDC_GROUP_MEMBERSHIP_MAPPING_REMOVE_USER]:
|
||||
"OIDC group membership mapping removed user from groups",
|
||||
[EventType.SECRET_APPROVAL_REQUEST_REVIEW]: "Review Secret Approval Request",
|
||||
[EventType.CREATE_KMIP_CLIENT]: "Create KMIP client",
|
||||
[EventType.UPDATE_KMIP_CLIENT]: "Update KMIP client",
|
||||
[EventType.DELETE_KMIP_CLIENT]: "Delete KMIP client",
|
||||
|
@@ -150,5 +150,6 @@ export enum EventType {
|
||||
KMIP_OPERATION_ACTIVATE = "kmip-operation-activate",
|
||||
KMIP_OPERATION_REVOKE = "kmip-operation-revoke",
|
||||
KMIP_OPERATION_LOCATE = "kmip-operation-locate",
|
||||
KMIP_OPERATION_REGISTER = "kmip-operation-register"
|
||||
KMIP_OPERATION_REGISTER = "kmip-operation-register",
|
||||
SECRET_APPROVAL_REQUEST_REVIEW = "secret-approval-request-review"
|
||||
}
|
||||
|
@@ -13,9 +13,10 @@ export const useUpdateSecretApprovalReviewStatus = () => {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
return useMutation<object, object, TUpdateSecretApprovalReviewStatusDTO>({
|
||||
mutationFn: async ({ id, status }) => {
|
||||
mutationFn: async ({ id, status, comment }) => {
|
||||
const { data } = await apiRequest.post(`/api/v1/secret-approval-requests/${id}/review`, {
|
||||
status
|
||||
status,
|
||||
comment
|
||||
});
|
||||
return data;
|
||||
},
|
||||
|
@@ -44,6 +44,7 @@ export type TSecretApprovalRequest = {
|
||||
reviewers: {
|
||||
userId: string;
|
||||
status: ApprovalStatus;
|
||||
comment: string;
|
||||
email: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
@@ -114,6 +115,7 @@ export type TGetSecretApprovalRequestDetails = {
|
||||
|
||||
export type TUpdateSecretApprovalReviewStatusDTO = {
|
||||
status: ApprovalStatus;
|
||||
comment?: string;
|
||||
id: string;
|
||||
};
|
||||
|
||||
|
@@ -148,12 +148,13 @@ export const useUpdateFolder = () => {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
return useMutation<object, object, TUpdateFolderDTO>({
|
||||
mutationFn: async ({ path = "/", folderId, name, environment, projectId }) => {
|
||||
mutationFn: async ({ path = "/", folderId, name, environment, projectId, description }) => {
|
||||
const { data } = await apiRequest.patch(`/api/v1/folders/${folderId}`, {
|
||||
name,
|
||||
environment,
|
||||
workspaceId: projectId,
|
||||
path
|
||||
path,
|
||||
description
|
||||
});
|
||||
return data;
|
||||
},
|
||||
|
@@ -5,6 +5,7 @@ export enum ReservedFolders {
|
||||
export type TSecretFolder = {
|
||||
id: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
};
|
||||
|
||||
export type TGetProjectFoldersDTO = {
|
||||
@@ -24,6 +25,7 @@ export type TCreateFolderDTO = {
|
||||
environment: string;
|
||||
name: string;
|
||||
path?: string;
|
||||
description?: string | null;
|
||||
};
|
||||
|
||||
export type TUpdateFolderDTO = {
|
||||
@@ -32,6 +34,7 @@ export type TUpdateFolderDTO = {
|
||||
name: string;
|
||||
folderId: string;
|
||||
path?: string;
|
||||
description?: string | null;
|
||||
};
|
||||
|
||||
export type TDeleteFolderDTO = {
|
||||
@@ -49,5 +52,6 @@ export type TUpdateFolderBatchDTO = {
|
||||
environment: string;
|
||||
id: string;
|
||||
path?: string;
|
||||
description?: string | null;
|
||||
}[];
|
||||
};
|
||||
|
@@ -2,13 +2,22 @@ import { useCallback, useMemo } from "react";
|
||||
|
||||
import { DashboardProjectSecretsOverview } from "@app/hooks/api/dashboard/types";
|
||||
|
||||
type FolderNameAndDescription = {
|
||||
name: string;
|
||||
description?: string;
|
||||
};
|
||||
|
||||
export const useFolderOverview = (folders: DashboardProjectSecretsOverview["folders"]) => {
|
||||
const folderNames = useMemo(() => {
|
||||
const names = new Set<string>();
|
||||
const folderNamesAndDescriptions = useMemo(() => {
|
||||
const namesAndDescriptions = new Map<string, FolderNameAndDescription>();
|
||||
|
||||
folders?.forEach((folder) => {
|
||||
names.add(folder.name);
|
||||
if (!namesAndDescriptions.has(folder.name)) {
|
||||
namesAndDescriptions.set(folder.name, { name: folder.name, description: folder.description });
|
||||
}
|
||||
});
|
||||
return [...names];
|
||||
|
||||
return Array.from(namesAndDescriptions.values());
|
||||
}, [folders]);
|
||||
|
||||
const isFolderPresentInEnv = useCallback(
|
||||
@@ -31,7 +40,7 @@ export const useFolderOverview = (folders: DashboardProjectSecretsOverview["fold
|
||||
[folders]
|
||||
);
|
||||
|
||||
return { folderNames, isFolderPresentInEnv, getFolderByNameAndEnv };
|
||||
return { folderNamesAndDescriptions, isFolderPresentInEnv, getFolderByNameAndEnv };
|
||||
};
|
||||
|
||||
export const useDynamicSecretOverview = (
|
||||
|
@@ -106,7 +106,7 @@ export const LogsTable = ({
|
||||
className="mb-20 mt-4 px-4 py-3 text-sm"
|
||||
isFullWidth
|
||||
variant="star"
|
||||
isLoading={isFetchingNextPage}
|
||||
isLoading={isFetchingNextPage || isPending}
|
||||
isDisabled={isFetchingNextPage || !hasNextPage}
|
||||
onClick={() => fetchNextPage()}
|
||||
>
|
||||
|
@@ -2,7 +2,7 @@ import { useEffect } from "react";
|
||||
import { Helmet } from "react-helmet";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { useSearch } from "@tanstack/react-router";
|
||||
import { useNavigate, useSearch } from "@tanstack/react-router";
|
||||
|
||||
import { OrgPermissionCan } from "@app/components/permissions";
|
||||
import { Button, NoticeBanner, PageHeader, Pagination } from "@app/components/v2";
|
||||
@@ -36,6 +36,8 @@ export const SecretScanningPage = withPermission(
|
||||
from: ROUTE_PATHS.Organization.SecretScanning.id
|
||||
});
|
||||
|
||||
const navigate = useNavigate();
|
||||
|
||||
const { control, watch } = useForm<SecretScanningFilterFormData>({
|
||||
resolver: zodResolver(secretScanningFilterFormSchema),
|
||||
defaultValues: {}
|
||||
@@ -70,8 +72,11 @@ export const SecretScanningPage = withPermission(
|
||||
const { mutateAsync: linkGitAppInstallationWithOrganization } =
|
||||
useLinkGitAppInstallationWithOrg();
|
||||
const { mutateAsync: createNewIntegrationSession } = useCreateNewInstallationSession();
|
||||
const { data: installationStatus, isPending: isSecretScanningInstatllationStatusLoading } =
|
||||
useGetSecretScanningInstallationStatus(organizationId);
|
||||
const {
|
||||
data: installationStatus,
|
||||
isPending: isSecretScanningInstatllationStatusLoading,
|
||||
refetch: refetchSecretScanningInstallationStatus
|
||||
} = useGetSecretScanningInstallationStatus(organizationId);
|
||||
const integrationEnabled =
|
||||
!isSecretScanningInstatllationStatusLoading && installationStatus?.appInstallationCompleted;
|
||||
|
||||
@@ -86,7 +91,10 @@ export const SecretScanningPage = withPermission(
|
||||
sessionId: String(queryParams.state)
|
||||
});
|
||||
if (isLinked) {
|
||||
window.location.reload();
|
||||
await navigate({
|
||||
to: "/organization/secret-scanning"
|
||||
});
|
||||
refetchSecretScanningInstallationStatus();
|
||||
}
|
||||
|
||||
console.log("installation verification complete");
|
||||
|
@@ -228,7 +228,7 @@ export const OverviewPage = () => {
|
||||
setPage
|
||||
});
|
||||
|
||||
const { folderNames, getFolderByNameAndEnv, isFolderPresentInEnv } = useFolderOverview(folders);
|
||||
const { folderNamesAndDescriptions, getFolderByNameAndEnv, isFolderPresentInEnv } = useFolderOverview(folders);
|
||||
|
||||
const { dynamicSecretNames, isDynamicSecretPresentInEnv } =
|
||||
useDynamicSecretOverview(dynamicSecrets);
|
||||
@@ -251,14 +251,15 @@ export const OverviewPage = () => {
|
||||
"updateFolder"
|
||||
] as const);
|
||||
|
||||
const handleFolderCreate = async (folderName: string) => {
|
||||
const handleFolderCreate = async (folderName: string, description: string | null) => {
|
||||
const promises = userAvailableEnvs.map((env) => {
|
||||
const environment = env.slug;
|
||||
return createFolder({
|
||||
name: folderName,
|
||||
path: secretPath,
|
||||
environment,
|
||||
projectId: workspaceId
|
||||
projectId: workspaceId,
|
||||
description
|
||||
});
|
||||
});
|
||||
|
||||
@@ -279,7 +280,7 @@ export const OverviewPage = () => {
|
||||
}
|
||||
};
|
||||
|
||||
const handleFolderUpdate = async (newFolderName: string) => {
|
||||
const handleFolderUpdate = async (newFolderName: string, description: string | null) => {
|
||||
const { name: oldFolderName } = popUp.updateFolder.data as TSecretFolder;
|
||||
|
||||
const updatedFolders: TUpdateFolderBatchDTO["folders"] = [];
|
||||
@@ -296,7 +297,8 @@ export const OverviewPage = () => {
|
||||
environment: env.slug,
|
||||
name: newFolderName,
|
||||
id: folder.id,
|
||||
path: secretPath
|
||||
path: secretPath,
|
||||
description
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1027,7 +1029,7 @@ export const OverviewPage = () => {
|
||||
)}
|
||||
{!isOverviewLoading && visibleEnvs.length > 0 && (
|
||||
<>
|
||||
{folderNames.map((folderName, index) => (
|
||||
{folderNamesAndDescriptions.map(({name: folderName, description}, index) => (
|
||||
<SecretOverviewFolderRow
|
||||
folderName={folderName}
|
||||
isFolderPresentInEnv={isFolderPresentInEnv}
|
||||
@@ -1039,7 +1041,7 @@ export const OverviewPage = () => {
|
||||
key={`overview-${folderName}-${index + 1}`}
|
||||
onClick={handleFolderClick}
|
||||
onToggleFolderEdit={(name: string) =>
|
||||
handlePopUpOpen("updateFolder", { name })
|
||||
handlePopUpOpen("updateFolder", { name, description })
|
||||
}
|
||||
/>
|
||||
))}
|
||||
@@ -1159,7 +1161,9 @@ export const OverviewPage = () => {
|
||||
<FolderForm
|
||||
isEdit
|
||||
defaultFolderName={(popUp.updateFolder?.data as Pick<TSecretFolder, "name">)?.name}
|
||||
defaultDescription={(popUp.updateFolder?.data as Pick<TSecretFolder, "description">)?.description}
|
||||
onUpdateFolder={handleFolderUpdate}
|
||||
showDescriptionOverwriteWarning
|
||||
/>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
|
@@ -1,19 +1,36 @@
|
||||
import { ReactNode } from "react";
|
||||
import { Controller, useForm } from "react-hook-form";
|
||||
import {
|
||||
faAngleDown,
|
||||
faArrowLeft,
|
||||
faCheck,
|
||||
faCheckCircle,
|
||||
faCircle,
|
||||
faCodeBranch,
|
||||
faComment,
|
||||
faFolder,
|
||||
faXmarkCircle
|
||||
} from "@fortawesome/free-solid-svg-icons";
|
||||
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { RadioGroup, RadioGroupIndicator, RadioGroupItem } from "@radix-ui/react-radio-group";
|
||||
import { twMerge } from "tailwind-merge";
|
||||
import z from "zod";
|
||||
|
||||
import { createNotification } from "@app/components/notifications";
|
||||
import { Button, ContentLoader, EmptyState, IconButton, Tooltip } from "@app/components/v2";
|
||||
import {
|
||||
Button,
|
||||
ContentLoader,
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuTrigger,
|
||||
EmptyState,
|
||||
FormControl,
|
||||
IconButton,
|
||||
TextArea,
|
||||
Tooltip
|
||||
} from "@app/components/v2";
|
||||
import { useUser } from "@app/context";
|
||||
import { usePopUp } from "@app/hooks";
|
||||
import {
|
||||
useGetSecretApprovalRequestDetails,
|
||||
useUpdateSecretApprovalReviewStatus
|
||||
@@ -74,6 +91,13 @@ type Props = {
|
||||
onGoBack: () => void;
|
||||
};
|
||||
|
||||
const reviewFormSchema = z.object({
|
||||
comment: z.string().trim().optional().default(""),
|
||||
status: z.nativeEnum(ApprovalStatus)
|
||||
});
|
||||
|
||||
type TReviewFormSchema = z.infer<typeof reviewFormSchema>;
|
||||
|
||||
export const SecretApprovalRequestChanges = ({
|
||||
approvalRequestId,
|
||||
onGoBack,
|
||||
@@ -94,6 +118,16 @@ export const SecretApprovalRequestChanges = ({
|
||||
variables
|
||||
} = useUpdateSecretApprovalReviewStatus();
|
||||
|
||||
const { popUp, handlePopUpToggle } = usePopUp(["reviewChanges"] as const);
|
||||
const {
|
||||
control,
|
||||
handleSubmit,
|
||||
reset,
|
||||
formState: { isSubmitting }
|
||||
} = useForm<TReviewFormSchema>({
|
||||
resolver: zodResolver(reviewFormSchema)
|
||||
});
|
||||
|
||||
const isApproving = variables?.status === ApprovalStatus.APPROVED && isUpdatingRequestStatus;
|
||||
const isRejecting = variables?.status === ApprovalStatus.REJECTED && isUpdatingRequestStatus;
|
||||
|
||||
@@ -101,23 +135,23 @@ export const SecretApprovalRequestChanges = ({
|
||||
const canApprove = secretApprovalRequestDetails?.policy?.approvers?.some(
|
||||
({ userId }) => userId === userSession.id
|
||||
);
|
||||
|
||||
const reviewedUsers = secretApprovalRequestDetails?.reviewers?.reduce<
|
||||
Record<string, ApprovalStatus>
|
||||
Record<string, { status: ApprovalStatus; comment: string }>
|
||||
>(
|
||||
(prev, curr) => ({
|
||||
...prev,
|
||||
[curr.userId]: curr.status
|
||||
[curr.userId]: { status: curr.status, comment: curr.comment }
|
||||
}),
|
||||
{}
|
||||
);
|
||||
const hasApproved = reviewedUsers?.[userSession.id] === ApprovalStatus.APPROVED;
|
||||
const hasRejected = reviewedUsers?.[userSession.id] === ApprovalStatus.REJECTED;
|
||||
|
||||
const handleSecretApprovalStatusUpdate = async (status: ApprovalStatus) => {
|
||||
const handleSecretApprovalStatusUpdate = async (status: ApprovalStatus, comment: string) => {
|
||||
try {
|
||||
await updateSecretApprovalRequestStatus({
|
||||
id: approvalRequestId,
|
||||
status
|
||||
status,
|
||||
comment
|
||||
});
|
||||
createNotification({
|
||||
type: "success",
|
||||
@@ -130,6 +164,16 @@ export const SecretApprovalRequestChanges = ({
|
||||
text: "Failed to update the request status"
|
||||
});
|
||||
}
|
||||
|
||||
handlePopUpToggle("reviewChanges", false);
|
||||
reset({
|
||||
comment: "",
|
||||
status: ApprovalStatus.APPROVED
|
||||
});
|
||||
};
|
||||
|
||||
const handleSubmitReview = (data: TReviewFormSchema) => {
|
||||
handleSecretApprovalStatusUpdate(data.status, data.comment);
|
||||
};
|
||||
|
||||
if (isSecretApprovalRequestLoading) {
|
||||
@@ -150,7 +194,7 @@ export const SecretApprovalRequestChanges = ({
|
||||
const isMergable =
|
||||
secretApprovalRequestDetails?.policy?.approvals <=
|
||||
secretApprovalRequestDetails?.policy?.approvers?.filter(
|
||||
({ userId }) => reviewedUsers?.[userId] === ApprovalStatus.APPROVED
|
||||
({ userId }) => reviewedUsers?.[userId]?.status === ApprovalStatus.APPROVED
|
||||
).length;
|
||||
const hasMerged = secretApprovalRequestDetails?.hasMerged;
|
||||
|
||||
@@ -202,27 +246,115 @@ export const SecretApprovalRequestChanges = ({
|
||||
</div>
|
||||
</div>
|
||||
{!hasMerged && secretApprovalRequestDetails.status === "open" && (
|
||||
<>
|
||||
<Button
|
||||
size="xs"
|
||||
leftIcon={hasApproved && <FontAwesomeIcon icon={faCheck} />}
|
||||
onClick={() => handleSecretApprovalStatusUpdate(ApprovalStatus.APPROVED)}
|
||||
isLoading={isApproving}
|
||||
isDisabled={isApproving || hasApproved || !canApprove}
|
||||
>
|
||||
{hasApproved ? "Approved" : "Approve"}
|
||||
</Button>
|
||||
<Button
|
||||
size="xs"
|
||||
colorSchema="danger"
|
||||
leftIcon={hasRejected && <FontAwesomeIcon icon={faCheck} />}
|
||||
onClick={() => handleSecretApprovalStatusUpdate(ApprovalStatus.REJECTED)}
|
||||
isLoading={isRejecting}
|
||||
isDisabled={isRejecting || hasRejected || !canApprove}
|
||||
>
|
||||
{hasRejected ? "Rejected" : "Reject"}
|
||||
</Button>
|
||||
</>
|
||||
<DropdownMenu
|
||||
open={popUp.reviewChanges.isOpen}
|
||||
onOpenChange={(isOpen) => handlePopUpToggle("reviewChanges", isOpen)}
|
||||
>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button
|
||||
variant="outline_bg"
|
||||
rightIcon={<FontAwesomeIcon className="ml-2" icon={faAngleDown} />}
|
||||
>
|
||||
Review
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align="end" asChild className="mt-3">
|
||||
<form onSubmit={handleSubmit(handleSubmitReview)}>
|
||||
<div className="flex w-[400px] flex-col space-y-2 p-5">
|
||||
<div className="text-lg font-medium">Finish your review</div>
|
||||
<Controller
|
||||
control={control}
|
||||
name="comment"
|
||||
render={({ field, fieldState: { error } }) => (
|
||||
<FormControl errorText={error?.message} isError={Boolean(error)}>
|
||||
<TextArea
|
||||
{...field}
|
||||
placeholder="Leave a comment..."
|
||||
reSize="none"
|
||||
className="text-md mt-2 h-48 border border-mineshaft-600 bg-bunker-800"
|
||||
/>
|
||||
</FormControl>
|
||||
)}
|
||||
/>
|
||||
<Controller
|
||||
control={control}
|
||||
name="status"
|
||||
defaultValue={ApprovalStatus.APPROVED}
|
||||
render={({ field, fieldState: { error } }) => (
|
||||
<FormControl errorText={error?.message} isError={Boolean(error)}>
|
||||
<RadioGroup
|
||||
value={field.value}
|
||||
onValueChange={field.onChange}
|
||||
className="mb-4 space-y-2"
|
||||
aria-label="Status"
|
||||
>
|
||||
<div className="flex items-center gap-2">
|
||||
<RadioGroupItem
|
||||
id="approve"
|
||||
className="h-4 w-4 rounded-full border border-gray-300 text-primary focus:ring-2 focus:ring-mineshaft-500"
|
||||
value={ApprovalStatus.APPROVED}
|
||||
aria-labelledby="approve-label"
|
||||
>
|
||||
<RadioGroupIndicator className="flex h-full w-full items-center justify-center after:h-2 after:w-2 after:rounded-full after:bg-current" />
|
||||
</RadioGroupItem>
|
||||
<span
|
||||
id="approve-label"
|
||||
className="cursor-pointer"
|
||||
onClick={() => field.onChange(ApprovalStatus.APPROVED)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === "Enter" || e.key === " ") {
|
||||
e.preventDefault();
|
||||
field.onChange(ApprovalStatus.APPROVED);
|
||||
}
|
||||
}}
|
||||
tabIndex={0}
|
||||
role="button"
|
||||
>
|
||||
Approve
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<RadioGroupItem
|
||||
id="reject"
|
||||
className="h-4 w-4 rounded-full border border-gray-300 text-primary focus:ring-2 focus:ring-mineshaft-500"
|
||||
value={ApprovalStatus.REJECTED}
|
||||
aria-labelledby="reject-label"
|
||||
>
|
||||
<RadioGroupIndicator className="flex h-full w-full items-center justify-center after:h-2 after:w-2 after:rounded-full after:bg-current" />
|
||||
</RadioGroupItem>
|
||||
<span
|
||||
id="reject-label"
|
||||
className="cursor-pointer"
|
||||
onClick={() => field.onChange(ApprovalStatus.REJECTED)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === "Enter" || e.key === " ") {
|
||||
e.preventDefault();
|
||||
field.onChange(ApprovalStatus.REJECTED);
|
||||
}
|
||||
}}
|
||||
tabIndex={0}
|
||||
role="button"
|
||||
>
|
||||
Reject
|
||||
</span>
|
||||
</div>
|
||||
</RadioGroup>
|
||||
</FormControl>
|
||||
)}
|
||||
/>
|
||||
<div className="flex justify-end">
|
||||
<Button
|
||||
type="submit"
|
||||
isLoading={isApproving || isRejecting || isSubmitting}
|
||||
variant="outline_bg"
|
||||
>
|
||||
Submit Review
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex flex-col space-y-4">
|
||||
@@ -240,7 +372,40 @@ export const SecretApprovalRequestChanges = ({
|
||||
)
|
||||
)}
|
||||
</div>
|
||||
<div className="mt-8 flex items-center space-x-6 rounded-lg bg-mineshaft-800 px-5 py-6">
|
||||
<div className="mt-4 flex flex-col items-center rounded-lg">
|
||||
{secretApprovalRequestDetails?.policy?.approvers
|
||||
.filter((requiredApprover) => reviewedUsers?.[requiredApprover.userId])
|
||||
.map((requiredApprover) => {
|
||||
const reviewer = reviewedUsers?.[requiredApprover.userId];
|
||||
return (
|
||||
<div
|
||||
className="mb-4 flex w-full flex-col rounded-md bg-mineshaft-800 p-6"
|
||||
key={`required-approver-${requiredApprover.userId}`}
|
||||
>
|
||||
<div>
|
||||
<span className="ml-1">
|
||||
{`${requiredApprover.firstName || ""} ${requiredApprover.lastName || ""}`} (
|
||||
{requiredApprover?.email}) has{" "}
|
||||
</span>
|
||||
<span
|
||||
className={`${reviewer?.status === ApprovalStatus.APPROVED ? "text-green-500" : "text-red-500"}`}
|
||||
>
|
||||
{reviewer?.status === ApprovalStatus.APPROVED ? "approved" : "rejected"}
|
||||
</span>{" "}
|
||||
the request.
|
||||
</div>
|
||||
{reviewer?.comment && (
|
||||
<FormControl label="Comment" className="mb-0 mt-2">
|
||||
<TextArea value={reviewer.comment} isDisabled reSize="none">
|
||||
{reviewer?.comment && reviewer.comment}
|
||||
</TextArea>
|
||||
</FormControl>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
<div className="flex items-center space-x-6 rounded-lg bg-mineshaft-800 px-5 py-6">
|
||||
<SecretApprovalRequestAction
|
||||
canApprove={canApprove}
|
||||
approvalRequestId={secretApprovalRequestDetails.id}
|
||||
@@ -258,7 +423,7 @@ export const SecretApprovalRequestChanges = ({
|
||||
<div className="text-sm text-bunker-300">Reviewers</div>
|
||||
<div className="mt-2 flex flex-col space-y-2 text-sm">
|
||||
{secretApprovalRequestDetails?.policy?.approvers.map((requiredApprover) => {
|
||||
const status = reviewedUsers?.[requiredApprover.userId];
|
||||
const reviewer = reviewedUsers?.[requiredApprover.userId];
|
||||
return (
|
||||
<div
|
||||
className="flex flex-nowrap items-center space-x-2 rounded bg-mineshaft-800 px-2 py-1"
|
||||
@@ -275,8 +440,17 @@ export const SecretApprovalRequestChanges = ({
|
||||
<span className="text-red">*</span>
|
||||
</div>
|
||||
<div>
|
||||
<Tooltip content={status || ApprovalStatus.PENDING}>
|
||||
{getReviewedStatusSymbol(status)}
|
||||
{reviewer?.comment && (
|
||||
<Tooltip content={reviewer.comment}>
|
||||
<FontAwesomeIcon
|
||||
icon={faComment}
|
||||
size="xs"
|
||||
className="mr-1 text-mineshaft-300"
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
<Tooltip content={reviewer?.status || ApprovalStatus.PENDING}>
|
||||
{getReviewedStatusSymbol(reviewer?.status)}
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
@@ -290,7 +464,7 @@ export const SecretApprovalRequestChanges = ({
|
||||
)
|
||||
)
|
||||
.map((reviewer) => {
|
||||
const status = reviewedUsers?.[reviewer.userId];
|
||||
const status = reviewedUsers?.[reviewer.userId].status;
|
||||
return (
|
||||
<div
|
||||
className="flex flex-nowrap items-center space-x-2 rounded bg-mineshaft-800 px-2 py-1"
|
||||
@@ -303,6 +477,15 @@ export const SecretApprovalRequestChanges = ({
|
||||
<span className="text-red">*</span>
|
||||
</div>
|
||||
<div>
|
||||
{reviewer.comment && (
|
||||
<Tooltip content={reviewer.comment}>
|
||||
<FontAwesomeIcon
|
||||
icon={faComment}
|
||||
size="xs"
|
||||
className="mr-1 text-mineshaft-300"
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
<Tooltip content={status || ApprovalStatus.PENDING}>
|
||||
{getReviewedStatusSymbol(status)}
|
||||
</Tooltip>
|
||||
|
@@ -128,13 +128,14 @@ export const ActionBar = ({
|
||||
|
||||
const { currentWorkspace } = useWorkspace();
|
||||
|
||||
const handleFolderCreate = async (folderName: string) => {
|
||||
const handleFolderCreate = async (folderName: string, description: string | null) => {
|
||||
try {
|
||||
await createFolder({
|
||||
name: folderName,
|
||||
path: secretPath,
|
||||
environment,
|
||||
projectId: workspaceId
|
||||
projectId: workspaceId,
|
||||
description
|
||||
});
|
||||
handlePopUpClose("addFolder");
|
||||
createNotification({
|
||||
|
@@ -1,16 +1,22 @@
|
||||
import { useRef } from "react";
|
||||
import { Controller, useForm } from "react-hook-form";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { z } from "zod";
|
||||
|
||||
import { Button, FormControl, Input, ModalClose } from "@app/components/v2";
|
||||
import { TextArea } from "@app/components/v2/TextArea/TextArea";
|
||||
|
||||
type Props = {
|
||||
onCreateFolder?: (folderName: string) => Promise<void>;
|
||||
onUpdateFolder?: (folderName: string) => Promise<void>;
|
||||
onCreateFolder?: (folderName: string, description: string | null) => Promise<void>;
|
||||
onUpdateFolder?: (folderName: string, description: string | null) => Promise<void>;
|
||||
isEdit?: boolean;
|
||||
defaultFolderName?: string;
|
||||
defaultDescription?: string;
|
||||
showDescriptionOverwriteWarning?: boolean;
|
||||
};
|
||||
|
||||
const descriptionOverwriteWarningMessage = "Warning: Any changes made here will overwrite any custom edits in individual environment folders."
|
||||
|
||||
const formSchema = z.object({
|
||||
name: z
|
||||
.string()
|
||||
@@ -18,15 +24,20 @@ const formSchema = z.object({
|
||||
.regex(
|
||||
/^[a-zA-Z0-9-_]+$/,
|
||||
"Folder name can only contain letters, numbers, dashes, and underscores"
|
||||
)
|
||||
),
|
||||
description: z
|
||||
.string()
|
||||
.optional()
|
||||
});
|
||||
type TFormData = z.infer<typeof formSchema>;
|
||||
|
||||
export const FolderForm = ({
|
||||
isEdit,
|
||||
defaultFolderName,
|
||||
defaultDescription,
|
||||
onCreateFolder,
|
||||
onUpdateFolder
|
||||
onUpdateFolder,
|
||||
showDescriptionOverwriteWarning = false
|
||||
}: Props): JSX.Element => {
|
||||
const {
|
||||
control,
|
||||
@@ -36,15 +47,32 @@ export const FolderForm = ({
|
||||
} = useForm<TFormData>({
|
||||
resolver: zodResolver(formSchema),
|
||||
defaultValues: {
|
||||
name: defaultFolderName
|
||||
name: defaultFolderName,
|
||||
description: defaultDescription || ""
|
||||
}
|
||||
});
|
||||
|
||||
const onSubmit = async ({ name }: TFormData) => {
|
||||
const descriptionRef = useRef<HTMLTextAreaElement>(null);
|
||||
|
||||
const handleInput = () => {
|
||||
const textarea = descriptionRef.current;
|
||||
if (textarea) {
|
||||
const lines = textarea.value.split("\n");
|
||||
const maxDescriptionLines = 10;
|
||||
|
||||
if (lines.length > maxDescriptionLines) {
|
||||
textarea.value = lines.slice(0, maxDescriptionLines).join("\n");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const onSubmit = async ({ name, description }: TFormData) => {
|
||||
const descriptionShaped = description && description.trim() !== "" ? description : null;
|
||||
|
||||
if (isEdit) {
|
||||
await onUpdateFolder?.(name);
|
||||
await onUpdateFolder?.(name, descriptionShaped);
|
||||
} else {
|
||||
await onCreateFolder?.(name);
|
||||
await onCreateFolder?.(name, descriptionShaped);
|
||||
}
|
||||
reset();
|
||||
};
|
||||
@@ -61,6 +89,31 @@ export const FolderForm = ({
|
||||
</FormControl>
|
||||
)}
|
||||
/>
|
||||
<Controller
|
||||
control={control}
|
||||
name="description"
|
||||
defaultValue=""
|
||||
render={({ field, fieldState: { error } }) => (
|
||||
<FormControl
|
||||
label="Folder Description"
|
||||
isError={Boolean(error)}
|
||||
tooltipText={showDescriptionOverwriteWarning ? descriptionOverwriteWarningMessage : undefined}
|
||||
isOptional
|
||||
errorText={error?.message}
|
||||
className="flex-1"
|
||||
>
|
||||
<TextArea
|
||||
placeholder="Folder description"
|
||||
{...field}
|
||||
rows={3}
|
||||
ref={descriptionRef}
|
||||
onInput={handleInput}
|
||||
className="thin-scrollbar w-full !resize-none bg-mineshaft-900"
|
||||
maxLength={255}
|
||||
/>
|
||||
</FormControl>
|
||||
)}
|
||||
/>
|
||||
<div className="mt-8 flex items-center">
|
||||
<Button className="mr-4" type="submit" isDisabled={isSubmitting} isLoading={isSubmitting}>
|
||||
{isEdit ? "Save" : "Create"}
|
||||
|
@@ -1,5 +1,5 @@
|
||||
import { subject } from "@casl/ability";
|
||||
import { faClose, faFolder, faPencilSquare } from "@fortawesome/free-solid-svg-icons";
|
||||
import { faClose, faFolder, faPencilSquare, faInfoCircle } from "@fortawesome/free-solid-svg-icons";
|
||||
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
||||
import { useNavigate, useSearch } from "@tanstack/react-router";
|
||||
|
||||
@@ -11,6 +11,7 @@ import { ProjectPermissionActions, ProjectPermissionSub } from "@app/context";
|
||||
import { usePopUp } from "@app/hooks";
|
||||
import { useDeleteFolder, useUpdateFolder } from "@app/hooks/api";
|
||||
import { TSecretFolder } from "@app/hooks/api/secretFolders/types";
|
||||
import { Tooltip } from "@app/components/v2/Tooltip/Tooltip";
|
||||
|
||||
import { FolderForm } from "../ActionBar/FolderForm";
|
||||
|
||||
@@ -42,7 +43,7 @@ export const FolderListView = ({
|
||||
const { mutateAsync: updateFolder } = useUpdateFolder();
|
||||
const { mutateAsync: deleteFolder } = useDeleteFolder();
|
||||
|
||||
const handleFolderUpdate = async (newFolderName: string) => {
|
||||
const handleFolderUpdate = async (newFolderName: string, newFolderDescription: string | null) => {
|
||||
try {
|
||||
const { id: folderId } = popUp.updateFolder.data as TSecretFolder;
|
||||
await updateFolder({
|
||||
@@ -50,7 +51,8 @@ export const FolderListView = ({
|
||||
name: newFolderName,
|
||||
path: secretPath,
|
||||
environment,
|
||||
projectId: workspaceId
|
||||
projectId: workspaceId,
|
||||
description: newFolderDescription
|
||||
});
|
||||
handlePopUpClose("updateFolder");
|
||||
createNotification({
|
||||
@@ -98,7 +100,7 @@ export const FolderListView = ({
|
||||
|
||||
return (
|
||||
<>
|
||||
{folders.map(({ name, id }) => (
|
||||
{folders.map(({ name, id, description }) => (
|
||||
<div
|
||||
key={id}
|
||||
className="group flex cursor-pointer border-b border-mineshaft-600 hover:bg-mineshaft-700"
|
||||
@@ -116,6 +118,16 @@ export const FolderListView = ({
|
||||
onClick={() => handleFolderClick(name)}
|
||||
>
|
||||
{name}
|
||||
{
|
||||
description &&
|
||||
<Tooltip
|
||||
position="right"
|
||||
className="flex items-center space-x-4 max-w-lg py-4 whitespace-pre-wrap"
|
||||
content={description}
|
||||
>
|
||||
<FontAwesomeIcon icon={faInfoCircle} className="text-mineshaft-400 ml-1" />
|
||||
</Tooltip>
|
||||
}
|
||||
</div>
|
||||
<div className="flex items-center space-x-4 border-l border-mineshaft-600 px-3 py-3">
|
||||
<ProjectPermissionCan
|
||||
@@ -130,7 +142,7 @@ export const FolderListView = ({
|
||||
variant="plain"
|
||||
size="sm"
|
||||
className="p-0 opacity-0 group-hover:opacity-100"
|
||||
onClick={() => handlePopUpOpen("updateFolder", { id, name })}
|
||||
onClick={() => handlePopUpOpen("updateFolder", { id, name, description })}
|
||||
isDisabled={!isAllowed}
|
||||
>
|
||||
<FontAwesomeIcon icon={faPencilSquare} size="lg" />
|
||||
@@ -167,6 +179,7 @@ export const FolderListView = ({
|
||||
<FolderForm
|
||||
isEdit
|
||||
defaultFolderName={(popUp.updateFolder?.data as TSecretFolder)?.name}
|
||||
defaultDescription={(popUp.updateFolder?.data as TSecretFolder)?.description}
|
||||
onUpdateFolder={handleFolderUpdate}
|
||||
/>
|
||||
</ModalContent>
|
||||
|
@@ -45,17 +45,33 @@ const formSchema = z
|
||||
.object({
|
||||
secretPath: z.string().default("/"),
|
||||
sourceEnvironment: z.object({ name: z.string(), slug: z.string() }),
|
||||
targetRepo: z.object({ name: z.string(), appId: z.string() }),
|
||||
targetRepo: z.object({ name: z.string(), appId: z.string().optional() }).nullish(),
|
||||
targetWorkspace: z.object({ name: z.string(), slug: z.string() }),
|
||||
targetEnvironment: z.object({ name: z.string(), uuid: z.string() }).optional(),
|
||||
targetEnvironment: z.object({ name: z.string(), uuid: z.string() }).nullish(),
|
||||
scope: z.object({ label: z.string(), value: z.nativeEnum(BitbucketScope) })
|
||||
})
|
||||
.superRefine((val, ctx) => {
|
||||
if (!val.targetWorkspace) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
path: ["targetWorkspace"],
|
||||
message: "Bitbucket Workspace required"
|
||||
});
|
||||
}
|
||||
|
||||
if (!val.targetRepo) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
path: ["targetRepo"],
|
||||
message: "Bitbucket Repo required"
|
||||
});
|
||||
}
|
||||
|
||||
if (val.scope.value === BitbucketScope.Env && !val.targetEnvironment) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
path: ["targetEnvironment"],
|
||||
message: "Required"
|
||||
message: "Bitbucket Deployment Environment required"
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -66,7 +82,7 @@ export const BitbucketConfigurePage = () => {
|
||||
const navigate = useNavigate();
|
||||
const createIntegration = useCreateIntegration();
|
||||
|
||||
const { watch, control, reset, handleSubmit } = useForm<TFormData>({
|
||||
const { watch, control, handleSubmit, setValue, reset } = useForm<TFormData>({
|
||||
resolver: zodResolver(formSchema),
|
||||
defaultValues: {
|
||||
secretPath: "/"
|
||||
@@ -90,14 +106,15 @@ export const BitbucketConfigurePage = () => {
|
||||
workspaceSlug: bitBucketWorkspace?.slug
|
||||
});
|
||||
|
||||
const { data: bitbucketEnvironments } = useGetIntegrationAuthBitBucketEnvironments(
|
||||
{
|
||||
integrationAuthId: (integrationAuthId as string) ?? "",
|
||||
workspaceSlug: bitBucketWorkspace?.slug,
|
||||
repoSlug: bitBucketRepo?.appId
|
||||
},
|
||||
{ enabled: Boolean(bitBucketWorkspace?.slug && bitBucketRepo?.appId) }
|
||||
);
|
||||
const { data: bitbucketEnvironments, isPending: isBitbucketEnvironmentsLoading } =
|
||||
useGetIntegrationAuthBitBucketEnvironments(
|
||||
{
|
||||
integrationAuthId: (integrationAuthId as string) ?? "",
|
||||
workspaceSlug: bitBucketWorkspace?.slug ?? "",
|
||||
repoSlug: bitBucketRepo?.appId ?? ""
|
||||
},
|
||||
{ enabled: Boolean(bitBucketWorkspace?.slug && bitBucketRepo?.appId) }
|
||||
);
|
||||
|
||||
const onSubmit = async ({
|
||||
targetRepo,
|
||||
@@ -107,6 +124,8 @@ export const BitbucketConfigurePage = () => {
|
||||
targetEnvironment,
|
||||
scope
|
||||
}: TFormData) => {
|
||||
if (!targetRepo || !targetWorkspace) return;
|
||||
|
||||
try {
|
||||
await createIntegration.mutateAsync({
|
||||
integrationAuthId,
|
||||
@@ -147,7 +166,14 @@ export const BitbucketConfigurePage = () => {
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (!bitbucketRepos || !bitbucketWorkspaces || !currentWorkspace) return;
|
||||
if (
|
||||
bitBucketWorkspace ||
|
||||
bitBucketRepo ||
|
||||
!bitbucketRepos ||
|
||||
!bitbucketWorkspaces ||
|
||||
!currentWorkspace
|
||||
)
|
||||
return;
|
||||
|
||||
reset({
|
||||
targetRepo: bitbucketRepos[0],
|
||||
@@ -224,12 +250,17 @@ export const BitbucketConfigurePage = () => {
|
||||
getOptionValue={(option) => option.slug}
|
||||
value={value}
|
||||
getOptionLabel={(option) => option.name}
|
||||
onChange={onChange}
|
||||
onChange={(option) => {
|
||||
onChange(option);
|
||||
setValue("targetRepo", null);
|
||||
setValue("targetEnvironment", null);
|
||||
}}
|
||||
options={bitbucketWorkspaces}
|
||||
placeholder={
|
||||
bitbucketWorkspaces?.length ? "Select a workspace..." : "No workspaces found..."
|
||||
}
|
||||
isDisabled={!bitbucketWorkspaces?.length}
|
||||
isLoading={isBitbucketWorkspacesLoading}
|
||||
isDisabled={!bitbucketWorkspaces?.length || isBitbucketWorkspacesLoading}
|
||||
/>
|
||||
</FormControl>
|
||||
)}
|
||||
@@ -243,12 +274,16 @@ export const BitbucketConfigurePage = () => {
|
||||
getOptionValue={(option) => option.appId!}
|
||||
value={value}
|
||||
getOptionLabel={(option) => option.name}
|
||||
onChange={onChange}
|
||||
onChange={(option) => {
|
||||
onChange(option);
|
||||
setValue("targetEnvironment", null);
|
||||
}}
|
||||
options={bitbucketRepos}
|
||||
placeholder={
|
||||
bitbucketRepos?.length ? "Select a repository..." : "No repositories found..."
|
||||
}
|
||||
isDisabled={!bitbucketRepos?.length}
|
||||
isLoading={isBitbucketReposLoading}
|
||||
isDisabled={!bitbucketRepos?.length || isBitbucketReposLoading}
|
||||
/>
|
||||
</FormControl>
|
||||
)}
|
||||
@@ -290,7 +325,8 @@ export const BitbucketConfigurePage = () => {
|
||||
? "Select an environment..."
|
||||
: "No environments found..."
|
||||
}
|
||||
isDisabled={!bitbucketEnvironments?.length}
|
||||
isLoading={isBitbucketEnvironmentsLoading && Boolean(bitBucketRepo)}
|
||||
isDisabled={!bitbucketEnvironments?.length || isBitbucketEnvironmentsLoading}
|
||||
/>
|
||||
</FormControl>
|
||||
)}
|
||||
|
Reference in New Issue
Block a user