mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-21 02:27:03 +00:00
Compare commits
315 Commits
daniel/act
...
ssh-cli
Author | SHA1 | Date | |
---|---|---|---|
058475fc3f | |||
ee4eb7f84b | |||
b4b417658f | |||
31a21a432d | |||
765280eef6 | |||
be36827392 | |||
097512c691 | |||
64a982d5e0 | |||
1080438ad8 | |||
eb3acae332 | |||
a0b3520899 | |||
2f6f359ddf | |||
df8c1e54e0 | |||
cac060deff | |||
47269bc95b | |||
8502e9a1d8 | |||
d89eb4fa84 | |||
ca7ab4eaf1 | |||
c57fc5e3f1 | |||
9b4e1f561e | |||
097fcad5ae | |||
d1547564f9 | |||
24acb98978 | |||
0fd8274ff0 | |||
a857375cc1 | |||
69bf9dc20f | |||
5151c91760 | |||
f12d8b6f89 | |||
695c499448 | |||
dc715cc238 | |||
d873f2e50f | |||
16ea757928 | |||
8713643bc1 | |||
c35657ed49 | |||
5b4487fae8 | |||
474731d8ef | |||
e9f254f81b | |||
639057415f | |||
c38dae2319 | |||
25191cff38 | |||
a6898717f4 | |||
cc77175188 | |||
fcb944d964 | |||
a8ad8707ac | |||
4568370552 | |||
c000a6f707 | |||
1ace8eebf8 | |||
3b3482b280 | |||
422fd27b9a | |||
a7b25f3bd8 | |||
7896b4e85e | |||
ba5e6fe28a | |||
8d79fa3529 | |||
1a55909b73 | |||
b2efb2845a | |||
c680030f01 | |||
cf1070c65e | |||
3a8219db03 | |||
9d9f6ec268 | |||
56aab172d3 | |||
c8ee06341a | |||
e32716c258 | |||
7f0d27e3dc | |||
5d9b99bee7 | |||
8fdc438940 | |||
d2b909b72b | |||
68988a3e78 | |||
3c954ea257 | |||
a92de1273e | |||
97f85fa8d9 | |||
84c26581a6 | |||
a808b6d4a0 | |||
826916399b | |||
7d5aba258a | |||
40d69d4620 | |||
3f6b1fe3bd | |||
c648235390 | |||
3c588beebe | |||
6614721d34 | |||
bbd8a049fb | |||
a91f64f742 | |||
1bc508b286 | |||
d3d30eba80 | |||
623a99be0e | |||
f80023f8f3 | |||
98289f56ae | |||
c40f195c1d | |||
fbfe694fc0 | |||
2098bd3be2 | |||
39f71f9488 | |||
ef82c664a6 | |||
fcbedfaf1b | |||
882f6b22f5 | |||
bcd778457d | |||
0a1242db75 | |||
a078cb6059 | |||
095b26c8c9 | |||
fcdfcd0219 | |||
132de1479d | |||
d4a76b3621 | |||
331dcd4d79 | |||
025f64f068 | |||
05d7f94518 | |||
b58e32c754 | |||
4ace30aecd | |||
8b2a866994 | |||
b4386af2e0 | |||
2b44e32ac1 | |||
ec5e6eb7b4 | |||
48cb5f6e9b | |||
3c63312944 | |||
0842901d4f | |||
32d6826ade | |||
a750f48922 | |||
67662686f3 | |||
11c96245a7 | |||
a63191e11d | |||
7a13c155f5 | |||
fb6a085bf9 | |||
6c533f89d3 | |||
5ceb30f43f | |||
7728a4793b | |||
d3523ed1d6 | |||
35a9b2a38d | |||
16a9f8c194 | |||
9557639bfe | |||
1049f95952 | |||
e618d5ca5f | |||
d659250ce8 | |||
87363eabfe | |||
d1b9c316d8 | |||
b9867c0d06 | |||
afa2f383c5 | |||
39f7354fec | |||
c46c0cb1e8 | |||
6905ffba4e | |||
64fd423c61 | |||
da1a7466d1 | |||
d3f3f34129 | |||
c8fba7ce4c | |||
82c3e943eb | |||
dc3903ff15 | |||
a9c01dcf1f | |||
ae51fbb8f2 | |||
62910e93ca | |||
586b9d9a56 | |||
9e3c632a1f | |||
bb094f60c1 | |||
6d709fba62 | |||
27beca7099 | |||
28e7e4c52d | |||
cfc0ca1f03 | |||
b96593d0ab | |||
2de5896ba4 | |||
3455ad3898 | |||
c7a32a3b05 | |||
1ebfed8c11 | |||
a18f3c2919 | |||
16d215b588 | |||
a852b15a1e | |||
cacd9041b0 | |||
cfeffebd46 | |||
1dceedcdb4 | |||
14f03c38c3 | |||
be9f096e75 | |||
49133a044f | |||
b7fe3743db | |||
c5fded361c | |||
e676acbadf | |||
9b31a7bbb1 | |||
345be85825 | |||
f82b11851a | |||
b466b3073b | |||
46105fc315 | |||
3cf8fd2ff8 | |||
5277a50b3e | |||
dab8f0b261 | |||
4293665130 | |||
8afa65c272 | |||
4c739fd57f | |||
bcc2840020 | |||
8b3af92d23 | |||
9ca58894f0 | |||
d131314de0 | |||
9c03144f19 | |||
5495ffd78e | |||
a200469c72 | |||
85c3074216 | |||
cfc55ff283 | |||
7179b7a540 | |||
6c4cb5e084 | |||
9cfb044178 | |||
105eb70fd9 | |||
18a2547b24 | |||
588b3c77f9 | |||
a04834c7c9 | |||
9df9f4a5da | |||
afdc704423 | |||
57261cf0c8 | |||
06f6004993 | |||
f3bfb9cc5a | |||
48fb77be49 | |||
c3956c60e9 | |||
f55bcb93ba | |||
d3fb2a6a74 | |||
6a23b74481 | |||
602cf4b3c4 | |||
84ff71fef2 | |||
4c01bddf0e | |||
add5742b8c | |||
68f3964206 | |||
90374971ae | |||
3a1eadba8c | |||
5305017ce2 | |||
cf5f49d14e | |||
4f4b5be8ea | |||
ecea79f040 | |||
586b901318 | |||
ad8d247cdc | |||
3b47d7698b | |||
aa9a86df71 | |||
33411335ed | |||
ca55f19926 | |||
3794521c56 | |||
728f023263 | |||
229706f57f | |||
6cf2488326 | |||
2c402fbbb6 | |||
92ce05283b | |||
39d92ce6ff | |||
44a026446e | |||
bbf52c9a48 | |||
539e5b1907 | |||
44b02d5324 | |||
71fb6f1d11 | |||
e64100fab1 | |||
e4b149a849 | |||
5bcf07b32b | |||
3b0c48052b | |||
df50e3b0f9 | |||
bdf2ae40b6 | |||
b6c05a2f25 | |||
960efb9cf9 | |||
aa8d58abad | |||
cfb0cc4fea | |||
7712df296c | |||
7c38932121 | |||
69ad9845e1 | |||
7321c237d7 | |||
32430a6a16 | |||
3d6ea3251e | |||
f034adba76 | |||
463eb0014e | |||
be39e63832 | |||
21403f6fe5 | |||
2f9e542b31 | |||
089d6812fd | |||
464a3ccd53 | |||
71c9c0fa1e | |||
46ad1d47a9 | |||
2b977eeb33 | |||
a692148597 | |||
b762816e66 | |||
cf275979ba | |||
64bfa4f334 | |||
e3eb14bfd9 | |||
24b50651c9 | |||
1cd459fda7 | |||
38917327d9 | |||
63fac39fff | |||
7c62a776fb | |||
d7b494c6f8 | |||
93208afb36 | |||
1a084d8fcf | |||
269f851cbf | |||
7a61995dd4 | |||
dd4f133c6c | |||
c41d27e1ae | |||
1866ed8d23 | |||
7b3b232dde | |||
9d618b4ae9 | |||
5330ab2171 | |||
662e588c22 | |||
90057d80ff | |||
1eda7aaaac | |||
00dcadbc08 | |||
7a7289ebd0 | |||
e5d4677fd6 | |||
bce3f3d676 | |||
300372fa98 | |||
47a4f8bae9 | |||
863719f296 | |||
7317dc1cf5 | |||
75df898e78 | |||
0de6add3f7 | |||
0c008b6393 | |||
0c3894496c | |||
35fbd5d49d | |||
d03b453e3d | |||
d4d468660d | |||
75a4965928 | |||
660c09ded4 | |||
b5287d91c0 | |||
6a17763237 | |||
f2bd3daea2 | |||
7f70f96936 | |||
73e0a54518 | |||
0d295a2824 | |||
3be3d807d2 | |||
9f7ea3c4e5 | |||
e67218f170 | |||
269c40c67c | |||
ba1fd8a3f7 | |||
ed7fc0e5cd | |||
1ae6213387 |
@ -74,8 +74,8 @@ CAPTCHA_SECRET=
|
||||
|
||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED=
|
||||
OTEL_EXPORT_TYPE=
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED=false
|
||||
OTEL_EXPORT_TYPE=prometheus
|
||||
OTEL_EXPORT_OTLP_ENDPOINT=
|
||||
OTEL_OTLP_PUSH_INTERVAL=
|
||||
|
||||
|
@ -1,6 +1,12 @@
|
||||
#!/usr/bin/env sh
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
|
||||
# Check if infisical is installed
|
||||
if ! command -v infisical >/dev/null 2>&1; then
|
||||
echo "\nError: Infisical CLI is not installed. Please install the Infisical CLI before comitting.\n You can refer to the documentation at https://infisical.com/docs/cli/overview\n\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
npx lint-staged
|
||||
|
||||
infisical scan git-changes --staged -v
|
||||
|
@ -69,13 +69,21 @@ RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
# Required for pkcs11js and ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@ -91,13 +99,21 @@ ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
# Required for pkcs11js and ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@ -108,13 +124,24 @@ RUN mkdir frontend-build
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
# Install necessary packages
|
||||
# Install necessary packages including ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
curl \
|
||||
git \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
|
@ -72,8 +72,16 @@ RUN addgroup --system --gid 1001 nodejs \
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apk add --no-cache python3 make g++
|
||||
# Install all required dependencies for build
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -88,8 +96,19 @@ FROM base AS backend-runner
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apk add --no-cache python3 make g++
|
||||
# Install all required dependencies for runtime
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -100,11 +119,32 @@ RUN mkdir frontend-build
|
||||
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
RUN apk add --upgrade --no-cache ca-certificates
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.31.1 && apk add --no-cache git
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install all required runtime dependencies
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
bash \
|
||||
curl \
|
||||
git
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Setup user permissions
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
@ -127,7 +167,6 @@ ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
@ -149,4 +188,4 @@ EXPOSE 443
|
||||
|
||||
USER non-root-user
|
||||
|
||||
CMD ["./standalone-entrypoint.sh"]
|
||||
CMD ["./standalone-entrypoint.sh"]
|
11
README.md
11
README.md
@ -14,15 +14,6 @@
|
||||
<a href="https://infisical.com/careers">Hiring (Remote/SF)</a>
|
||||
</h4>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://infisical.com/docs/self-hosting/deployment-options/aws-ec2">
|
||||
<img src=".github/images/deploy-to-aws.png" width="137" />
|
||||
</a>
|
||||
<a href="https://infisical.com/docs/self-hosting/deployment-options/digital-ocean-marketplace" alt="Deploy to DigitalOcean">
|
||||
<img width="200" alt="Deploy to DO" src="https://www.deploytodo.com/do-btn-blue.svg"/>
|
||||
</a>
|
||||
</p>
|
||||
|
||||
<h4 align="center">
|
||||
<a href="https://github.com/Infisical/infisical/blob/main/LICENSE">
|
||||
<img src="https://img.shields.io/badge/license-MIT-blue.svg" alt="Infisical is released under the MIT license." />
|
||||
@ -75,7 +66,7 @@ We're on a mission to make security tooling more accessible to everyone, not jus
|
||||
|
||||
### Key Management (KMS):
|
||||
|
||||
- **[Cryptograhic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
|
||||
- **[Cryptographic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
|
||||
- **[Encrypt and Decrypt Data](https://infisical.com/docs/documentation/platform/kms#guide-to-encrypting-data)**: Use symmetric keys to encrypt and decrypt data.
|
||||
|
||||
### General Platform:
|
||||
|
@ -9,6 +9,15 @@ RUN apk --update add \
|
||||
make \
|
||||
g++
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@ -28,6 +37,17 @@ RUN apk --update add \
|
||||
make \
|
||||
g++
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN npm ci --only-production && npm cache clean --force
|
||||
|
||||
COPY --from=build /app .
|
||||
|
@ -7,7 +7,7 @@ ARG SOFTHSM2_VERSION=2.5.0
|
||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||
|
||||
# install build dependencies including python3
|
||||
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apk --update add \
|
||||
alpine-sdk \
|
||||
autoconf \
|
||||
@ -19,7 +19,19 @@ RUN apk --update add \
|
||||
make \
|
||||
g++
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# build and install SoftHSM2
|
||||
|
||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||
WORKDIR ${SOFTHSM2_SOURCES}
|
||||
|
||||
|
@ -10,12 +10,15 @@ export const mockQueue = (): TQueueServiceFactory => {
|
||||
queue: async (name, jobData) => {
|
||||
job[name] = jobData;
|
||||
},
|
||||
queuePg: async () => {},
|
||||
initialize: async () => {},
|
||||
shutdown: async () => undefined,
|
||||
stopRepeatableJob: async () => true,
|
||||
start: (name, jobFn) => {
|
||||
queues[name] = jobFn;
|
||||
workers[name] = jobFn;
|
||||
},
|
||||
startPg: async () => {},
|
||||
listen: (name, event) => {
|
||||
events[name] = event;
|
||||
},
|
||||
|
86
backend/e2e-test/routes/v3/secret-recursive.spec.ts
Normal file
86
backend/e2e-test/routes/v3/secret-recursive.spec.ts
Normal file
@ -0,0 +1,86 @@
|
||||
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
|
||||
import { createSecretV2, deleteSecretV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
|
||||
describe("Secret Recursive Testing", async () => {
|
||||
const projectId = seedData1.projectV3.id;
|
||||
const folderAndSecretNames = [
|
||||
{ name: "deep1", path: "/", expectedSecretCount: 4 },
|
||||
{ name: "deep21", path: "/deep1", expectedSecretCount: 2 },
|
||||
{ name: "deep3", path: "/deep1/deep2", expectedSecretCount: 1 },
|
||||
{ name: "deep22", path: "/deep2", expectedSecretCount: 1 }
|
||||
];
|
||||
|
||||
beforeAll(async () => {
|
||||
const rootFolderIds: string[] = [];
|
||||
for (const folder of folderAndSecretNames) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const createdFolder = await createFolder({
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
secretPath: folder.path,
|
||||
name: folder.name
|
||||
});
|
||||
|
||||
if (folder.path === "/") {
|
||||
rootFolderIds.push(createdFolder.id);
|
||||
}
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await createSecretV2({
|
||||
secretPath: folder.path,
|
||||
authToken: jwtAuthToken,
|
||||
environmentSlug: "prod",
|
||||
workspaceId: projectId,
|
||||
key: folder.name,
|
||||
value: folder.name
|
||||
});
|
||||
}
|
||||
|
||||
return async () => {
|
||||
await Promise.all(
|
||||
rootFolderIds.map((id) =>
|
||||
deleteFolder({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
id,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod"
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
await deleteSecretV2({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: "/",
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod",
|
||||
key: folderAndSecretNames[0].name
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
test.each(folderAndSecretNames)("$path recursive secret fetching", async ({ path, expectedSecretCount }) => {
|
||||
const secrets = await getSecretsV2({
|
||||
authToken: jwtAuthToken,
|
||||
secretPath: path,
|
||||
workspaceId: projectId,
|
||||
environmentSlug: "prod",
|
||||
recursive: true
|
||||
});
|
||||
|
||||
expect(secrets.secrets.length).toEqual(expectedSecretCount);
|
||||
expect(secrets.secrets.sort((a, b) => a.secretKey.localeCompare(b.secretKey))).toEqual(
|
||||
folderAndSecretNames
|
||||
.filter((el) => el.path.startsWith(path))
|
||||
.sort((a, b) => a.name.localeCompare(b.name))
|
||||
.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.name,
|
||||
secretValue: el.name
|
||||
})
|
||||
)
|
||||
);
|
||||
});
|
||||
});
|
@ -97,6 +97,7 @@ export const getSecretsV2 = async (dto: {
|
||||
environmentSlug: string;
|
||||
secretPath: string;
|
||||
authToken: string;
|
||||
recursive?: boolean;
|
||||
}) => {
|
||||
const getSecretsResponse = await testServer.inject({
|
||||
method: "GET",
|
||||
@ -109,7 +110,8 @@ export const getSecretsV2 = async (dto: {
|
||||
environment: dto.environmentSlug,
|
||||
secretPath: dto.secretPath,
|
||||
expandSecretReferences: "true",
|
||||
include_imports: "true"
|
||||
include_imports: "true",
|
||||
recursive: String(dto.recursive || false)
|
||||
}
|
||||
});
|
||||
expect(getSecretsResponse.statusCode).toBe(200);
|
||||
|
@ -53,13 +53,13 @@ export default {
|
||||
extension: "ts"
|
||||
});
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL);
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL, { dbConnectionUrl: cfg.DB_CONNECTION_URI });
|
||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule();
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule() });
|
||||
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
|
||||
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
|
220
backend/package-lock.json
generated
220
backend/package-lock.json
generated
@ -24,14 +24,17 @@
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@octopusdeploy/api-client": "^3.4.1",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||
@ -46,7 +49,6 @@
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@slack/oauth": "^3.0.1",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@team-plain/typescript-sdk": "^4.6.1",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
@ -80,6 +82,7 @@
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
@ -89,6 +92,7 @@
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-ldapauth": "^3.0.1",
|
||||
"pg": "^8.11.3",
|
||||
"pg-boss": "^10.1.5",
|
||||
"pg-query-stream": "^4.5.3",
|
||||
"picomatch": "^3.0.1",
|
||||
"pino": "^8.16.2",
|
||||
@ -5528,6 +5532,15 @@
|
||||
"toad-cache": "^3.3.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/request-context": {
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/request-context/-/request-context-5.1.0.tgz",
|
||||
"integrity": "sha512-PM7wrLJOEylVDpxabOFLaYsdAiaa0lpDUcP2HMFJ1JzgiWuC6k4r3duf6Pm9YLnzlGmT+Yp4tkQjqsu7V/pSOA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fastify-plugin": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/send": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/send/-/send-2.1.0.tgz",
|
||||
@ -5586,6 +5599,18 @@
|
||||
"yaml": "^2.2.2"
|
||||
}
|
||||
},
|
||||
"node_modules/@google-cloud/kms": {
|
||||
"version": "4.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/kms/-/kms-4.5.0.tgz",
|
||||
"integrity": "sha512-i2vC0DI7bdfEhQszqASTw0KVvbB7HsO2CwTBod423NawAu7FWi+gVVa7NLfXVNGJaZZayFfci2Hu+om/HmyEjQ==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"google-gax": "^4.0.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@google-cloud/paginator": {
|
||||
"version": "5.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.2.tgz",
|
||||
@ -5652,14 +5677,6 @@
|
||||
"uuid": "dist/bin/uuid"
|
||||
}
|
||||
},
|
||||
"node_modules/@graphql-typed-document-node/core": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@graphql-typed-document-node/core/-/core-3.2.0.tgz",
|
||||
"integrity": "sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ==",
|
||||
"peerDependencies": {
|
||||
"graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@grpc/grpc-js": {
|
||||
"version": "1.12.2",
|
||||
"resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.2.tgz",
|
||||
@ -6944,6 +6961,21 @@
|
||||
"resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-7.1.0.tgz",
|
||||
"integrity": "sha512-y92CpG4kFFtBBjni8LHoV12IegJ+KFxLgKRengrVjKmGE5XMeCuGvlfRe75lTRrgXaG6XIWJlFpIDTlkoJsU8w=="
|
||||
},
|
||||
"node_modules/@octopusdeploy/api-client": {
|
||||
"version": "3.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@octopusdeploy/api-client/-/api-client-3.4.1.tgz",
|
||||
"integrity": "sha512-j6FRgDNzc6AQoT3CAguYLWxoMR4W5TKCT1BCPpqjEN9mknmdMSKfYORs3djn/Yj/BhqtITTydDpBoREbzKY5+g==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"adm-zip": "^0.5.9",
|
||||
"axios": "^1.2.1",
|
||||
"form-data": "^4.0.0",
|
||||
"glob": "^8.0.3",
|
||||
"lodash": "^4.17.21",
|
||||
"semver": "^7.3.8",
|
||||
"urijs": "^1.19.11"
|
||||
}
|
||||
},
|
||||
"node_modules/@opentelemetry/api": {
|
||||
"version": "1.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz",
|
||||
@ -9929,18 +9961,6 @@
|
||||
"optional": true,
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/@team-plain/typescript-sdk": {
|
||||
"version": "4.6.1",
|
||||
"resolved": "https://registry.npmjs.org/@team-plain/typescript-sdk/-/typescript-sdk-4.6.1.tgz",
|
||||
"integrity": "sha512-Uy9QJXu9U7bJb6WXL9sArGk7FXPpzdqBd6q8tAF1vexTm8fbTJRqcikTKxGtZmNADt+C2SapH3cApM4oHpO4lQ==",
|
||||
"dependencies": {
|
||||
"@graphql-typed-document-node/core": "^3.2.0",
|
||||
"ajv": "^8.12.0",
|
||||
"ajv-formats": "^2.1.1",
|
||||
"graphql": "^16.6.0",
|
||||
"zod": "3.22.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@techteamer/ocsp": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@techteamer/ocsp/-/ocsp-1.0.1.tgz",
|
||||
@ -12232,14 +12252,6 @@
|
||||
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
|
||||
"integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="
|
||||
},
|
||||
"node_modules/buffer-writer": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz",
|
||||
"integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/bullmq": {
|
||||
"version": "5.4.2",
|
||||
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.4.2.tgz",
|
||||
@ -15059,6 +15071,44 @@
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/google-gax": {
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/google-gax/-/google-gax-4.4.1.tgz",
|
||||
"integrity": "sha512-Phyp9fMfA00J3sZbJxbbB4jC55b7DBjE3F6poyL3wKMEBVKA79q6BGuHcTiM28yOzVql0NDbRL8MLLh8Iwk9Dg==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@grpc/grpc-js": "^1.10.9",
|
||||
"@grpc/proto-loader": "^0.7.13",
|
||||
"@types/long": "^4.0.0",
|
||||
"abort-controller": "^3.0.0",
|
||||
"duplexify": "^4.0.0",
|
||||
"google-auth-library": "^9.3.0",
|
||||
"node-fetch": "^2.7.0",
|
||||
"object-hash": "^3.0.0",
|
||||
"proto3-json-serializer": "^2.0.2",
|
||||
"protobufjs": "^7.3.2",
|
||||
"retry-request": "^7.0.0",
|
||||
"uuid": "^9.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
},
|
||||
"node_modules/google-gax/node_modules/@types/long": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz",
|
||||
"integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/google-gax/node_modules/object-hash": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz",
|
||||
"integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/googleapis": {
|
||||
"version": "137.1.0",
|
||||
"resolved": "https://registry.npmjs.org/googleapis/-/googleapis-137.1.0.tgz",
|
||||
@ -15109,14 +15159,6 @@
|
||||
"integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/graphql": {
|
||||
"version": "16.9.0",
|
||||
"resolved": "https://registry.npmjs.org/graphql/-/graphql-16.9.0.tgz",
|
||||
"integrity": "sha512-GGTKBX4SD7Wdb8mqeDLni2oaRGYQWjWHGKPQ24ZMnUtKfcsVoiv4uX8+LJr1K6U5VW2Lu1BwJnj7uiori0YtRw==",
|
||||
"engines": {
|
||||
"node": "^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/gtoken": {
|
||||
"version": "7.1.0",
|
||||
"resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz",
|
||||
@ -17862,6 +17904,27 @@
|
||||
"jsonwebtoken": "^9.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/odbc": {
|
||||
"version": "2.4.9",
|
||||
"resolved": "https://registry.npmjs.org/odbc/-/odbc-2.4.9.tgz",
|
||||
"integrity": "sha512-sHFWOKfyj4oFYds7YBlN+fq9ZjC2J6CsCN5CNMABpKLp+NZdb8bnanb57OaoDy1VFXEOTE91S+F900J/aIPu6w==",
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@mapbox/node-pre-gyp": "^1.0.5",
|
||||
"async": "^3.0.1",
|
||||
"node-addon-api": "^3.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/odbc/node_modules/node-addon-api": {
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.2.1.tgz",
|
||||
"integrity": "sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/oidc-token-hash": {
|
||||
"version": "5.0.3",
|
||||
"resolved": "https://registry.npmjs.org/oidc-token-hash/-/oidc-token-hash-5.0.3.tgz",
|
||||
@ -18137,11 +18200,6 @@
|
||||
"integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==",
|
||||
"license": "BlueOak-1.0.0"
|
||||
},
|
||||
"node_modules/packet-reader": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz",
|
||||
"integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ=="
|
||||
},
|
||||
"node_modules/parent-module": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
|
||||
@ -18360,15 +18418,13 @@
|
||||
"integrity": "sha512-KG8UEiEVkR3wGEb4m5yZkVCzigAD+cVEJck2CzYZO37ZGJfctvVptVO192MwrtPhzONn6go8ylnOdMhKqi4nfg=="
|
||||
},
|
||||
"node_modules/pg": {
|
||||
"version": "8.11.3",
|
||||
"resolved": "https://registry.npmjs.org/pg/-/pg-8.11.3.tgz",
|
||||
"integrity": "sha512-+9iuvG8QfaaUrrph+kpF24cXkH1YOOUeArRNYIxq1viYHZagBxrTno7cecY1Fa44tJeZvaoG+Djpkc3JwehN5g==",
|
||||
"version": "8.13.1",
|
||||
"resolved": "https://registry.npmjs.org/pg/-/pg-8.13.1.tgz",
|
||||
"integrity": "sha512-OUir1A0rPNZlX//c7ksiu7crsGZTKSOXJPgtNiHGIlC9H0lO+NC6ZDYksSgBYY/thSWhnSRBv8w1lieNNGATNQ==",
|
||||
"dependencies": {
|
||||
"buffer-writer": "2.0.0",
|
||||
"packet-reader": "1.0.0",
|
||||
"pg-connection-string": "^2.6.2",
|
||||
"pg-pool": "^3.6.1",
|
||||
"pg-protocol": "^1.6.0",
|
||||
"pg-connection-string": "^2.7.0",
|
||||
"pg-pool": "^3.7.0",
|
||||
"pg-protocol": "^1.7.0",
|
||||
"pg-types": "^2.1.0",
|
||||
"pgpass": "1.x"
|
||||
},
|
||||
@ -18387,6 +18443,19 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/pg-boss": {
|
||||
"version": "10.1.5",
|
||||
"resolved": "https://registry.npmjs.org/pg-boss/-/pg-boss-10.1.5.tgz",
|
||||
"integrity": "sha512-H87NL6c7N6nTCSCePh16EaSQVSFevNXWdJuzY6PZz4rw+W/nuMKPfI/vYyXS0AdT1g1Q3S3EgeOYOHcB7ZVToQ==",
|
||||
"dependencies": {
|
||||
"cron-parser": "^4.9.0",
|
||||
"pg": "^8.13.0",
|
||||
"serialize-error": "^8.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
}
|
||||
},
|
||||
"node_modules/pg-cloudflare": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.1.1.tgz",
|
||||
@ -18423,17 +18492,17 @@
|
||||
}
|
||||
},
|
||||
"node_modules/pg-pool": {
|
||||
"version": "3.6.1",
|
||||
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.6.1.tgz",
|
||||
"integrity": "sha512-jizsIzhkIitxCGfPRzJn1ZdcosIt3pz9Sh3V01fm1vZnbnCMgmGl5wvGGdNN2EL9Rmb0EcFoCkixH4Pu+sP9Og==",
|
||||
"version": "3.7.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.7.0.tgz",
|
||||
"integrity": "sha512-ZOBQForurqh4zZWjrgSwwAtzJ7QiRX0ovFkZr2klsen3Nm0aoh33Ls0fzfv3imeH/nw/O27cjdz5kzYJfeGp/g==",
|
||||
"peerDependencies": {
|
||||
"pg": ">=8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/pg-protocol": {
|
||||
"version": "1.6.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.6.0.tgz",
|
||||
"integrity": "sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q=="
|
||||
"version": "1.7.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.7.0.tgz",
|
||||
"integrity": "sha512-hTK/mE36i8fDDhgDFjy6xNOG+LCorxLG3WO17tku+ij6sVHXh1jQUJ8hYAnRhNla4QVD2H8er/FOjc/+EgC6yQ=="
|
||||
},
|
||||
"node_modules/pg-query-stream": {
|
||||
"version": "4.5.3",
|
||||
@ -18462,9 +18531,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/pg/node_modules/pg-connection-string": {
|
||||
"version": "2.6.2",
|
||||
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.6.2.tgz",
|
||||
"integrity": "sha512-ch6OwaeaPYcova4kKZ15sbJ2hKb/VP48ZD2gE7i1J+L4MspCtBMAx8nMgz7bksc7IojCIIWuEhHibSMFH8m8oA=="
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.7.0.tgz",
|
||||
"integrity": "sha512-PI2W9mv53rXJQEOb8xNR8lH7Hr+EKa6oJa38zsK0S/ky2er16ios1wLKhZyxzD7jUReiWokc9WK5nxSnC7W1TA=="
|
||||
},
|
||||
"node_modules/pgpass": {
|
||||
"version": "1.0.5",
|
||||
@ -19175,6 +19244,18 @@
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/proto3-json-serializer": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/proto3-json-serializer/-/proto3-json-serializer-2.0.2.tgz",
|
||||
"integrity": "sha512-SAzp/O4Yh02jGdRc+uIrGoe87dkN/XtwxfZ4ZyafJHymd79ozp5VG5nyZ7ygqPM5+cpLDjjGnYFUkngonyDPOQ==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"protobufjs": "^7.2.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/protobufjs": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz",
|
||||
@ -20063,6 +20144,20 @@
|
||||
"resolved": "https://registry.npmjs.org/seq-queue/-/seq-queue-0.0.5.tgz",
|
||||
"integrity": "sha512-hr3Wtp/GZIc/6DAGPDcV4/9WoZhjrkXsi5B/07QgX8tsdc6ilr7BFM6PM6rbdAX1kFSDYeZGLipIZZKyQP0O5Q=="
|
||||
},
|
||||
"node_modules/serialize-error": {
|
||||
"version": "8.1.0",
|
||||
"resolved": "https://registry.npmjs.org/serialize-error/-/serialize-error-8.1.0.tgz",
|
||||
"integrity": "sha512-3NnuWfM6vBYoy5gZFvHiYsVbafvI9vZv/+jlIigFn4oP4zjNPK3LhcY0xSCgeb1a5L8jO71Mit9LlNoi2UfDDQ==",
|
||||
"dependencies": {
|
||||
"type-fest": "^0.20.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/serve-static": {
|
||||
"version": "1.16.2",
|
||||
"resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz",
|
||||
@ -22082,7 +22177,6 @@
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz",
|
||||
"integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
@ -22365,6 +22459,12 @@
|
||||
"punycode": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/urijs": {
|
||||
"version": "1.19.11",
|
||||
"resolved": "https://registry.npmjs.org/urijs/-/urijs-1.19.11.tgz",
|
||||
"integrity": "sha512-HXgFDgDommxn5/bIv0cnQZsPhHDA90NPHD6+c/v21U5+Sx5hoP8+dP9IZXBU1gIfvdRfhG8cel9QNPeionfcCQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/url": {
|
||||
"version": "0.10.3",
|
||||
"resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz",
|
||||
|
@ -132,14 +132,17 @@
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
"@google-cloud/kms": "^4.5.0",
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/auth-app": "^7.1.1",
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@octopusdeploy/api-client": "^3.4.1",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||
@ -154,7 +157,6 @@
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@slack/oauth": "^3.0.1",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@team-plain/typescript-sdk": "^4.6.1",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
@ -188,6 +190,7 @@
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
@ -197,6 +200,7 @@
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-ldapauth": "^3.0.1",
|
||||
"pg": "^8.11.3",
|
||||
"pg-boss": "^10.1.5",
|
||||
"pg-query-stream": "^4.5.3",
|
||||
"picomatch": "^3.0.1",
|
||||
"pino": "^8.16.2",
|
||||
|
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import "@fastify/request-context";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
}
|
||||
}
|
4
backend/src/@types/fastify-zod.d.ts
vendored
4
backend/src/@types/fastify-zod.d.ts
vendored
@ -1,6 +1,6 @@
|
||||
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { CustomLogger } from "@app/lib/logger/logger";
|
||||
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
|
||||
|
||||
declare global {
|
||||
@ -8,7 +8,7 @@ declare global {
|
||||
RawServerDefault,
|
||||
RawRequestDefaultExpression<RawServerDefault>,
|
||||
RawReplyDefaultExpression<RawServerDefault>,
|
||||
Readonly<Logger>,
|
||||
Readonly<CustomLogger>,
|
||||
ZodTypeProvider
|
||||
>;
|
||||
|
||||
|
9
backend/src/@types/fastify.d.ts
vendored
9
backend/src/@types/fastify.d.ts
vendored
@ -1,5 +1,7 @@
|
||||
import "fastify";
|
||||
|
||||
import { Redis } from "ioredis";
|
||||
|
||||
import { TUsers } from "@app/db/schemas";
|
||||
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
|
||||
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
|
||||
@ -50,6 +52,7 @@ import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-acces
|
||||
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
|
||||
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
|
||||
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
|
||||
import { TIdentityJwtAuthServiceFactory } from "@app/services/identity-jwt-auth/identity-jwt-auth-service";
|
||||
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
|
||||
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
|
||||
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
|
||||
@ -87,6 +90,10 @@ import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
||||
|
||||
declare module "fastify" {
|
||||
interface Session {
|
||||
callbackPort: string;
|
||||
}
|
||||
|
||||
interface FastifyRequest {
|
||||
realIp: string;
|
||||
// used for mfa session authentication
|
||||
@ -115,6 +122,7 @@ declare module "fastify" {
|
||||
}
|
||||
|
||||
interface FastifyInstance {
|
||||
redis: Redis;
|
||||
services: {
|
||||
login: TAuthLoginFactory;
|
||||
password: TAuthPasswordFactory;
|
||||
@ -155,6 +163,7 @@ declare module "fastify" {
|
||||
identityAwsAuth: TIdentityAwsAuthServiceFactory;
|
||||
identityAzureAuth: TIdentityAzureAuthServiceFactory;
|
||||
identityOidcAuth: TIdentityOidcAuthServiceFactory;
|
||||
identityJwtAuth: TIdentityJwtAuthServiceFactory;
|
||||
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
|
||||
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
|
||||
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
|
||||
|
16
backend/src/@types/knex.d.ts
vendored
16
backend/src/@types/knex.d.ts
vendored
@ -98,6 +98,9 @@ import {
|
||||
TIdentityGcpAuths,
|
||||
TIdentityGcpAuthsInsert,
|
||||
TIdentityGcpAuthsUpdate,
|
||||
TIdentityJwtAuths,
|
||||
TIdentityJwtAuthsInsert,
|
||||
TIdentityJwtAuthsUpdate,
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate,
|
||||
@ -199,6 +202,9 @@ import {
|
||||
TProjectSlackConfigs,
|
||||
TProjectSlackConfigsInsert,
|
||||
TProjectSlackConfigsUpdate,
|
||||
TProjectSplitBackfillIds,
|
||||
TProjectSplitBackfillIdsInsert,
|
||||
TProjectSplitBackfillIdsUpdate,
|
||||
TProjectsUpdate,
|
||||
TProjectTemplates,
|
||||
TProjectTemplatesInsert,
|
||||
@ -590,6 +596,11 @@ declare module "knex/types/tables" {
|
||||
TIdentityOidcAuthsInsert,
|
||||
TIdentityOidcAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityJwtAuth]: KnexOriginal.CompositeTableType<
|
||||
TIdentityJwtAuths,
|
||||
TIdentityJwtAuthsInsert,
|
||||
TIdentityJwtAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
|
||||
TIdentityUaClientSecrets,
|
||||
TIdentityUaClientSecretsInsert,
|
||||
@ -830,5 +841,10 @@ declare module "knex/types/tables" {
|
||||
TProjectTemplatesUpdate
|
||||
>;
|
||||
[TableName.TotpConfig]: KnexOriginal.CompositeTableType<TTotpConfigs, TTotpConfigsInsert, TTotpConfigsUpdate>;
|
||||
[TableName.ProjectSplitBackfillIds]: KnexOriginal.CompositeTableType<
|
||||
TProjectSplitBackfillIds,
|
||||
TProjectSplitBackfillIdsInsert,
|
||||
TProjectSplitBackfillIdsUpdate
|
||||
>;
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
|
||||
|
||||
if (!hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.string("description");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
|
||||
|
||||
if (hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.dropColumn("description");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,20 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
||||
await knex(TableName.IdentityMetadata).whereNull("value").delete();
|
||||
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
||||
t.string("value", 1020).notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
||||
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
||||
t.string("value", 1020).alter();
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,59 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAccessApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
|
||||
TableName.AccessApprovalPolicy,
|
||||
"deletedAt"
|
||||
);
|
||||
const hasSecretApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
|
||||
TableName.SecretApprovalPolicy,
|
||||
"deletedAt"
|
||||
);
|
||||
|
||||
if (!hasAccessApprovalPolicyDeletedAtColumn) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||
t.timestamp("deletedAt");
|
||||
});
|
||||
}
|
||||
if (!hasSecretApprovalPolicyDeletedAtColumn) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.timestamp("deletedAt");
|
||||
});
|
||||
}
|
||||
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
|
||||
t.dropForeign(["privilegeId"]);
|
||||
|
||||
// Add the new foreign key constraint with ON DELETE SET NULL
|
||||
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("SET NULL");
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasAccessApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
|
||||
TableName.AccessApprovalPolicy,
|
||||
"deletedAt"
|
||||
);
|
||||
const hasSecretApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
|
||||
TableName.SecretApprovalPolicy,
|
||||
"deletedAt"
|
||||
);
|
||||
|
||||
if (hasAccessApprovalPolicyDeletedAtColumn) {
|
||||
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
|
||||
t.dropColumn("deletedAt");
|
||||
});
|
||||
}
|
||||
if (hasSecretApprovalPolicyDeletedAtColumn) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
|
||||
t.dropColumn("deletedAt");
|
||||
});
|
||||
}
|
||||
|
||||
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
|
||||
t.dropForeign(["privilegeId"]);
|
||||
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("CASCADE");
|
||||
});
|
||||
}
|
@ -0,0 +1,34 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.IdentityJwtAuth))) {
|
||||
await knex.schema.createTable(TableName.IdentityJwtAuth, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
|
||||
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
|
||||
t.jsonb("accessTokenTrustedIps").notNullable();
|
||||
t.uuid("identityId").notNullable().unique();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
t.string("configurationType").notNullable();
|
||||
t.string("jwksUrl").notNullable();
|
||||
t.binary("encryptedJwksCaCert").notNullable();
|
||||
t.binary("encryptedPublicKeys").notNullable();
|
||||
t.string("boundIssuer").notNullable();
|
||||
t.string("boundAudiences").notNullable();
|
||||
t.jsonb("boundClaims").notNullable();
|
||||
t.string("boundSubject").notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.IdentityJwtAuth);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.IdentityJwtAuth);
|
||||
await dropOnUpdateTrigger(knex, TableName.IdentityJwtAuth);
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretVersionV2, "folderId")) {
|
||||
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
|
||||
t.index("folderId");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SecretVersionV2, "folderId")) {
|
||||
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
|
||||
t.dropIndex("folderId");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,297 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { Knex } from "knex";
|
||||
import { v4 as uuidV4 } from "uuid";
|
||||
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { ProjectType, TableName } from "../schemas";
|
||||
|
||||
/* eslint-disable no-await-in-loop,@typescript-eslint/ban-ts-comment */
|
||||
const newProject = async (knex: Knex, projectId: string, projectType: ProjectType) => {
|
||||
const newProjectId = uuidV4();
|
||||
const project = await knex(TableName.Project).where("id", projectId).first();
|
||||
await knex(TableName.Project).insert({
|
||||
...project,
|
||||
type: projectType,
|
||||
// @ts-ignore id is required
|
||||
id: newProjectId,
|
||||
slug: slugify(`${project?.name}-${alphaNumericNanoId(4)}`)
|
||||
});
|
||||
|
||||
const customRoleMapping: Record<string, string> = {};
|
||||
const projectCustomRoles = await knex(TableName.ProjectRoles).where("projectId", projectId);
|
||||
if (projectCustomRoles.length) {
|
||||
await knex.batchInsert(
|
||||
TableName.ProjectRoles,
|
||||
projectCustomRoles.map((el) => {
|
||||
const id = uuidV4();
|
||||
customRoleMapping[el.id] = id;
|
||||
return {
|
||||
...el,
|
||||
id,
|
||||
projectId: newProjectId,
|
||||
permissions: el.permissions ? JSON.stringify(el.permissions) : el.permissions
|
||||
};
|
||||
})
|
||||
);
|
||||
}
|
||||
const groupMembershipMapping: Record<string, string> = {};
|
||||
const groupMemberships = await knex(TableName.GroupProjectMembership).where("projectId", projectId);
|
||||
if (groupMemberships.length) {
|
||||
await knex.batchInsert(
|
||||
TableName.GroupProjectMembership,
|
||||
groupMemberships.map((el) => {
|
||||
const id = uuidV4();
|
||||
groupMembershipMapping[el.id] = id;
|
||||
return { ...el, id, projectId: newProjectId };
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const groupMembershipRoles = await knex(TableName.GroupProjectMembershipRole).whereIn(
|
||||
"projectMembershipId",
|
||||
groupMemberships.map((el) => el.id)
|
||||
);
|
||||
if (groupMembershipRoles.length) {
|
||||
await knex.batchInsert(
|
||||
TableName.GroupProjectMembershipRole,
|
||||
groupMembershipRoles.map((el) => {
|
||||
const id = uuidV4();
|
||||
const projectMembershipId = groupMembershipMapping[el.projectMembershipId];
|
||||
const customRoleId = el.customRoleId ? customRoleMapping[el.customRoleId] : el.customRoleId;
|
||||
return { ...el, id, projectMembershipId, customRoleId };
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const identityProjectMembershipMapping: Record<string, string> = {};
|
||||
const identities = await knex(TableName.IdentityProjectMembership).where("projectId", projectId);
|
||||
if (identities.length) {
|
||||
await knex.batchInsert(
|
||||
TableName.IdentityProjectMembership,
|
||||
identities.map((el) => {
|
||||
const id = uuidV4();
|
||||
identityProjectMembershipMapping[el.id] = id;
|
||||
return { ...el, id, projectId: newProjectId };
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const identitiesRoles = await knex(TableName.IdentityProjectMembershipRole).whereIn(
|
||||
"projectMembershipId",
|
||||
identities.map((el) => el.id)
|
||||
);
|
||||
if (identitiesRoles.length) {
|
||||
await knex.batchInsert(
|
||||
TableName.IdentityProjectMembershipRole,
|
||||
identitiesRoles.map((el) => {
|
||||
const id = uuidV4();
|
||||
const projectMembershipId = identityProjectMembershipMapping[el.projectMembershipId];
|
||||
const customRoleId = el.customRoleId ? customRoleMapping[el.customRoleId] : el.customRoleId;
|
||||
return { ...el, id, projectMembershipId, customRoleId };
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const projectMembershipMapping: Record<string, string> = {};
|
||||
const projectUserMembers = await knex(TableName.ProjectMembership).where("projectId", projectId);
|
||||
if (projectUserMembers.length) {
|
||||
await knex.batchInsert(
|
||||
TableName.ProjectMembership,
|
||||
projectUserMembers.map((el) => {
|
||||
const id = uuidV4();
|
||||
projectMembershipMapping[el.id] = id;
|
||||
return { ...el, id, projectId: newProjectId };
|
||||
})
|
||||
);
|
||||
}
|
||||
const membershipRoles = await knex(TableName.ProjectUserMembershipRole).whereIn(
|
||||
"projectMembershipId",
|
||||
projectUserMembers.map((el) => el.id)
|
||||
);
|
||||
if (membershipRoles.length) {
|
||||
await knex.batchInsert(
|
||||
TableName.ProjectUserMembershipRole,
|
||||
membershipRoles.map((el) => {
|
||||
const id = uuidV4();
|
||||
const projectMembershipId = projectMembershipMapping[el.projectMembershipId];
|
||||
const customRoleId = el.customRoleId ? customRoleMapping[el.customRoleId] : el.customRoleId;
|
||||
return { ...el, id, projectMembershipId, customRoleId };
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const kmsKeys = await knex(TableName.KmsKey).where("projectId", projectId).andWhere("isReserved", true);
|
||||
if (kmsKeys.length) {
|
||||
await knex.batchInsert(
|
||||
TableName.KmsKey,
|
||||
kmsKeys.map((el) => {
|
||||
const id = uuidV4();
|
||||
const slug = slugify(alphaNumericNanoId(8).toLowerCase());
|
||||
return { ...el, id, slug, projectId: newProjectId };
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const projectBot = await knex(TableName.ProjectBot).where("projectId", projectId).first();
|
||||
if (projectBot) {
|
||||
const newProjectBot = { ...projectBot, id: uuidV4(), projectId: newProjectId };
|
||||
await knex(TableName.ProjectBot).insert(newProjectBot);
|
||||
}
|
||||
|
||||
const projectKeys = await knex(TableName.ProjectKeys).where("projectId", projectId);
|
||||
if (projectKeys.length) {
|
||||
await knex.batchInsert(
|
||||
TableName.ProjectKeys,
|
||||
projectKeys.map((el) => {
|
||||
const id = uuidV4();
|
||||
return { ...el, id, projectId: newProjectId };
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return newProjectId;
|
||||
};
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasSplitMappingTable = await knex.schema.hasTable(TableName.ProjectSplitBackfillIds);
|
||||
if (!hasSplitMappingTable) {
|
||||
await knex.schema.createTable(TableName.ProjectSplitBackfillIds, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("sourceProjectId", 36).notNullable();
|
||||
t.foreign("sourceProjectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.string("destinationProjectType").notNullable();
|
||||
t.string("destinationProjectId", 36).notNullable();
|
||||
t.foreign("destinationProjectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
|
||||
const hasTypeColumn = await knex.schema.hasColumn(TableName.Project, "type");
|
||||
if (!hasTypeColumn) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.string("type");
|
||||
});
|
||||
|
||||
let projectsToBeTyped;
|
||||
do {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
projectsToBeTyped = await knex(TableName.Project).whereNull("type").limit(BATCH_SIZE).select("id");
|
||||
if (projectsToBeTyped.length) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.Project)
|
||||
.whereIn(
|
||||
"id",
|
||||
projectsToBeTyped.map((el) => el.id)
|
||||
)
|
||||
.update({ type: ProjectType.SecretManager });
|
||||
}
|
||||
} while (projectsToBeTyped.length > 0);
|
||||
|
||||
const projectsWithCertificates = await knex(TableName.CertificateAuthority)
|
||||
.distinct("projectId")
|
||||
.select("projectId");
|
||||
/* eslint-disable no-await-in-loop,no-param-reassign */
|
||||
for (const { projectId } of projectsWithCertificates) {
|
||||
const newProjectId = await newProject(knex, projectId, ProjectType.CertificateManager);
|
||||
await knex(TableName.CertificateAuthority).where("projectId", projectId).update({ projectId: newProjectId });
|
||||
await knex(TableName.PkiAlert).where("projectId", projectId).update({ projectId: newProjectId });
|
||||
await knex(TableName.PkiCollection).where("projectId", projectId).update({ projectId: newProjectId });
|
||||
await knex(TableName.ProjectSplitBackfillIds).insert({
|
||||
sourceProjectId: projectId,
|
||||
destinationProjectType: ProjectType.CertificateManager,
|
||||
destinationProjectId: newProjectId
|
||||
});
|
||||
}
|
||||
|
||||
const projectsWithCmek = await knex(TableName.KmsKey)
|
||||
.where("isReserved", false)
|
||||
.whereNotNull("projectId")
|
||||
.distinct("projectId")
|
||||
.select("projectId");
|
||||
for (const { projectId } of projectsWithCmek) {
|
||||
if (projectId) {
|
||||
const newProjectId = await newProject(knex, projectId, ProjectType.KMS);
|
||||
await knex(TableName.KmsKey)
|
||||
.where({
|
||||
isReserved: false,
|
||||
projectId
|
||||
})
|
||||
.update({ projectId: newProjectId });
|
||||
await knex(TableName.ProjectSplitBackfillIds).insert({
|
||||
sourceProjectId: projectId,
|
||||
destinationProjectType: ProjectType.KMS,
|
||||
destinationProjectId: newProjectId
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/* eslint-enable */
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.string("type").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasTypeColumn = await knex.schema.hasColumn(TableName.Project, "type");
|
||||
const hasSplitMappingTable = await knex.schema.hasTable(TableName.ProjectSplitBackfillIds);
|
||||
|
||||
if (hasTypeColumn && hasSplitMappingTable) {
|
||||
const splitProjectMappings = await knex(TableName.ProjectSplitBackfillIds).where({});
|
||||
const certMapping = splitProjectMappings.filter(
|
||||
(el) => el.destinationProjectType === ProjectType.CertificateManager
|
||||
);
|
||||
/* eslint-disable no-await-in-loop */
|
||||
for (const project of certMapping) {
|
||||
await knex(TableName.CertificateAuthority)
|
||||
.where("projectId", project.destinationProjectId)
|
||||
.update({ projectId: project.sourceProjectId });
|
||||
await knex(TableName.PkiAlert)
|
||||
.where("projectId", project.destinationProjectId)
|
||||
.update({ projectId: project.sourceProjectId });
|
||||
await knex(TableName.PkiCollection)
|
||||
.where("projectId", project.destinationProjectId)
|
||||
.update({ projectId: project.sourceProjectId });
|
||||
}
|
||||
|
||||
/* eslint-enable */
|
||||
const kmsMapping = splitProjectMappings.filter((el) => el.destinationProjectType === ProjectType.KMS);
|
||||
/* eslint-disable no-await-in-loop */
|
||||
for (const project of kmsMapping) {
|
||||
await knex(TableName.KmsKey)
|
||||
.where({
|
||||
isReserved: false,
|
||||
projectId: project.destinationProjectId
|
||||
})
|
||||
.update({ projectId: project.sourceProjectId });
|
||||
}
|
||||
/* eslint-enable */
|
||||
await knex(TableName.ProjectMembership)
|
||||
.whereIn(
|
||||
"projectId",
|
||||
splitProjectMappings.map((el) => el.destinationProjectId)
|
||||
)
|
||||
.delete();
|
||||
await knex(TableName.ProjectRoles)
|
||||
.whereIn(
|
||||
"projectId",
|
||||
splitProjectMappings.map((el) => el.destinationProjectId)
|
||||
)
|
||||
.delete();
|
||||
await knex(TableName.Project)
|
||||
.whereIn(
|
||||
"id",
|
||||
splitProjectMappings.map((el) => el.destinationProjectId)
|
||||
)
|
||||
.delete();
|
||||
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.dropColumn("type");
|
||||
});
|
||||
}
|
||||
|
||||
if (hasSplitMappingTable) {
|
||||
await knex.schema.dropTableIfExists(TableName.ProjectSplitBackfillIds);
|
||||
}
|
||||
}
|
@ -15,7 +15,8 @@ export const AccessApprovalPoliciesSchema = z.object({
|
||||
envId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
enforcementLevel: z.string().default("hard")
|
||||
enforcementLevel: z.string().default("hard"),
|
||||
deletedAt: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;
|
||||
|
33
backend/src/db/schemas/identity-jwt-auths.ts
Normal file
33
backend/src/db/schemas/identity-jwt-auths.ts
Normal file
@ -0,0 +1,33 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityJwtAuthsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
accessTokenTTL: z.coerce.number().default(7200),
|
||||
accessTokenMaxTTL: z.coerce.number().default(7200),
|
||||
accessTokenNumUsesLimit: z.coerce.number().default(0),
|
||||
accessTokenTrustedIps: z.unknown(),
|
||||
identityId: z.string().uuid(),
|
||||
configurationType: z.string(),
|
||||
jwksUrl: z.string(),
|
||||
encryptedJwksCaCert: zodBuffer,
|
||||
encryptedPublicKeys: zodBuffer,
|
||||
boundIssuer: z.string(),
|
||||
boundAudiences: z.string(),
|
||||
boundClaims: z.unknown(),
|
||||
boundSubject: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TIdentityJwtAuths = z.infer<typeof IdentityJwtAuthsSchema>;
|
||||
export type TIdentityJwtAuthsInsert = Omit<z.input<typeof IdentityJwtAuthsSchema>, TImmutableDBKeys>;
|
||||
export type TIdentityJwtAuthsUpdate = Partial<Omit<z.input<typeof IdentityJwtAuthsSchema>, TImmutableDBKeys>>;
|
@ -30,6 +30,7 @@ export * from "./identity-access-tokens";
|
||||
export * from "./identity-aws-auths";
|
||||
export * from "./identity-azure-auths";
|
||||
export * from "./identity-gcp-auths";
|
||||
export * from "./identity-jwt-auths";
|
||||
export * from "./identity-kubernetes-auths";
|
||||
export * from "./identity-metadata";
|
||||
export * from "./identity-oidc-auths";
|
||||
@ -64,6 +65,7 @@ export * from "./project-keys";
|
||||
export * from "./project-memberships";
|
||||
export * from "./project-roles";
|
||||
export * from "./project-slack-configs";
|
||||
export * from "./project-split-backfill-ids";
|
||||
export * from "./project-templates";
|
||||
export * from "./project-user-additional-privilege";
|
||||
export * from "./project-user-membership-roles";
|
||||
|
@ -12,7 +12,7 @@ import { TImmutableDBKeys } from "./models";
|
||||
export const KmsRootConfigSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedRootKey: zodBuffer,
|
||||
encryptionStrategy: z.string(),
|
||||
encryptionStrategy: z.string().default("SOFTWARE").nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
@ -68,6 +68,7 @@ export enum TableName {
|
||||
IdentityUaClientSecret = "identity_ua_client_secrets",
|
||||
IdentityAwsAuth = "identity_aws_auths",
|
||||
IdentityOidcAuth = "identity_oidc_auths",
|
||||
IdentityJwtAuth = "identity_jwt_auths",
|
||||
IdentityOrgMembership = "identity_org_memberships",
|
||||
IdentityProjectMembership = "identity_project_memberships",
|
||||
IdentityProjectMembershipRole = "identity_project_membership_role",
|
||||
@ -105,6 +106,7 @@ export enum TableName {
|
||||
SecretApprovalRequestSecretV2 = "secret_approval_requests_secrets_v2",
|
||||
SecretApprovalRequestSecretTagV2 = "secret_approval_request_secret_tags_v2",
|
||||
SnapshotSecretV2 = "secret_snapshot_secrets_v2",
|
||||
ProjectSplitBackfillIds = "project_split_backfill_ids",
|
||||
// junction tables with tags
|
||||
SecretV2JnTag = "secret_v2_tag_junction",
|
||||
JnSecretTag = "secret_tag_junction",
|
||||
@ -196,5 +198,12 @@ export enum IdentityAuthMethod {
|
||||
GCP_AUTH = "gcp-auth",
|
||||
AWS_AUTH = "aws-auth",
|
||||
AZURE_AUTH = "azure-auth",
|
||||
OIDC_AUTH = "oidc-auth"
|
||||
OIDC_AUTH = "oidc-auth",
|
||||
JWT_AUTH = "jwt-auth"
|
||||
}
|
||||
|
||||
export enum ProjectType {
|
||||
SecretManager = "secret-manager",
|
||||
CertificateManager = "cert-manager",
|
||||
KMS = "kms"
|
||||
}
|
||||
|
21
backend/src/db/schemas/project-split-backfill-ids.ts
Normal file
21
backend/src/db/schemas/project-split-backfill-ids.ts
Normal file
@ -0,0 +1,21 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ProjectSplitBackfillIdsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
sourceProjectId: z.string(),
|
||||
destinationProjectType: z.string(),
|
||||
destinationProjectId: z.string()
|
||||
});
|
||||
|
||||
export type TProjectSplitBackfillIds = z.infer<typeof ProjectSplitBackfillIdsSchema>;
|
||||
export type TProjectSplitBackfillIdsInsert = Omit<z.input<typeof ProjectSplitBackfillIdsSchema>, TImmutableDBKeys>;
|
||||
export type TProjectSplitBackfillIdsUpdate = Partial<
|
||||
Omit<z.input<typeof ProjectSplitBackfillIdsSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -23,7 +23,9 @@ export const ProjectsSchema = z.object({
|
||||
kmsCertificateKeyId: z.string().uuid().nullable().optional(),
|
||||
auditLogsRetentionDays: z.number().nullable().optional(),
|
||||
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
|
||||
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional()
|
||||
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||
description: z.string().nullable().optional(),
|
||||
type: z.string()
|
||||
});
|
||||
|
||||
export type TProjects = z.infer<typeof ProjectsSchema>;
|
||||
|
@ -15,7 +15,8 @@ export const SecretApprovalPoliciesSchema = z.object({
|
||||
envId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
enforcementLevel: z.string().default("hard")
|
||||
enforcementLevel: z.string().default("hard"),
|
||||
deletedAt: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;
|
||||
|
@ -4,7 +4,7 @@ import { Knex } from "knex";
|
||||
|
||||
import { encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
|
||||
import { ProjectMembershipRole, SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { ProjectMembershipRole, ProjectType, SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { buildUserProjectKey, getUserPrivateKey, seedData1 } from "../seed-data";
|
||||
|
||||
export const DEFAULT_PROJECT_ENVS = [
|
||||
@ -24,6 +24,7 @@ export async function seed(knex: Knex): Promise<void> {
|
||||
name: seedData1.project.name,
|
||||
orgId: seedData1.organization.id,
|
||||
slug: "first-project",
|
||||
type: ProjectType.SecretManager,
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore
|
||||
id: seedData1.project.id
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { ProjectMembershipRole, ProjectVersion, TableName } from "../schemas";
|
||||
import { ProjectMembershipRole, ProjectType, ProjectVersion, TableName } from "../schemas";
|
||||
import { seedData1 } from "../seed-data";
|
||||
|
||||
export const DEFAULT_PROJECT_ENVS = [
|
||||
@ -16,6 +16,7 @@ export async function seed(knex: Knex): Promise<void> {
|
||||
orgId: seedData1.organization.id,
|
||||
slug: seedData1.projectV3.slug,
|
||||
version: ProjectVersion.V3,
|
||||
type: ProjectType.SecretManager,
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore
|
||||
id: seedData1.projectV3.id
|
||||
|
@ -109,7 +109,8 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
|
||||
approvers: z.string().array(),
|
||||
secretPath: z.string().nullish(),
|
||||
envId: z.string(),
|
||||
enforcementLevel: z.string()
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish()
|
||||
}),
|
||||
reviewers: z
|
||||
.object({
|
||||
|
@ -1,4 +1,3 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
@ -8,6 +7,7 @@ import { DYNAMIC_SECRETS } from "@app/lib/api-docs";
|
||||
import { daysToMillisecond } from "@app/lib/dates";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -48,15 +48,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
|
||||
.nullable(),
|
||||
path: z.string().describe(DYNAMIC_SECRETS.CREATE.path).trim().default("/").transform(removeTrailingSlash),
|
||||
environmentSlug: z.string().describe(DYNAMIC_SECRETS.CREATE.environmentSlug).min(1),
|
||||
name: z
|
||||
.string()
|
||||
.describe(DYNAMIC_SECRETS.CREATE.name)
|
||||
.min(1)
|
||||
.toLowerCase()
|
||||
.max(64)
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid"
|
||||
})
|
||||
name: slugSchema({ min: 1, max: 64, field: "Name" }).describe(DYNAMIC_SECRETS.CREATE.name)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -4,9 +4,15 @@ import { ExternalKmsSchema, KmsKeysSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import {
|
||||
ExternalKmsAwsSchema,
|
||||
ExternalKmsGcpCredentialSchema,
|
||||
ExternalKmsGcpSchema,
|
||||
ExternalKmsInputSchema,
|
||||
ExternalKmsInputUpdateSchema
|
||||
ExternalKmsInputUpdateSchema,
|
||||
KmsGcpKeyFetchAuthType,
|
||||
KmsProviders,
|
||||
TExternalKmsGcpCredentialSchema
|
||||
} from "@app/ee/services/external-kms/providers/model";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -44,7 +50,8 @@ const sanitizedExternalSchemaForGetById = KmsKeysSchema.extend({
|
||||
statusDetails: true,
|
||||
provider: true
|
||||
}).extend({
|
||||
providerInput: ExternalKmsAwsSchema
|
||||
// for GCP, we don't return the credential object as it is sensitive data that should not be exposed
|
||||
providerInput: z.union([ExternalKmsAwsSchema, ExternalKmsGcpSchema.pick({ gcpRegion: true, keyName: true })])
|
||||
})
|
||||
});
|
||||
|
||||
@ -286,4 +293,67 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
|
||||
return { externalKms };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/gcp/keys",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.discriminatedUnion("authMethod", [
|
||||
z.object({
|
||||
authMethod: z.literal(KmsGcpKeyFetchAuthType.Credential),
|
||||
region: z.string().trim().min(1),
|
||||
credential: ExternalKmsGcpCredentialSchema
|
||||
}),
|
||||
z.object({
|
||||
authMethod: z.literal(KmsGcpKeyFetchAuthType.Kms),
|
||||
region: z.string().trim().min(1),
|
||||
kmsId: z.string().trim().min(1)
|
||||
})
|
||||
]),
|
||||
response: {
|
||||
200: z.object({
|
||||
keys: z.string().array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { region, authMethod } = req.body;
|
||||
let credentialJson: TExternalKmsGcpCredentialSchema | undefined;
|
||||
|
||||
if (authMethod === KmsGcpKeyFetchAuthType.Credential) {
|
||||
credentialJson = req.body.credential;
|
||||
} else if (authMethod === KmsGcpKeyFetchAuthType.Kms) {
|
||||
const externalKms = await server.services.externalKms.findById({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.body.kmsId
|
||||
});
|
||||
|
||||
if (!externalKms || externalKms.external.provider !== KmsProviders.Gcp) {
|
||||
throw new NotFoundError({ message: "KMS not found or not of type GCP" });
|
||||
}
|
||||
|
||||
credentialJson = externalKms.external.providerInput.credential as TExternalKmsGcpCredentialSchema;
|
||||
}
|
||||
|
||||
if (!credentialJson) {
|
||||
throw new NotFoundError({
|
||||
message: "Something went wrong while fetching the GCP credential, please check inputs and try again"
|
||||
});
|
||||
}
|
||||
|
||||
const results = await server.services.externalKms.fetchGcpKeys({
|
||||
credential: credentialJson,
|
||||
gcpRegion: region
|
||||
});
|
||||
|
||||
return results;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -1,8 +1,9 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { GroupsSchema, OrgMembershipRole, UsersSchema } from "@app/db/schemas";
|
||||
import { EFilterReturnedUsers } from "@app/ee/services/group/group-types";
|
||||
import { GROUPS } from "@app/lib/api-docs";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
@ -14,15 +15,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
schema: {
|
||||
body: z.object({
|
||||
name: z.string().trim().min(1).max(50).describe(GROUPS.CREATE.name),
|
||||
slug: z
|
||||
.string()
|
||||
.min(5)
|
||||
.max(36)
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.optional()
|
||||
.describe(GROUPS.CREATE.slug),
|
||||
slug: slugSchema({ min: 5, max: 36 }).optional().describe(GROUPS.CREATE.slug),
|
||||
role: z.string().trim().min(1).default(OrgMembershipRole.NoAccess).describe(GROUPS.CREATE.role)
|
||||
}),
|
||||
response: {
|
||||
@ -100,14 +93,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
body: z
|
||||
.object({
|
||||
name: z.string().trim().min(1).describe(GROUPS.UPDATE.name),
|
||||
slug: z
|
||||
.string()
|
||||
.min(5)
|
||||
.max(36)
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.describe(GROUPS.UPDATE.slug),
|
||||
slug: slugSchema({ min: 5, max: 36 }).describe(GROUPS.UPDATE.slug),
|
||||
role: z.string().trim().min(1).describe(GROUPS.UPDATE.role)
|
||||
})
|
||||
.partial(),
|
||||
@ -166,7 +152,8 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
|
||||
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
|
||||
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
|
||||
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search)
|
||||
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search),
|
||||
filter: z.nativeEnum(EFilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -179,7 +166,8 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
.merge(
|
||||
z.object({
|
||||
isPartOfGroup: z.boolean()
|
||||
isPartOfGroup: z.boolean(),
|
||||
joinedGroupAt: z.date().nullable()
|
||||
})
|
||||
)
|
||||
.array(),
|
||||
|
@ -8,6 +8,7 @@ import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||
import { UnauthorizedError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import {
|
||||
ProjectPermissionSchema,
|
||||
@ -33,17 +34,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
body: z.object({
|
||||
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.identityId),
|
||||
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.projectSlug),
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(60)
|
||||
.trim()
|
||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.optional()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
permissions: ProjectPermissionSchema.array()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
|
||||
.optional(),
|
||||
@ -77,7 +68,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
...req.body,
|
||||
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
|
||||
slug: req.body.slug ?? slugify(alphaNumericNanoId(12)),
|
||||
isTemporary: false,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore-error this is valid ts
|
||||
@ -103,17 +94,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
body: z.object({
|
||||
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.identityId),
|
||||
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.projectSlug),
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(60)
|
||||
.trim()
|
||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.optional()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
permissions: ProjectPermissionSchema.array()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
|
||||
.optional(),
|
||||
@ -159,7 +140,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
...req.body,
|
||||
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
|
||||
slug: req.body.slug ?? slugify(alphaNumericNanoId(12)),
|
||||
isTemporary: true,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore-error this is valid ts
|
||||
@ -189,16 +170,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.projectSlug),
|
||||
privilegeDetails: z
|
||||
.object({
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(60)
|
||||
.trim()
|
||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.newSlug),
|
||||
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.newSlug),
|
||||
permissions: ProjectPermissionSchema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
|
||||
privilegePermission: ProjectSpecificPrivilegePermissionSchema.describe(
|
||||
IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.privilegePermission
|
||||
|
@ -9,7 +9,6 @@
|
||||
import { Authenticator, Strategy } from "@fastify/passport";
|
||||
import fastifySession from "@fastify/session";
|
||||
import RedisStore from "connect-redis";
|
||||
import { Redis } from "ioredis";
|
||||
import { z } from "zod";
|
||||
|
||||
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
|
||||
@ -21,7 +20,6 @@ import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
const appCfg = getConfig();
|
||||
const redis = new Redis(appCfg.REDIS_URL);
|
||||
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
|
||||
|
||||
/*
|
||||
@ -30,7 +28,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
- Fastify session <> Redis structure is based on the ff: https://github.com/fastify/session/blob/master/examples/redis.js
|
||||
*/
|
||||
const redisStore = new RedisStore({
|
||||
client: redis,
|
||||
client: server.redis,
|
||||
prefix: "oidc-session:",
|
||||
ttl: 600 // 10 minutes
|
||||
});
|
||||
|
@ -1,8 +1,8 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { OrgMembershipRole, OrgMembershipsSchema, OrgRolesSchema } from "@app/db/schemas";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
@ -18,17 +18,10 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
|
||||
organizationId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.trim()
|
||||
.refine(
|
||||
(val) => !Object.values(OrgMembershipRole).includes(val as OrgMembershipRole),
|
||||
"Please choose a different slug, the slug you have entered is reserved"
|
||||
)
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid"
|
||||
}),
|
||||
slug: slugSchema({ min: 1, max: 64 }).refine(
|
||||
(val) => !Object.values(OrgMembershipRole).includes(val as OrgMembershipRole),
|
||||
"Please choose a different slug, the slug you have entered is reserved"
|
||||
),
|
||||
name: z.string().trim(),
|
||||
description: z.string().trim().optional(),
|
||||
permissions: z.any().array()
|
||||
@ -94,17 +87,13 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
|
||||
roleId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z
|
||||
.string()
|
||||
.trim()
|
||||
.optional()
|
||||
// TODO: Switch to slugSchema after verifying correct methods with Akhil - Omar 11/24
|
||||
slug: slugSchema({ min: 1, max: 64 })
|
||||
.refine(
|
||||
(val) => typeof val !== "undefined" && !Object.keys(OrgMembershipRole).includes(val),
|
||||
(val) => !Object.keys(OrgMembershipRole).includes(val),
|
||||
"Please choose a different slug, the slug you have entered is reserved."
|
||||
)
|
||||
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
|
||||
message: "Slug must be a valid"
|
||||
}),
|
||||
.optional(),
|
||||
name: z.string().trim().optional(),
|
||||
description: z.string().trim().optional(),
|
||||
permissions: z.any().array().optional()
|
||||
|
@ -1,5 +1,4 @@
|
||||
import { packRules } from "@casl/ability/extra";
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectMembershipRole, ProjectMembershipsSchema, ProjectRolesSchema } from "@app/db/schemas";
|
||||
@ -9,6 +8,7 @@ import {
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { PROJECT_ROLE } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedRoleSchemaV1 } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -32,18 +32,11 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
projectSlug: z.string().trim().describe(PROJECT_ROLE.CREATE.projectSlug)
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z
|
||||
.string()
|
||||
.toLowerCase()
|
||||
.trim()
|
||||
.min(1)
|
||||
slug: slugSchema({ max: 64 })
|
||||
.refine(
|
||||
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
||||
"Please choose a different slug, the slug you have entered is reserved"
|
||||
)
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid"
|
||||
})
|
||||
.describe(PROJECT_ROLE.CREATE.slug),
|
||||
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
|
||||
@ -94,21 +87,13 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId)
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z
|
||||
.string()
|
||||
.toLowerCase()
|
||||
.trim()
|
||||
.optional()
|
||||
.describe(PROJECT_ROLE.UPDATE.slug)
|
||||
slug: slugSchema({ max: 64 })
|
||||
.refine(
|
||||
(val) =>
|
||||
typeof val === "undefined" ||
|
||||
!Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
||||
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
||||
"Please choose a different slug, the slug you have entered is reserved"
|
||||
)
|
||||
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
|
||||
message: "Slug must be a valid"
|
||||
}),
|
||||
.describe(PROJECT_ROLE.UPDATE.slug)
|
||||
.optional(),
|
||||
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
|
||||
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
||||
|
@ -1,4 +1,3 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectMembershipRole, ProjectTemplatesSchema } from "@app/db/schemas";
|
||||
@ -8,22 +7,13 @@ import { ProjectTemplateDefaultEnvironments } from "@app/ee/services/project-tem
|
||||
import { isInfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-fns";
|
||||
import { ProjectTemplates } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;
|
||||
|
||||
const SlugSchema = z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(32)
|
||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Must be valid slug format"
|
||||
});
|
||||
|
||||
const isReservedRoleSlug = (slug: string) =>
|
||||
Object.values(ProjectMembershipRole).includes(slug as ProjectMembershipRole);
|
||||
|
||||
@ -34,14 +24,14 @@ const SanitizedProjectTemplateSchema = ProjectTemplatesSchema.extend({
|
||||
roles: z
|
||||
.object({
|
||||
name: z.string().trim().min(1),
|
||||
slug: SlugSchema,
|
||||
slug: slugSchema(),
|
||||
permissions: UnpackedPermissionSchema.array()
|
||||
})
|
||||
.array(),
|
||||
environments: z
|
||||
.object({
|
||||
name: z.string().trim().min(1),
|
||||
slug: SlugSchema,
|
||||
slug: slugSchema(),
|
||||
position: z.number().min(1)
|
||||
})
|
||||
.array()
|
||||
@ -50,7 +40,7 @@ const SanitizedProjectTemplateSchema = ProjectTemplatesSchema.extend({
|
||||
const ProjectTemplateRolesSchema = z
|
||||
.object({
|
||||
name: z.string().trim().min(1),
|
||||
slug: SlugSchema,
|
||||
slug: slugSchema(),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
})
|
||||
.array()
|
||||
@ -78,7 +68,7 @@ const ProjectTemplateRolesSchema = z
|
||||
const ProjectTemplateEnvironmentsSchema = z
|
||||
.object({
|
||||
name: z.string().trim().min(1),
|
||||
slug: SlugSchema,
|
||||
slug: slugSchema(),
|
||||
position: z.number().min(1)
|
||||
})
|
||||
.array()
|
||||
@ -188,9 +178,11 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
|
||||
schema: {
|
||||
description: "Create a project template.",
|
||||
body: z.object({
|
||||
name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), {
|
||||
message: `The requested project template name is reserved.`
|
||||
}).describe(ProjectTemplates.CREATE.name),
|
||||
name: slugSchema({ field: "name" })
|
||||
.refine((val) => !isInfisicalProjectTemplate(val), {
|
||||
message: `The requested project template name is reserved.`
|
||||
})
|
||||
.describe(ProjectTemplates.CREATE.name),
|
||||
description: z.string().max(256).trim().optional().describe(ProjectTemplates.CREATE.description),
|
||||
roles: ProjectTemplateRolesSchema.default([]).describe(ProjectTemplates.CREATE.roles),
|
||||
environments: ProjectTemplateEnvironmentsSchema.default(ProjectTemplateDefaultEnvironments).describe(
|
||||
@ -230,9 +222,10 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
|
||||
description: "Update a project template.",
|
||||
params: z.object({ templateId: z.string().uuid().describe(ProjectTemplates.UPDATE.templateId) }),
|
||||
body: z.object({
|
||||
name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), {
|
||||
message: `The requested project template name is reserved.`
|
||||
})
|
||||
name: slugSchema({ field: "name" })
|
||||
.refine((val) => !isInfisicalProjectTemplate(val), {
|
||||
message: `The requested project template name is reserved.`
|
||||
})
|
||||
.optional()
|
||||
.describe(ProjectTemplates.UPDATE.name),
|
||||
description: z.string().max(256).trim().optional().describe(ProjectTemplates.UPDATE.description),
|
||||
|
@ -52,7 +52,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
})
|
||||
.array(),
|
||||
secretPath: z.string().optional().nullable(),
|
||||
enforcementLevel: z.string()
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish()
|
||||
}),
|
||||
committerUser: approvalRequestUser,
|
||||
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
|
||||
@ -260,7 +261,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
approvals: z.number(),
|
||||
approvers: approvalRequestUser.array(),
|
||||
secretPath: z.string().optional().nullable(),
|
||||
enforcementLevel: z.string()
|
||||
enforcementLevel: z.string(),
|
||||
deletedAt: z.date().nullish()
|
||||
}),
|
||||
environment: z.string(),
|
||||
statusChangedByUser: approvalRequestUser.optional(),
|
||||
|
@ -7,6 +7,7 @@ import { ProjectUserAdditionalPrivilegeTemporaryMode } from "@app/ee/services/pr
|
||||
import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/santizedSchemas/user-additional-privilege";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -21,17 +22,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
schema: {
|
||||
body: z.object({
|
||||
projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId),
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(60)
|
||||
.trim()
|
||||
.refine((v) => v.toLowerCase() === v, "Slug must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.optional()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
slug: slugSchema({ min: 1, max: 60 }).optional().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({
|
||||
@ -87,15 +78,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
|
||||
}),
|
||||
body: z
|
||||
.object({
|
||||
slug: z
|
||||
.string()
|
||||
.max(60)
|
||||
.trim()
|
||||
.refine((v) => v.toLowerCase() === v, "Slug must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
|
||||
slug: slugSchema({ min: 1, max: 60 }).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.optional()
|
||||
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
|
||||
|
@ -7,6 +7,7 @@ import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-p
|
||||
import { IDENTITY_ADDITIONAL_PRIVILEGE_V2 } from "@app/lib/api-docs";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/santizedSchemas/identitiy-additional-privilege";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -28,17 +29,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
body: z.object({
|
||||
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.identityId),
|
||||
projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.projectId),
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(60)
|
||||
.trim()
|
||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.optional()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
|
||||
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission),
|
||||
type: z.discriminatedUnion("isTemporary", [
|
||||
z.object({
|
||||
@ -100,16 +91,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
|
||||
id: z.string().trim().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.id)
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(60)
|
||||
.trim()
|
||||
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
|
||||
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
|
||||
permissions: ProjectPermissionV2Schema.array()
|
||||
.optional()
|
||||
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission),
|
||||
|
@ -1,11 +1,11 @@
|
||||
import { packRules } from "@casl/ability/extra";
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectMembershipRole, ProjectRolesSchema } from "@app/db/schemas";
|
||||
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { PROJECT_ROLE } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -29,18 +29,11 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
projectId: z.string().trim().describe(PROJECT_ROLE.CREATE.projectId)
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z
|
||||
.string()
|
||||
.toLowerCase()
|
||||
.trim()
|
||||
.min(1)
|
||||
slug: slugSchema({ min: 1, max: 64 })
|
||||
.refine(
|
||||
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
||||
"Please choose a different slug, the slug you have entered is reserved"
|
||||
)
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid"
|
||||
})
|
||||
.describe(PROJECT_ROLE.CREATE.slug),
|
||||
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
|
||||
@ -90,21 +83,13 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
|
||||
roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId)
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z
|
||||
.string()
|
||||
.toLowerCase()
|
||||
.trim()
|
||||
.optional()
|
||||
.describe(PROJECT_ROLE.UPDATE.slug)
|
||||
slug: slugSchema({ min: 1, max: 64 })
|
||||
.refine(
|
||||
(val) =>
|
||||
typeof val === "undefined" ||
|
||||
!Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
||||
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
|
||||
"Please choose a different slug, the slug you have entered is reserved"
|
||||
)
|
||||
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
|
||||
message: "Slug must be a valid"
|
||||
}),
|
||||
.optional()
|
||||
.describe(PROJECT_ROLE.UPDATE.slug),
|
||||
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
|
||||
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
|
||||
|
@ -139,5 +139,10 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
return { ...accessApprovalPolicyOrm, find, findById };
|
||||
const softDeleteById = async (policyId: string, tx?: Knex) => {
|
||||
const softDeletedPolicy = await accessApprovalPolicyOrm.updateById(policyId, { deletedAt: new Date() }, tx);
|
||||
return softDeletedPolicy;
|
||||
};
|
||||
|
||||
return { ...accessApprovalPolicyOrm, find, findById, softDeleteById };
|
||||
};
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { ProjectType } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
@ -8,7 +9,11 @@ import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { TAccessApprovalRequestDALFactory } from "../access-approval-request/access-approval-request-dal";
|
||||
import { TAccessApprovalRequestReviewerDALFactory } from "../access-approval-request/access-approval-request-reviewer-dal";
|
||||
import { ApprovalStatus } from "../access-approval-request/access-approval-request-types";
|
||||
import { TGroupDALFactory } from "../group/group-dal";
|
||||
import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal";
|
||||
import { TAccessApprovalPolicyApproverDALFactory } from "./access-approval-policy-approver-dal";
|
||||
import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal";
|
||||
import {
|
||||
@ -21,7 +26,7 @@ import {
|
||||
TUpdateAccessApprovalPolicy
|
||||
} from "./access-approval-policy-types";
|
||||
|
||||
type TSecretApprovalPolicyServiceFactoryDep = {
|
||||
type TAccessApprovalPolicyServiceFactoryDep = {
|
||||
projectDAL: TProjectDALFactory;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
accessApprovalPolicyDAL: TAccessApprovalPolicyDALFactory;
|
||||
@ -30,6 +35,9 @@ type TSecretApprovalPolicyServiceFactoryDep = {
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find">;
|
||||
groupDAL: TGroupDALFactory;
|
||||
userDAL: Pick<TUserDALFactory, "find">;
|
||||
accessApprovalRequestDAL: Pick<TAccessApprovalRequestDALFactory, "update" | "find">;
|
||||
additionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "delete">;
|
||||
accessApprovalRequestReviewerDAL: Pick<TAccessApprovalRequestReviewerDALFactory, "update">;
|
||||
};
|
||||
|
||||
export type TAccessApprovalPolicyServiceFactory = ReturnType<typeof accessApprovalPolicyServiceFactory>;
|
||||
@ -41,8 +49,11 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
permissionService,
|
||||
projectEnvDAL,
|
||||
projectDAL,
|
||||
userDAL
|
||||
}: TSecretApprovalPolicyServiceFactoryDep) => {
|
||||
userDAL,
|
||||
accessApprovalRequestDAL,
|
||||
additionalPrivilegeDAL,
|
||||
accessApprovalRequestReviewerDAL
|
||||
}: TAccessApprovalPolicyServiceFactoryDep) => {
|
||||
const createAccessApprovalPolicy = async ({
|
||||
name,
|
||||
actor,
|
||||
@ -76,13 +87,15 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
if (!groupApprovers && approvals > userApprovers.length + userApproverNames.length)
|
||||
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
project.id,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionSub.SecretApproval
|
||||
@ -180,16 +193,9 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
|
||||
// Anyone in the project should be able to get the policies.
|
||||
/* const { permission } = */ await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
project.id,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
// ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
|
||||
await permissionService.getProjectPermission(actor, actorId, project.id, actorAuthMethod, actorOrgId);
|
||||
|
||||
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id });
|
||||
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id, deletedAt: null });
|
||||
return accessApprovalPolicies;
|
||||
};
|
||||
|
||||
@ -231,13 +237,14 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
if (!accessApprovalPolicy) {
|
||||
throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` });
|
||||
}
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
accessApprovalPolicy.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretApproval);
|
||||
|
||||
@ -314,19 +321,42 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
const policy = await accessApprovalPolicyDAL.findById(policyId);
|
||||
if (!policy) throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
policy.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Delete,
|
||||
ProjectPermissionSub.SecretApproval
|
||||
);
|
||||
|
||||
await accessApprovalPolicyDAL.deleteById(policyId);
|
||||
await accessApprovalPolicyDAL.transaction(async (tx) => {
|
||||
await accessApprovalPolicyDAL.softDeleteById(policyId, tx);
|
||||
const allAccessApprovalRequests = await accessApprovalRequestDAL.find({ policyId });
|
||||
|
||||
if (allAccessApprovalRequests.length) {
|
||||
const accessApprovalRequestsIds = allAccessApprovalRequests.map((request) => request.id);
|
||||
|
||||
const privilegeIdsArray = allAccessApprovalRequests
|
||||
.map((request) => request.privilegeId)
|
||||
.filter((id): id is string => id != null);
|
||||
|
||||
if (privilegeIdsArray.length) {
|
||||
await additionalPrivilegeDAL.delete({ $in: { id: privilegeIdsArray } }, tx);
|
||||
}
|
||||
|
||||
await accessApprovalRequestReviewerDAL.update(
|
||||
{ $in: { id: accessApprovalRequestsIds }, status: ApprovalStatus.PENDING },
|
||||
{ status: ApprovalStatus.REJECTED },
|
||||
tx
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
return policy;
|
||||
};
|
||||
|
||||
@ -356,7 +386,11 @@ export const accessApprovalPolicyServiceFactory = ({
|
||||
const environment = await projectEnvDAL.findOne({ projectId: project.id, slug: envSlug });
|
||||
if (!environment) throw new NotFoundError({ message: `Environment with slug '${envSlug}' not found` });
|
||||
|
||||
const policies = await accessApprovalPolicyDAL.find({ envId: environment.id, projectId: project.id });
|
||||
const policies = await accessApprovalPolicyDAL.find({
|
||||
envId: environment.id,
|
||||
projectId: project.id,
|
||||
deletedAt: null
|
||||
});
|
||||
if (!policies) throw new NotFoundError({ message: `No policies found in environment with slug '${envSlug}'` });
|
||||
|
||||
return { count: policies.length };
|
||||
|
@ -61,7 +61,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
|
||||
db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
|
||||
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
||||
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId")
|
||||
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
|
||||
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
|
||||
)
|
||||
|
||||
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
|
||||
@ -118,7 +119,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
approvals: doc.policyApprovals,
|
||||
secretPath: doc.policySecretPath,
|
||||
enforcementLevel: doc.policyEnforcementLevel,
|
||||
envId: doc.policyEnvId
|
||||
envId: doc.policyEnvId,
|
||||
deletedAt: doc.policyDeletedAt
|
||||
},
|
||||
requestedByUser: {
|
||||
userId: doc.requestedByUserId,
|
||||
@ -141,7 +143,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
: null,
|
||||
|
||||
isApproved: !!doc.privilegeId
|
||||
isApproved: !!doc.policyDeletedAt || !!doc.privilegeId
|
||||
}),
|
||||
childrenMapper: [
|
||||
{
|
||||
@ -252,7 +254,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
|
||||
tx.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
|
||||
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
|
||||
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals")
|
||||
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
|
||||
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
|
||||
);
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
@ -271,7 +274,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
name: el.policyName,
|
||||
approvals: el.policyApprovals,
|
||||
secretPath: el.policySecretPath,
|
||||
enforcementLevel: el.policyEnforcementLevel
|
||||
enforcementLevel: el.policyEnforcementLevel,
|
||||
deletedAt: el.policyDeletedAt
|
||||
},
|
||||
requestedByUser: {
|
||||
userId: el.requestedByUserId,
|
||||
@ -363,6 +367,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
)
|
||||
|
||||
.where(`${TableName.Environment}.projectId`, projectId)
|
||||
.where(`${TableName.AccessApprovalPolicy}.deletedAt`, null)
|
||||
.select(selectAllTableCols(TableName.AccessApprovalRequest))
|
||||
.select(db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus"))
|
||||
.select(db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId"));
|
||||
|
@ -130,6 +130,9 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
message: `No policy in environment with slug '${environment.slug}' and with secret path '${secretPath}' was found.`
|
||||
});
|
||||
}
|
||||
if (policy.deletedAt) {
|
||||
throw new BadRequestError({ message: "The policy linked to this request has been deleted" });
|
||||
}
|
||||
|
||||
const approverIds: string[] = [];
|
||||
const approverGroupIds: string[] = [];
|
||||
@ -309,6 +312,12 @@ export const accessApprovalRequestServiceFactory = ({
|
||||
}
|
||||
|
||||
const { policy } = accessApprovalRequest;
|
||||
if (policy.deletedAt) {
|
||||
throw new BadRequestError({
|
||||
message: "The policy associated with this access request has been deleted."
|
||||
});
|
||||
}
|
||||
|
||||
const { membership, hasRole } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { RawAxiosRequestHeaders } from "axios";
|
||||
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
@ -20,27 +21,130 @@ type TAuditLogQueueServiceFactoryDep = {
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TAuditLogQueueServiceFactory = ReturnType<typeof auditLogQueueServiceFactory>;
|
||||
export type TAuditLogQueueServiceFactory = Awaited<ReturnType<typeof auditLogQueueServiceFactory>>;
|
||||
|
||||
// keep this timeout 5s it must be fast because else the queue will take time to finish
|
||||
// audit log is a crowded queue thus needs to be fast
|
||||
export const AUDIT_LOG_STREAM_TIMEOUT = 5 * 1000;
|
||||
export const auditLogQueueServiceFactory = ({
|
||||
|
||||
export const auditLogQueueServiceFactory = async ({
|
||||
auditLogDAL,
|
||||
queueService,
|
||||
projectDAL,
|
||||
licenseService,
|
||||
auditLogStreamDAL
|
||||
}: TAuditLogQueueServiceFactoryDep) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const pushToLog = async (data: TCreateAuditLogDTO) => {
|
||||
await queueService.queue(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
||||
removeOnFail: {
|
||||
count: 3
|
||||
},
|
||||
removeOnComplete: true
|
||||
});
|
||||
if (appCfg.USE_PG_QUEUE && appCfg.SHOULD_INIT_PG_QUEUE) {
|
||||
await queueService.queuePg<QueueName.AuditLog>(QueueJobs.AuditLog, data, {
|
||||
retryLimit: 10,
|
||||
retryBackoff: true
|
||||
});
|
||||
} else {
|
||||
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
||||
removeOnFail: {
|
||||
count: 3
|
||||
},
|
||||
removeOnComplete: true
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
if (appCfg.SHOULD_INIT_PG_QUEUE) {
|
||||
await queueService.startPg<QueueName.AuditLog>(
|
||||
QueueJobs.AuditLog,
|
||||
async ([job]) => {
|
||||
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
|
||||
let { orgId } = job.data;
|
||||
const MS_IN_DAY = 24 * 60 * 60 * 1000;
|
||||
let project;
|
||||
|
||||
if (!orgId) {
|
||||
// it will never be undefined for both org and project id
|
||||
// TODO(akhilmhdh): use caching here in dal to avoid db calls
|
||||
project = await projectDAL.findById(projectId as string);
|
||||
orgId = project.orgId;
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (plan.auditLogsRetentionDays === 0) {
|
||||
// skip inserting if audit log retention is 0 meaning its not supported
|
||||
return;
|
||||
}
|
||||
|
||||
// For project actions, set TTL to project-level audit log retention config
|
||||
// This condition ensures that the plan's audit log retention days cannot be bypassed
|
||||
const ttlInDays =
|
||||
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
|
||||
? project.auditLogsRetentionDays
|
||||
: plan.auditLogsRetentionDays;
|
||||
|
||||
const ttl = ttlInDays * MS_IN_DAY;
|
||||
|
||||
const auditLog = await auditLogDAL.create({
|
||||
actor: actor.type,
|
||||
actorMetadata: actor.metadata,
|
||||
userAgent,
|
||||
projectId,
|
||||
projectName: project?.name,
|
||||
ipAddress,
|
||||
orgId,
|
||||
eventType: event.type,
|
||||
expiresAt: new Date(Date.now() + ttl),
|
||||
eventMetadata: event.metadata,
|
||||
userAgentType
|
||||
});
|
||||
|
||||
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
|
||||
await Promise.allSettled(
|
||||
logStreams.map(
|
||||
async ({
|
||||
url,
|
||||
encryptedHeadersTag,
|
||||
encryptedHeadersIV,
|
||||
encryptedHeadersKeyEncoding,
|
||||
encryptedHeadersCiphertext
|
||||
}) => {
|
||||
const streamHeaders =
|
||||
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
|
||||
? (JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
|
||||
iv: encryptedHeadersIV,
|
||||
tag: encryptedHeadersTag,
|
||||
ciphertext: encryptedHeadersCiphertext
|
||||
})
|
||||
) as LogStreamHeaders[])
|
||||
: [];
|
||||
|
||||
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
|
||||
if (streamHeaders.length)
|
||||
streamHeaders.forEach(({ key, value }) => {
|
||||
headers[key] = value;
|
||||
});
|
||||
|
||||
return request.post(url, auditLog, {
|
||||
headers,
|
||||
// request timeout
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
// connection timeout
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
});
|
||||
}
|
||||
)
|
||||
);
|
||||
},
|
||||
{
|
||||
batchSize: 1,
|
||||
workerCount: 30,
|
||||
pollingIntervalSeconds: 0.5
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
queueService.start(QueueName.AuditLog, async (job) => {
|
||||
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
|
||||
let { orgId } = job.data;
|
||||
|
@ -60,6 +60,7 @@ export enum EventType {
|
||||
DELETE_SECRETS = "delete-secrets",
|
||||
GET_WORKSPACE_KEY = "get-workspace-key",
|
||||
AUTHORIZE_INTEGRATION = "authorize-integration",
|
||||
UPDATE_INTEGRATION_AUTH = "update-integration-auth",
|
||||
UNAUTHORIZE_INTEGRATION = "unauthorize-integration",
|
||||
CREATE_INTEGRATION = "create-integration",
|
||||
DELETE_INTEGRATION = "delete-integration",
|
||||
@ -94,6 +95,11 @@ export enum EventType {
|
||||
UPDATE_IDENTITY_OIDC_AUTH = "update-identity-oidc-auth",
|
||||
GET_IDENTITY_OIDC_AUTH = "get-identity-oidc-auth",
|
||||
REVOKE_IDENTITY_OIDC_AUTH = "revoke-identity-oidc-auth",
|
||||
LOGIN_IDENTITY_JWT_AUTH = "login-identity-jwt-auth",
|
||||
ADD_IDENTITY_JWT_AUTH = "add-identity-jwt-auth",
|
||||
UPDATE_IDENTITY_JWT_AUTH = "update-identity-jwt-auth",
|
||||
GET_IDENTITY_JWT_AUTH = "get-identity-jwt-auth",
|
||||
REVOKE_IDENTITY_JWT_AUTH = "revoke-identity-jwt-auth",
|
||||
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
|
||||
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
|
||||
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
|
||||
@ -357,6 +363,13 @@ interface AuthorizeIntegrationEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateIntegrationAuthEvent {
|
||||
type: EventType.UPDATE_INTEGRATION_AUTH;
|
||||
metadata: {
|
||||
integration: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UnauthorizeIntegrationEvent {
|
||||
type: EventType.UNAUTHORIZE_INTEGRATION;
|
||||
metadata: {
|
||||
@ -895,6 +908,67 @@ interface GetIdentityOidcAuthEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface LoginIdentityJwtAuthEvent {
|
||||
type: EventType.LOGIN_IDENTITY_JWT_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
identityJwtAuthId: string;
|
||||
identityAccessTokenId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface AddIdentityJwtAuthEvent {
|
||||
type: EventType.ADD_IDENTITY_JWT_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
configurationType: string;
|
||||
jwksUrl?: string;
|
||||
jwksCaCert: string;
|
||||
publicKeys: string[];
|
||||
boundIssuer: string;
|
||||
boundAudiences: string;
|
||||
boundClaims: Record<string, string>;
|
||||
boundSubject: string;
|
||||
accessTokenTTL: number;
|
||||
accessTokenMaxTTL: number;
|
||||
accessTokenNumUsesLimit: number;
|
||||
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateIdentityJwtAuthEvent {
|
||||
type: EventType.UPDATE_IDENTITY_JWT_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
configurationType?: string;
|
||||
jwksUrl?: string;
|
||||
jwksCaCert?: string;
|
||||
publicKeys?: string[];
|
||||
boundIssuer?: string;
|
||||
boundAudiences?: string;
|
||||
boundClaims?: Record<string, string>;
|
||||
boundSubject?: string;
|
||||
accessTokenTTL?: number;
|
||||
accessTokenMaxTTL?: number;
|
||||
accessTokenNumUsesLimit?: number;
|
||||
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteIdentityJwtAuthEvent {
|
||||
type: EventType.REVOKE_IDENTITY_JWT_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetIdentityJwtAuthEvent {
|
||||
type: EventType.GET_IDENTITY_JWT_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateEnvironmentEvent {
|
||||
type: EventType.CREATE_ENVIRONMENT;
|
||||
metadata: {
|
||||
@ -1680,6 +1754,7 @@ export type Event =
|
||||
| DeleteSecretBatchEvent
|
||||
| GetWorkspaceKeyEvent
|
||||
| AuthorizeIntegrationEvent
|
||||
| UpdateIntegrationAuthEvent
|
||||
| UnauthorizeIntegrationEvent
|
||||
| CreateIntegrationEvent
|
||||
| DeleteIntegrationEvent
|
||||
@ -1733,6 +1808,11 @@ export type Event =
|
||||
| DeleteIdentityOidcAuthEvent
|
||||
| UpdateIdentityOidcAuthEvent
|
||||
| GetIdentityOidcAuthEvent
|
||||
| LoginIdentityJwtAuthEvent
|
||||
| AddIdentityJwtAuthEvent
|
||||
| UpdateIdentityJwtAuthEvent
|
||||
| GetIdentityJwtAuthEvent
|
||||
| DeleteIdentityJwtAuthEvent
|
||||
| CreateEnvironmentEvent
|
||||
| GetEnvironmentEvent
|
||||
| UpdateEnvironmentEvent
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import ms from "ms";
|
||||
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { ProjectType, SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import {
|
||||
@ -67,13 +67,14 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionDynamicSecretActions.Lease,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
@ -112,7 +113,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
})
|
||||
) as object;
|
||||
|
||||
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
|
||||
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
|
||||
const { maxTTL } = dynamicSecretCfg;
|
||||
const expireAt = new Date(new Date().getTime() + ms(selectedTTL));
|
||||
if (maxTTL) {
|
||||
@ -146,13 +147,14 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionDynamicSecretActions.Lease,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
@ -187,7 +189,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
})
|
||||
) as object;
|
||||
|
||||
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
|
||||
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
|
||||
const { maxTTL } = dynamicSecretCfg;
|
||||
const expireAt = new Date(dynamicSecretLease.expireAt.getTime() + ms(selectedTTL));
|
||||
if (maxTTL) {
|
||||
@ -225,13 +227,14 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionDynamicSecretActions.Lease,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { ProjectType, SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import {
|
||||
@ -73,13 +73,14 @@ export const dynamicSecretServiceFactory = ({
|
||||
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
|
||||
const projectId = project.id;
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionDynamicSecretActions.CreateRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
@ -144,13 +145,14 @@ export const dynamicSecretServiceFactory = ({
|
||||
|
||||
const projectId = project.id;
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionDynamicSecretActions.EditRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
@ -227,13 +229,14 @@ export const dynamicSecretServiceFactory = ({
|
||||
|
||||
const projectId = project.id;
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionDynamicSecretActions.DeleteRootCredential,
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
|
@ -80,7 +80,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
|
||||
}
|
||||
};
|
||||
|
||||
const addUserToInfisicalGroup = async (userId: string) => {
|
||||
const $addUserToInfisicalGroup = async (userId: string) => {
|
||||
// figure out if the default user is already in the group, if it is, then we shouldn't add it again
|
||||
|
||||
const addUserToGroupCommand = new ModifyUserGroupCommand({
|
||||
@ -96,7 +96,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
|
||||
await ensureInfisicalGroupExists(clusterName);
|
||||
|
||||
await elastiCache.send(new CreateUserCommand(creationInput)); // First create the user
|
||||
await addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
|
||||
await $addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
|
||||
|
||||
return {
|
||||
userId: creationInput.UserId,
|
||||
@ -127,7 +127,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
|
||||
};
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
@ -211,8 +211,8 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -33,7 +33,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
|
||||
const client = new IAMClient({
|
||||
region: providerInputs.region,
|
||||
credentials: {
|
||||
@ -47,7 +47,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
|
||||
return isConnected;
|
||||
@ -55,7 +55,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
|
||||
@ -118,7 +118,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
@ -179,9 +179,8 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// do nothing
|
||||
const username = entityId;
|
||||
return { entityId: username };
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -9,7 +9,7 @@ const MSFT_GRAPH_API_URL = "https://graph.microsoft.com/v1.0/";
|
||||
const MSFT_LOGIN_URL = "https://login.microsoftonline.com";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
@ -23,7 +23,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getToken = async (
|
||||
const $getToken = async (
|
||||
tenantId: string,
|
||||
applicationId: string,
|
||||
clientSecret: string
|
||||
@ -51,18 +51,13 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
return data.success;
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
if (!data.success) {
|
||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||
}
|
||||
@ -98,7 +93,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
};
|
||||
|
||||
const fetchAzureEntraIdUsers = async (tenantId: string, applicationId: string, clientSecret: string) => {
|
||||
const data = await getToken(tenantId, applicationId, clientSecret);
|
||||
const data = await $getToken(tenantId, applicationId, clientSecret);
|
||||
if (!data.success) {
|
||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||
}
|
||||
@ -127,6 +122,11 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
return users;
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
|
@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
@ -27,7 +27,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
|
||||
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||
const client = new cassandra.Client({
|
||||
sslOptions,
|
||||
@ -47,7 +47,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
|
||||
await client.shutdown();
|
||||
@ -56,7 +56,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -82,7 +82,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const { keyspace } = providerInputs;
|
||||
@ -99,20 +99,24 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const { keyspace } = providerInputs;
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace, expiration });
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
keyspace,
|
||||
expiration
|
||||
});
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
for await (const query of queries) {
|
||||
await client.execute(query);
|
||||
}
|
||||
await client.shutdown();
|
||||
return { entityId: username };
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -8,7 +8,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
@ -24,7 +24,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const connection = new ElasticSearchClient({
|
||||
node: {
|
||||
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
|
||||
@ -55,7 +55,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection
|
||||
.info()
|
||||
@ -67,7 +67,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -85,7 +85,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
await connection.security.deleteUser({
|
||||
username: entityId
|
||||
@ -95,8 +95,8 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -6,15 +6,17 @@ import { AzureEntraIDProvider } from "./azure-entra-id";
|
||||
import { CassandraProvider } from "./cassandra";
|
||||
import { ElasticSearchProvider } from "./elastic-search";
|
||||
import { LdapProvider } from "./ldap";
|
||||
import { DynamicSecretProviders } from "./models";
|
||||
import { DynamicSecretProviders, TDynamicProviderFns } from "./models";
|
||||
import { MongoAtlasProvider } from "./mongo-atlas";
|
||||
import { MongoDBProvider } from "./mongo-db";
|
||||
import { RabbitMqProvider } from "./rabbit-mq";
|
||||
import { RedisDatabaseProvider } from "./redis";
|
||||
import { SapAseProvider } from "./sap-ase";
|
||||
import { SapHanaProvider } from "./sap-hana";
|
||||
import { SqlDatabaseProvider } from "./sql-database";
|
||||
import { TotpProvider } from "./totp";
|
||||
|
||||
export const buildDynamicSecretProviders = () => ({
|
||||
export const buildDynamicSecretProviders = (): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
|
||||
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
|
||||
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
|
||||
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
|
||||
@ -27,5 +29,7 @@ export const buildDynamicSecretProviders = () => ({
|
||||
[DynamicSecretProviders.AzureEntraID]: AzureEntraIDProvider(),
|
||||
[DynamicSecretProviders.Ldap]: LdapProvider(),
|
||||
[DynamicSecretProviders.SapHana]: SapHanaProvider(),
|
||||
[DynamicSecretProviders.Snowflake]: SnowflakeProvider()
|
||||
[DynamicSecretProviders.Snowflake]: SnowflakeProvider(),
|
||||
[DynamicSecretProviders.Totp]: TotpProvider(),
|
||||
[DynamicSecretProviders.SapAse]: SapAseProvider()
|
||||
});
|
||||
|
@ -52,7 +52,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const client = ldapjs.createClient({
|
||||
url: providerInputs.url,
|
||||
@ -83,7 +83,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
return client.connected;
|
||||
};
|
||||
|
||||
@ -191,7 +191,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
||||
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
|
||||
@ -235,7 +235,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
||||
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
|
||||
@ -268,7 +268,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -4,7 +4,8 @@ export enum SqlProviders {
|
||||
Postgres = "postgres",
|
||||
MySQL = "mysql2",
|
||||
Oracle = "oracledb",
|
||||
MsSQL = "mssql"
|
||||
MsSQL = "mssql",
|
||||
SapAse = "sap-ase"
|
||||
}
|
||||
|
||||
export enum ElasticSearchAuthTypes {
|
||||
@ -17,6 +18,17 @@ export enum LdapCredentialType {
|
||||
Static = "static"
|
||||
}
|
||||
|
||||
export enum TotpConfigType {
|
||||
URL = "url",
|
||||
MANUAL = "manual"
|
||||
}
|
||||
|
||||
export enum TotpAlgorithm {
|
||||
SHA1 = "sha1",
|
||||
SHA256 = "sha256",
|
||||
SHA512 = "sha512"
|
||||
}
|
||||
|
||||
export const DynamicSecretRedisDBSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
@ -107,6 +119,16 @@ export const DynamicSecretCassandraSchema = z.object({
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretSapAseSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
database: z.string().trim(),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim(),
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim()
|
||||
});
|
||||
|
||||
export const DynamicSecretAwsIamSchema = z.object({
|
||||
accessKey: z.string().trim().min(1),
|
||||
secretAccessKey: z.string().trim().min(1),
|
||||
@ -221,6 +243,34 @@ export const LdapSchema = z.union([
|
||||
})
|
||||
]);
|
||||
|
||||
export const DynamicSecretTotpSchema = z.discriminatedUnion("configType", [
|
||||
z.object({
|
||||
configType: z.literal(TotpConfigType.URL),
|
||||
url: z
|
||||
.string()
|
||||
.url()
|
||||
.trim()
|
||||
.min(1)
|
||||
.refine((val) => {
|
||||
const urlObj = new URL(val);
|
||||
const secret = urlObj.searchParams.get("secret");
|
||||
|
||||
return Boolean(secret);
|
||||
}, "OTP URL must contain secret field")
|
||||
}),
|
||||
z.object({
|
||||
configType: z.literal(TotpConfigType.MANUAL),
|
||||
secret: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.transform((val) => val.replace(/\s+/g, "")),
|
||||
period: z.number().optional(),
|
||||
algorithm: z.nativeEnum(TotpAlgorithm).optional(),
|
||||
digits: z.number().optional()
|
||||
})
|
||||
]);
|
||||
|
||||
export enum DynamicSecretProviders {
|
||||
SqlDatabase = "sql-database",
|
||||
Cassandra = "cassandra",
|
||||
@ -234,12 +284,15 @@ export enum DynamicSecretProviders {
|
||||
AzureEntraID = "azure-entra-id",
|
||||
Ldap = "ldap",
|
||||
SapHana = "sap-hana",
|
||||
Snowflake = "snowflake"
|
||||
Snowflake = "snowflake",
|
||||
Totp = "totp",
|
||||
SapAse = "sap-ase"
|
||||
}
|
||||
|
||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SapAse), inputs: DynamicSecretSapAseSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SapHana), inputs: DynamicSecretSapHanaSchema }),
|
||||
@ -250,7 +303,8 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AzureEntraID), inputs: AzureEntraIDSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema })
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Totp), inputs: DynamicSecretTotpSchema })
|
||||
]);
|
||||
|
||||
export type TDynamicProviderFns = {
|
||||
|
@ -8,7 +8,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
@ -22,7 +22,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
|
||||
const client = axios.create({
|
||||
baseURL: "https://cloud.mongodb.com/api/atlas",
|
||||
headers: {
|
||||
@ -40,7 +40,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client({
|
||||
method: "GET",
|
||||
@ -59,7 +59,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -87,7 +87,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const isExisting = await client({
|
||||
@ -114,7 +114,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
@ -8,7 +8,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
@ -23,7 +23,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||
const isSrv = !providerInputs.port;
|
||||
const uri = isSrv
|
||||
? `mongodb+srv://${providerInputs.host}`
|
||||
@ -42,7 +42,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client
|
||||
.db(providerInputs.database)
|
||||
@ -55,7 +55,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -74,7 +74,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
@ -88,6 +88,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -11,7 +11,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
@ -84,7 +84,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||
const axiosInstance = axios.create({
|
||||
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
|
||||
auth: {
|
||||
@ -105,7 +105,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection.get("/whoami").then(() => true);
|
||||
|
||||
@ -114,7 +114,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -134,15 +134,15 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
|
||||
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -10,7 +10,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = () => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 64)();
|
||||
};
|
||||
|
||||
@ -55,7 +55,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
|
||||
let connection: Redis | null = null;
|
||||
try {
|
||||
connection = new Redis({
|
||||
@ -92,7 +92,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const pingResponse = await connection
|
||||
.ping()
|
||||
@ -104,7 +104,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -126,7 +126,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
@ -141,7 +141,9 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
145
backend/src/ee/services/dynamic-secret/providers/sap-ase.ts
Normal file
145
backend/src/ee/services/dynamic-secret/providers/sap-ase.ts
Normal file
@ -0,0 +1,145 @@
|
||||
import handlebars from "handlebars";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import odbc from "odbc";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(25);
|
||||
};
|
||||
|
||||
enum SapCommands {
|
||||
CreateLogin = "sp_addlogin",
|
||||
DropLogin = "sp_droplogin"
|
||||
}
|
||||
|
||||
export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
|
||||
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
|
||||
const connectionString =
|
||||
`DRIVER={FreeTDS};` +
|
||||
`SERVER=${providerInputs.host};` +
|
||||
`PORT=${providerInputs.port};` +
|
||||
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
|
||||
`UID=${providerInputs.username};` +
|
||||
`PWD=${providerInputs.password};` +
|
||||
`TDS_VERSION=5.0`;
|
||||
|
||||
const client = await odbc.connect(connectionString);
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const [resultFromMasterDatabase] = await masterClient.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||
const [resultFromSelectedDatabase] = await client.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||
|
||||
if (!resultFromSelectedDatabase.version) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SAP ASE connection, version query failed"
|
||||
});
|
||||
}
|
||||
|
||||
if (resultFromMasterDatabase.version !== resultFromSelectedDatabase.version) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SAP ASE connection (master), version mismatch"
|
||||
});
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const username = `inf_${generateUsername()}`;
|
||||
const password = `${generatePassword()}`;
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password
|
||||
});
|
||||
|
||||
const queries = creationStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
|
||||
|
||||
for await (const query of queries) {
|
||||
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
|
||||
// If not done, then the newly created user won't be able to authenticate.
|
||||
await (query.startsWith(SapCommands.CreateLogin) ? masterClient : client).query(query);
|
||||
}
|
||||
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({
|
||||
username
|
||||
});
|
||||
|
||||
const queries = revokeStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
|
||||
// Get all processes for this login and kill them. If there are active connections to the database when drop login happens, it will throw an error.
|
||||
const result = await masterClient.query<{ spid?: string }>(`sp_who '${username}'`);
|
||||
|
||||
if (result && result.length > 0) {
|
||||
for await (const row of result) {
|
||||
if (row.spid) {
|
||||
await masterClient.query(`KILL ${row.spid.trim()}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for await (const query of queries) {
|
||||
await (query.startsWith(SapCommands.DropLogin) ? masterClient : client).query(query);
|
||||
}
|
||||
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (_: unknown, username: string) => {
|
||||
// No need for renewal
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -32,7 +32,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
|
||||
const client = hdb.createClient({
|
||||
host: providerInputs.host,
|
||||
port: providerInputs.port,
|
||||
@ -64,9 +64,9 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const testResult: boolean = await new Promise((resolve, reject) => {
|
||||
const testResult = await new Promise<boolean>((resolve, reject) => {
|
||||
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
|
||||
if (err) {
|
||||
reject();
|
||||
@ -86,7 +86,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
const password = generatePassword();
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
@ -114,7 +114,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
@ -135,13 +135,15 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, username: string, expireAt: number) => {
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
try {
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration });
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username: entityId, expiration });
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
await new Promise((resolve, reject) => {
|
||||
@ -161,7 +163,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
client.disconnect();
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -12,7 +12,7 @@ import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
|
||||
const noop = () => {};
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
@ -34,7 +34,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
|
||||
const client = snowflake.createConnection({
|
||||
account: `${providerInputs.orgId}-${providerInputs.accountId}`,
|
||||
username: providerInputs.username,
|
||||
@ -49,7 +49,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
let isValidConnection: boolean;
|
||||
|
||||
@ -72,7 +72,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -107,7 +107,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
try {
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
@ -131,17 +131,16 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, username: string, expireAt: number) => {
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
if (!providerInputs.renewStatement) return { entityId: username };
|
||||
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
try {
|
||||
const expiration = getDaysToExpiry(new Date(expireAt));
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username,
|
||||
username: entityId,
|
||||
expiration
|
||||
});
|
||||
|
||||
@ -161,7 +160,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
client.destroy(noop);
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -14,7 +14,7 @@ const generatePassword = (provider: SqlProviders) => {
|
||||
// oracle has limit of 48 password length
|
||||
const size = provider === SqlProviders.Oracle ? 30 : 48;
|
||||
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
@ -32,7 +32,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
|
||||
const ssl = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||
const db = knex({
|
||||
client: providerInputs.client,
|
||||
@ -52,7 +52,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await getClient(providerInputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
// oracle needs from keyword
|
||||
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
||||
|
||||
@ -63,7 +63,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await getClient(providerInputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername(providerInputs.client);
|
||||
const password = generatePassword(providerInputs.client);
|
||||
@ -90,7 +90,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await getClient(providerInputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const { database } = providerInputs;
|
||||
@ -110,13 +110,19 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await getClient(providerInputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const db = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const { database } = providerInputs;
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration, database });
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
expiration,
|
||||
database
|
||||
});
|
||||
|
||||
if (renewStatement) {
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
@ -128,7 +134,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
}
|
||||
|
||||
await db.destroy();
|
||||
return { entityId: username };
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
90
backend/src/ee/services/dynamic-secret/providers/totp.ts
Normal file
90
backend/src/ee/services/dynamic-secret/providers/totp.ts
Normal file
@ -0,0 +1,90 @@
|
||||
import { authenticator } from "otplib";
|
||||
import { HashAlgorithms } from "otplib/core";
|
||||
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretTotpSchema, TDynamicProviderFns, TotpConfigType } from "./models";
|
||||
|
||||
export const TotpProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretTotpSchema.parseAsync(inputs);
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const validateConnection = async () => {
|
||||
return true;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const entityId = alphaNumericNanoId(32);
|
||||
const authenticatorInstance = authenticator.clone();
|
||||
|
||||
let secret: string;
|
||||
let period: number | null | undefined;
|
||||
let digits: number | null | undefined;
|
||||
let algorithm: HashAlgorithms | null | undefined;
|
||||
|
||||
if (providerInputs.configType === TotpConfigType.URL) {
|
||||
const urlObj = new URL(providerInputs.url);
|
||||
secret = urlObj.searchParams.get("secret") as string;
|
||||
const periodFromUrl = urlObj.searchParams.get("period");
|
||||
const digitsFromUrl = urlObj.searchParams.get("digits");
|
||||
const algorithmFromUrl = urlObj.searchParams.get("algorithm");
|
||||
|
||||
if (periodFromUrl) {
|
||||
period = +periodFromUrl;
|
||||
}
|
||||
|
||||
if (digitsFromUrl) {
|
||||
digits = +digitsFromUrl;
|
||||
}
|
||||
|
||||
if (algorithmFromUrl) {
|
||||
algorithm = algorithmFromUrl.toLowerCase() as HashAlgorithms;
|
||||
}
|
||||
} else {
|
||||
secret = providerInputs.secret;
|
||||
period = providerInputs.period;
|
||||
digits = providerInputs.digits;
|
||||
algorithm = providerInputs.algorithm as unknown as HashAlgorithms;
|
||||
}
|
||||
|
||||
if (digits) {
|
||||
authenticatorInstance.options = { digits };
|
||||
}
|
||||
|
||||
if (algorithm) {
|
||||
authenticatorInstance.options = { algorithm };
|
||||
}
|
||||
|
||||
if (period) {
|
||||
authenticatorInstance.options = { step: period };
|
||||
}
|
||||
|
||||
return {
|
||||
entityId,
|
||||
data: { TOTP: authenticatorInstance.generate(secret), TIME_REMAINING: authenticatorInstance.timeRemaining() }
|
||||
};
|
||||
};
|
||||
|
||||
const revoke = async (_inputs: unknown, entityId: string) => {
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -20,7 +20,8 @@ import {
|
||||
TUpdateExternalKmsDTO
|
||||
} from "./external-kms-types";
|
||||
import { AwsKmsProviderFactory } from "./providers/aws-kms";
|
||||
import { ExternalKmsAwsSchema, KmsProviders } from "./providers/model";
|
||||
import { GcpKmsProviderFactory } from "./providers/gcp-kms";
|
||||
import { ExternalKmsAwsSchema, ExternalKmsGcpSchema, KmsProviders, TExternalKmsGcpSchema } from "./providers/model";
|
||||
|
||||
type TExternalKmsServiceFactoryDep = {
|
||||
externalKmsDAL: TExternalKmsDALFactory;
|
||||
@ -78,6 +79,13 @@ export const externalKmsServiceFactory = ({
|
||||
await externalKms.validateConnection();
|
||||
}
|
||||
break;
|
||||
case KmsProviders.Gcp:
|
||||
{
|
||||
const externalKms = await GcpKmsProviderFactory({ inputs: provider.inputs });
|
||||
await externalKms.validateConnection();
|
||||
sanitizedProviderInput = JSON.stringify(provider.inputs);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new BadRequestError({ message: "external kms provided is invalid" });
|
||||
}
|
||||
@ -88,7 +96,7 @@ export const externalKmsServiceFactory = ({
|
||||
});
|
||||
|
||||
const { cipherTextBlob: encryptedProviderInputs } = orgDataKeyEncryptor({
|
||||
plainText: Buffer.from(sanitizedProviderInput, "utf8")
|
||||
plainText: Buffer.from(sanitizedProviderInput)
|
||||
});
|
||||
|
||||
const externalKms = await externalKmsDAL.transaction(async (tx) => {
|
||||
@ -162,7 +170,7 @@ export const externalKmsServiceFactory = ({
|
||||
case KmsProviders.Aws:
|
||||
{
|
||||
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
|
||||
JSON.parse(decryptedProviderInputBlob.toString())
|
||||
);
|
||||
const updatedProviderInput = { ...decryptedProviderInput, ...provider.inputs };
|
||||
const externalKms = await AwsKmsProviderFactory({ inputs: updatedProviderInput });
|
||||
@ -170,6 +178,17 @@ export const externalKmsServiceFactory = ({
|
||||
sanitizedProviderInput = JSON.stringify(updatedProviderInput);
|
||||
}
|
||||
break;
|
||||
case KmsProviders.Gcp:
|
||||
{
|
||||
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString())
|
||||
);
|
||||
const updatedProviderInput = { ...decryptedProviderInput, ...provider.inputs };
|
||||
const externalKms = await GcpKmsProviderFactory({ inputs: updatedProviderInput });
|
||||
await externalKms.validateConnection();
|
||||
sanitizedProviderInput = JSON.stringify(updatedProviderInput);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new BadRequestError({ message: "external kms provided is invalid" });
|
||||
}
|
||||
@ -178,7 +197,7 @@ export const externalKmsServiceFactory = ({
|
||||
let encryptedProviderInputs: Buffer | undefined;
|
||||
if (sanitizedProviderInput) {
|
||||
const { cipherTextBlob } = orgDataKeyEncryptor({
|
||||
plainText: Buffer.from(sanitizedProviderInput, "utf8")
|
||||
plainText: Buffer.from(sanitizedProviderInput)
|
||||
});
|
||||
encryptedProviderInputs = cipherTextBlob;
|
||||
}
|
||||
@ -271,10 +290,17 @@ export const externalKmsServiceFactory = ({
|
||||
switch (externalKmsDoc.provider) {
|
||||
case KmsProviders.Aws: {
|
||||
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
|
||||
JSON.parse(decryptedProviderInputBlob.toString())
|
||||
);
|
||||
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
|
||||
}
|
||||
case KmsProviders.Gcp: {
|
||||
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString())
|
||||
);
|
||||
|
||||
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
|
||||
}
|
||||
default:
|
||||
throw new BadRequestError({ message: "external kms provided is invalid" });
|
||||
}
|
||||
@ -312,21 +338,34 @@ export const externalKmsServiceFactory = ({
|
||||
switch (externalKmsDoc.provider) {
|
||||
case KmsProviders.Aws: {
|
||||
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
|
||||
JSON.parse(decryptedProviderInputBlob.toString())
|
||||
);
|
||||
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
|
||||
}
|
||||
case KmsProviders.Gcp: {
|
||||
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
|
||||
JSON.parse(decryptedProviderInputBlob.toString())
|
||||
);
|
||||
|
||||
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
|
||||
}
|
||||
default:
|
||||
throw new BadRequestError({ message: "external kms provided is invalid" });
|
||||
}
|
||||
};
|
||||
|
||||
const fetchGcpKeys = async ({ credential, gcpRegion }: Pick<TExternalKmsGcpSchema, "credential" | "gcpRegion">) => {
|
||||
const externalKms = await GcpKmsProviderFactory({ inputs: { credential, gcpRegion, keyName: "" } });
|
||||
return externalKms.getKeysList();
|
||||
};
|
||||
|
||||
return {
|
||||
create,
|
||||
updateById,
|
||||
deleteById,
|
||||
list,
|
||||
findById,
|
||||
findByName
|
||||
findByName,
|
||||
fetchGcpKeys
|
||||
};
|
||||
};
|
||||
|
113
backend/src/ee/services/external-kms/providers/gcp-kms.ts
Normal file
113
backend/src/ee/services/external-kms/providers/gcp-kms.ts
Normal file
@ -0,0 +1,113 @@
|
||||
import { KeyManagementServiceClient } from "@google-cloud/kms";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { ExternalKmsGcpSchema, TExternalKmsGcpClientSchema, TExternalKmsProviderFns } from "./model";
|
||||
|
||||
const getGcpKmsClient = async ({ credential, gcpRegion }: TExternalKmsGcpClientSchema) => {
|
||||
const gcpKmsClient = new KeyManagementServiceClient({
|
||||
credentials: credential
|
||||
});
|
||||
const projectId = credential.project_id;
|
||||
const locationName = gcpKmsClient.locationPath(projectId, gcpRegion);
|
||||
|
||||
return {
|
||||
gcpKmsClient,
|
||||
locationName
|
||||
};
|
||||
};
|
||||
|
||||
type GcpKmsProviderArgs = {
|
||||
inputs: unknown;
|
||||
};
|
||||
type TGcpKmsProviderFactoryReturn = TExternalKmsProviderFns & {
|
||||
getKeysList: () => Promise<{ keys: string[] }>;
|
||||
};
|
||||
|
||||
export const GcpKmsProviderFactory = async ({ inputs }: GcpKmsProviderArgs): Promise<TGcpKmsProviderFactoryReturn> => {
|
||||
const { credential, gcpRegion, keyName } = await ExternalKmsGcpSchema.parseAsync(inputs);
|
||||
const { gcpKmsClient, locationName } = await getGcpKmsClient({
|
||||
credential,
|
||||
gcpRegion
|
||||
});
|
||||
|
||||
const validateConnection = async () => {
|
||||
try {
|
||||
await gcpKmsClient.listKeyRings({
|
||||
parent: locationName
|
||||
});
|
||||
return true;
|
||||
} catch (error) {
|
||||
throw new BadRequestError({
|
||||
message: "Cannot connect to GCP KMS"
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// Used when adding the KMS to fetch the list of keys in specified region
|
||||
const getKeysList = async () => {
|
||||
try {
|
||||
const [keyRings] = await gcpKmsClient.listKeyRings({
|
||||
parent: locationName
|
||||
});
|
||||
|
||||
const validKeyRings = keyRings
|
||||
.filter(
|
||||
(keyRing): keyRing is { name: string } =>
|
||||
keyRing !== null && typeof keyRing === "object" && "name" in keyRing && typeof keyRing.name === "string"
|
||||
)
|
||||
.map((keyRing) => keyRing.name);
|
||||
const keyList: string[] = [];
|
||||
const keyListPromises = validKeyRings.map((keyRingName) =>
|
||||
gcpKmsClient
|
||||
.listCryptoKeys({
|
||||
parent: keyRingName
|
||||
})
|
||||
.then(([cryptoKeys]) =>
|
||||
cryptoKeys
|
||||
.filter(
|
||||
(key): key is { name: string } =>
|
||||
key !== null && typeof key === "object" && "name" in key && typeof key.name === "string"
|
||||
)
|
||||
.map((key) => key.name)
|
||||
)
|
||||
);
|
||||
|
||||
const cryptoKeyLists = await Promise.all(keyListPromises);
|
||||
keyList.push(...cryptoKeyLists.flat());
|
||||
return { keys: keyList };
|
||||
} catch (error) {
|
||||
logger.error(error, "Could not validate GCP KMS connection and credentials");
|
||||
throw new BadRequestError({
|
||||
message: "Could not validate GCP KMS connection and credentials",
|
||||
error
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const encrypt = async (data: Buffer) => {
|
||||
const encryptedText = await gcpKmsClient.encrypt({
|
||||
name: keyName,
|
||||
plaintext: data
|
||||
});
|
||||
if (!encryptedText[0].ciphertext) throw new Error("encryption failed");
|
||||
return { encryptedBlob: Buffer.from(encryptedText[0].ciphertext) };
|
||||
};
|
||||
|
||||
const decrypt = async (encryptedBlob: Buffer) => {
|
||||
const decryptedText = await gcpKmsClient.decrypt({
|
||||
name: keyName,
|
||||
ciphertext: encryptedBlob
|
||||
});
|
||||
if (!decryptedText[0].plaintext) throw new Error("decryption failed");
|
||||
return { data: Buffer.from(decryptedText[0].plaintext) };
|
||||
};
|
||||
|
||||
return {
|
||||
validateConnection,
|
||||
getKeysList,
|
||||
encrypt,
|
||||
decrypt
|
||||
};
|
||||
};
|
@ -1,13 +1,23 @@
|
||||
import { z } from "zod";
|
||||
|
||||
export enum KmsProviders {
|
||||
Aws = "aws"
|
||||
Aws = "aws",
|
||||
Gcp = "gcp"
|
||||
}
|
||||
|
||||
export enum KmsAwsCredentialType {
|
||||
AssumeRole = "assume-role",
|
||||
AccessKey = "access-key"
|
||||
}
|
||||
// Google uses snake_case for their enum values and we need to match that
|
||||
export enum KmsGcpCredentialType {
|
||||
ServiceAccount = "service_account"
|
||||
}
|
||||
|
||||
export enum KmsGcpKeyFetchAuthType {
|
||||
Credential = "credential",
|
||||
Kms = "kmsId"
|
||||
}
|
||||
|
||||
export const ExternalKmsAwsSchema = z.object({
|
||||
credential: z
|
||||
@ -42,14 +52,44 @@ export const ExternalKmsAwsSchema = z.object({
|
||||
});
|
||||
export type TExternalKmsAwsSchema = z.infer<typeof ExternalKmsAwsSchema>;
|
||||
|
||||
export const ExternalKmsGcpCredentialSchema = z.object({
|
||||
type: z.literal(KmsGcpCredentialType.ServiceAccount),
|
||||
project_id: z.string().min(1),
|
||||
private_key_id: z.string().min(1),
|
||||
private_key: z.string().min(1),
|
||||
client_email: z.string().min(1),
|
||||
client_id: z.string().min(1),
|
||||
auth_uri: z.string().min(1),
|
||||
token_uri: z.string().min(1),
|
||||
auth_provider_x509_cert_url: z.string().min(1),
|
||||
client_x509_cert_url: z.string().min(1),
|
||||
universe_domain: z.string().min(1)
|
||||
});
|
||||
|
||||
export type TExternalKmsGcpCredentialSchema = z.infer<typeof ExternalKmsGcpCredentialSchema>;
|
||||
|
||||
export const ExternalKmsGcpSchema = z.object({
|
||||
credential: ExternalKmsGcpCredentialSchema.describe("GCP Service Account JSON credential to connect"),
|
||||
gcpRegion: z.string().trim().describe("GCP region where the KMS key is located"),
|
||||
keyName: z.string().trim().describe("GCP key name")
|
||||
});
|
||||
export type TExternalKmsGcpSchema = z.infer<typeof ExternalKmsGcpSchema>;
|
||||
|
||||
const ExternalKmsGcpClientSchema = ExternalKmsGcpSchema.pick({ gcpRegion: true }).extend({
|
||||
credential: ExternalKmsGcpCredentialSchema
|
||||
});
|
||||
export type TExternalKmsGcpClientSchema = z.infer<typeof ExternalKmsGcpClientSchema>;
|
||||
|
||||
// The root schema of the JSON
|
||||
export const ExternalKmsInputSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema })
|
||||
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema }),
|
||||
z.object({ type: z.literal(KmsProviders.Gcp), inputs: ExternalKmsGcpSchema })
|
||||
]);
|
||||
export type TExternalKmsInputSchema = z.infer<typeof ExternalKmsInputSchema>;
|
||||
|
||||
export const ExternalKmsInputUpdateSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema.partial() })
|
||||
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema.partial() }),
|
||||
z.object({ type: z.literal(KmsProviders.Gcp), inputs: ExternalKmsGcpSchema.partial() })
|
||||
]);
|
||||
export type TExternalKmsInputUpdateSchema = z.infer<typeof ExternalKmsInputUpdateSchema>;
|
||||
|
||||
|
@ -5,6 +5,8 @@ import { TableName, TGroups } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { buildFindFilter, ormify, selectAllTableCols, TFindFilter, TFindOpt } from "@app/lib/knex";
|
||||
|
||||
import { EFilterReturnedUsers } from "./group-types";
|
||||
|
||||
export type TGroupDALFactory = ReturnType<typeof groupDALFactory>;
|
||||
|
||||
export const groupDALFactory = (db: TDbClient) => {
|
||||
@ -66,7 +68,8 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
offset = 0,
|
||||
limit,
|
||||
username, // depreciated in favor of search
|
||||
search
|
||||
search,
|
||||
filter
|
||||
}: {
|
||||
orgId: string;
|
||||
groupId: string;
|
||||
@ -74,6 +77,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
limit?: number;
|
||||
username?: string;
|
||||
search?: string;
|
||||
filter?: EFilterReturnedUsers;
|
||||
}) => {
|
||||
try {
|
||||
const query = db
|
||||
@ -90,6 +94,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
.select(
|
||||
db.ref("id").withSchema(TableName.OrgMembership),
|
||||
db.ref("groupId").withSchema(TableName.UserGroupMembership),
|
||||
db.ref("createdAt").withSchema(TableName.UserGroupMembership).as("joinedGroupAt"),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("firstName").withSchema(TableName.Users),
|
||||
@ -111,17 +116,37 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
void query.andWhere(`${TableName.Users}.username`, "ilike", `%${username}%`);
|
||||
}
|
||||
|
||||
switch (filter) {
|
||||
case EFilterReturnedUsers.EXISTING_MEMBERS:
|
||||
void query.andWhere(`${TableName.UserGroupMembership}.createdAt`, "is not", null);
|
||||
break;
|
||||
case EFilterReturnedUsers.NON_MEMBERS:
|
||||
void query.andWhere(`${TableName.UserGroupMembership}.createdAt`, "is", null);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
const members = await query;
|
||||
|
||||
return {
|
||||
members: members.map(
|
||||
({ email, username: memberUsername, firstName, lastName, userId, groupId: memberGroupId }) => ({
|
||||
({
|
||||
email,
|
||||
username: memberUsername,
|
||||
firstName,
|
||||
lastName,
|
||||
userId,
|
||||
groupId: memberGroupId,
|
||||
joinedGroupAt
|
||||
}) => ({
|
||||
id: userId,
|
||||
email,
|
||||
username: memberUsername,
|
||||
firstName,
|
||||
lastName,
|
||||
isPartOfGroup: !!memberGroupId
|
||||
isPartOfGroup: !!memberGroupId,
|
||||
joinedGroupAt
|
||||
})
|
||||
),
|
||||
// @ts-expect-error col select is raw and not strongly typed
|
||||
|
@ -222,7 +222,8 @@ export const groupServiceFactory = ({
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
search
|
||||
search,
|
||||
filter
|
||||
}: TListGroupUsersDTO) => {
|
||||
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
|
||||
|
||||
@ -251,7 +252,8 @@ export const groupServiceFactory = ({
|
||||
offset,
|
||||
limit,
|
||||
username,
|
||||
search
|
||||
search,
|
||||
filter
|
||||
});
|
||||
|
||||
return { users: members, totalCount };
|
||||
@ -283,8 +285,8 @@ export const groupServiceFactory = ({
|
||||
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
|
||||
|
||||
// check if user has broader or equal to privileges than group
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, groupRolePermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, groupRolePermission);
|
||||
if (!hasRequiredPrivileges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to add user to more privileged group" });
|
||||
|
||||
const user = await userDAL.findOne({ username });
|
||||
@ -338,8 +340,8 @@ export const groupServiceFactory = ({
|
||||
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
|
||||
|
||||
// check if user has broader or equal to privileges than group
|
||||
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, groupRolePermission);
|
||||
if (!hasRequiredPriviledges)
|
||||
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, groupRolePermission);
|
||||
if (!hasRequiredPrivileges)
|
||||
throw new ForbiddenRequestError({ message: "Failed to delete user from more privileged group" });
|
||||
|
||||
const user = await userDAL.findOne({ username });
|
||||
|
@ -39,6 +39,7 @@ export type TListGroupUsersDTO = {
|
||||
limit: number;
|
||||
username?: string;
|
||||
search?: string;
|
||||
filter?: EFilterReturnedUsers;
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TAddUserToGroupDTO = {
|
||||
@ -101,3 +102,8 @@ export type TConvertPendingGroupAdditionsToGroupMemberships = {
|
||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||
tx?: Knex;
|
||||
};
|
||||
|
||||
export enum EFilterReturnedUsers {
|
||||
EXISTING_MEMBERS = "existingMembers",
|
||||
NON_MEMBERS = "nonMembers"
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ export const initializeHsmModule = () => {
|
||||
|
||||
logger.info("PKCS#11 module initialized");
|
||||
} catch (err) {
|
||||
logger.error("Failed to initialize PKCS#11 module:", err);
|
||||
logger.error(err, "Failed to initialize PKCS#11 module");
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
@ -39,7 +39,7 @@ export const initializeHsmModule = () => {
|
||||
isInitialized = false;
|
||||
logger.info("PKCS#11 module finalized");
|
||||
} catch (err) {
|
||||
logger.error("Failed to finalize PKCS#11 module:", err);
|
||||
logger.error(err, "Failed to finalize PKCS#11 module");
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import { packRules } from "@casl/ability/extra";
|
||||
import ms from "ms";
|
||||
|
||||
@ -62,7 +62,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityId,
|
||||
@ -139,7 +142,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityProjectMembership.identityId,
|
||||
@ -216,7 +222,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
const { permission: identityRolePermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityProjectMembership.identityId,
|
||||
@ -258,7 +267,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
|
||||
return {
|
||||
...identityPrivilege,
|
||||
@ -289,7 +301,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
|
||||
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
@ -321,7 +336,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
|
||||
);
|
||||
|
||||
const identityPrivileges = await identityProjectAdditionalPrivilegeDAL.find(
|
||||
{
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { ForbiddenError, MongoAbility, RawRuleOf } from "@casl/ability";
|
||||
import { ForbiddenError, MongoAbility, RawRuleOf, subject } from "@casl/ability";
|
||||
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
|
||||
import ms from "ms";
|
||||
|
||||
@ -69,7 +69,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityId,
|
||||
@ -146,7 +150,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
|
||||
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
@ -241,7 +249,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Edit,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
|
||||
const { permission: identityRolePermission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityProjectMembership.identityId,
|
||||
@ -294,7 +306,10 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
|
||||
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
@ -333,7 +348,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Identity, { identityId })
|
||||
);
|
||||
|
||||
const identityPrivileges = await identityProjectAdditionalPrivilegeDAL.find({
|
||||
projectMembershipId: identityProjectMembership.id
|
||||
|
@ -36,8 +36,7 @@ export const testLDAPConfig = async (ldapConfig: TLDAPConfig): Promise<boolean>
|
||||
});
|
||||
|
||||
ldapClient.on("error", (err) => {
|
||||
logger.error("LDAP client error:", err);
|
||||
logger.error(err);
|
||||
logger.error(err, "LDAP client error");
|
||||
resolve(false);
|
||||
});
|
||||
|
||||
|
@ -161,8 +161,8 @@ export const licenseServiceFactory = ({
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`getPlan: encountered an error when fetching pan [orgId=${orgId}] [projectId=${projectId}] [error]`,
|
||||
error
|
||||
error,
|
||||
`getPlan: encountered an error when fetching pan [orgId=${orgId}] [projectId=${projectId}] [error]`
|
||||
);
|
||||
await keyStore.setItemWithExpiry(
|
||||
FEATURE_CACHE_KEY(orgId),
|
||||
|
@ -31,7 +31,6 @@ export enum OrgPermissionSubjects {
|
||||
}
|
||||
|
||||
export type OrgPermissionSet =
|
||||
| [OrgPermissionActions.Read, OrgPermissionSubjects.Workspace]
|
||||
| [OrgPermissionActions.Create, OrgPermissionSubjects.Workspace]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Role]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Member]
|
||||
@ -52,7 +51,6 @@ export type OrgPermissionSet =
|
||||
const buildAdminPermission = () => {
|
||||
const { can, rules } = new AbilityBuilder<MongoAbility<OrgPermissionSet>>(createMongoAbility);
|
||||
// ws permissions
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Workspace);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace);
|
||||
// role permission
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Role);
|
||||
@ -135,7 +133,6 @@ export const orgAdminPermissions = buildAdminPermission();
|
||||
const buildMemberPermission = () => {
|
||||
const { can, rules } = new AbilityBuilder<MongoAbility<OrgPermissionSet>>(createMongoAbility);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Workspace);
|
||||
can(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Member);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
|
||||
|
@ -127,14 +127,15 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getProjectPermission = async (userId: string, projectId: string) => {
|
||||
try {
|
||||
const subQueryUserGroups = db(TableName.UserGroupMembership).where("userId", userId).select("groupId");
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.Users)
|
||||
.where(`${TableName.Users}.id`, userId)
|
||||
.leftJoin(TableName.UserGroupMembership, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.GroupProjectMembership, (queryBuilder) => {
|
||||
void queryBuilder
|
||||
.on(`${TableName.GroupProjectMembership}.projectId`, db.raw("?", [projectId]))
|
||||
.andOn(`${TableName.GroupProjectMembership}.groupId`, `${TableName.UserGroupMembership}.groupId`);
|
||||
// @ts-expect-error akhilmhdh: this is valid knexjs query. Its just ts type argument is missing it
|
||||
.andOnIn(`${TableName.GroupProjectMembership}.groupId`, subQueryUserGroups);
|
||||
})
|
||||
.leftJoin(
|
||||
TableName.GroupProjectMembershipRole,
|
||||
@ -268,6 +269,7 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
db.ref("value").withSchema(TableName.IdentityMetadata).as("metadataValue"),
|
||||
db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"),
|
||||
db.ref("orgId").withSchema(TableName.Project),
|
||||
db.ref("type").withSchema(TableName.Project).as("projectType"),
|
||||
db.ref("id").withSchema(TableName.Project).as("projectId")
|
||||
);
|
||||
|
||||
@ -283,13 +285,15 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
membershipCreatedAt,
|
||||
groupMembershipCreatedAt,
|
||||
groupMembershipUpdatedAt,
|
||||
membershipUpdatedAt
|
||||
membershipUpdatedAt,
|
||||
projectType
|
||||
}) => ({
|
||||
orgId,
|
||||
orgAuthEnforced,
|
||||
userId,
|
||||
projectId,
|
||||
username,
|
||||
projectType,
|
||||
id: membershipId || groupMembershipId,
|
||||
createdAt: membershipCreatedAt || groupMembershipCreatedAt,
|
||||
updatedAt: membershipUpdatedAt || groupMembershipUpdatedAt
|
||||
@ -448,6 +452,7 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
db.ref("id").withSchema(TableName.IdentityProjectMembership).as("membershipId"),
|
||||
db.ref("name").withSchema(TableName.Identity).as("identityName"),
|
||||
db.ref("orgId").withSchema(TableName.Project).as("orgId"), // Now you can select orgId from Project
|
||||
db.ref("type").withSchema(TableName.Project).as("projectType"),
|
||||
db.ref("createdAt").withSchema(TableName.IdentityProjectMembership).as("membershipCreatedAt"),
|
||||
db.ref("updatedAt").withSchema(TableName.IdentityProjectMembership).as("membershipUpdatedAt"),
|
||||
db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug"),
|
||||
@ -479,7 +484,14 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
const permission = sqlNestRelationships({
|
||||
data: docs,
|
||||
key: "membershipId",
|
||||
parentMapper: ({ membershipId, membershipCreatedAt, membershipUpdatedAt, orgId, identityName }) => ({
|
||||
parentMapper: ({
|
||||
membershipId,
|
||||
membershipCreatedAt,
|
||||
membershipUpdatedAt,
|
||||
orgId,
|
||||
identityName,
|
||||
projectType
|
||||
}) => ({
|
||||
id: membershipId,
|
||||
identityId,
|
||||
username: identityName,
|
||||
@ -487,6 +499,7 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
createdAt: membershipCreatedAt,
|
||||
updatedAt: membershipUpdatedAt,
|
||||
orgId,
|
||||
projectType,
|
||||
// just a prefilled value
|
||||
orgAuthEnforced: false
|
||||
}),
|
||||
|
@ -6,6 +6,7 @@ import handlebars from "handlebars";
|
||||
import {
|
||||
OrgMembershipRole,
|
||||
ProjectMembershipRole,
|
||||
ProjectType,
|
||||
ServiceTokenScopes,
|
||||
TIdentityProjectMemberships,
|
||||
TProjectMemberships
|
||||
@ -255,6 +256,13 @@ export const permissionServiceFactory = ({
|
||||
return {
|
||||
permission,
|
||||
membership: userProjectPermission,
|
||||
ForbidOnInvalidProjectType: (productType: ProjectType) => {
|
||||
if (productType !== userProjectPermission.projectType) {
|
||||
throw new BadRequestError({
|
||||
message: `The project is of type ${userProjectPermission.projectType}. Operations of type ${productType} are not allowed.`
|
||||
});
|
||||
}
|
||||
},
|
||||
hasRole: (role: string) =>
|
||||
userProjectPermission.roles.findIndex(
|
||||
({ role: slug, customRoleSlug }) => role === slug || slug === customRoleSlug
|
||||
@ -323,6 +331,13 @@ export const permissionServiceFactory = ({
|
||||
return {
|
||||
permission,
|
||||
membership: identityProjectPermission,
|
||||
ForbidOnInvalidProjectType: (productType: ProjectType) => {
|
||||
if (productType !== identityProjectPermission.projectType) {
|
||||
throw new BadRequestError({
|
||||
message: `The project is of type ${identityProjectPermission.projectType}. Operations of type ${productType} are not allowed.`
|
||||
});
|
||||
}
|
||||
},
|
||||
hasRole: (role: string) =>
|
||||
identityProjectPermission.roles.findIndex(
|
||||
({ role: slug, customRoleSlug }) => role === slug || slug === customRoleSlug
|
||||
@ -361,7 +376,14 @@ export const permissionServiceFactory = ({
|
||||
const scopes = ServiceTokenScopes.parse(serviceToken.scopes || []);
|
||||
return {
|
||||
permission: buildServiceTokenProjectPermission(scopes, serviceToken.permissions),
|
||||
membership: undefined
|
||||
membership: undefined,
|
||||
ForbidOnInvalidProjectType: (productType: ProjectType) => {
|
||||
if (productType !== serviceTokenProject.type) {
|
||||
throw new BadRequestError({
|
||||
message: `The project is of type ${serviceTokenProject.type}. Operations of type ${productType} are not allowed.`
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@ -370,6 +392,7 @@ export const permissionServiceFactory = ({
|
||||
permission: MongoAbility<ProjectPermissionSet, MongoQuery>;
|
||||
membership: undefined;
|
||||
hasRole: (arg: string) => boolean;
|
||||
ForbidOnInvalidProjectType: (type: ProjectType) => void;
|
||||
} // service token doesn't have both membership and roles
|
||||
: {
|
||||
permission: MongoAbility<ProjectPermissionSet, MongoQuery>;
|
||||
@ -379,6 +402,7 @@ export const permissionServiceFactory = ({
|
||||
roles: Array<{ role: string }>;
|
||||
};
|
||||
hasRole: (role: string) => boolean;
|
||||
ForbidOnInvalidProjectType: (type: ProjectType) => void;
|
||||
};
|
||||
|
||||
const getProjectPermission = async <T extends ActorType>(
|
||||
|
@ -1,14 +1,7 @@
|
||||
import picomatch from "picomatch";
|
||||
import { z } from "zod";
|
||||
|
||||
export enum PermissionConditionOperators {
|
||||
$IN = "$in",
|
||||
$ALL = "$all",
|
||||
$REGEX = "$regex",
|
||||
$EQ = "$eq",
|
||||
$NEQ = "$ne",
|
||||
$GLOB = "$glob"
|
||||
}
|
||||
import { PermissionConditionOperators } from "@app/lib/casl";
|
||||
|
||||
export const PermissionConditionSchema = {
|
||||
[PermissionConditionOperators.$IN]: z.string().trim().min(1).array(),
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { AbilityBuilder, createMongoAbility, ForcedSubject, MongoAbility } from "@casl/ability";
|
||||
import { z } from "zod";
|
||||
|
||||
import { conditionsMatcher } from "@app/lib/casl";
|
||||
import { conditionsMatcher, PermissionConditionOperators } from "@app/lib/casl";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
|
||||
import { PermissionConditionOperators, PermissionConditionSchema } from "./permission-types";
|
||||
import { PermissionConditionSchema } from "./permission-types";
|
||||
|
||||
export enum ProjectPermissionActions {
|
||||
Read = "read",
|
||||
@ -82,6 +82,10 @@ export type SecretImportSubjectFields = {
|
||||
secretPath: string;
|
||||
};
|
||||
|
||||
export type IdentityManagementSubjectFields = {
|
||||
identityId: string;
|
||||
};
|
||||
|
||||
export type ProjectPermissionSet =
|
||||
| [
|
||||
ProjectPermissionActions,
|
||||
@ -121,7 +125,10 @@ export type ProjectPermissionSet =
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.ServiceTokens]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.SecretApproval]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.SecretRotation]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Identity]
|
||||
| [
|
||||
ProjectPermissionActions,
|
||||
ProjectPermissionSub.Identity | (ForcedSubject<ProjectPermissionSub.Identity> & IdentityManagementSubjectFields)
|
||||
]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.CertificateAuthorities]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.Certificates]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.CertificateTemplates]
|
||||
@ -213,6 +220,21 @@ const SecretConditionV2Schema = z
|
||||
})
|
||||
.partial();
|
||||
|
||||
const IdentityManagementConditionSchema = z
|
||||
.object({
|
||||
identityId: z.union([
|
||||
z.string(),
|
||||
z
|
||||
.object({
|
||||
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
|
||||
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
|
||||
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
|
||||
})
|
||||
.partial()
|
||||
])
|
||||
})
|
||||
.partial();
|
||||
|
||||
const GeneralPermissionSchema = [
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.SecretApproval).describe("The entity this permission pertains to."),
|
||||
@ -262,12 +284,6 @@ const GeneralPermissionSchema = [
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Identity).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.ServiceTokens).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
@ -373,6 +389,12 @@ export const ProjectPermissionV1Schema = z.discriminatedUnion("subject", [
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Identity).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
...GeneralPermissionSchema
|
||||
]);
|
||||
|
||||
@ -417,6 +439,16 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
|
||||
"When specified, only matching conditions will be allowed to access given resource."
|
||||
).optional()
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Identity).describe("The entity this permission pertains to."),
|
||||
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
),
|
||||
conditions: IdentityManagementConditionSchema.describe(
|
||||
"When specified, only matching conditions will be allowed to access given resource."
|
||||
).optional()
|
||||
}),
|
||||
...GeneralPermissionSchema
|
||||
]);
|
||||
|
||||
@ -697,26 +729,26 @@ export const buildServiceTokenProjectPermission = (
|
||||
[ProjectPermissionSub.Secrets, ProjectPermissionSub.SecretImports, ProjectPermissionSub.SecretFolders].forEach(
|
||||
(subject) => {
|
||||
if (canWrite) {
|
||||
// TODO: @Akhi
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Edit, subject, {
|
||||
// TODO: @Akhi
|
||||
// @ts-expect-error type
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Create, subject, {
|
||||
// @ts-expect-error type
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Delete, subject, {
|
||||
// @ts-expect-error type
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
}
|
||||
if (canRead) {
|
||||
// @ts-expect-error type
|
||||
can(ProjectPermissionActions.Read, subject, {
|
||||
// @ts-expect-error type
|
||||
secretPath: { $glob: secretPath },
|
||||
environment
|
||||
});
|
||||
|
@ -46,7 +46,7 @@ export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateL
|
||||
}
|
||||
return rateLimit;
|
||||
} catch (err) {
|
||||
logger.error("Error fetching rate limits %o", err);
|
||||
logger.error(err, "Error fetching rate limits");
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
@ -69,12 +69,12 @@ export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateL
|
||||
mfaRateLimit: rateLimit.mfaRateLimit
|
||||
};
|
||||
|
||||
logger.info(`syncRateLimitConfiguration: rate limit configuration: %o`, newRateLimitMaxConfiguration);
|
||||
logger.info(newRateLimitMaxConfiguration, "syncRateLimitConfiguration: rate limit configuration");
|
||||
Object.freeze(newRateLimitMaxConfiguration);
|
||||
rateLimitMaxConfiguration = newRateLimitMaxConfiguration;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error syncing rate limit configurations: %o`, error);
|
||||
logger.error(error, "Error syncing rate limit configurations");
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -18,6 +18,7 @@ import { TGroupProjectDALFactory } from "@app/services/group-project/group-proje
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { deleteOrgMembershipFn } from "@app/services/org/org-fns";
|
||||
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
|
||||
import { OrgAuthMethod } from "@app/services/org/org-types";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
|
||||
@ -71,6 +72,7 @@ type TScimServiceFactoryDep = {
|
||||
| "deleteMembershipById"
|
||||
| "transaction"
|
||||
| "updateMembershipById"
|
||||
| "findOrgById"
|
||||
>;
|
||||
orgMembershipDAL: Pick<
|
||||
TOrgMembershipDALFactory,
|
||||
@ -288,8 +290,7 @@ export const scimServiceFactory = ({
|
||||
const createScimUser = async ({ externalId, email, firstName, lastName, orgId }: TCreateScimUserDTO) => {
|
||||
if (!email) throw new ScimRequestError({ detail: "Invalid request. Missing email.", status: 400 });
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
|
||||
const org = await orgDAL.findOrgById(orgId);
|
||||
if (!org)
|
||||
throw new ScimRequestError({
|
||||
detail: "Organization not found",
|
||||
@ -302,13 +303,24 @@ export const scimServiceFactory = ({
|
||||
status: 403
|
||||
});
|
||||
|
||||
if (!org.orgAuthMethod) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Neither SAML or OIDC SSO is configured",
|
||||
status: 400
|
||||
});
|
||||
}
|
||||
|
||||
const appCfg = getConfig();
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
const aliasType = org.orgAuthMethod === OrgAuthMethod.OIDC ? UserAliasType.OIDC : UserAliasType.SAML;
|
||||
const trustScimEmails =
|
||||
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails;
|
||||
|
||||
const userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
aliasType: UserAliasType.SAML
|
||||
aliasType
|
||||
});
|
||||
|
||||
const { user: createdUser, orgMembership: createdOrgMembership } = await userDAL.transaction(async (tx) => {
|
||||
@ -349,7 +361,7 @@ export const scimServiceFactory = ({
|
||||
);
|
||||
}
|
||||
} else {
|
||||
if (serverCfg.trustSamlEmails) {
|
||||
if (trustScimEmails) {
|
||||
user = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
@ -367,9 +379,9 @@ export const scimServiceFactory = ({
|
||||
);
|
||||
user = await userDAL.create(
|
||||
{
|
||||
username: serverCfg.trustSamlEmails ? email : uniqueUsername,
|
||||
username: trustScimEmails ? email : uniqueUsername,
|
||||
email,
|
||||
isEmailVerified: serverCfg.trustSamlEmails,
|
||||
isEmailVerified: trustScimEmails,
|
||||
firstName,
|
||||
lastName,
|
||||
authMethods: [],
|
||||
@ -382,7 +394,7 @@ export const scimServiceFactory = ({
|
||||
await userAliasDAL.create(
|
||||
{
|
||||
userId: user.id,
|
||||
aliasType: UserAliasType.SAML,
|
||||
aliasType,
|
||||
externalId,
|
||||
emails: email ? [email] : [],
|
||||
orgId
|
||||
@ -437,7 +449,7 @@ export const scimServiceFactory = ({
|
||||
recipients: [email],
|
||||
substitutions: {
|
||||
organizationName: org.name,
|
||||
callback_url: `${appCfg.SITE_URL}/api/v1/sso/redirect/saml2/organizations/${org.slug}`
|
||||
callback_url: `${appCfg.SITE_URL}/api/v1/sso/redirect/organizations/${org.slug}`
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -456,6 +468,14 @@ export const scimServiceFactory = ({
|
||||
|
||||
// partial
|
||||
const updateScimUser = async ({ orgMembershipId, orgId, operations }: TUpdateScimUserDTO) => {
|
||||
const org = await orgDAL.findOrgById(orgId);
|
||||
if (!org.orgAuthMethod) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Neither SAML or OIDC SSO is configured",
|
||||
status: 400
|
||||
});
|
||||
}
|
||||
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
[`${TableName.OrgMembership}.id` as "id"]: orgMembershipId,
|
||||
@ -493,6 +513,9 @@ export const scimServiceFactory = ({
|
||||
scimPatch(scimUser, operations);
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
const trustScimEmails =
|
||||
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails;
|
||||
|
||||
await userDAL.transaction(async (tx) => {
|
||||
await orgMembershipDAL.updateById(
|
||||
membership.id,
|
||||
@ -508,7 +531,7 @@ export const scimServiceFactory = ({
|
||||
firstName: scimUser.name.givenName,
|
||||
email: scimUser.emails[0].value,
|
||||
lastName: scimUser.name.familyName,
|
||||
isEmailVerified: hasEmailChanged ? serverCfg.trustSamlEmails : true
|
||||
isEmailVerified: hasEmailChanged ? trustScimEmails : true
|
||||
},
|
||||
tx
|
||||
);
|
||||
@ -526,6 +549,14 @@ export const scimServiceFactory = ({
|
||||
email,
|
||||
externalId
|
||||
}: TReplaceScimUserDTO) => {
|
||||
const org = await orgDAL.findOrgById(orgId);
|
||||
if (!org.orgAuthMethod) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Neither SAML or OIDC SSO is configured",
|
||||
status: 400
|
||||
});
|
||||
}
|
||||
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
[`${TableName.OrgMembership}.id` as "id"]: orgMembershipId,
|
||||
@ -555,7 +586,7 @@ export const scimServiceFactory = ({
|
||||
await userAliasDAL.update(
|
||||
{
|
||||
orgId,
|
||||
aliasType: UserAliasType.SAML,
|
||||
aliasType: org.orgAuthMethod === OrgAuthMethod.OIDC ? UserAliasType.OIDC : UserAliasType.SAML,
|
||||
userId: membership.userId
|
||||
},
|
||||
{
|
||||
@ -576,7 +607,8 @@ export const scimServiceFactory = ({
|
||||
firstName,
|
||||
email,
|
||||
lastName,
|
||||
isEmailVerified: serverCfg.trustSamlEmails
|
||||
isEmailVerified:
|
||||
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@ -177,5 +177,10 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
return { ...secretApprovalPolicyOrm, findById, find };
|
||||
const softDeleteById = async (policyId: string, tx?: Knex) => {
|
||||
const softDeletedPolicy = await secretApprovalPolicyOrm.updateById(policyId, { deletedAt: new Date() }, tx);
|
||||
return softDeletedPolicy;
|
||||
};
|
||||
|
||||
return { ...secretApprovalPolicyOrm, findById, find, softDeleteById };
|
||||
};
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import picomatch from "picomatch";
|
||||
|
||||
import { ProjectType } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
@ -11,6 +12,8 @@ import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
import { ApproverType } from "../access-approval-policy/access-approval-policy-types";
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { TSecretApprovalRequestDALFactory } from "../secret-approval-request/secret-approval-request-dal";
|
||||
import { RequestState } from "../secret-approval-request/secret-approval-request-types";
|
||||
import { TSecretApprovalPolicyApproverDALFactory } from "./secret-approval-policy-approver-dal";
|
||||
import { TSecretApprovalPolicyDALFactory } from "./secret-approval-policy-dal";
|
||||
import {
|
||||
@ -34,6 +37,7 @@ type TSecretApprovalPolicyServiceFactoryDep = {
|
||||
userDAL: Pick<TUserDALFactory, "find">;
|
||||
secretApprovalPolicyApproverDAL: TSecretApprovalPolicyApproverDALFactory;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
secretApprovalRequestDAL: Pick<TSecretApprovalRequestDALFactory, "update">;
|
||||
};
|
||||
|
||||
export type TSecretApprovalPolicyServiceFactory = ReturnType<typeof secretApprovalPolicyServiceFactory>;
|
||||
@ -44,7 +48,8 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
secretApprovalPolicyApproverDAL,
|
||||
projectEnvDAL,
|
||||
userDAL,
|
||||
licenseService
|
||||
licenseService,
|
||||
secretApprovalRequestDAL
|
||||
}: TSecretApprovalPolicyServiceFactoryDep) => {
|
||||
const createSecretApprovalPolicy = async ({
|
||||
name,
|
||||
@ -74,13 +79,14 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
if (!groupApprovers.length && approvals > approvers.length)
|
||||
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionSub.SecretApproval
|
||||
@ -187,13 +193,14 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
secretApprovalPolicy.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretApproval);
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
@ -281,13 +288,14 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
if (!sapPolicy)
|
||||
throw new NotFoundError({ message: `Secret approval policy with ID '${secretPolicyId}' not found` });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
sapPolicy.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Delete,
|
||||
ProjectPermissionSub.SecretApproval
|
||||
@ -301,8 +309,16 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
await secretApprovalPolicyDAL.deleteById(secretPolicyId);
|
||||
return sapPolicy;
|
||||
const deletedPolicy = await secretApprovalPolicyDAL.transaction(async (tx) => {
|
||||
await secretApprovalRequestDAL.update(
|
||||
{ policyId: secretPolicyId, status: RequestState.Open },
|
||||
{ status: RequestState.Closed },
|
||||
tx
|
||||
);
|
||||
const updatedPolicy = await secretApprovalPolicyDAL.softDeleteById(secretPolicyId, tx);
|
||||
return updatedPolicy;
|
||||
});
|
||||
return { ...deletedPolicy, projectId: sapPolicy.projectId, environment: sapPolicy.environment };
|
||||
};
|
||||
|
||||
const getSecretApprovalPolicyByProjectId = async ({
|
||||
@ -321,7 +337,7 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
|
||||
|
||||
const sapPolicies = await secretApprovalPolicyDAL.find({ projectId });
|
||||
const sapPolicies = await secretApprovalPolicyDAL.find({ projectId, deletedAt: null });
|
||||
return sapPolicies;
|
||||
};
|
||||
|
||||
@ -334,7 +350,7 @@ export const secretApprovalPolicyServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const policies = await secretApprovalPolicyDAL.find({ envId: env.id });
|
||||
const policies = await secretApprovalPolicyDAL.find({ envId: env.id, deletedAt: null });
|
||||
if (!policies.length) return;
|
||||
// this will filter policies either without scoped to secret path or the one that matches with secret path
|
||||
const policiesFilteredByPath = policies.filter(
|
||||
|
@ -111,7 +111,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
tx.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
|
||||
tx.ref("envId").withSchema(TableName.SecretApprovalPolicy).as("policyEnvId"),
|
||||
tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
|
||||
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals")
|
||||
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
|
||||
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt")
|
||||
);
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
@ -147,7 +148,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
approvals: el.policyApprovals,
|
||||
secretPath: el.policySecretPath,
|
||||
enforcementLevel: el.policyEnforcementLevel,
|
||||
envId: el.policyEnvId
|
||||
envId: el.policyEnvId,
|
||||
deletedAt: el.policyDeletedAt
|
||||
}
|
||||
}),
|
||||
childrenMapper: [
|
||||
@ -222,6 +224,11 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
`${TableName.SecretApprovalRequest}.policyId`,
|
||||
`${TableName.SecretApprovalPolicyApprover}.policyId`
|
||||
)
|
||||
.join(
|
||||
TableName.SecretApprovalPolicy,
|
||||
`${TableName.SecretApprovalRequest}.policyId`,
|
||||
`${TableName.SecretApprovalPolicy}.id`
|
||||
)
|
||||
.where({ projectId })
|
||||
.andWhere(
|
||||
(bd) =>
|
||||
@ -229,6 +236,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
.where(`${TableName.SecretApprovalPolicyApprover}.approverUserId`, userId)
|
||||
.orWhere(`${TableName.SecretApprovalRequest}.committerUserId`, userId)
|
||||
)
|
||||
.andWhere((bd) => void bd.where(`${TableName.SecretApprovalPolicy}.deletedAt`, null))
|
||||
.select("status", `${TableName.SecretApprovalRequest}.id`)
|
||||
.groupBy(`${TableName.SecretApprovalRequest}.id`, "status")
|
||||
.count("status")
|
||||
|
@ -2,6 +2,7 @@ import { ForbiddenError, subject } from "@casl/ability";
|
||||
|
||||
import {
|
||||
ProjectMembershipRole,
|
||||
ProjectType,
|
||||
SecretEncryptionAlgo,
|
||||
SecretKeyEncoding,
|
||||
SecretType,
|
||||
@ -232,10 +233,10 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
const encrypedSecrets = await secretApprovalRequestSecretDAL.findByRequestIdBridgeSecretV2(
|
||||
const encryptedSecrets = await secretApprovalRequestSecretDAL.findByRequestIdBridgeSecretV2(
|
||||
secretApprovalRequest.id
|
||||
);
|
||||
secrets = encrypedSecrets.map((el) => ({
|
||||
secrets = encryptedSecrets.map((el) => ({
|
||||
...el,
|
||||
secretKey: el.key,
|
||||
id: el.id,
|
||||
@ -274,8 +275,8 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
}));
|
||||
} else {
|
||||
if (!botKey) throw new NotFoundError({ message: `Project bot key not found`, name: "BotKeyNotFound" }); // CLI depends on this error message. TODO(daniel): Make API check for name BotKeyNotFound instead of message
|
||||
const encrypedSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
|
||||
secrets = encrypedSecrets.map((el) => ({
|
||||
const encryptedSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
|
||||
secrets = encryptedSecrets.map((el) => ({
|
||||
...el,
|
||||
...decryptSecretWithBot(el, botKey),
|
||||
secret: el.secret
|
||||
@ -323,6 +324,12 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
}
|
||||
|
||||
const { policy } = secretApprovalRequest;
|
||||
if (policy.deletedAt) {
|
||||
throw new BadRequestError({
|
||||
message: "The policy associated with this secret approval request has been deleted."
|
||||
});
|
||||
}
|
||||
|
||||
const { hasRole } = await permissionService.getProjectPermission(
|
||||
ActorType.USER,
|
||||
actorId,
|
||||
@ -383,6 +390,12 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
}
|
||||
|
||||
const { policy } = secretApprovalRequest;
|
||||
if (policy.deletedAt) {
|
||||
throw new BadRequestError({
|
||||
message: "The policy associated with this secret approval request has been deleted."
|
||||
});
|
||||
}
|
||||
|
||||
const { hasRole } = await permissionService.getProjectPermission(
|
||||
ActorType.USER,
|
||||
actorId,
|
||||
@ -433,6 +446,12 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
}
|
||||
|
||||
const { policy, folderId, projectId } = secretApprovalRequest;
|
||||
if (policy.deletedAt) {
|
||||
throw new BadRequestError({
|
||||
message: "The policy associated with this secret approval request has been deleted."
|
||||
});
|
||||
}
|
||||
|
||||
const { hasRole } = await permissionService.getProjectPermission(
|
||||
ActorType.USER,
|
||||
actorId,
|
||||
@ -857,13 +876,14 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
}: TGenerateSecretApprovalRequestDTO) => {
|
||||
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
|
||||
@ -1137,14 +1157,14 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
if (actor === ActorType.SERVICE || actor === ActorType.Machine)
|
||||
throw new BadRequestError({ message: "Cannot use service token or machine token over protected branches" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
|
||||
if (!folder)
|
||||
throw new NotFoundError({
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import Ajv from "ajv";
|
||||
|
||||
import { ProjectVersion, TableName } from "@app/db/schemas";
|
||||
import { ProjectType, ProjectVersion, TableName } from "@app/db/schemas";
|
||||
import { decryptSymmetric128BitHexKeyUTF8, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
@ -53,13 +53,14 @@ export const secretRotationServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
projectId
|
||||
}: TProjectPermission) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRotation);
|
||||
|
||||
return {
|
||||
@ -81,13 +82,14 @@ export const secretRotationServiceFactory = ({
|
||||
secretPath,
|
||||
environment
|
||||
}: TCreateSecretRotationDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionSub.SecretRotation
|
||||
@ -234,13 +236,14 @@ export const secretRotationServiceFactory = ({
|
||||
message: "Failed to add secret rotation due to plan restriction. Upgrade plan to add secret rotation."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
doc.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretRotation);
|
||||
await secretRotationQueue.removeFromQueue(doc.id, doc.interval);
|
||||
await secretRotationQueue.addToQueue(doc.id, doc.interval);
|
||||
@ -251,13 +254,14 @@ export const secretRotationServiceFactory = ({
|
||||
const doc = await secretRotationDAL.findById(rotationId);
|
||||
if (!doc) throw new NotFoundError({ message: `Rotation with ID '${rotationId}' not found` });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
doc.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Delete,
|
||||
ProjectPermissionSub.SecretRotation
|
||||
|
@ -238,11 +238,11 @@ export const secretScanningQueueFactory = ({
|
||||
});
|
||||
|
||||
queueService.listen(QueueName.SecretPushEventScan, "failed", (job, err) => {
|
||||
logger.error("Failed to secret scan on push", job?.data, err);
|
||||
logger.error(err, "Failed to secret scan on push", job?.data);
|
||||
});
|
||||
|
||||
queueService.listen(QueueName.SecretFullRepoScan, "failed", (job, err) => {
|
||||
logger.error("Failed to do full repo secret scan", job?.data, err);
|
||||
logger.error(err, "Failed to do full repo secret scan", job?.data);
|
||||
});
|
||||
|
||||
return { startFullRepoScan, startPushEventScan };
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
|
||||
import { TableName, TSecretTagJunctionInsert, TSecretV2TagJunctionInsert } from "@app/db/schemas";
|
||||
import { ProjectType, TableName, TSecretTagJunctionInsert, TSecretV2TagJunctionInsert } from "@app/db/schemas";
|
||||
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
import { InternalServerError, NotFoundError } from "@app/lib/errors";
|
||||
import { groupBy } from "@app/lib/fn";
|
||||
@ -322,13 +322,14 @@ export const secretSnapshotServiceFactory = ({
|
||||
if (!snapshot) throw new NotFoundError({ message: `Snapshot with ID '${snapshotId}' not found` });
|
||||
const shouldUseBridge = snapshot.projectVersion === 3;
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
snapshot.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbidOnInvalidProjectType(ProjectType.SecretManager);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
ProjectPermissionSub.SecretRollback
|
||||
|
@ -19,7 +19,9 @@ export const GROUPS = {
|
||||
offset: "The offset to start from. If you enter 10, it will start from the 10th user.",
|
||||
limit: "The number of users to return.",
|
||||
username: "The username to search for.",
|
||||
search: "The text string that user email or name will be filtered by."
|
||||
search: "The text string that user email or name will be filtered by.",
|
||||
filterUsers:
|
||||
"Whether to filter the list of returned users. 'existingMembers' will only return existing users in the group, 'nonMembers' will only return users not in the group, undefined will return all users in the organization."
|
||||
},
|
||||
ADD_USER: {
|
||||
id: "The ID of the group to add the user to.",
|
||||
@ -349,6 +351,52 @@ export const OIDC_AUTH = {
|
||||
}
|
||||
} as const;
|
||||
|
||||
export const JWT_AUTH = {
|
||||
LOGIN: {
|
||||
identityId: "The ID of the identity to login."
|
||||
},
|
||||
ATTACH: {
|
||||
identityId: "The ID of the identity to attach the configuration onto.",
|
||||
configurationType: "The configuration for validating JWTs. Must be one of: 'jwks', 'static'",
|
||||
jwksUrl:
|
||||
"The URL of the JWKS endpoint. Required if configurationType is 'jwks'. This endpoint must serve JSON Web Key Sets (JWKS) containing the public keys used to verify JWT signatures.",
|
||||
jwksCaCert: "The PEM-encoded CA certificate for validating the TLS connection to the JWKS endpoint.",
|
||||
publicKeys:
|
||||
"A list of PEM-encoded public keys used to verify JWT signatures. Required if configurationType is 'static'. Each key must be in RSA or ECDSA format and properly PEM-encoded with BEGIN/END markers.",
|
||||
boundIssuer: "The unique identifier of the JWT provider.",
|
||||
boundAudiences: "The list of intended recipients.",
|
||||
boundClaims: "The attributes that should be present in the JWT for it to be valid.",
|
||||
boundSubject: "The expected principal that is the subject of the JWT.",
|
||||
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
|
||||
accessTokenTTL: "The lifetime for an access token in seconds.",
|
||||
accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.",
|
||||
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used."
|
||||
},
|
||||
UPDATE: {
|
||||
identityId: "The ID of the identity to update the auth method for.",
|
||||
configurationType: "The new configuration for validating JWTs. Must be one of: 'jwks', 'static'",
|
||||
jwksUrl:
|
||||
"The new URL of the JWKS endpoint. This endpoint must serve JSON Web Key Sets (JWKS) containing the public keys used to verify JWT signatures.",
|
||||
jwksCaCert: "The new PEM-encoded CA certificate for validating the TLS connection to the JWKS endpoint.",
|
||||
publicKeys:
|
||||
"A new list of PEM-encoded public keys used to verify JWT signatures. Each key must be in RSA or ECDSA format and properly PEM-encoded with BEGIN/END markers.",
|
||||
boundIssuer: "The new unique identifier of the JWT provider.",
|
||||
boundAudiences: "The new list of intended recipients.",
|
||||
boundClaims: "The new attributes that should be present in the JWT for it to be valid.",
|
||||
boundSubject: "The new expected principal that is the subject of the JWT.",
|
||||
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
|
||||
accessTokenTTL: "The new lifetime for an access token in seconds.",
|
||||
accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.",
|
||||
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used."
|
||||
},
|
||||
RETRIEVE: {
|
||||
identityId: "The ID of the identity to retrieve the auth method for."
|
||||
},
|
||||
REVOKE: {
|
||||
identityId: "The ID of the identity to revoke the auth method for."
|
||||
}
|
||||
} as const;
|
||||
|
||||
export const ORGANIZATIONS = {
|
||||
LIST_USER_MEMBERSHIPS: {
|
||||
organizationId: "The ID of the organization to get memberships from."
|
||||
@ -380,7 +428,8 @@ export const ORGANIZATIONS = {
|
||||
search: "The text string that identity membership names will be filtered by."
|
||||
},
|
||||
GET_PROJECTS: {
|
||||
organizationId: "The ID of the organization to get projects from."
|
||||
organizationId: "The ID of the organization to get projects from.",
|
||||
type: "The type of project to filter by."
|
||||
},
|
||||
LIST_GROUPS: {
|
||||
organizationId: "The ID of the organization to list groups for."
|
||||
@ -391,6 +440,7 @@ export const PROJECTS = {
|
||||
CREATE: {
|
||||
organizationSlug: "The slug of the organization to create the project in.",
|
||||
projectName: "The name of the project to create.",
|
||||
projectDescription: "An optional description label for the project.",
|
||||
slug: "An optional slug for the project.",
|
||||
template: "The name of the project template, if specified, to apply to this project."
|
||||
},
|
||||
@ -403,6 +453,7 @@ export const PROJECTS = {
|
||||
UPDATE: {
|
||||
workspaceId: "The ID of the project to update.",
|
||||
name: "The new name of the project.",
|
||||
projectDescription: "An optional description label for the project.",
|
||||
autoCapitalization: "Disable or enable auto-capitalization for the project."
|
||||
},
|
||||
GET_KEY: {
|
||||
@ -1030,6 +1081,9 @@ export const INTEGRATION_AUTH = {
|
||||
DELETE_BY_ID: {
|
||||
integrationAuthId: "The ID of integration authentication object to delete."
|
||||
},
|
||||
UPDATE_BY_ID: {
|
||||
integrationAuthId: "The ID of integration authentication object to update."
|
||||
},
|
||||
CREATE_ACCESS_TOKEN: {
|
||||
workspaceId: "The ID of the project to create the integration auth for.",
|
||||
integration: "The slug of integration for the auth object.",
|
||||
@ -1080,16 +1134,19 @@ export const INTEGRATION = {
|
||||
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store.",
|
||||
shouldMaskSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Masked'.",
|
||||
shouldProtectSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Protected'.",
|
||||
shouldEnableDelete: "The flag to enable deletion of secrets."
|
||||
shouldEnableDelete: "The flag to enable deletion of secrets.",
|
||||
octopusDeployScopeValues: "Specifies the scope values to set on synced secrets to Octopus Deploy."
|
||||
}
|
||||
},
|
||||
UPDATE: {
|
||||
integrationId: "The ID of the integration object.",
|
||||
region: "AWS region to sync secrets to.",
|
||||
app: "The name of the external integration providers app entity that you want to sync secrets with. Used in Netlify, GitHub, Vercel integrations.",
|
||||
appId:
|
||||
"The ID of the external integration providers app entity that you want to sync secrets with. Used in Netlify, GitHub, Vercel integrations.",
|
||||
isActive: "Whether the integration should be active or disabled.",
|
||||
secretPath: "The path of the secrets to sync secrets from.",
|
||||
path: "Path to save the synced secrets. Used by Gitlab, AWS Parameter Store, Vault.",
|
||||
owner: "External integration providers service entity owner. Used in Github.",
|
||||
targetEnvironment:
|
||||
"The target environment of the integration provider. Used in cloudflare pages, TeamCity, Gitlab integrations.",
|
||||
|
@ -54,3 +54,12 @@ export const isAtLeastAsPrivileged = (permissions1: MongoAbility, permissions2:
|
||||
|
||||
return set1.size >= set2.size;
|
||||
};
|
||||
|
||||
export enum PermissionConditionOperators {
|
||||
$IN = "$in",
|
||||
$ALL = "$all",
|
||||
$REGEX = "$regex",
|
||||
$EQ = "$eq",
|
||||
$NEQ = "$ne",
|
||||
$GLOB = "$glob"
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { Logger } from "pino";
|
||||
import { z } from "zod";
|
||||
|
||||
import { removeTrailingSlash } from "../fn";
|
||||
import { CustomLogger } from "../logger/logger";
|
||||
import { zpStr } from "../zod";
|
||||
|
||||
export const GITLAB_URL = "https://gitlab.com";
|
||||
@ -10,7 +10,7 @@ export const GITLAB_URL = "https://gitlab.com";
|
||||
export const IS_PACKAGED = (process as any)?.pkg !== undefined;
|
||||
|
||||
const zodStrBool = z
|
||||
.enum(["true", "false"])
|
||||
.string()
|
||||
.optional()
|
||||
.transform((val) => val === "true");
|
||||
|
||||
@ -166,8 +166,7 @@ const envSchema = z
|
||||
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD: zpStr(z.string().optional()),
|
||||
OTEL_EXPORT_TYPE: z.enum(["prometheus", "otlp"]).optional(),
|
||||
|
||||
PLAIN_API_KEY: zpStr(z.string().optional()),
|
||||
PLAIN_WISH_LABEL_IDS: zpStr(z.string().optional()),
|
||||
PYLON_API_KEY: zpStr(z.string().optional()),
|
||||
DISABLE_AUDIT_LOG_GENERATION: zodStrBool.default("false"),
|
||||
SSL_CLIENT_CERTIFICATE_HEADER_KEY: zpStr(z.string().optional()).default("x-ssl-client-cert"),
|
||||
WORKFLOW_SLACK_CLIENT_ID: zpStr(z.string().optional()),
|
||||
@ -178,7 +177,10 @@ const envSchema = z
|
||||
HSM_LIB_PATH: zpStr(z.string().optional()),
|
||||
HSM_PIN: zpStr(z.string().optional()),
|
||||
HSM_KEY_LABEL: zpStr(z.string().optional()),
|
||||
HSM_SLOT: z.coerce.number().optional().default(0)
|
||||
HSM_SLOT: z.coerce.number().optional().default(0),
|
||||
|
||||
USE_PG_QUEUE: zodStrBool.default("false"),
|
||||
SHOULD_INIT_PG_QUEUE: zodStrBool.default("false")
|
||||
})
|
||||
// To ensure that basic encryption is always possible.
|
||||
.refine(
|
||||
@ -212,7 +214,7 @@ let envCfg: Readonly<z.infer<typeof envSchema>>;
|
||||
|
||||
export const getConfig = () => envCfg;
|
||||
// cannot import singleton logger directly as it needs config to load various transport
|
||||
export const initEnvConfig = (logger?: Logger) => {
|
||||
export const initEnvConfig = (logger?: CustomLogger) => {
|
||||
const parsedEnv = envSchema.safeParse(process.env);
|
||||
if (!parsedEnv.success) {
|
||||
(logger ?? console).error("Invalid environment variables. Check the error below");
|
||||
|
@ -1,6 +1,8 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
// logger follows a singleton pattern
|
||||
// easier to use it that's all.
|
||||
import { requestContext } from "@fastify/request-context";
|
||||
import pino, { Logger } from "pino";
|
||||
import { z } from "zod";
|
||||
|
||||
@ -13,14 +15,37 @@ const logLevelToSeverityLookup: Record<string, string> = {
|
||||
"60": "CRITICAL"
|
||||
};
|
||||
|
||||
// eslint-disable-next-line import/no-mutable-exports
|
||||
export let logger: Readonly<Logger>;
|
||||
// akhilmhdh:
|
||||
// The logger is not placed in the main app config to avoid a circular dependency.
|
||||
// The config requires the logger to display errors when an invalid environment is supplied.
|
||||
// On the other hand, the logger needs the config to obtain credentials for AWS or other transports.
|
||||
// By keeping the logger separate, it becomes an independent package.
|
||||
|
||||
// We define our own custom logger interface to enforce structure to the logging methods.
|
||||
|
||||
export interface CustomLogger extends Omit<Logger, "info" | "error" | "warn" | "debug"> {
|
||||
info: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
|
||||
error: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
warn: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
debug: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
}
|
||||
|
||||
// eslint-disable-next-line import/no-mutable-exports
|
||||
export let logger: Readonly<CustomLogger>;
|
||||
|
||||
const loggerConfig = z.object({
|
||||
AWS_CLOUDWATCH_LOG_GROUP_NAME: z.string().default("infisical-log-stream"),
|
||||
AWS_CLOUDWATCH_LOG_REGION: z.string().default("us-east-1"),
|
||||
@ -62,6 +87,17 @@ const redactedKeys = [
|
||||
"config"
|
||||
];
|
||||
|
||||
const UNKNOWN_REQUEST_ID = "UNKNOWN_REQUEST_ID";
|
||||
|
||||
const extractReqId = () => {
|
||||
try {
|
||||
return requestContext.get("reqId") || UNKNOWN_REQUEST_ID;
|
||||
} catch (err) {
|
||||
console.log("failed to get request context", err);
|
||||
return UNKNOWN_REQUEST_ID;
|
||||
}
|
||||
};
|
||||
|
||||
export const initLogger = async () => {
|
||||
const cfg = loggerConfig.parse(process.env);
|
||||
const targets: pino.TransportMultiOptions["targets"][number][] = [
|
||||
@ -94,6 +130,30 @@ export const initLogger = async () => {
|
||||
targets
|
||||
});
|
||||
|
||||
const wrapLogger = (originalLogger: Logger): CustomLogger => {
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.info = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ reqId: extractReqId() }).info(obj, msg, ...args);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.error = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ reqId: extractReqId() }).error(obj, msg, ...args);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.warn = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ reqId: extractReqId() }).warn(obj, msg, ...args);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.debug = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ reqId: extractReqId() }).debug(obj, msg, ...args);
|
||||
};
|
||||
|
||||
return originalLogger;
|
||||
};
|
||||
|
||||
logger = pino(
|
||||
{
|
||||
mixin(_context, level) {
|
||||
@ -113,5 +173,6 @@ export const initLogger = async () => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
transport
|
||||
);
|
||||
return logger;
|
||||
|
||||
return wrapLogger(logger);
|
||||
};
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user