mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-24 21:44:53 +00:00
Compare commits
123 Commits
misc/addre
...
vmatsiiako
Author | SHA1 | Date | |
---|---|---|---|
5305017ce2 | |||
ad8d247cdc | |||
33411335ed | |||
728f023263 | |||
229706f57f | |||
6cf2488326 | |||
92ce05283b | |||
39d92ce6ff | |||
44a026446e | |||
539e5b1907 | |||
44b02d5324 | |||
71fb6f1d11 | |||
e64100fab1 | |||
5bcf07b32b | |||
3b0c48052b | |||
df50e3b0f9 | |||
bdf2ae40b6 | |||
b6c05a2f25 | |||
960efb9cf9 | |||
aa8d58abad | |||
cfb0cc4fea | |||
7712df296c | |||
7c38932121 | |||
69ad9845e1 | |||
7321c237d7 | |||
32430a6a16 | |||
f034adba76 | |||
463eb0014e | |||
21403f6fe5 | |||
2f9e542b31 | |||
089d6812fd | |||
71c9c0fa1e | |||
46ad1d47a9 | |||
2b977eeb33 | |||
a692148597 | |||
b762816e66 | |||
cf275979ba | |||
64bfa4f334 | |||
e3eb14bfd9 | |||
24b50651c9 | |||
1cd459fda7 | |||
38917327d9 | |||
d7b494c6f8 | |||
93208afb36 | |||
1a084d8fcf | |||
dd4f133c6c | |||
c41d27e1ae | |||
1866ed8d23 | |||
7b3b232dde | |||
9d618b4ae9 | |||
5330ab2171 | |||
662e588c22 | |||
90057d80ff | |||
1eda7aaaac | |||
00dcadbc08 | |||
7a7289ebd0 | |||
e5d4677fd6 | |||
bce3f3d676 | |||
300372fa98 | |||
47a4f8bae9 | |||
863719f296 | |||
7317dc1cf5 | |||
75df898e78 | |||
0de6add3f7 | |||
0c008b6393 | |||
0c3894496c | |||
35fbd5d49d | |||
d03b453e3d | |||
96e331b678 | |||
d4d468660d | |||
75a4965928 | |||
660c09ded4 | |||
b5287d91c0 | |||
6a17763237 | |||
f2bd3daea2 | |||
7f70f96936 | |||
73e0a54518 | |||
0d295a2824 | |||
9a62efea4f | |||
506c30bcdb | |||
735ad4ff65 | |||
41e36dfcef | |||
421d8578b7 | |||
6685f8aa0a | |||
d6c37c1065 | |||
54f3f94185 | |||
907537f7c0 | |||
61263b9384 | |||
d71c85e052 | |||
b6d8be2105 | |||
0693f81d0a | |||
61d516ef35 | |||
31fc64fb4c | |||
8bf7e4c4d1 | |||
2027d4b44e | |||
d401c9074e | |||
afe35dbbb5 | |||
6ff1602fd5 | |||
6603364749 | |||
53bea22b85 | |||
7c84adc1c2 | |||
fa8d6735a1 | |||
a6137f267d | |||
d521ee7b7e | |||
3be3d807d2 | |||
9f7ea3c4e5 | |||
e67218f170 | |||
269c40c67c | |||
089a7e880b | |||
64ec741f1a | |||
c98233ddaf | |||
d4cfd0b6ed | |||
ba1fd8a3f7 | |||
e8f09d2c7b | |||
ada63b9e7d | |||
3f6a0c77f1 | |||
9e4b66e215 | |||
8a14914bc3 | |||
fc3a409164 | |||
ffc58b0313 | |||
9a7e05369c | |||
33b49f4466 | |||
60895537a7 |
.env.example
.husky
Dockerfile.fips.standalone-infisicalDockerfile.standalone-infisicalMakefilebackend
DockerfileDockerfile.devmain.ts
e2e-test/mocks
package-lock.jsonpackage.jsonscripts
src
@types
db
migrations
20241014084900_identity-multiple-auth-methods.ts20241110032223_add-missing-oidc-org-cascade-reference.ts20241119143026_add-project-descripton.ts20241121131344_make-identity-metadata-not-nullable-again.ts
schemas
ee/services
dynamic-secret/providers
aws-elasticache.tsaws-iam.tsazure-entra-id.tscassandra.tselastic-search.tsindex.tsldap.tsmodels.tsmongo-atlas.tsmongo-db.tsrabbit-mq.tsredis.tssap-ase.tssap-hana.tssnowflake.tssql-database.tstotp.ts
hsm
ldap-config
license
oidc
permission
rate-limit
secret-scanning/secret-scanning-queue
lib
api-docs
casl
config
errors
logger
telemetry
server
services
auth
identity-aws-auth
identity-azure-auth
identity-gcp-auth
identity-kubernetes-auth
identity-oidc-auth
identity-project
identity-token-auth
integration-auth
integration-app-list.tsintegration-auth-service.tsintegration-auth-types.tsintegration-list.tsintegration-sync-secret.ts
integration
project-membership
project
secret-v2-bridge
smtp
webhook
cli/packages
company/handbook
docker-compose.dev.ymldocs
api-reference/endpoints/secrets
cli
documentation/platform/dynamic-secrets
aws-elasticache.mdxaws-iam.mdxazure-entra-id.mdxcassandra.mdxelastic-search.mdxldap.mdxmongo-atlas.mdxmongo-db.mdxmssql.mdxmysql.mdxoracle.mdxpostgresql.mdxrabbit-mq.mdxredis.mdxsap-ase.mdxsap-hana.mdxsnowflake.mdxtotp.mdx
images
integrations/octopus-deploy
integrations-octopus-deploy-add-role.pngintegrations-octopus-deploy-add-to-team.pngintegrations-octopus-deploy-authorize.pngintegrations-octopus-deploy-copy-api-key.pngintegrations-octopus-deploy-create-api-key.pngintegrations-octopus-deploy-create-service-account.pngintegrations-octopus-deploy-create-team.pngintegrations-octopus-deploy-create.pngintegrations-octopus-deploy-generate-api-key.pngintegrations-octopus-deploy-integrations.pngintegrations-octopus-deploy-save-team.pngintegrations-octopus-deploy-sync.pngintegrations-octopus-deploy-team-settings.pngintegrations-octopus-deploy-user-settings.png
mfa-email.pngplatform/dynamic-secrets
integrations
mint.jsonself-hosting/configuration
frontend
next.config.jspackage-lock.jsonpackage.json
public
src
components
mfa
notifications
v2
CreatableSelect
FilterableSelect
Select/components
projects
context/ProjectPermissionContext
hooks/api
layouts/AppLayout
pages
reactQuery.tsxstyles
views
IntegrationsPage
Login
Org
Project
IdentityDetailsPage/components/IdentityRoleDetailsSection
MemberDetailsPage/components/MemberRoleDetailsSection
SecretMainPage/components
ActionBar/CreateDynamicSecretForm
CreateSecretForm
DynamicSecretListView
SecretOverviewPage
Settings
BillingSettingsPage/components/BillingDetailsTab
PersonalSettingsPage/SecuritySection
ProjectSettingsPage/components
npm
otel-collector-config.yamlprometheus.dev.yml@ -74,6 +74,14 @@ CAPTCHA_SECRET=
|
||||
|
||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED=false
|
||||
OTEL_EXPORT_TYPE=prometheus
|
||||
OTEL_EXPORT_OTLP_ENDPOINT=
|
||||
OTEL_OTLP_PUSH_INTERVAL=
|
||||
|
||||
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=
|
||||
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=
|
||||
|
||||
PLAIN_API_KEY=
|
||||
PLAIN_WISH_LABEL_IDS=
|
||||
|
||||
|
@ -1,6 +1,12 @@
|
||||
#!/usr/bin/env sh
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
|
||||
# Check if infisical is installed
|
||||
if ! command -v infisical >/dev/null 2>&1; then
|
||||
echo "\nError: Infisical CLI is not installed. Please install the Infisical CLI before comitting.\n You can refer to the documentation at https://infisical.com/docs/cli/overview\n\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
npx lint-staged
|
||||
|
||||
infisical scan git-changes --staged -v
|
||||
|
@ -69,13 +69,21 @@ RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
# Required for pkcs11js and ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@ -91,13 +99,21 @@ ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
# Required for pkcs11js and ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@ -108,13 +124,24 @@ RUN mkdir frontend-build
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
# Install necessary packages
|
||||
# Install necessary packages including ODBC
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
curl \
|
||||
git \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
unixodbc-dev \
|
||||
freetds-dev \
|
||||
freetds-bin \
|
||||
tdsodbc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Install Infisical CLI
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
|
||||
&& apt-get update && apt-get install -y infisical=0.31.1 \
|
||||
|
@ -72,8 +72,16 @@ RUN addgroup --system --gid 1001 nodejs \
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apk add --no-cache python3 make g++
|
||||
# Install all required dependencies for build
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -88,8 +96,19 @@ FROM base AS backend-runner
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Required for pkcs11js
|
||||
RUN apk add --no-cache python3 make g++
|
||||
# Install all required dependencies for runtime
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
# Configure ODBC
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
COPY backend/package*.json ./
|
||||
RUN npm ci --only-production
|
||||
@ -100,11 +119,32 @@ RUN mkdir frontend-build
|
||||
|
||||
# Production stage
|
||||
FROM base AS production
|
||||
|
||||
RUN apk add --upgrade --no-cache ca-certificates
|
||||
RUN apk add --no-cache bash curl && curl -1sLf \
|
||||
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
|
||||
&& apk add infisical=0.31.1 && apk add --no-cache git
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install all required runtime dependencies
|
||||
RUN apk --update add \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev \
|
||||
bash \
|
||||
curl \
|
||||
git
|
||||
|
||||
# Configure ODBC in production
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# Setup user permissions
|
||||
RUN addgroup --system --gid 1001 nodejs \
|
||||
&& adduser --system --uid 1001 non-root-user
|
||||
|
||||
@ -127,7 +167,6 @@ ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=backend-runner /app /backend
|
||||
|
||||
@ -149,4 +188,4 @@ EXPOSE 443
|
||||
|
||||
USER non-root-user
|
||||
|
||||
CMD ["./standalone-entrypoint.sh"]
|
||||
CMD ["./standalone-entrypoint.sh"]
|
4
Makefile
4
Makefile
@ -10,6 +10,9 @@ up-dev:
|
||||
up-dev-ldap:
|
||||
docker compose -f docker-compose.dev.yml --profile ldap up --build
|
||||
|
||||
up-dev-metrics:
|
||||
docker compose -f docker-compose.dev.yml --profile metrics up --build
|
||||
|
||||
up-prod:
|
||||
docker-compose -f docker-compose.prod.yml up --build
|
||||
|
||||
@ -27,4 +30,3 @@ reviewable-api:
|
||||
npm run type:check
|
||||
|
||||
reviewable: reviewable-ui reviewable-api
|
||||
|
||||
|
@ -9,6 +9,15 @@ RUN apk --update add \
|
||||
make \
|
||||
g++
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only-production
|
||||
|
||||
@ -28,6 +37,17 @@ RUN apk --update add \
|
||||
make \
|
||||
g++
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
RUN npm ci --only-production && npm cache clean --force
|
||||
|
||||
COPY --from=build /app .
|
||||
|
@ -7,7 +7,7 @@ ARG SOFTHSM2_VERSION=2.5.0
|
||||
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
|
||||
SOFTHSM2_SOURCES=/tmp/softhsm2
|
||||
|
||||
# install build dependencies including python3
|
||||
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
|
||||
RUN apk --update add \
|
||||
alpine-sdk \
|
||||
autoconf \
|
||||
@ -19,7 +19,19 @@ RUN apk --update add \
|
||||
make \
|
||||
g++
|
||||
|
||||
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
|
||||
RUN apk add --no-cache \
|
||||
unixodbc \
|
||||
freetds \
|
||||
unixodbc-dev \
|
||||
libc-dev \
|
||||
freetds-dev
|
||||
|
||||
|
||||
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
|
||||
|
||||
# build and install SoftHSM2
|
||||
|
||||
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
|
||||
WORKDIR ${SOFTHSM2_SOURCES}
|
||||
|
||||
|
@ -5,6 +5,9 @@ export const mockSmtpServer = (): TSmtpService => {
|
||||
return {
|
||||
sendMail: async (data) => {
|
||||
storage.push(data);
|
||||
},
|
||||
verify: async () => {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
1660
backend/package-lock.json
generated
1660
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -50,6 +50,7 @@
|
||||
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
||||
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
|
||||
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
|
||||
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
|
||||
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
|
||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
@ -58,6 +59,7 @@
|
||||
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
@ -130,6 +132,7 @@
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
"@fastify/session": "^10.7.0",
|
||||
"@fastify/swagger": "^8.14.0",
|
||||
"@fastify/swagger-ui": "^2.1.0",
|
||||
@ -138,6 +141,15 @@
|
||||
"@octokit/plugin-retry": "^5.0.5",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@octopusdeploy/api-client": "^3.4.1",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.55.0",
|
||||
"@opentelemetry/instrumentation": "^0.55.0",
|
||||
"@opentelemetry/resources": "^1.28.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.28.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.27.0",
|
||||
"@peculiar/asn1-schema": "^2.3.8",
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
@ -178,6 +190,7 @@
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
|
@ -8,61 +8,80 @@ const prompt = promptSync({
|
||||
sigint: true
|
||||
});
|
||||
|
||||
const sanitizeInputParam = (value: string) => {
|
||||
// Escape double quotes and wrap the entire value in double quotes
|
||||
if (value) {
|
||||
return `"${value.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
return '""';
|
||||
};
|
||||
|
||||
const exportDb = () => {
|
||||
const exportHost = prompt("Enter your Postgres Host to migrate from: ");
|
||||
const exportPort = prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432";
|
||||
const exportUser = prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical";
|
||||
const exportPassword = prompt("Enter your Postgres Password to migrate from: ");
|
||||
const exportDatabase = prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical";
|
||||
const exportHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate from: "));
|
||||
const exportPort = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432"
|
||||
);
|
||||
const exportUser = sanitizeInputParam(
|
||||
prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const exportPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate from: "));
|
||||
const exportDatabase = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
|
||||
// we do not include the audit_log and secret_sharing entries
|
||||
execSync(
|
||||
`PGDATABASE="${exportDatabase}" PGPASSWORD="${exportPassword}" PGHOST="${exportHost}" PGPORT=${exportPort} PGUSER=${exportUser} pg_dump infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
||||
`PGDATABASE=${exportDatabase} PGPASSWORD=${exportPassword} PGHOST=${exportHost} PGPORT=${exportPort} PGUSER=${exportUser} pg_dump -Fc infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
|
||||
__dirname,
|
||||
"../src/db/dump.sql"
|
||||
"../src/db/backup.dump"
|
||||
)}`,
|
||||
{ stdio: "inherit" }
|
||||
);
|
||||
};
|
||||
|
||||
const importDbForOrg = () => {
|
||||
const importHost = prompt("Enter your Postgres Host to migrate to: ");
|
||||
const importPort = prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432";
|
||||
const importUser = prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical";
|
||||
const importPassword = prompt("Enter your Postgres Password to migrate to: ");
|
||||
const importDatabase = prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical";
|
||||
const orgId = prompt("Enter the organization ID to migrate: ");
|
||||
const importHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate to: "));
|
||||
const importPort = sanitizeInputParam(prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432");
|
||||
const importUser = sanitizeInputParam(
|
||||
prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const importPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate to: "));
|
||||
const importDatabase = sanitizeInputParam(
|
||||
prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical"
|
||||
);
|
||||
const orgId = sanitizeInputParam(prompt("Enter the organization ID to migrate: "));
|
||||
|
||||
if (!existsSync(path.join(__dirname, "../src/db/dump.sql"))) {
|
||||
if (!existsSync(path.join(__dirname, "../src/db/backup.dump"))) {
|
||||
console.log("File not found, please export the database first.");
|
||||
return;
|
||||
}
|
||||
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -f ${path.join(
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} pg_restore -d ${importDatabase} --verbose ${path.join(
|
||||
__dirname,
|
||||
"../src/db/dump.sql"
|
||||
)}`
|
||||
"../src/db/backup.dump"
|
||||
)}`,
|
||||
{ maxBuffer: 1024 * 1024 * 4096 }
|
||||
);
|
||||
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
|
||||
);
|
||||
|
||||
// delete global/instance-level resources not relevant to the organization to migrate
|
||||
// users
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
|
||||
);
|
||||
|
||||
// identities
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
|
||||
);
|
||||
|
||||
// reset slack configuration in superAdmin
|
||||
execSync(
|
||||
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
||||
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
|
||||
);
|
||||
|
||||
console.log("Organization migrated successfully.");
|
||||
|
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
7
backend/src/@types/fastify-request-context.d.ts
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import "@fastify/request-context";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
requestId: string;
|
||||
}
|
||||
}
|
4
backend/src/@types/fastify-zod.d.ts
vendored
4
backend/src/@types/fastify-zod.d.ts
vendored
@ -1,6 +1,6 @@
|
||||
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { CustomLogger } from "@app/lib/logger/logger";
|
||||
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
|
||||
|
||||
declare global {
|
||||
@ -8,7 +8,7 @@ declare global {
|
||||
RawServerDefault,
|
||||
RawRequestDefaultExpression<RawServerDefault>,
|
||||
RawReplyDefaultExpression<RawServerDefault>,
|
||||
Readonly<Logger>,
|
||||
Readonly<CustomLogger>,
|
||||
ZodTypeProvider
|
||||
>;
|
||||
|
||||
|
@ -2,7 +2,7 @@ import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const BATCH_SIZE = 30_000;
|
||||
const BATCH_SIZE = 10_000;
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
|
||||
@ -12,7 +12,18 @@ export async function up(knex: Knex): Promise<void> {
|
||||
t.string("authMethod").nullable();
|
||||
});
|
||||
|
||||
let nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
|
||||
// first we remove identities without auth method that is unused
|
||||
// ! We delete all access tokens where the identity has no auth method set!
|
||||
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
|
||||
await knex(TableName.IdentityAccessToken)
|
||||
.leftJoin(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
|
||||
.whereNull(`${TableName.Identity}.authMethod`)
|
||||
.delete();
|
||||
|
||||
let nullableAccessTokens = await knex(TableName.IdentityAccessToken)
|
||||
.whereNull("authMethod")
|
||||
.limit(BATCH_SIZE)
|
||||
.select("id");
|
||||
let totalUpdated = 0;
|
||||
|
||||
do {
|
||||
@ -33,24 +44,15 @@ export async function up(knex: Knex): Promise<void> {
|
||||
});
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
|
||||
nullableAccessTokens = await knex(TableName.IdentityAccessToken)
|
||||
.whereNull("authMethod")
|
||||
.limit(BATCH_SIZE)
|
||||
.select("id");
|
||||
|
||||
totalUpdated += batchIds.length;
|
||||
console.log(`Updated ${batchIds.length} access tokens in batch <> Total updated: ${totalUpdated}`);
|
||||
} while (nullableAccessTokens.length > 0);
|
||||
|
||||
// ! We delete all access tokens where the identity has no auth method set!
|
||||
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
|
||||
await knex(TableName.IdentityAccessToken)
|
||||
.whereNotExists((queryBuilder) => {
|
||||
void queryBuilder
|
||||
.select("id")
|
||||
.from(TableName.Identity)
|
||||
.whereRaw(`${TableName.IdentityAccessToken}."identityId" = ${TableName.Identity}.id`)
|
||||
.whereNotNull("authMethod");
|
||||
})
|
||||
.delete();
|
||||
|
||||
// Finally we set the authMethod to notNullable after populating the column.
|
||||
// This will fail if the data is not populated correctly, so it's safe.
|
||||
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
|
||||
|
@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
t.dropForeign("orgId");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
t.dropForeign("orgId");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization);
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
|
||||
|
||||
if (!hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.string("description");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
|
||||
|
||||
if (hasProjectDescription) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.dropColumn("description");
|
||||
});
|
||||
}
|
||||
}
|
20
backend/src/db/migrations/20241121131344_make-identity-metadata-not-nullable-again.ts
Normal file
20
backend/src/db/migrations/20241121131344_make-identity-metadata-not-nullable-again.ts
Normal file
@ -0,0 +1,20 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
||||
await knex(TableName.IdentityMetadata).whereNull("value").delete();
|
||||
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
||||
t.string("value", 1020).notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
|
||||
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
|
||||
t.string("value", 1020).alter();
|
||||
});
|
||||
}
|
||||
}
|
@ -12,7 +12,7 @@ import { TImmutableDBKeys } from "./models";
|
||||
export const KmsRootConfigSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedRootKey: zodBuffer,
|
||||
encryptionStrategy: z.string(),
|
||||
encryptionStrategy: z.string().default("SOFTWARE").nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
@ -23,7 +23,8 @@ export const ProjectsSchema = z.object({
|
||||
kmsCertificateKeyId: z.string().uuid().nullable().optional(),
|
||||
auditLogsRetentionDays: z.number().nullable().optional(),
|
||||
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
|
||||
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional()
|
||||
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional(),
|
||||
description: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TProjects = z.infer<typeof ProjectsSchema>;
|
||||
|
@ -80,7 +80,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
|
||||
}
|
||||
};
|
||||
|
||||
const addUserToInfisicalGroup = async (userId: string) => {
|
||||
const $addUserToInfisicalGroup = async (userId: string) => {
|
||||
// figure out if the default user is already in the group, if it is, then we shouldn't add it again
|
||||
|
||||
const addUserToGroupCommand = new ModifyUserGroupCommand({
|
||||
@ -96,7 +96,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
|
||||
await ensureInfisicalGroupExists(clusterName);
|
||||
|
||||
await elastiCache.send(new CreateUserCommand(creationInput)); // First create the user
|
||||
await addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
|
||||
await $addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
|
||||
|
||||
return {
|
||||
userId: creationInput.UserId,
|
||||
@ -212,7 +212,7 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -33,7 +33,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
|
||||
const client = new IAMClient({
|
||||
region: providerInputs.region,
|
||||
credentials: {
|
||||
@ -47,7 +47,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
|
||||
return isConnected;
|
||||
@ -55,7 +55,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
|
||||
@ -118,7 +118,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
@ -179,9 +179,8 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// do nothing
|
||||
const username = entityId;
|
||||
return { entityId: username };
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -23,7 +23,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getToken = async (
|
||||
const $getToken = async (
|
||||
tenantId: string,
|
||||
applicationId: string,
|
||||
clientSecret: string
|
||||
@ -51,18 +51,13 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
return data.success;
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
|
||||
if (!data.success) {
|
||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||
}
|
||||
@ -98,7 +93,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
};
|
||||
|
||||
const fetchAzureEntraIdUsers = async (tenantId: string, applicationId: string, clientSecret: string) => {
|
||||
const data = await getToken(tenantId, applicationId, clientSecret);
|
||||
const data = await $getToken(tenantId, applicationId, clientSecret);
|
||||
if (!data.success) {
|
||||
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
|
||||
}
|
||||
@ -127,6 +122,11 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
|
||||
return users;
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
|
@ -27,7 +27,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
|
||||
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||
const client = new cassandra.Client({
|
||||
sslOptions,
|
||||
@ -47,7 +47,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
|
||||
await client.shutdown();
|
||||
@ -56,7 +56,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -82,7 +82,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const { keyspace } = providerInputs;
|
||||
@ -99,20 +99,24 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const { keyspace } = providerInputs;
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace, expiration });
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
keyspace,
|
||||
expiration
|
||||
});
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
for await (const query of queries) {
|
||||
await client.execute(query);
|
||||
}
|
||||
await client.shutdown();
|
||||
return { entityId: username };
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -24,7 +24,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const connection = new ElasticSearchClient({
|
||||
node: {
|
||||
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
|
||||
@ -55,7 +55,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection
|
||||
.info()
|
||||
@ -67,7 +67,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -85,7 +85,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
await connection.security.deleteUser({
|
||||
username: entityId
|
||||
@ -96,7 +96,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -6,15 +6,17 @@ import { AzureEntraIDProvider } from "./azure-entra-id";
|
||||
import { CassandraProvider } from "./cassandra";
|
||||
import { ElasticSearchProvider } from "./elastic-search";
|
||||
import { LdapProvider } from "./ldap";
|
||||
import { DynamicSecretProviders } from "./models";
|
||||
import { DynamicSecretProviders, TDynamicProviderFns } from "./models";
|
||||
import { MongoAtlasProvider } from "./mongo-atlas";
|
||||
import { MongoDBProvider } from "./mongo-db";
|
||||
import { RabbitMqProvider } from "./rabbit-mq";
|
||||
import { RedisDatabaseProvider } from "./redis";
|
||||
import { SapAseProvider } from "./sap-ase";
|
||||
import { SapHanaProvider } from "./sap-hana";
|
||||
import { SqlDatabaseProvider } from "./sql-database";
|
||||
import { TotpProvider } from "./totp";
|
||||
|
||||
export const buildDynamicSecretProviders = () => ({
|
||||
export const buildDynamicSecretProviders = (): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
|
||||
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
|
||||
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
|
||||
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
|
||||
@ -27,5 +29,7 @@ export const buildDynamicSecretProviders = () => ({
|
||||
[DynamicSecretProviders.AzureEntraID]: AzureEntraIDProvider(),
|
||||
[DynamicSecretProviders.Ldap]: LdapProvider(),
|
||||
[DynamicSecretProviders.SapHana]: SapHanaProvider(),
|
||||
[DynamicSecretProviders.Snowflake]: SnowflakeProvider()
|
||||
[DynamicSecretProviders.Snowflake]: SnowflakeProvider(),
|
||||
[DynamicSecretProviders.Totp]: TotpProvider(),
|
||||
[DynamicSecretProviders.SapAse]: SapAseProvider()
|
||||
});
|
||||
|
@ -52,7 +52,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const client = ldapjs.createClient({
|
||||
url: providerInputs.url,
|
||||
@ -83,7 +83,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
return client.connected;
|
||||
};
|
||||
|
||||
@ -191,7 +191,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
||||
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
|
||||
@ -235,7 +235,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
if (providerInputs.credentialType === LdapCredentialType.Static) {
|
||||
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
|
||||
@ -268,7 +268,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -4,7 +4,8 @@ export enum SqlProviders {
|
||||
Postgres = "postgres",
|
||||
MySQL = "mysql2",
|
||||
Oracle = "oracledb",
|
||||
MsSQL = "mssql"
|
||||
MsSQL = "mssql",
|
||||
SapAse = "sap-ase"
|
||||
}
|
||||
|
||||
export enum ElasticSearchAuthTypes {
|
||||
@ -17,6 +18,17 @@ export enum LdapCredentialType {
|
||||
Static = "static"
|
||||
}
|
||||
|
||||
export enum TotpConfigType {
|
||||
URL = "url",
|
||||
MANUAL = "manual"
|
||||
}
|
||||
|
||||
export enum TotpAlgorithm {
|
||||
SHA1 = "sha1",
|
||||
SHA256 = "sha256",
|
||||
SHA512 = "sha512"
|
||||
}
|
||||
|
||||
export const DynamicSecretRedisDBSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
@ -107,6 +119,16 @@ export const DynamicSecretCassandraSchema = z.object({
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretSapAseSchema = z.object({
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
database: z.string().trim(),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim(),
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim()
|
||||
});
|
||||
|
||||
export const DynamicSecretAwsIamSchema = z.object({
|
||||
accessKey: z.string().trim().min(1),
|
||||
secretAccessKey: z.string().trim().min(1),
|
||||
@ -221,6 +243,34 @@ export const LdapSchema = z.union([
|
||||
})
|
||||
]);
|
||||
|
||||
export const DynamicSecretTotpSchema = z.discriminatedUnion("configType", [
|
||||
z.object({
|
||||
configType: z.literal(TotpConfigType.URL),
|
||||
url: z
|
||||
.string()
|
||||
.url()
|
||||
.trim()
|
||||
.min(1)
|
||||
.refine((val) => {
|
||||
const urlObj = new URL(val);
|
||||
const secret = urlObj.searchParams.get("secret");
|
||||
|
||||
return Boolean(secret);
|
||||
}, "OTP URL must contain secret field")
|
||||
}),
|
||||
z.object({
|
||||
configType: z.literal(TotpConfigType.MANUAL),
|
||||
secret: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.transform((val) => val.replace(/\s+/g, "")),
|
||||
period: z.number().optional(),
|
||||
algorithm: z.nativeEnum(TotpAlgorithm).optional(),
|
||||
digits: z.number().optional()
|
||||
})
|
||||
]);
|
||||
|
||||
export enum DynamicSecretProviders {
|
||||
SqlDatabase = "sql-database",
|
||||
Cassandra = "cassandra",
|
||||
@ -234,12 +284,15 @@ export enum DynamicSecretProviders {
|
||||
AzureEntraID = "azure-entra-id",
|
||||
Ldap = "ldap",
|
||||
SapHana = "sap-hana",
|
||||
Snowflake = "snowflake"
|
||||
Snowflake = "snowflake",
|
||||
Totp = "totp",
|
||||
SapAse = "sap-ase"
|
||||
}
|
||||
|
||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SapAse), inputs: DynamicSecretSapAseSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SapHana), inputs: DynamicSecretSapHanaSchema }),
|
||||
@ -250,7 +303,8 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AzureEntraID), inputs: AzureEntraIDSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema })
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Totp), inputs: DynamicSecretTotpSchema })
|
||||
]);
|
||||
|
||||
export type TDynamicProviderFns = {
|
||||
|
@ -22,7 +22,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
|
||||
const client = axios.create({
|
||||
baseURL: "https://cloud.mongodb.com/api/atlas",
|
||||
headers: {
|
||||
@ -40,7 +40,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client({
|
||||
method: "GET",
|
||||
@ -59,7 +59,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -87,7 +87,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const isExisting = await client({
|
||||
@ -114,7 +114,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
@ -23,7 +23,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||
const isSrv = !providerInputs.port;
|
||||
const uri = isSrv
|
||||
? `mongodb+srv://${providerInputs.host}`
|
||||
@ -42,7 +42,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const isConnected = await client
|
||||
.db(providerInputs.database)
|
||||
@ -55,7 +55,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -74,7 +74,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
@ -88,6 +88,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -84,7 +84,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||
const axiosInstance = axios.create({
|
||||
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
|
||||
auth: {
|
||||
@ -105,7 +105,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const infoResponse = await connection.get("/whoami").then(() => true);
|
||||
|
||||
@ -114,7 +114,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -134,7 +134,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
|
||||
|
||||
@ -142,7 +142,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string) => {
|
||||
// Do nothing
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
@ -55,7 +55,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
|
||||
let connection: Redis | null = null;
|
||||
try {
|
||||
connection = new Redis({
|
||||
@ -92,7 +92,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const pingResponse = await connection
|
||||
.ping()
|
||||
@ -104,7 +104,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -126,7 +126,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
@ -141,7 +141,9 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const connection = await getClient(providerInputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const connection = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
145
backend/src/ee/services/dynamic-secret/providers/sap-ase.ts
Normal file
145
backend/src/ee/services/dynamic-secret/providers/sap-ase.ts
Normal file
@ -0,0 +1,145 @@
|
||||
import handlebars from "handlebars";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import odbc from "odbc";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
|
||||
return customAlphabet(charset, 48)(size);
|
||||
};
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(25);
|
||||
};
|
||||
|
||||
enum SapCommands {
|
||||
CreateLogin = "sp_addlogin",
|
||||
DropLogin = "sp_droplogin"
|
||||
}
|
||||
|
||||
export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
|
||||
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
|
||||
const connectionString =
|
||||
`DRIVER={FreeTDS};` +
|
||||
`SERVER=${providerInputs.host};` +
|
||||
`PORT=${providerInputs.port};` +
|
||||
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
|
||||
`UID=${providerInputs.username};` +
|
||||
`PWD=${providerInputs.password};` +
|
||||
`TDS_VERSION=5.0`;
|
||||
|
||||
const client = await odbc.connect(connectionString);
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const [resultFromMasterDatabase] = await masterClient.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||
const [resultFromSelectedDatabase] = await client.query<{ version: string }>("SELECT @@VERSION AS version");
|
||||
|
||||
if (!resultFromSelectedDatabase.version) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SAP ASE connection, version query failed"
|
||||
});
|
||||
}
|
||||
|
||||
if (resultFromMasterDatabase.version !== resultFromSelectedDatabase.version) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to validate SAP ASE connection (master), version mismatch"
|
||||
});
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const username = `inf_${generateUsername()}`;
|
||||
const password = `${generatePassword()}`;
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password
|
||||
});
|
||||
|
||||
const queries = creationStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
|
||||
|
||||
for await (const query of queries) {
|
||||
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
|
||||
// If not done, then the newly created user won't be able to authenticate.
|
||||
await (query.startsWith(SapCommands.CreateLogin) ? masterClient : client).query(query);
|
||||
}
|
||||
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({
|
||||
username
|
||||
});
|
||||
|
||||
const queries = revokeStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
const masterClient = await $getClient(providerInputs, true);
|
||||
|
||||
// Get all processes for this login and kill them. If there are active connections to the database when drop login happens, it will throw an error.
|
||||
const result = await masterClient.query<{ spid?: string }>(`sp_who '${username}'`);
|
||||
|
||||
if (result && result.length > 0) {
|
||||
for await (const row of result) {
|
||||
if (row.spid) {
|
||||
await masterClient.query(`KILL ${row.spid.trim()}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for await (const query of queries) {
|
||||
await (query.startsWith(SapCommands.DropLogin) ? masterClient : client).query(query);
|
||||
}
|
||||
|
||||
await masterClient.close();
|
||||
await client.close();
|
||||
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (_: unknown, username: string) => {
|
||||
// No need for renewal
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -32,7 +32,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
|
||||
const client = hdb.createClient({
|
||||
host: providerInputs.host,
|
||||
port: providerInputs.port,
|
||||
@ -64,9 +64,9 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const testResult: boolean = await new Promise((resolve, reject) => {
|
||||
const testResult = await new Promise<boolean>((resolve, reject) => {
|
||||
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
|
||||
if (err) {
|
||||
reject();
|
||||
@ -86,7 +86,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
const password = generatePassword();
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
@ -114,7 +114,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
@ -135,13 +135,15 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, username: string, expireAt: number) => {
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const client = await $getClient(providerInputs);
|
||||
try {
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration });
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username: entityId, expiration });
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
for await (const query of queries) {
|
||||
await new Promise((resolve, reject) => {
|
||||
@ -161,7 +163,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
client.disconnect();
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -34,7 +34,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
|
||||
const client = snowflake.createConnection({
|
||||
account: `${providerInputs.orgId}-${providerInputs.accountId}`,
|
||||
username: providerInputs.username,
|
||||
@ -49,7 +49,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
let isValidConnection: boolean;
|
||||
|
||||
@ -72,7 +72,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const password = generatePassword();
|
||||
@ -107,7 +107,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
const revoke = async (inputs: unknown, username: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
try {
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
@ -131,17 +131,16 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (inputs: unknown, username: string, expireAt: number) => {
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
if (!providerInputs.renewStatement) return { entityId: username };
|
||||
|
||||
const client = await getClient(providerInputs);
|
||||
const client = await $getClient(providerInputs);
|
||||
|
||||
try {
|
||||
const expiration = getDaysToExpiry(new Date(expireAt));
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username,
|
||||
username: entityId,
|
||||
expiration
|
||||
});
|
||||
|
||||
@ -161,7 +160,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
client.destroy(noop);
|
||||
}
|
||||
|
||||
return { entityId: username };
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -32,7 +32,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
|
||||
const ssl = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||
const db = knex({
|
||||
client: providerInputs.client,
|
||||
@ -52,7 +52,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await getClient(providerInputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
// oracle needs from keyword
|
||||
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
||||
|
||||
@ -63,7 +63,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await getClient(providerInputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername(providerInputs.client);
|
||||
const password = generatePassword(providerInputs.client);
|
||||
@ -90,7 +90,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await getClient(providerInputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const { database } = providerInputs;
|
||||
@ -110,13 +110,19 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await getClient(providerInputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const db = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const { database } = providerInputs;
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration, database });
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
expiration,
|
||||
database
|
||||
});
|
||||
|
||||
if (renewStatement) {
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
@ -128,7 +134,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
}
|
||||
|
||||
await db.destroy();
|
||||
return { entityId: username };
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
|
90
backend/src/ee/services/dynamic-secret/providers/totp.ts
Normal file
90
backend/src/ee/services/dynamic-secret/providers/totp.ts
Normal file
@ -0,0 +1,90 @@
|
||||
import { authenticator } from "otplib";
|
||||
import { HashAlgorithms } from "otplib/core";
|
||||
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretTotpSchema, TDynamicProviderFns, TotpConfigType } from "./models";
|
||||
|
||||
export const TotpProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretTotpSchema.parseAsync(inputs);
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const validateConnection = async () => {
|
||||
return true;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
|
||||
const entityId = alphaNumericNanoId(32);
|
||||
const authenticatorInstance = authenticator.clone();
|
||||
|
||||
let secret: string;
|
||||
let period: number | null | undefined;
|
||||
let digits: number | null | undefined;
|
||||
let algorithm: HashAlgorithms | null | undefined;
|
||||
|
||||
if (providerInputs.configType === TotpConfigType.URL) {
|
||||
const urlObj = new URL(providerInputs.url);
|
||||
secret = urlObj.searchParams.get("secret") as string;
|
||||
const periodFromUrl = urlObj.searchParams.get("period");
|
||||
const digitsFromUrl = urlObj.searchParams.get("digits");
|
||||
const algorithmFromUrl = urlObj.searchParams.get("algorithm");
|
||||
|
||||
if (periodFromUrl) {
|
||||
period = +periodFromUrl;
|
||||
}
|
||||
|
||||
if (digitsFromUrl) {
|
||||
digits = +digitsFromUrl;
|
||||
}
|
||||
|
||||
if (algorithmFromUrl) {
|
||||
algorithm = algorithmFromUrl.toLowerCase() as HashAlgorithms;
|
||||
}
|
||||
} else {
|
||||
secret = providerInputs.secret;
|
||||
period = providerInputs.period;
|
||||
digits = providerInputs.digits;
|
||||
algorithm = providerInputs.algorithm as unknown as HashAlgorithms;
|
||||
}
|
||||
|
||||
if (digits) {
|
||||
authenticatorInstance.options = { digits };
|
||||
}
|
||||
|
||||
if (algorithm) {
|
||||
authenticatorInstance.options = { algorithm };
|
||||
}
|
||||
|
||||
if (period) {
|
||||
authenticatorInstance.options = { step: period };
|
||||
}
|
||||
|
||||
return {
|
||||
entityId,
|
||||
data: { TOTP: authenticatorInstance.generate(secret), TIME_REMAINING: authenticatorInstance.timeRemaining() }
|
||||
};
|
||||
};
|
||||
|
||||
const revoke = async (_inputs: unknown, entityId: string) => {
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// No renewal necessary
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -27,7 +27,7 @@ export const initializeHsmModule = () => {
|
||||
|
||||
logger.info("PKCS#11 module initialized");
|
||||
} catch (err) {
|
||||
logger.error("Failed to initialize PKCS#11 module:", err);
|
||||
logger.error(err, "Failed to initialize PKCS#11 module");
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
@ -39,7 +39,7 @@ export const initializeHsmModule = () => {
|
||||
isInitialized = false;
|
||||
logger.info("PKCS#11 module finalized");
|
||||
} catch (err) {
|
||||
logger.error("Failed to finalize PKCS#11 module:", err);
|
||||
logger.error(err, "Failed to finalize PKCS#11 module");
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
@ -36,8 +36,7 @@ export const testLDAPConfig = async (ldapConfig: TLDAPConfig): Promise<boolean>
|
||||
});
|
||||
|
||||
ldapClient.on("error", (err) => {
|
||||
logger.error("LDAP client error:", err);
|
||||
logger.error(err);
|
||||
logger.error(err, "LDAP client error");
|
||||
resolve(false);
|
||||
});
|
||||
|
||||
|
@ -161,8 +161,8 @@ export const licenseServiceFactory = ({
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`getPlan: encountered an error when fetching pan [orgId=${orgId}] [projectId=${projectId}] [error]`,
|
||||
error
|
||||
error,
|
||||
`getPlan: encountered an error when fetching pan [orgId=${orgId}] [projectId=${projectId}] [error]`
|
||||
);
|
||||
await keyStore.setItemWithExpiry(
|
||||
FEATURE_CACHE_KEY(orgId),
|
||||
|
@ -17,7 +17,7 @@ import {
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, OidcAuthError } from "@app/lib/errors";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
@ -56,7 +56,7 @@ type TOidcConfigServiceFactoryDep = {
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
smtpService: Pick<TSmtpService, "sendMail" | "verify">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne" | "update" | "create">;
|
||||
};
|
||||
@ -223,6 +223,7 @@ export const oidcConfigServiceFactory = ({
|
||||
let newUser: TUsers | undefined;
|
||||
|
||||
if (serverCfg.trustOidcEmails) {
|
||||
// we prioritize getting the most complete user to create the new alias under
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
@ -230,6 +231,23 @@ export const oidcConfigServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (!newUser) {
|
||||
// this fetches user entries created via invites
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
username: email
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (newUser && !newUser.isEmailVerified) {
|
||||
// we automatically mark it as email-verified because we've configured trust for OIDC emails
|
||||
newUser = await userDAL.updateById(newUser.id, {
|
||||
isEmailVerified: true
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!newUser) {
|
||||
@ -332,14 +350,20 @@ export const oidcConfigServiceFactory = ({
|
||||
userId: user.id
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
});
|
||||
await smtpService
|
||||
.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
throw new OidcAuthError({
|
||||
message: `Error sending email confirmation code for user registration - contact the Infisical instance admin. ${err.message}`
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return { isUserCompleted, providerAuthToken };
|
||||
@ -395,6 +419,18 @@ export const oidcConfigServiceFactory = ({
|
||||
message: `Organization bot for organization with ID '${org.id}' not found`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
if (isActive && !serverCfg.trustOidcEmails) {
|
||||
const isSmtpConnected = await smtpService.verify();
|
||||
if (!isSmtpConnected) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Cannot enable OIDC when there are issues with the instance's SMTP configuration. Bypass this by turning on trust for OIDC emails in the server admin console."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
|
@ -127,14 +127,15 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getProjectPermission = async (userId: string, projectId: string) => {
|
||||
try {
|
||||
const subQueryUserGroups = db(TableName.UserGroupMembership).where("userId", userId).select("groupId");
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.Users)
|
||||
.where(`${TableName.Users}.id`, userId)
|
||||
.leftJoin(TableName.UserGroupMembership, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.GroupProjectMembership, (queryBuilder) => {
|
||||
void queryBuilder
|
||||
.on(`${TableName.GroupProjectMembership}.projectId`, db.raw("?", [projectId]))
|
||||
.andOn(`${TableName.GroupProjectMembership}.groupId`, `${TableName.UserGroupMembership}.groupId`);
|
||||
// @ts-expect-error akhilmhdh: this is valid knexjs query. Its just ts type argument is missing it
|
||||
.andOnIn(`${TableName.GroupProjectMembership}.groupId`, subQueryUserGroups);
|
||||
})
|
||||
.leftJoin(
|
||||
TableName.GroupProjectMembershipRole,
|
||||
|
@ -1,14 +1,7 @@
|
||||
import picomatch from "picomatch";
|
||||
import { z } from "zod";
|
||||
|
||||
export enum PermissionConditionOperators {
|
||||
$IN = "$in",
|
||||
$ALL = "$all",
|
||||
$REGEX = "$regex",
|
||||
$EQ = "$eq",
|
||||
$NEQ = "$ne",
|
||||
$GLOB = "$glob"
|
||||
}
|
||||
import { PermissionConditionOperators } from "@app/lib/casl";
|
||||
|
||||
export const PermissionConditionSchema = {
|
||||
[PermissionConditionOperators.$IN]: z.string().trim().min(1).array(),
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { AbilityBuilder, createMongoAbility, ForcedSubject, MongoAbility } from "@casl/ability";
|
||||
import { z } from "zod";
|
||||
|
||||
import { conditionsMatcher } from "@app/lib/casl";
|
||||
import { conditionsMatcher, PermissionConditionOperators } from "@app/lib/casl";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
|
||||
import { PermissionConditionOperators, PermissionConditionSchema } from "./permission-types";
|
||||
import { PermissionConditionSchema } from "./permission-types";
|
||||
|
||||
export enum ProjectPermissionActions {
|
||||
Read = "read",
|
||||
|
@ -46,7 +46,7 @@ export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateL
|
||||
}
|
||||
return rateLimit;
|
||||
} catch (err) {
|
||||
logger.error("Error fetching rate limits %o", err);
|
||||
logger.error(err, "Error fetching rate limits");
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
@ -69,12 +69,12 @@ export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateL
|
||||
mfaRateLimit: rateLimit.mfaRateLimit
|
||||
};
|
||||
|
||||
logger.info(`syncRateLimitConfiguration: rate limit configuration: %o`, newRateLimitMaxConfiguration);
|
||||
logger.info(newRateLimitMaxConfiguration, "syncRateLimitConfiguration: rate limit configuration");
|
||||
Object.freeze(newRateLimitMaxConfiguration);
|
||||
rateLimitMaxConfiguration = newRateLimitMaxConfiguration;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error syncing rate limit configurations: %o`, error);
|
||||
logger.error(error, "Error syncing rate limit configurations");
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -238,11 +238,11 @@ export const secretScanningQueueFactory = ({
|
||||
});
|
||||
|
||||
queueService.listen(QueueName.SecretPushEventScan, "failed", (job, err) => {
|
||||
logger.error("Failed to secret scan on push", job?.data, err);
|
||||
logger.error(err, "Failed to secret scan on push", job?.data);
|
||||
});
|
||||
|
||||
queueService.listen(QueueName.SecretFullRepoScan, "failed", (job, err) => {
|
||||
logger.error("Failed to do full repo secret scan", job?.data, err);
|
||||
logger.error(err, "Failed to do full repo secret scan", job?.data);
|
||||
});
|
||||
|
||||
return { startFullRepoScan, startPushEventScan };
|
||||
|
@ -391,6 +391,7 @@ export const PROJECTS = {
|
||||
CREATE: {
|
||||
organizationSlug: "The slug of the organization to create the project in.",
|
||||
projectName: "The name of the project to create.",
|
||||
projectDescription: "An optional description label for the project.",
|
||||
slug: "An optional slug for the project.",
|
||||
template: "The name of the project template, if specified, to apply to this project."
|
||||
},
|
||||
@ -403,6 +404,7 @@ export const PROJECTS = {
|
||||
UPDATE: {
|
||||
workspaceId: "The ID of the project to update.",
|
||||
name: "The new name of the project.",
|
||||
projectDescription: "An optional description label for the project.",
|
||||
autoCapitalization: "Disable or enable auto-capitalization for the project."
|
||||
},
|
||||
GET_KEY: {
|
||||
@ -1080,7 +1082,8 @@ export const INTEGRATION = {
|
||||
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store.",
|
||||
shouldMaskSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Masked'.",
|
||||
shouldProtectSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Protected'.",
|
||||
shouldEnableDelete: "The flag to enable deletion of secrets."
|
||||
shouldEnableDelete: "The flag to enable deletion of secrets.",
|
||||
octopusDeployScopeValues: "Specifies the scope values to set on synced secrets to Octopus Deploy."
|
||||
}
|
||||
},
|
||||
UPDATE: {
|
||||
|
@ -54,3 +54,12 @@ export const isAtLeastAsPrivileged = (permissions1: MongoAbility, permissions2:
|
||||
|
||||
return set1.size >= set2.size;
|
||||
};
|
||||
|
||||
export enum PermissionConditionOperators {
|
||||
$IN = "$in",
|
||||
$ALL = "$all",
|
||||
$REGEX = "$regex",
|
||||
$EQ = "$eq",
|
||||
$NEQ = "$ne",
|
||||
$GLOB = "$glob"
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { Logger } from "pino";
|
||||
import { z } from "zod";
|
||||
|
||||
import { removeTrailingSlash } from "../fn";
|
||||
import { CustomLogger } from "../logger/logger";
|
||||
import { zpStr } from "../zod";
|
||||
|
||||
export const GITLAB_URL = "https://gitlab.com";
|
||||
@ -10,7 +10,7 @@ export const GITLAB_URL = "https://gitlab.com";
|
||||
export const IS_PACKAGED = (process as any)?.pkg !== undefined;
|
||||
|
||||
const zodStrBool = z
|
||||
.enum(["true", "false"])
|
||||
.string()
|
||||
.optional()
|
||||
.transform((val) => val === "true");
|
||||
|
||||
@ -157,6 +157,15 @@ const envSchema = z
|
||||
INFISICAL_CLOUD: zodStrBool.default("false"),
|
||||
MAINTENANCE_MODE: zodStrBool.default("false"),
|
||||
CAPTCHA_SECRET: zpStr(z.string().optional()),
|
||||
|
||||
// TELEMETRY
|
||||
OTEL_TELEMETRY_COLLECTION_ENABLED: zodStrBool.default("false"),
|
||||
OTEL_EXPORT_OTLP_ENDPOINT: zpStr(z.string().optional()),
|
||||
OTEL_OTLP_PUSH_INTERVAL: z.coerce.number().default(30000),
|
||||
OTEL_COLLECTOR_BASIC_AUTH_USERNAME: zpStr(z.string().optional()),
|
||||
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD: zpStr(z.string().optional()),
|
||||
OTEL_EXPORT_TYPE: z.enum(["prometheus", "otlp"]).optional(),
|
||||
|
||||
PLAIN_API_KEY: zpStr(z.string().optional()),
|
||||
PLAIN_WISH_LABEL_IDS: zpStr(z.string().optional()),
|
||||
DISABLE_AUDIT_LOG_GENERATION: zodStrBool.default("false"),
|
||||
@ -203,11 +212,11 @@ let envCfg: Readonly<z.infer<typeof envSchema>>;
|
||||
|
||||
export const getConfig = () => envCfg;
|
||||
// cannot import singleton logger directly as it needs config to load various transport
|
||||
export const initEnvConfig = (logger: Logger) => {
|
||||
export const initEnvConfig = (logger?: CustomLogger) => {
|
||||
const parsedEnv = envSchema.safeParse(process.env);
|
||||
if (!parsedEnv.success) {
|
||||
logger.error("Invalid environment variables. Check the error below");
|
||||
logger.error(parsedEnv.error.issues);
|
||||
(logger ?? console).error("Invalid environment variables. Check the error below");
|
||||
(logger ?? console).error(parsedEnv.error.issues);
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
|
@ -133,3 +133,15 @@ export class ScimRequestError extends Error {
|
||||
this.status = status;
|
||||
}
|
||||
}
|
||||
|
||||
export class OidcAuthError extends Error {
|
||||
name: string;
|
||||
|
||||
error: unknown;
|
||||
|
||||
constructor({ name, error, message }: { message?: string; name?: string; error?: unknown }) {
|
||||
super(message || "Something went wrong");
|
||||
this.name = name || "OidcAuthError";
|
||||
this.error = error;
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,8 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
// logger follows a singleton pattern
|
||||
// easier to use it that's all.
|
||||
import { requestContext } from "@fastify/request-context";
|
||||
import pino, { Logger } from "pino";
|
||||
import { z } from "zod";
|
||||
|
||||
@ -13,14 +15,37 @@ const logLevelToSeverityLookup: Record<string, string> = {
|
||||
"60": "CRITICAL"
|
||||
};
|
||||
|
||||
// eslint-disable-next-line import/no-mutable-exports
|
||||
export let logger: Readonly<Logger>;
|
||||
// akhilmhdh:
|
||||
// The logger is not placed in the main app config to avoid a circular dependency.
|
||||
// The config requires the logger to display errors when an invalid environment is supplied.
|
||||
// On the other hand, the logger needs the config to obtain credentials for AWS or other transports.
|
||||
// By keeping the logger separate, it becomes an independent package.
|
||||
|
||||
// We define our own custom logger interface to enforce structure to the logging methods.
|
||||
|
||||
export interface CustomLogger extends Omit<Logger, "info" | "error" | "warn" | "debug"> {
|
||||
info: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
|
||||
error: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
warn: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
debug: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(obj: unknown, msg?: string, ...args: any[]): void;
|
||||
};
|
||||
}
|
||||
|
||||
// eslint-disable-next-line import/no-mutable-exports
|
||||
export let logger: Readonly<CustomLogger>;
|
||||
|
||||
const loggerConfig = z.object({
|
||||
AWS_CLOUDWATCH_LOG_GROUP_NAME: z.string().default("infisical-log-stream"),
|
||||
AWS_CLOUDWATCH_LOG_REGION: z.string().default("us-east-1"),
|
||||
@ -62,6 +87,17 @@ const redactedKeys = [
|
||||
"config"
|
||||
];
|
||||
|
||||
const UNKNOWN_REQUEST_ID = "UNKNOWN_REQUEST_ID";
|
||||
|
||||
const extractRequestId = () => {
|
||||
try {
|
||||
return requestContext.get("requestId") || UNKNOWN_REQUEST_ID;
|
||||
} catch (err) {
|
||||
console.log("failed to get request context", err);
|
||||
return UNKNOWN_REQUEST_ID;
|
||||
}
|
||||
};
|
||||
|
||||
export const initLogger = async () => {
|
||||
const cfg = loggerConfig.parse(process.env);
|
||||
const targets: pino.TransportMultiOptions["targets"][number][] = [
|
||||
@ -94,6 +130,30 @@ export const initLogger = async () => {
|
||||
targets
|
||||
});
|
||||
|
||||
const wrapLogger = (originalLogger: Logger): CustomLogger => {
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.info = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ requestId: extractRequestId() }).info(obj, msg, ...args);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.error = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ requestId: extractRequestId() }).error(obj, msg, ...args);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.warn = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ requestId: extractRequestId() }).warn(obj, msg, ...args);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
|
||||
originalLogger.debug = (obj: unknown, msg?: string, ...args: any[]) => {
|
||||
return originalLogger.child({ requestId: extractRequestId() }).debug(obj, msg, ...args);
|
||||
};
|
||||
|
||||
return originalLogger;
|
||||
};
|
||||
|
||||
logger = pino(
|
||||
{
|
||||
mixin(_context, level) {
|
||||
@ -113,5 +173,6 @@ export const initLogger = async () => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
transport
|
||||
);
|
||||
return logger;
|
||||
|
||||
return wrapLogger(logger);
|
||||
};
|
||||
|
91
backend/src/lib/telemetry/instrumentation.ts
Normal file
91
backend/src/lib/telemetry/instrumentation.ts
Normal file
@ -0,0 +1,91 @@
|
||||
import opentelemetry, { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api";
|
||||
import { getNodeAutoInstrumentations } from "@opentelemetry/auto-instrumentations-node";
|
||||
import { OTLPMetricExporter } from "@opentelemetry/exporter-metrics-otlp-proto";
|
||||
import { PrometheusExporter } from "@opentelemetry/exporter-prometheus";
|
||||
import { registerInstrumentations } from "@opentelemetry/instrumentation";
|
||||
import { Resource } from "@opentelemetry/resources";
|
||||
import { AggregationTemporality, MeterProvider, PeriodicExportingMetricReader } from "@opentelemetry/sdk-metrics";
|
||||
import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION } from "@opentelemetry/semantic-conventions";
|
||||
import dotenv from "dotenv";
|
||||
|
||||
import { initEnvConfig } from "../config/env";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const initTelemetryInstrumentation = ({
|
||||
exportType,
|
||||
otlpURL,
|
||||
otlpUser,
|
||||
otlpPassword,
|
||||
otlpPushInterval
|
||||
}: {
|
||||
exportType?: string;
|
||||
otlpURL?: string;
|
||||
otlpUser?: string;
|
||||
otlpPassword?: string;
|
||||
otlpPushInterval?: number;
|
||||
}) => {
|
||||
diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG);
|
||||
|
||||
const resource = Resource.default().merge(
|
||||
new Resource({
|
||||
[ATTR_SERVICE_NAME]: "infisical-core",
|
||||
[ATTR_SERVICE_VERSION]: "0.1.0"
|
||||
})
|
||||
);
|
||||
|
||||
const metricReaders = [];
|
||||
switch (exportType) {
|
||||
case "prometheus": {
|
||||
const promExporter = new PrometheusExporter();
|
||||
metricReaders.push(promExporter);
|
||||
break;
|
||||
}
|
||||
case "otlp": {
|
||||
const otlpExporter = new OTLPMetricExporter({
|
||||
url: `${otlpURL}/v1/metrics`,
|
||||
headers: {
|
||||
Authorization: `Basic ${btoa(`${otlpUser}:${otlpPassword}`)}`
|
||||
},
|
||||
temporalityPreference: AggregationTemporality.DELTA
|
||||
});
|
||||
metricReaders.push(
|
||||
new PeriodicExportingMetricReader({
|
||||
exporter: otlpExporter,
|
||||
exportIntervalMillis: otlpPushInterval
|
||||
})
|
||||
);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error("Invalid OTEL export type");
|
||||
}
|
||||
|
||||
const meterProvider = new MeterProvider({
|
||||
resource,
|
||||
readers: metricReaders
|
||||
});
|
||||
|
||||
opentelemetry.metrics.setGlobalMeterProvider(meterProvider);
|
||||
|
||||
registerInstrumentations({
|
||||
instrumentations: [getNodeAutoInstrumentations()]
|
||||
});
|
||||
};
|
||||
|
||||
const setupTelemetry = () => {
|
||||
const appCfg = initEnvConfig();
|
||||
|
||||
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
|
||||
console.log("Initializing telemetry instrumentation");
|
||||
initTelemetryInstrumentation({
|
||||
otlpURL: appCfg.OTEL_EXPORT_OTLP_ENDPOINT,
|
||||
otlpUser: appCfg.OTEL_COLLECTOR_BASIC_AUTH_USERNAME,
|
||||
otlpPassword: appCfg.OTEL_COLLECTOR_BASIC_AUTH_PASSWORD,
|
||||
otlpPushInterval: appCfg.OTEL_OTLP_PUSH_INTERVAL,
|
||||
exportType: appCfg.OTEL_EXPORT_TYPE
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
void setupTelemetry();
|
@ -1,3 +1,5 @@
|
||||
import "./lib/telemetry/instrumentation";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import path from "path";
|
||||
|
||||
@ -18,6 +20,7 @@ dotenv.config();
|
||||
const run = async () => {
|
||||
const logger = await initLogger();
|
||||
const appCfg = initEnvConfig(logger);
|
||||
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: appCfg.DB_CONNECTION_URI,
|
||||
dbRootCert: appCfg.DB_ROOT_CERT,
|
||||
|
@ -10,18 +10,21 @@ import fastifyFormBody from "@fastify/formbody";
|
||||
import helmet from "@fastify/helmet";
|
||||
import type { FastifyRateLimitOptions } from "@fastify/rate-limit";
|
||||
import ratelimiter from "@fastify/rate-limit";
|
||||
import { fastifyRequestContext } from "@fastify/request-context";
|
||||
import fastify from "fastify";
|
||||
import { Knex } from "knex";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig, IS_PACKAGED } from "@app/lib/config/env";
|
||||
import { CustomLogger } from "@app/lib/logger/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
import { TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
|
||||
import { globalRateLimiterCfg } from "./config/rateLimiter";
|
||||
import { addErrorsToResponseSchemas } from "./plugins/add-errors-to-response-schemas";
|
||||
import { apiMetrics } from "./plugins/api-metrics";
|
||||
import { fastifyErrHandler } from "./plugins/error-handler";
|
||||
import { registerExternalNextjs } from "./plugins/external-nextjs";
|
||||
import { serializerCompiler, validatorCompiler, ZodTypeProvider } from "./plugins/fastify-zod";
|
||||
@ -34,7 +37,7 @@ type TMain = {
|
||||
auditLogDb?: Knex;
|
||||
db: Knex;
|
||||
smtp: TSmtpService;
|
||||
logger?: Logger;
|
||||
logger?: CustomLogger;
|
||||
queue: TQueueServiceFactory;
|
||||
keyStore: TKeyStoreFactory;
|
||||
hsmModule: HsmModule;
|
||||
@ -46,7 +49,9 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
||||
|
||||
const server = fastify({
|
||||
logger: appCfg.NODE_ENV === "test" ? false : logger,
|
||||
genReqId: () => `req-${alphaNumericNanoId(14)}`,
|
||||
trustProxy: true,
|
||||
|
||||
connectionTimeout: appCfg.isHsmConfigured ? 90_000 : 30_000,
|
||||
ignoreTrailingSlash: true,
|
||||
pluginTimeout: 40_000
|
||||
@ -86,6 +91,10 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
||||
// pull ip based on various proxy headers
|
||||
await server.register(fastifyIp);
|
||||
|
||||
if (appCfg.OTEL_TELEMETRY_COLLECTION_ENABLED) {
|
||||
await server.register(apiMetrics);
|
||||
}
|
||||
|
||||
await server.register(fastifySwagger);
|
||||
await server.register(fastifyFormBody);
|
||||
await server.register(fastifyErrHandler);
|
||||
@ -99,6 +108,13 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
|
||||
|
||||
await server.register(maintenanceMode);
|
||||
|
||||
await server.register(fastifyRequestContext, {
|
||||
defaultStoreValues: (request) => ({
|
||||
requestId: request.id,
|
||||
log: request.log.child({ requestId: request.id })
|
||||
})
|
||||
});
|
||||
|
||||
await server.register(registerRoutes, { smtp, queue, db, auditLogDb, keyStore, hsmModule });
|
||||
|
||||
if (appCfg.isProductionMode) {
|
||||
|
@ -46,10 +46,10 @@ export const bootstrapCheck = async ({ db }: BootstrapOpt) => {
|
||||
await createTransport(smtpCfg)
|
||||
.verify()
|
||||
.then(async () => {
|
||||
console.info("SMTP successfully connected");
|
||||
console.info(`SMTP - Verified connection to ${appCfg.SMTP_HOST}:${appCfg.SMTP_PORT}`);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(`SMTP - Failed to connect to ${appCfg.SMTP_HOST}:${appCfg.SMTP_PORT}`);
|
||||
.catch((err: Error) => {
|
||||
console.error(`SMTP - Failed to connect to ${appCfg.SMTP_HOST}:${appCfg.SMTP_PORT} - ${err.message}`);
|
||||
logger.error(err);
|
||||
});
|
||||
|
||||
|
21
backend/src/server/plugins/api-metrics.ts
Normal file
21
backend/src/server/plugins/api-metrics.ts
Normal file
@ -0,0 +1,21 @@
|
||||
import opentelemetry from "@opentelemetry/api";
|
||||
import fp from "fastify-plugin";
|
||||
|
||||
export const apiMetrics = fp(async (fastify) => {
|
||||
const apiMeter = opentelemetry.metrics.getMeter("API");
|
||||
const latencyHistogram = apiMeter.createHistogram("API_latency", {
|
||||
unit: "ms"
|
||||
});
|
||||
|
||||
fastify.addHook("onResponse", async (request, reply) => {
|
||||
const { method } = request;
|
||||
const route = request.routerPath;
|
||||
const { statusCode } = reply;
|
||||
|
||||
latencyHistogram.record(reply.elapsedTime, {
|
||||
route,
|
||||
method,
|
||||
statusCode
|
||||
});
|
||||
});
|
||||
});
|
@ -1,4 +1,4 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { ForbiddenError, PureAbility } from "@casl/ability";
|
||||
import fastifyPlugin from "fastify-plugin";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { ZodError } from "zod";
|
||||
@ -10,6 +10,7 @@ import {
|
||||
GatewayTimeoutError,
|
||||
InternalServerError,
|
||||
NotFoundError,
|
||||
OidcAuthError,
|
||||
RateLimitError,
|
||||
ScimRequestError,
|
||||
UnauthorizedError
|
||||
@ -38,74 +39,102 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
|
||||
if (error instanceof BadRequestError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.BadRequest)
|
||||
.send({ statusCode: HttpStatusCodes.BadRequest, message: error.message, error: error.name });
|
||||
.send({ requestId: req.id, statusCode: HttpStatusCodes.BadRequest, message: error.message, error: error.name });
|
||||
} else if (error instanceof NotFoundError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.NotFound)
|
||||
.send({ statusCode: HttpStatusCodes.NotFound, message: error.message, error: error.name });
|
||||
.send({ requestId: req.id, statusCode: HttpStatusCodes.NotFound, message: error.message, error: error.name });
|
||||
} else if (error instanceof UnauthorizedError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.Unauthorized)
|
||||
.send({ statusCode: HttpStatusCodes.Unauthorized, message: error.message, error: error.name });
|
||||
void res.status(HttpStatusCodes.Unauthorized).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.Unauthorized,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof DatabaseError || error instanceof InternalServerError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.InternalServerError)
|
||||
.send({ statusCode: HttpStatusCodes.InternalServerError, message: "Something went wrong", error: error.name });
|
||||
void res.status(HttpStatusCodes.InternalServerError).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.InternalServerError,
|
||||
message: "Something went wrong",
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof GatewayTimeoutError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.GatewayTimeout)
|
||||
.send({ statusCode: HttpStatusCodes.GatewayTimeout, message: error.message, error: error.name });
|
||||
void res.status(HttpStatusCodes.GatewayTimeout).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.GatewayTimeout,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof ZodError) {
|
||||
void res
|
||||
.status(HttpStatusCodes.Unauthorized)
|
||||
.send({ statusCode: HttpStatusCodes.Unauthorized, error: "ValidationFailure", message: error.issues });
|
||||
void res.status(HttpStatusCodes.Unauthorized).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.Unauthorized,
|
||||
error: "ValidationFailure",
|
||||
message: error.issues
|
||||
});
|
||||
} else if (error instanceof ForbiddenError) {
|
||||
void res.status(HttpStatusCodes.Forbidden).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.Forbidden,
|
||||
error: "PermissionDenied",
|
||||
message: `You are not allowed to ${error.action} on ${error.subjectType} - ${JSON.stringify(error.subject)}`
|
||||
message: `You are not allowed to ${error.action} on ${error.subjectType}`,
|
||||
details: (error.ability as PureAbility).rulesFor(error.action as string, error.subjectType).map((el) => ({
|
||||
action: el.action,
|
||||
inverted: el.inverted,
|
||||
subject: el.subject,
|
||||
conditions: el.conditions
|
||||
}))
|
||||
});
|
||||
} else if (error instanceof ForbiddenRequestError) {
|
||||
void res.status(HttpStatusCodes.Forbidden).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.Forbidden,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof RateLimitError) {
|
||||
void res.status(HttpStatusCodes.TooManyRequests).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.TooManyRequests,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof ScimRequestError) {
|
||||
void res.status(error.status).send({
|
||||
requestId: req.id,
|
||||
schemas: error.schemas,
|
||||
status: error.status,
|
||||
detail: error.detail
|
||||
});
|
||||
// Handle JWT errors and make them more human-readable for the end-user.
|
||||
} else if (error instanceof OidcAuthError) {
|
||||
void res.status(HttpStatusCodes.InternalServerError).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.InternalServerError,
|
||||
message: error.message,
|
||||
error: error.name
|
||||
});
|
||||
} else if (error instanceof jwt.JsonWebTokenError) {
|
||||
const message = (() => {
|
||||
if (error.message === JWTErrors.JwtExpired) {
|
||||
return "Your token has expired. Please re-authenticate.";
|
||||
}
|
||||
if (error.message === JWTErrors.JwtMalformed) {
|
||||
return "The provided access token is malformed. Please use a valid token or generate a new one and try again.";
|
||||
}
|
||||
if (error.message === JWTErrors.InvalidAlgorithm) {
|
||||
return "The access token is signed with an invalid algorithm. Please provide a valid token and try again.";
|
||||
}
|
||||
let errorMessage = error.message;
|
||||
|
||||
return error.message;
|
||||
})();
|
||||
if (error.message === JWTErrors.JwtExpired) {
|
||||
errorMessage = "Your token has expired. Please re-authenticate.";
|
||||
} else if (error.message === JWTErrors.JwtMalformed) {
|
||||
errorMessage =
|
||||
"The provided access token is malformed. Please use a valid token or generate a new one and try again.";
|
||||
} else if (error.message === JWTErrors.InvalidAlgorithm) {
|
||||
errorMessage =
|
||||
"The access token is signed with an invalid algorithm. Please provide a valid token and try again.";
|
||||
}
|
||||
|
||||
void res.status(HttpStatusCodes.Forbidden).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.Forbidden,
|
||||
error: "TokenError",
|
||||
message
|
||||
message: errorMessage
|
||||
});
|
||||
} else {
|
||||
void res.status(HttpStatusCodes.InternalServerError).send({
|
||||
requestId: req.id,
|
||||
statusCode: HttpStatusCodes.InternalServerError,
|
||||
error: "InternalServerError",
|
||||
message: "Something went wrong"
|
||||
|
@ -19,7 +19,7 @@ export const registerSecretScannerGhApp = async (server: FastifyZodProvider) =>
|
||||
|
||||
app.on("installation", async (context) => {
|
||||
const { payload } = context;
|
||||
logger.info("Installed secret scanner to:", { repositories: payload.repositories });
|
||||
logger.info({ repositories: payload.repositories }, "Installed secret scanner to");
|
||||
});
|
||||
|
||||
app.on("push", async (context) => {
|
||||
|
@ -30,26 +30,32 @@ export const integrationAuthPubSchema = IntegrationAuthsSchema.pick({
|
||||
|
||||
export const DefaultResponseErrorsSchema = {
|
||||
400: z.object({
|
||||
requestId: z.string(),
|
||||
statusCode: z.literal(400),
|
||||
message: z.string(),
|
||||
error: z.string()
|
||||
}),
|
||||
404: z.object({
|
||||
requestId: z.string(),
|
||||
statusCode: z.literal(404),
|
||||
message: z.string(),
|
||||
error: z.string()
|
||||
}),
|
||||
401: z.object({
|
||||
requestId: z.string(),
|
||||
statusCode: z.literal(401),
|
||||
message: z.any(),
|
||||
error: z.string()
|
||||
}),
|
||||
403: z.object({
|
||||
requestId: z.string(),
|
||||
statusCode: z.literal(403),
|
||||
message: z.string(),
|
||||
details: z.any().optional(),
|
||||
error: z.string()
|
||||
}),
|
||||
500: z.object({
|
||||
requestId: z.string(),
|
||||
statusCode: z.literal(500),
|
||||
message: z.string(),
|
||||
error: z.string()
|
||||
@ -206,6 +212,7 @@ export const SanitizedAuditLogStreamSchema = z.object({
|
||||
export const SanitizedProjectSchema = ProjectsSchema.pick({
|
||||
id: true,
|
||||
name: true,
|
||||
description: true,
|
||||
slug: true,
|
||||
autoCapitalization: true,
|
||||
orgId: true,
|
||||
|
@ -5,6 +5,7 @@ import { INTEGRATION_AUTH } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { OctopusDeployScope } from "@app/services/integration-auth/integration-auth-types";
|
||||
|
||||
import { integrationAuthPubSchema } from "../sanitizedSchemas";
|
||||
|
||||
@ -1008,4 +1009,118 @@ export const registerIntegrationAuthRouter = async (server: FastifyZodProvider)
|
||||
return { buildConfigs };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:integrationAuthId/octopus-deploy/scope-values",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
integrationAuthId: z.string().trim()
|
||||
}),
|
||||
querystring: z.object({
|
||||
scope: z.nativeEnum(OctopusDeployScope),
|
||||
spaceId: z.string().trim(),
|
||||
resourceId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
Environments: z
|
||||
.object({
|
||||
Name: z.string(),
|
||||
Id: z.string()
|
||||
})
|
||||
.array(),
|
||||
Machines: z
|
||||
.object({
|
||||
Name: z.string(),
|
||||
Id: z.string()
|
||||
})
|
||||
.array(),
|
||||
Actions: z
|
||||
.object({
|
||||
Name: z.string(),
|
||||
Id: z.string()
|
||||
})
|
||||
.array(),
|
||||
Roles: z
|
||||
.object({
|
||||
Name: z.string(),
|
||||
Id: z.string()
|
||||
})
|
||||
.array(),
|
||||
Channels: z
|
||||
.object({
|
||||
Name: z.string(),
|
||||
Id: z.string()
|
||||
})
|
||||
.array(),
|
||||
TenantTags: z
|
||||
.object({
|
||||
Name: z.string(),
|
||||
Id: z.string()
|
||||
})
|
||||
.array(),
|
||||
Processes: z
|
||||
.object({
|
||||
ProcessType: z.string(),
|
||||
Name: z.string(),
|
||||
Id: z.string()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const scopeValues = await server.services.integrationAuth.getOctopusDeployScopeValues({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.integrationAuthId,
|
||||
scope: req.query.scope,
|
||||
spaceId: req.query.spaceId,
|
||||
resourceId: req.query.resourceId
|
||||
});
|
||||
return scopeValues;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:integrationAuthId/octopus-deploy/spaces",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
integrationAuthId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
spaces: z
|
||||
.object({
|
||||
Name: z.string(),
|
||||
Id: z.string(),
|
||||
IsDefault: z.boolean()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const spaces = await server.services.integrationAuth.getOctopusDeploySpaces({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.integrationAuthId
|
||||
});
|
||||
return { spaces };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -9,6 +9,7 @@ import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { IntegrationMetadataSchema } from "@app/services/integration/integration-schema";
|
||||
import { Integrations } from "@app/services/integration-auth/integration-list";
|
||||
import { PostHogEventTypes, TIntegrationCreatedEvent } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
import {} from "../sanitizedSchemas";
|
||||
@ -206,6 +207,33 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
|
||||
id: req.params.integrationId
|
||||
});
|
||||
|
||||
if (integration.region) {
|
||||
integration.metadata = {
|
||||
...(integration.metadata || {}),
|
||||
region: integration.region
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
integration.integration === Integrations.AWS_SECRET_MANAGER ||
|
||||
integration.integration === Integrations.AWS_PARAMETER_STORE
|
||||
) {
|
||||
const awsRoleDetails = await server.services.integration.getIntegrationAWSIamRole({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.params.integrationId
|
||||
});
|
||||
|
||||
if (awsRoleDetails) {
|
||||
integration.metadata = {
|
||||
...(integration.metadata || {}),
|
||||
awsIamRole: awsRoleDetails.role
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return { integration };
|
||||
}
|
||||
});
|
||||
|
@ -296,6 +296,12 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
.max(64, { message: "Name must be 64 or fewer characters" })
|
||||
.optional()
|
||||
.describe(PROJECTS.UPDATE.name),
|
||||
description: z
|
||||
.string()
|
||||
.trim()
|
||||
.max(256, { message: "Description must be 256 or fewer characters" })
|
||||
.optional()
|
||||
.describe(PROJECTS.UPDATE.projectDescription),
|
||||
autoCapitalization: z.boolean().optional().describe(PROJECTS.UPDATE.autoCapitalization)
|
||||
}),
|
||||
response: {
|
||||
@ -313,6 +319,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
update: {
|
||||
name: req.body.name,
|
||||
description: req.body.description,
|
||||
autoCapitalization: req.body.autoCapitalization
|
||||
},
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
|
@ -161,6 +161,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
],
|
||||
body: z.object({
|
||||
projectName: z.string().trim().describe(PROJECTS.CREATE.projectName),
|
||||
projectDescription: z.string().trim().optional().describe(PROJECTS.CREATE.projectDescription),
|
||||
slug: z
|
||||
.string()
|
||||
.min(5)
|
||||
@ -194,6 +195,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
workspaceName: req.body.projectName,
|
||||
workspaceDescription: req.body.projectDescription,
|
||||
slug: req.body.slug,
|
||||
kmsKeyId: req.body.kmsKeyId,
|
||||
template: req.body.template
|
||||
@ -312,8 +314,9 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
slug: slugSchema.describe("The slug of the project to update.")
|
||||
}),
|
||||
body: z.object({
|
||||
name: z.string().trim().optional().describe("The new name of the project."),
|
||||
autoCapitalization: z.boolean().optional().describe("The new auto-capitalization setting.")
|
||||
name: z.string().trim().optional().describe(PROJECTS.UPDATE.name),
|
||||
description: z.string().trim().optional().describe(PROJECTS.UPDATE.projectDescription),
|
||||
autoCapitalization: z.boolean().optional().describe(PROJECTS.UPDATE.autoCapitalization)
|
||||
}),
|
||||
response: {
|
||||
200: SanitizedProjectSchema
|
||||
@ -330,6 +333,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
update: {
|
||||
name: req.body.name,
|
||||
description: req.body.description,
|
||||
autoCapitalization: req.body.autoCapitalization
|
||||
},
|
||||
actorId: req.permission.id,
|
||||
|
@ -119,13 +119,6 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
|
||||
if (!userAgent) throw new Error("user agent header is required");
|
||||
const appCfg = getConfig();
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
if (!serverCfg.allowSignUp) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Signup's are disabled"
|
||||
});
|
||||
}
|
||||
|
||||
const { user, accessToken, refreshToken, organizationId } =
|
||||
await server.services.signup.completeEmailAccountSignup({
|
||||
...req.body,
|
||||
|
@ -9,7 +9,7 @@ import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { generateUserSrpKeys, getUserPrivateKey } from "@app/lib/crypto/srp";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { isDisposableEmail } from "@app/lib/validator";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
@ -23,6 +23,7 @@ import { TOrgServiceFactory } from "../org/org-service";
|
||||
import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal";
|
||||
import { TProjectUserMembershipRoleDALFactory } from "../project-membership/project-user-membership-role-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
|
||||
import { getServerCfg } from "../super-admin/super-admin-service";
|
||||
import { TUserDALFactory } from "../user/user-dal";
|
||||
import { UserEncryption } from "../user/user-types";
|
||||
import { TAuthDALFactory } from "./auth-dal";
|
||||
@ -151,6 +152,8 @@ export const authSignupServiceFactory = ({
|
||||
authorization
|
||||
}: TCompleteAccountSignupDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
const user = await userDAL.findOne({ username: email });
|
||||
if (!user || (user && user.isAccepted)) {
|
||||
throw new Error("Failed to complete account for complete user");
|
||||
@ -163,6 +166,12 @@ export const authSignupServiceFactory = ({
|
||||
authMethod = userAuthMethod;
|
||||
organizationId = orgId;
|
||||
} else {
|
||||
// disallow signup if disabled. we are not doing this for providerAuthToken because we allow signups via saml or sso
|
||||
if (!serverCfg.allowSignUp) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Signup's are disabled"
|
||||
});
|
||||
}
|
||||
validateSignUpAuthorization(authorization, user.id);
|
||||
}
|
||||
|
||||
|
@ -29,7 +29,7 @@ import {
|
||||
} from "./identity-aws-auth-types";
|
||||
|
||||
type TIdentityAwsAuthServiceFactoryDep = {
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityAwsAuthDAL: Pick<TIdentityAwsAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
@ -346,6 +346,8 @@ export const identityAwsAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityAwsAuth = await identityAwsAuthDAL.transaction(async (tx) => {
|
||||
const deletedAwsAuth = await identityAwsAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.AWS_AUTH }, tx);
|
||||
|
||||
return { ...deletedAwsAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityAwsAuth;
|
||||
|
@ -30,7 +30,7 @@ type TIdentityAzureAuthServiceFactoryDep = {
|
||||
"findOne" | "transaction" | "create" | "updateById" | "delete"
|
||||
>;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
@ -70,7 +70,9 @@ export const identityAzureAuthServiceFactory = ({
|
||||
.map((servicePrincipalId) => servicePrincipalId.trim())
|
||||
.some((servicePrincipalId) => servicePrincipalId === azureIdentity.oid);
|
||||
|
||||
if (!isServicePrincipalAllowed) throw new UnauthorizedError({ message: "Service principal not allowed" });
|
||||
if (!isServicePrincipalAllowed) {
|
||||
throw new UnauthorizedError({ message: `Service principal '${azureIdentity.oid}' not allowed` });
|
||||
}
|
||||
}
|
||||
|
||||
const identityAccessToken = await identityAzureAuthDAL.transaction(async (tx) => {
|
||||
@ -317,6 +319,8 @@ export const identityAzureAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityAzureAuth = await identityAzureAuthDAL.transaction(async (tx) => {
|
||||
const deletedAzureAuth = await identityAzureAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.AZURE_AUTH }, tx);
|
||||
|
||||
return { ...deletedAzureAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityAzureAuth;
|
||||
|
@ -28,7 +28,7 @@ import {
|
||||
type TIdentityGcpAuthServiceFactoryDep = {
|
||||
identityGcpAuthDAL: Pick<TIdentityGcpAuthDALFactory, "findOne" | "transaction" | "create" | "updateById" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
@ -365,6 +365,8 @@ export const identityGcpAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityGcpAuth = await identityGcpAuthDAL.transaction(async (tx) => {
|
||||
const deletedGcpAuth = await identityGcpAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.GCP_AUTH }, tx);
|
||||
|
||||
return { ...deletedGcpAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityGcpAuth;
|
||||
|
@ -41,7 +41,7 @@ type TIdentityKubernetesAuthServiceFactoryDep = {
|
||||
TIdentityKubernetesAuthDALFactory,
|
||||
"create" | "findOne" | "transaction" | "updateById" | "delete"
|
||||
>;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne" | "findById">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
@ -622,6 +622,7 @@ export const identityKubernetesAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityKubernetesAuth = await identityKubernetesAuthDAL.transaction(async (tx) => {
|
||||
const deletedKubernetesAuth = await identityKubernetesAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.KUBERNETES_AUTH }, tx);
|
||||
return { ...deletedKubernetesAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
return revokedIdentityKubernetesAuth;
|
||||
|
@ -39,7 +39,7 @@ import {
|
||||
type TIdentityOidcAuthServiceFactoryDep = {
|
||||
identityOidcAuthDAL: TIdentityOidcAuthDALFactory;
|
||||
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create">;
|
||||
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "transaction" | "create">;
|
||||
@ -539,6 +539,8 @@ export const identityOidcAuthServiceFactory = ({
|
||||
|
||||
const revokedIdentityOidcAuth = await identityOidcAuthDAL.transaction(async (tx) => {
|
||||
const deletedOidcAuth = await identityOidcAuthDAL.delete({ identityId }, tx);
|
||||
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.OIDC_AUTH }, tx);
|
||||
|
||||
return { ...deletedOidcAuth?.[0], orgId: identityMembershipOrg.orgId };
|
||||
});
|
||||
|
||||
|
@ -182,7 +182,12 @@ export const identityProjectServiceFactory = ({
|
||||
|
||||
// validate custom roles input
|
||||
const customInputRoles = roles.filter(
|
||||
({ role }) => !Object.values(ProjectMembershipRole).includes(role as ProjectMembershipRole)
|
||||
({ role }) =>
|
||||
!Object.values(ProjectMembershipRole)
|
||||
// we don't want to include custom in this check;
|
||||
// this unintentionally enables setting slug to custom which is reserved
|
||||
.filter((r) => r !== ProjectMembershipRole.Custom)
|
||||
.includes(role as ProjectMembershipRole)
|
||||
);
|
||||
const hasCustomRole = Boolean(customInputRoles.length);
|
||||
const customRoles = hasCustomRole
|
||||
|
@ -385,8 +385,8 @@ export const identityTokenAuthServiceFactory = ({
|
||||
actorOrgId
|
||||
}: TUpdateTokenAuthTokenDTO) => {
|
||||
const foundToken = await identityAccessTokenDAL.findOne({
|
||||
id: tokenId,
|
||||
authMethod: IdentityAuthMethod.TOKEN_AUTH
|
||||
[`${TableName.IdentityAccessToken}.id` as "id"]: tokenId,
|
||||
[`${TableName.IdentityAccessToken}.authMethod` as "authMethod"]: IdentityAuthMethod.TOKEN_AUTH
|
||||
});
|
||||
if (!foundToken) throw new NotFoundError({ message: `Token with ID ${tokenId} not found` });
|
||||
|
||||
@ -444,8 +444,8 @@ export const identityTokenAuthServiceFactory = ({
|
||||
}: TRevokeTokenAuthTokenDTO) => {
|
||||
const identityAccessToken = await identityAccessTokenDAL.findOne({
|
||||
[`${TableName.IdentityAccessToken}.id` as "id"]: tokenId,
|
||||
isAccessTokenRevoked: false,
|
||||
authMethod: IdentityAuthMethod.TOKEN_AUTH
|
||||
[`${TableName.IdentityAccessToken}.isAccessTokenRevoked` as "isAccessTokenRevoked"]: false,
|
||||
[`${TableName.IdentityAccessToken}.authMethod` as "authMethod"]: IdentityAuthMethod.TOKEN_AUTH
|
||||
});
|
||||
if (!identityAccessToken)
|
||||
throw new NotFoundError({
|
||||
|
@ -1,6 +1,7 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { createAppAuth } from "@octokit/auth-app";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { Client as OctopusDeployClient, ProjectRepository as OctopusDeployRepository } from "@octopusdeploy/api-client";
|
||||
|
||||
import { TIntegrationAuths } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
@ -1087,6 +1088,33 @@ const getAppsAzureDevOps = async ({ accessToken, orgName }: { accessToken: strin
|
||||
return apps;
|
||||
};
|
||||
|
||||
const getAppsOctopusDeploy = async ({
|
||||
apiKey,
|
||||
instanceURL,
|
||||
spaceName = "Default"
|
||||
}: {
|
||||
apiKey: string;
|
||||
instanceURL: string;
|
||||
spaceName?: string;
|
||||
}) => {
|
||||
const client = await OctopusDeployClient.create({
|
||||
instanceURL,
|
||||
apiKey,
|
||||
userAgentApp: "Infisical Integration"
|
||||
});
|
||||
|
||||
const repository = new OctopusDeployRepository(client, spaceName);
|
||||
|
||||
const projects = await repository.list({
|
||||
take: 1000
|
||||
});
|
||||
|
||||
return projects.Items.map((project) => ({
|
||||
name: project.Name,
|
||||
appId: project.Id
|
||||
}));
|
||||
};
|
||||
|
||||
export const getApps = async ({
|
||||
integration,
|
||||
integrationAuth,
|
||||
@ -1260,6 +1288,13 @@ export const getApps = async ({
|
||||
orgName: azureDevOpsOrgName as string
|
||||
});
|
||||
|
||||
case Integrations.OCTOPUS_DEPLOY:
|
||||
return getAppsOctopusDeploy({
|
||||
apiKey: accessToken,
|
||||
instanceURL: url!,
|
||||
spaceName: workspaceSlug
|
||||
});
|
||||
|
||||
default:
|
||||
throw new NotFoundError({ message: `Integration '${integration}' not found` });
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { createAppAuth } from "@octokit/auth-app";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { Client as OctopusClient, SpaceRepository as OctopusSpaceRepository } from "@octopusdeploy/api-client";
|
||||
import AWS from "aws-sdk";
|
||||
|
||||
import { SecretEncryptionAlgo, SecretKeyEncoding, TIntegrationAuths, TIntegrationAuthsInsert } from "@app/db/schemas";
|
||||
@ -9,7 +10,7 @@ import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { decryptSymmetric128BitHexKeyUTF8, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, InternalServerError, NotFoundError } from "@app/lib/errors";
|
||||
import { TGenericPermission, TProjectPermission } from "@app/lib/types";
|
||||
|
||||
import { TIntegrationDALFactory } from "../integration/integration-dal";
|
||||
@ -20,6 +21,7 @@ import { getApps } from "./integration-app-list";
|
||||
import { TIntegrationAuthDALFactory } from "./integration-auth-dal";
|
||||
import { IntegrationAuthMetadataSchema, TIntegrationAuthMetadata } from "./integration-auth-schema";
|
||||
import {
|
||||
OctopusDeployScope,
|
||||
TBitbucketEnvironment,
|
||||
TBitbucketWorkspace,
|
||||
TChecklyGroups,
|
||||
@ -38,6 +40,8 @@ import {
|
||||
TIntegrationAuthGithubOrgsDTO,
|
||||
TIntegrationAuthHerokuPipelinesDTO,
|
||||
TIntegrationAuthNorthflankSecretGroupDTO,
|
||||
TIntegrationAuthOctopusDeployProjectScopeValuesDTO,
|
||||
TIntegrationAuthOctopusDeploySpacesDTO,
|
||||
TIntegrationAuthQoveryEnvironmentsDTO,
|
||||
TIntegrationAuthQoveryOrgsDTO,
|
||||
TIntegrationAuthQoveryProjectDTO,
|
||||
@ -48,6 +52,7 @@ import {
|
||||
TIntegrationAuthVercelBranchesDTO,
|
||||
TNorthflankSecretGroup,
|
||||
TOauthExchangeDTO,
|
||||
TOctopusDeployVariableSet,
|
||||
TSaveIntegrationAccessTokenDTO,
|
||||
TTeamCityBuildConfig,
|
||||
TVercelBranches
|
||||
@ -1521,6 +1526,88 @@ export const integrationAuthServiceFactory = ({
|
||||
return integrationAuthDAL.create(newIntegrationAuth);
|
||||
};
|
||||
|
||||
const getOctopusDeploySpaces = async ({
|
||||
actorId,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
id
|
||||
}: TIntegrationAuthOctopusDeploySpacesDTO) => {
|
||||
const integrationAuth = await integrationAuthDAL.findById(id);
|
||||
if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
integrationAuth.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
|
||||
const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId);
|
||||
const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey);
|
||||
|
||||
const client = await OctopusClient.create({
|
||||
apiKey: accessToken,
|
||||
instanceURL: integrationAuth.url!,
|
||||
userAgentApp: "Infisical Integration"
|
||||
});
|
||||
|
||||
const spaceRepository = new OctopusSpaceRepository(client);
|
||||
|
||||
const spaces = await spaceRepository.list({
|
||||
partialName: "", // throws error if no string is present...
|
||||
take: 1000
|
||||
});
|
||||
|
||||
return spaces.Items;
|
||||
};
|
||||
|
||||
const getOctopusDeployScopeValues = async ({
|
||||
actorId,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
id,
|
||||
scope,
|
||||
spaceId,
|
||||
resourceId
|
||||
}: TIntegrationAuthOctopusDeployProjectScopeValuesDTO) => {
|
||||
const integrationAuth = await integrationAuthDAL.findById(id);
|
||||
if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
integrationAuth.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
|
||||
const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId);
|
||||
const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey);
|
||||
|
||||
let url: string;
|
||||
switch (scope) {
|
||||
case OctopusDeployScope.Project:
|
||||
url = `${integrationAuth.url}/api/${spaceId}/projects/${resourceId}/variables`;
|
||||
break;
|
||||
// future support tenant, variable set etc.
|
||||
default:
|
||||
throw new InternalServerError({ message: `Unhandled Octopus Deploy scope` });
|
||||
}
|
||||
|
||||
// SDK doesn't support variable set...
|
||||
const { data: variableSet } = await request.get<TOctopusDeployVariableSet>(url, {
|
||||
headers: {
|
||||
"X-NuGet-ApiKey": accessToken,
|
||||
Accept: "application/json"
|
||||
}
|
||||
});
|
||||
|
||||
return variableSet.ScopeValues;
|
||||
};
|
||||
|
||||
return {
|
||||
listIntegrationAuthByProjectId,
|
||||
listOrgIntegrationAuth,
|
||||
@ -1552,6 +1639,8 @@ export const integrationAuthServiceFactory = ({
|
||||
getBitbucketWorkspaces,
|
||||
getBitbucketEnvironments,
|
||||
getIntegrationAccessToken,
|
||||
duplicateIntegrationAuth
|
||||
duplicateIntegrationAuth,
|
||||
getOctopusDeploySpaces,
|
||||
getOctopusDeployScopeValues
|
||||
};
|
||||
};
|
||||
|
@ -193,3 +193,72 @@ export type TIntegrationsWithEnvironment = TIntegrations & {
|
||||
| null
|
||||
| undefined;
|
||||
};
|
||||
|
||||
export type TIntegrationAuthOctopusDeploySpacesDTO = {
|
||||
id: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TIntegrationAuthOctopusDeployProjectScopeValuesDTO = {
|
||||
id: string;
|
||||
spaceId: string;
|
||||
resourceId: string;
|
||||
scope: OctopusDeployScope;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export enum OctopusDeployScope {
|
||||
Project = "project"
|
||||
// add tenant, variable set, etc.
|
||||
}
|
||||
|
||||
export type TOctopusDeployVariableSet = {
|
||||
Id: string;
|
||||
OwnerId: string;
|
||||
Version: number;
|
||||
Variables: {
|
||||
Id: string;
|
||||
Name: string;
|
||||
Value: string;
|
||||
Description: string;
|
||||
Scope: {
|
||||
Environment?: string[];
|
||||
Machine?: string[];
|
||||
Role?: string[];
|
||||
TargetRole?: string[];
|
||||
Action?: string[];
|
||||
User?: string[];
|
||||
Trigger?: string[];
|
||||
ParentDeployment?: string[];
|
||||
Private?: string[];
|
||||
Channel?: string[];
|
||||
TenantTag?: string[];
|
||||
Tenant?: string[];
|
||||
ProcessOwner?: string[];
|
||||
};
|
||||
IsEditable: boolean;
|
||||
Prompt: {
|
||||
Description: string;
|
||||
DisplaySettings: Record<string, string>;
|
||||
Label: string;
|
||||
Required: boolean;
|
||||
} | null;
|
||||
Type: "String";
|
||||
IsSensitive: boolean;
|
||||
}[];
|
||||
ScopeValues: {
|
||||
Environments: { Id: string; Name: string }[];
|
||||
Machines: { Id: string; Name: string }[];
|
||||
Actions: { Id: string; Name: string }[];
|
||||
Roles: { Id: string; Name: string }[];
|
||||
Channels: { Id: string; Name: string }[];
|
||||
TenantTags: { Id: string; Name: string }[];
|
||||
Processes: {
|
||||
ProcessType: string;
|
||||
Id: string;
|
||||
Name: string;
|
||||
}[];
|
||||
};
|
||||
SpaceId: string;
|
||||
Links: {
|
||||
Self: string;
|
||||
};
|
||||
};
|
||||
|
@ -34,7 +34,8 @@ export enum Integrations {
|
||||
HASURA_CLOUD = "hasura-cloud",
|
||||
RUNDECK = "rundeck",
|
||||
AZURE_DEVOPS = "azure-devops",
|
||||
AZURE_APP_CONFIGURATION = "azure-app-configuration"
|
||||
AZURE_APP_CONFIGURATION = "azure-app-configuration",
|
||||
OCTOPUS_DEPLOY = "octopus-deploy"
|
||||
}
|
||||
|
||||
export enum IntegrationType {
|
||||
@ -413,6 +414,15 @@ export const getIntegrationOptions = async () => {
|
||||
type: "pat",
|
||||
clientId: "",
|
||||
docsLink: ""
|
||||
},
|
||||
{
|
||||
name: "Octopus Deploy",
|
||||
slug: "octopus-deploy",
|
||||
image: "Octopus Deploy.png",
|
||||
isAvailable: true,
|
||||
type: "sat",
|
||||
clientId: "",
|
||||
docsLink: ""
|
||||
}
|
||||
];
|
||||
|
||||
|
@ -32,14 +32,14 @@ import { z } from "zod";
|
||||
import { SecretType, TIntegrationAuths, TIntegrations } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { BadRequestError, InternalServerError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TCreateManySecretsRawFn, TUpdateManySecretsRawFn } from "@app/services/secret/secret-types";
|
||||
|
||||
import { TIntegrationDALFactory } from "../integration/integration-dal";
|
||||
import { IntegrationMetadataSchema } from "../integration/integration-schema";
|
||||
import { IntegrationAuthMetadataSchema } from "./integration-auth-schema";
|
||||
import { TIntegrationsWithEnvironment } from "./integration-auth-types";
|
||||
import { OctopusDeployScope, TIntegrationsWithEnvironment, TOctopusDeployVariableSet } from "./integration-auth-types";
|
||||
import {
|
||||
IntegrationInitialSyncBehavior,
|
||||
IntegrationMappingBehavior,
|
||||
@ -473,7 +473,7 @@ const syncSecretsAzureKeyVault = async ({
|
||||
id: string; // secret URI
|
||||
value: string;
|
||||
attributes: {
|
||||
enabled: true;
|
||||
enabled: boolean;
|
||||
created: number;
|
||||
updated: number;
|
||||
recoveryLevel: string;
|
||||
@ -509,10 +509,19 @@ const syncSecretsAzureKeyVault = async ({
|
||||
|
||||
const getAzureKeyVaultSecrets = await paginateAzureKeyVaultSecrets(`${integration.app}/secrets?api-version=7.3`);
|
||||
|
||||
const enabledAzureKeyVaultSecrets = getAzureKeyVaultSecrets.filter((secret) => secret.attributes.enabled);
|
||||
|
||||
// disabled keys to skip sending updates to
|
||||
const disabledAzureKeyVaultSecretKeys = getAzureKeyVaultSecrets
|
||||
.filter(({ attributes }) => !attributes.enabled)
|
||||
.map((getAzureKeyVaultSecret) => {
|
||||
return getAzureKeyVaultSecret.id.substring(getAzureKeyVaultSecret.id.lastIndexOf("/") + 1);
|
||||
});
|
||||
|
||||
let lastSlashIndex: number;
|
||||
const res = (
|
||||
await Promise.all(
|
||||
getAzureKeyVaultSecrets.map(async (getAzureKeyVaultSecret) => {
|
||||
enabledAzureKeyVaultSecrets.map(async (getAzureKeyVaultSecret) => {
|
||||
if (!lastSlashIndex) {
|
||||
lastSlashIndex = getAzureKeyVaultSecret.id.lastIndexOf("/");
|
||||
}
|
||||
@ -658,6 +667,7 @@ const syncSecretsAzureKeyVault = async ({
|
||||
}) => {
|
||||
let isSecretSet = false;
|
||||
let maxTries = 6;
|
||||
if (disabledAzureKeyVaultSecretKeys.includes(key)) return;
|
||||
|
||||
while (!isSecretSet && maxTries > 0) {
|
||||
// try to set secret
|
||||
@ -3075,7 +3085,7 @@ const syncSecretsTerraformCloud = async ({
|
||||
}) => {
|
||||
// get secrets from Terraform Cloud
|
||||
const terraformSecrets = (
|
||||
await request.get<{ data: { attributes: { key: string; value: string }; id: string }[] }>(
|
||||
await request.get<{ data: { attributes: { key: string; value: string; sensitive: boolean }; id: string }[] }>(
|
||||
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars`,
|
||||
{
|
||||
headers: {
|
||||
@ -3089,7 +3099,7 @@ const syncSecretsTerraformCloud = async ({
|
||||
...obj,
|
||||
[secret.attributes.key]: secret
|
||||
}),
|
||||
{} as Record<string, { attributes: { key: string; value: string }; id: string }>
|
||||
{} as Record<string, { attributes: { key: string; value: string; sensitive: boolean }; id: string }>
|
||||
);
|
||||
|
||||
const secretsToAdd: { [key: string]: string } = {};
|
||||
@ -3170,7 +3180,8 @@ const syncSecretsTerraformCloud = async ({
|
||||
attributes: {
|
||||
key,
|
||||
value: secrets[key]?.value,
|
||||
category: integration.targetService
|
||||
category: integration.targetService,
|
||||
sensitive: true
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -3183,7 +3194,11 @@ const syncSecretsTerraformCloud = async ({
|
||||
}
|
||||
);
|
||||
// case: secret exists in Terraform Cloud
|
||||
} else if (secrets[key]?.value !== terraformSecrets[key].attributes.value) {
|
||||
} else if (
|
||||
// we now set secrets to sensitive in Terraform Cloud, this checks if existing secrets are not sensitive and updates them accordingly
|
||||
!terraformSecrets[key].attributes.sensitive ||
|
||||
secrets[key]?.value !== terraformSecrets[key].attributes.value
|
||||
) {
|
||||
// -> update secret
|
||||
await request.patch(
|
||||
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars/${terraformSecrets[key].id}`,
|
||||
@ -3193,7 +3208,8 @@ const syncSecretsTerraformCloud = async ({
|
||||
id: terraformSecrets[key].id,
|
||||
attributes: {
|
||||
...terraformSecrets[key],
|
||||
value: secrets[key]?.value
|
||||
value: secrets[key]?.value,
|
||||
sensitive: true
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -4195,6 +4211,61 @@ const syncSecretsRundeck = async ({
|
||||
}
|
||||
};
|
||||
|
||||
const syncSecretsOctopusDeploy = async ({
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets,
|
||||
accessToken
|
||||
}: {
|
||||
integration: TIntegrations;
|
||||
integrationAuth: TIntegrationAuths;
|
||||
secrets: Record<string, { value: string; comment?: string }>;
|
||||
accessToken: string;
|
||||
}) => {
|
||||
let url: string;
|
||||
switch (integration.scope) {
|
||||
case OctopusDeployScope.Project:
|
||||
url = `${integrationAuth.url}/api/${integration.targetEnvironmentId}/projects/${integration.appId}/variables`;
|
||||
break;
|
||||
// future support tenant, variable set, etc.
|
||||
default:
|
||||
throw new InternalServerError({ message: `Unhandled Octopus Deploy scope: ${integration.scope}` });
|
||||
}
|
||||
|
||||
// SDK doesn't support variable set...
|
||||
const { data: variableSet } = await request.get<TOctopusDeployVariableSet>(url, {
|
||||
headers: {
|
||||
"X-NuGet-ApiKey": accessToken,
|
||||
Accept: "application/json"
|
||||
}
|
||||
});
|
||||
|
||||
await request.put(
|
||||
url,
|
||||
{
|
||||
...variableSet,
|
||||
Variables: Object.entries(secrets).map(([key, value]) => ({
|
||||
Name: key,
|
||||
Value: value.value,
|
||||
Description: value.comment ?? "",
|
||||
Scope:
|
||||
(integration.metadata as { octopusDeployScopeValues: TOctopusDeployVariableSet["ScopeValues"] })
|
||||
?.octopusDeployScopeValues ?? {},
|
||||
IsEditable: false,
|
||||
Prompt: null,
|
||||
Type: "String",
|
||||
IsSensitive: true
|
||||
}))
|
||||
} as unknown as TOctopusDeployVariableSet,
|
||||
{
|
||||
headers: {
|
||||
"X-NuGet-ApiKey": accessToken,
|
||||
Accept: "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Sync/push [secrets] to [app] in integration named [integration]
|
||||
*
|
||||
@ -4507,6 +4578,14 @@ export const syncIntegrationSecrets = async ({
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
case Integrations.OCTOPUS_DEPLOY:
|
||||
await syncSecretsOctopusDeploy({
|
||||
integration,
|
||||
integrationAuth,
|
||||
secrets,
|
||||
accessToken
|
||||
});
|
||||
break;
|
||||
default:
|
||||
throw new BadRequestError({ message: "Invalid integration" });
|
||||
}
|
||||
|
@ -46,5 +46,18 @@ export const IntegrationMetadataSchema = z.object({
|
||||
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete),
|
||||
shouldEnableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldEnableDelete),
|
||||
shouldMaskSecrets: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldMaskSecrets),
|
||||
shouldProtectSecrets: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldProtectSecrets)
|
||||
shouldProtectSecrets: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldProtectSecrets),
|
||||
|
||||
octopusDeployScopeValues: z
|
||||
.object({
|
||||
// in Octopus Deploy Scope Value Format
|
||||
Environment: z.string().array().optional(),
|
||||
Action: z.string().array().optional(),
|
||||
Channel: z.string().array().optional(),
|
||||
Machine: z.string().array().optional(),
|
||||
ProcessOwner: z.string().array().optional(),
|
||||
Role: z.string().array().optional()
|
||||
})
|
||||
.optional()
|
||||
.describe(INTEGRATION.CREATE.metadata.octopusDeployScopeValues)
|
||||
});
|
||||
|
@ -9,6 +9,7 @@ import { TIntegrationAuthDALFactory } from "../integration-auth/integration-auth
|
||||
import { TIntegrationAuthServiceFactory } from "../integration-auth/integration-auth-service";
|
||||
import { deleteIntegrationSecrets } from "../integration-auth/integration-delete-secret";
|
||||
import { TKmsServiceFactory } from "../kms/kms-service";
|
||||
import { KmsDataKey } from "../kms/kms-types";
|
||||
import { TProjectBotServiceFactory } from "../project-bot/project-bot-service";
|
||||
import { TSecretDALFactory } from "../secret/secret-dal";
|
||||
import { TSecretQueueFactory } from "../secret/secret-queue";
|
||||
@ -237,6 +238,46 @@ export const integrationServiceFactory = ({
|
||||
return { ...integration, envId: integration.environment.id };
|
||||
};
|
||||
|
||||
const getIntegrationAWSIamRole = async ({ id, actor, actorAuthMethod, actorId, actorOrgId }: TGetIntegrationDTO) => {
|
||||
const integration = await integrationDAL.findById(id);
|
||||
|
||||
if (!integration) {
|
||||
throw new NotFoundError({
|
||||
message: `Integration with ID '${id}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
integration?.projectId || "",
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
|
||||
|
||||
const integrationAuth = await integrationAuthDAL.findById(integration.integrationAuthId);
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: integration.projectId
|
||||
});
|
||||
let awsIamRole: string | null = null;
|
||||
if (integrationAuth.encryptedAwsAssumeIamRoleArn) {
|
||||
const awsAssumeRoleArn = secretManagerDecryptor({
|
||||
cipherTextBlob: Buffer.from(integrationAuth.encryptedAwsAssumeIamRoleArn)
|
||||
}).toString();
|
||||
if (awsAssumeRoleArn) {
|
||||
const [, role] = awsAssumeRoleArn.split(":role/");
|
||||
awsIamRole = role;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
role: awsIamRole
|
||||
};
|
||||
};
|
||||
|
||||
const deleteIntegration = async ({
|
||||
actorId,
|
||||
id,
|
||||
@ -329,6 +370,7 @@ export const integrationServiceFactory = ({
|
||||
deleteIntegration,
|
||||
listIntegrationByProject,
|
||||
getIntegration,
|
||||
getIntegrationAWSIamRole,
|
||||
syncIntegration
|
||||
};
|
||||
};
|
||||
|
@ -280,7 +280,12 @@ export const projectMembershipServiceFactory = ({
|
||||
|
||||
// validate custom roles input
|
||||
const customInputRoles = roles.filter(
|
||||
({ role }) => !Object.values(ProjectMembershipRole).includes(role as ProjectMembershipRole)
|
||||
({ role }) =>
|
||||
!Object.values(ProjectMembershipRole)
|
||||
// we don't want to include custom in this check;
|
||||
// this unintentionally enables setting slug to custom which is reserved
|
||||
.filter((r) => r !== ProjectMembershipRole.Custom)
|
||||
.includes(role as ProjectMembershipRole)
|
||||
);
|
||||
const hasCustomRole = Boolean(customInputRoles.length);
|
||||
if (hasCustomRole) {
|
||||
|
@ -191,6 +191,10 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
|
||||
return project;
|
||||
} catch (error) {
|
||||
if (error instanceof NotFoundError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
throw new DatabaseError({ error, name: "Find all projects" });
|
||||
}
|
||||
};
|
||||
@ -240,6 +244,10 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
|
||||
return project;
|
||||
} catch (error) {
|
||||
if (error instanceof NotFoundError || error instanceof UnauthorizedError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
throw new DatabaseError({ error, name: "Find project by slug" });
|
||||
}
|
||||
};
|
||||
@ -260,7 +268,7 @@ export const projectDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
throw new BadRequestError({ message: "Invalid filter type" });
|
||||
} catch (error) {
|
||||
if (error instanceof BadRequestError) {
|
||||
if (error instanceof BadRequestError || error instanceof NotFoundError || error instanceof UnauthorizedError) {
|
||||
throw error;
|
||||
}
|
||||
throw new DatabaseError({ error, name: `Failed to find project by ${filter.type}` });
|
||||
|
@ -285,11 +285,14 @@ export const projectQueueFactory = ({
|
||||
|
||||
if (!orgMembership) {
|
||||
// This can happen. Since we don't remove project memberships and project keys when a user is removed from an org, this is a valid case.
|
||||
logger.info("User is not in organization", {
|
||||
userId: key.receiverId,
|
||||
orgId: project.orgId,
|
||||
projectId: project.id
|
||||
});
|
||||
logger.info(
|
||||
{
|
||||
userId: key.receiverId,
|
||||
orgId: project.orgId,
|
||||
projectId: project.id
|
||||
},
|
||||
"User is not in organization"
|
||||
);
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
@ -551,10 +554,10 @@ export const projectQueueFactory = ({
|
||||
.catch(() => [null]);
|
||||
|
||||
if (!project) {
|
||||
logger.error("Failed to upgrade project, because no project was found", data);
|
||||
logger.error(data, "Failed to upgrade project, because no project was found");
|
||||
} else {
|
||||
await projectDAL.setProjectUpgradeStatus(data.projectId, ProjectUpgradeStatus.Failed);
|
||||
logger.error("Failed to upgrade project", err, {
|
||||
logger.error(err, "Failed to upgrade project", {
|
||||
extra: {
|
||||
project,
|
||||
jobData: data
|
||||
|
@ -149,6 +149,7 @@ export const projectServiceFactory = ({
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
workspaceName,
|
||||
workspaceDescription,
|
||||
slug: projectSlug,
|
||||
kmsKeyId,
|
||||
tx: trx,
|
||||
@ -206,6 +207,7 @@ export const projectServiceFactory = ({
|
||||
const project = await projectDAL.create(
|
||||
{
|
||||
name: workspaceName,
|
||||
description: workspaceDescription,
|
||||
orgId: organization.id,
|
||||
slug: projectSlug || slugify(`${workspaceName}-${alphaNumericNanoId(4)}`),
|
||||
kmsSecretManagerKeyId: kmsKeyId,
|
||||
@ -496,6 +498,7 @@ export const projectServiceFactory = ({
|
||||
|
||||
const updatedProject = await projectDAL.updateById(project.id, {
|
||||
name: update.name,
|
||||
description: update.description,
|
||||
autoCapitalization: update.autoCapitalization
|
||||
});
|
||||
return updatedProject;
|
||||
|
@ -29,6 +29,7 @@ export type TCreateProjectDTO = {
|
||||
actorId: string;
|
||||
actorOrgId?: string;
|
||||
workspaceName: string;
|
||||
workspaceDescription?: string;
|
||||
slug?: string;
|
||||
kmsKeyId?: string;
|
||||
createDefaultEnvs?: boolean;
|
||||
@ -69,6 +70,7 @@ export type TUpdateProjectDTO = {
|
||||
filter: Filter;
|
||||
update: {
|
||||
name?: string;
|
||||
description?: string;
|
||||
autoCapitalization?: boolean;
|
||||
};
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
@ -414,12 +414,13 @@ export const secretV2BridgeServiceFactory = ({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
const encryptedValue = secretValue
|
||||
? {
|
||||
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(secretValue) }).cipherTextBlob,
|
||||
references: getAllSecretReferences(secretValue).nestedReferences
|
||||
}
|
||||
: {};
|
||||
const encryptedValue =
|
||||
typeof secretValue === "string"
|
||||
? {
|
||||
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(secretValue) }).cipherTextBlob,
|
||||
references: getAllSecretReferences(secretValue).nestedReferences
|
||||
}
|
||||
: {};
|
||||
|
||||
if (secretValue) {
|
||||
const { nestedReferences, localReferences } = getAllSecretReferences(secretValue);
|
||||
@ -1165,7 +1166,7 @@ export const secretV2BridgeServiceFactory = ({
|
||||
const newSecrets = await secretDAL.transaction(async (tx) =>
|
||||
fnSecretBulkInsert({
|
||||
inputSecrets: inputSecrets.map((el) => {
|
||||
const references = secretReferencesGroupByInputSecretKey[el.secretKey].nestedReferences;
|
||||
const references = secretReferencesGroupByInputSecretKey[el.secretKey]?.nestedReferences;
|
||||
|
||||
return {
|
||||
version: 1,
|
||||
@ -1372,7 +1373,7 @@ export const secretV2BridgeServiceFactory = ({
|
||||
typeof el.secretValue !== "undefined"
|
||||
? {
|
||||
encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(el.secretValue) }).cipherTextBlob,
|
||||
references: secretReferencesGroupByInputSecretKey[el.secretKey].nestedReferences
|
||||
references: secretReferencesGroupByInputSecretKey[el.secretKey]?.nestedReferences
|
||||
}
|
||||
: {};
|
||||
|
||||
|
@ -77,5 +77,21 @@ export const smtpServiceFactory = (cfg: TSmtpConfig) => {
|
||||
}
|
||||
};
|
||||
|
||||
return { sendMail };
|
||||
const verify = async () => {
|
||||
const isConnected = smtp
|
||||
.verify()
|
||||
.then(async () => {
|
||||
logger.info("SMTP connected");
|
||||
return true;
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
logger.error("SMTP error");
|
||||
logger.error(err);
|
||||
return false;
|
||||
});
|
||||
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
return { sendMail, verify };
|
||||
};
|
||||
|
@ -142,7 +142,7 @@ export const fnTriggerWebhook = async ({
|
||||
!isDisabled && picomatch.isMatch(secretPath, hookSecretPath, { strictSlashes: false })
|
||||
);
|
||||
if (!toBeTriggeredHooks.length) return;
|
||||
logger.info("Secret webhook job started", { environment, secretPath, projectId });
|
||||
logger.info({ environment, secretPath, projectId }, "Secret webhook job started");
|
||||
const project = await projectDAL.findById(projectId);
|
||||
const webhooksTriggered = await Promise.allSettled(
|
||||
toBeTriggeredHooks.map((hook) =>
|
||||
@ -195,5 +195,5 @@ export const fnTriggerWebhook = async ({
|
||||
);
|
||||
}
|
||||
});
|
||||
logger.info("Secret webhook job ended", { environment, secretPath, projectId });
|
||||
logger.info({ environment, secretPath, projectId }, "Secret webhook job ended");
|
||||
};
|
||||
|
@ -111,7 +111,7 @@ var exportCmd = &cobra.Command{
|
||||
accessToken = token.Token
|
||||
} else {
|
||||
log.Debug().Msg("GetAllEnvironmentVariables: Trying to fetch secrets using logged in details")
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
@ -41,7 +41,7 @@ var initCmd = &cobra.Command{
|
||||
}
|
||||
}
|
||||
|
||||
userCreds, err := util.GetCurrentLoggedInUserDetails()
|
||||
userCreds, err := util.GetCurrentLoggedInUserDetails(true)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get your login details")
|
||||
}
|
||||
|
@ -154,6 +154,8 @@ var loginCmd = &cobra.Command{
|
||||
DisableFlagsInUseLine: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
|
||||
presetDomain := config.INFISICAL_URL
|
||||
|
||||
clearSelfHostedDomains, err := cmd.Flags().GetBool("clear-domains")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
@ -198,7 +200,7 @@ var loginCmd = &cobra.Command{
|
||||
|
||||
// standalone user auth
|
||||
if loginMethod == "user" {
|
||||
currentLoggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
currentLoggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
|
||||
// if the key can't be found or there is an error getting current credentials from key ring, allow them to override
|
||||
if err != nil && (strings.Contains(err.Error(), "we couldn't find your logged in details")) {
|
||||
log.Debug().Err(err)
|
||||
@ -216,11 +218,19 @@ var loginCmd = &cobra.Command{
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
usePresetDomain, err := usePresetDomain(presetDomain)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
//override domain
|
||||
domainQuery := true
|
||||
if config.INFISICAL_URL_MANUAL_OVERRIDE != "" &&
|
||||
config.INFISICAL_URL_MANUAL_OVERRIDE != fmt.Sprintf("%s/api", util.INFISICAL_DEFAULT_EU_URL) &&
|
||||
config.INFISICAL_URL_MANUAL_OVERRIDE != fmt.Sprintf("%s/api", util.INFISICAL_DEFAULT_US_URL) {
|
||||
config.INFISICAL_URL_MANUAL_OVERRIDE != fmt.Sprintf("%s/api", util.INFISICAL_DEFAULT_US_URL) &&
|
||||
!usePresetDomain {
|
||||
overrideDomain, err := DomainOverridePrompt()
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
@ -228,7 +238,7 @@ var loginCmd = &cobra.Command{
|
||||
|
||||
//if not override set INFISICAL_URL to exported var
|
||||
//set domainQuery to false
|
||||
if !overrideDomain {
|
||||
if !overrideDomain && !usePresetDomain {
|
||||
domainQuery = false
|
||||
config.INFISICAL_URL = util.AppendAPIEndpoint(config.INFISICAL_URL_MANUAL_OVERRIDE)
|
||||
config.INFISICAL_LOGIN_URL = fmt.Sprintf("%s/login", strings.TrimSuffix(config.INFISICAL_URL, "/api"))
|
||||
@ -237,7 +247,7 @@ var loginCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
//prompt user to select domain between Infisical cloud and self-hosting
|
||||
if domainQuery {
|
||||
if domainQuery && !usePresetDomain {
|
||||
err = askForDomain()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse domain url")
|
||||
@ -526,6 +536,45 @@ func DomainOverridePrompt() (bool, error) {
|
||||
return selectedOption == OVERRIDE, err
|
||||
}
|
||||
|
||||
func usePresetDomain(presetDomain string) (bool, error) {
|
||||
infisicalConfig, err := util.GetConfigFile()
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("askForDomain: unable to get config file because [err=%s]", err)
|
||||
}
|
||||
|
||||
preconfiguredUrl := strings.TrimSuffix(presetDomain, "/api")
|
||||
|
||||
if preconfiguredUrl != "" && preconfiguredUrl != util.INFISICAL_DEFAULT_US_URL && preconfiguredUrl != util.INFISICAL_DEFAULT_EU_URL {
|
||||
parsedDomain := strings.TrimSuffix(strings.Trim(preconfiguredUrl, "/"), "/api")
|
||||
|
||||
_, err := url.ParseRequestURI(parsedDomain)
|
||||
if err != nil {
|
||||
return false, errors.New(fmt.Sprintf("Invalid domain URL: '%s'", parsedDomain))
|
||||
}
|
||||
|
||||
config.INFISICAL_URL = fmt.Sprintf("%s/api", parsedDomain)
|
||||
config.INFISICAL_LOGIN_URL = fmt.Sprintf("%s/login", parsedDomain)
|
||||
|
||||
if !slices.Contains(infisicalConfig.Domains, parsedDomain) {
|
||||
infisicalConfig.Domains = append(infisicalConfig.Domains, parsedDomain)
|
||||
err = util.WriteConfigFile(&infisicalConfig)
|
||||
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("askForDomain: unable to write domains to config file because [err=%s]", err)
|
||||
}
|
||||
}
|
||||
|
||||
whilte := color.New(color.FgGreen)
|
||||
boldWhite := whilte.Add(color.Bold)
|
||||
time.Sleep(time.Second * 1)
|
||||
boldWhite.Printf("[INFO] Using domain '%s' from domain flag or INFISICAL_API_URL environment variable\n", parsedDomain)
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func askForDomain() error {
|
||||
|
||||
// query user to choose between Infisical cloud or self-hosting
|
||||
|
@ -54,7 +54,7 @@ func init() {
|
||||
util.CheckForUpdate()
|
||||
}
|
||||
|
||||
loggedInDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
loggedInDetails, err := util.GetCurrentLoggedInUserDetails(false)
|
||||
|
||||
if !silent && err == nil && loggedInDetails.IsUserLoggedIn && !loggedInDetails.LoginExpired {
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
|
@ -194,7 +194,7 @@ var secretsSetCmd = &cobra.Command{
|
||||
projectId = workspaceFile.WorkspaceId
|
||||
}
|
||||
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
|
||||
if err != nil {
|
||||
util.HandleError(err, "unable to authenticate [err=%v]")
|
||||
}
|
||||
@ -278,7 +278,7 @@ var secretsDeleteCmd = &cobra.Command{
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to authenticate")
|
||||
}
|
||||
|
@ -41,7 +41,7 @@ var tokensCreateCmd = &cobra.Command{
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
// get plain text workspace key
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to retrieve your logged in your details. Please login in then try again")
|
||||
|
@ -55,7 +55,7 @@ func GetUserCredsFromKeyRing(userEmail string) (credentials models.UserCredentia
|
||||
return userCredentials, err
|
||||
}
|
||||
|
||||
func GetCurrentLoggedInUserDetails() (LoggedInUserDetails, error) {
|
||||
func GetCurrentLoggedInUserDetails(setConfigVariables bool) (LoggedInUserDetails, error) {
|
||||
if ConfigFileExists() {
|
||||
configFile, err := GetConfigFile()
|
||||
if err != nil {
|
||||
@ -75,18 +75,20 @@ func GetCurrentLoggedInUserDetails() (LoggedInUserDetails, error) {
|
||||
}
|
||||
}
|
||||
|
||||
if setConfigVariables {
|
||||
config.INFISICAL_URL_MANUAL_OVERRIDE = config.INFISICAL_URL
|
||||
//configFile.LoggedInUserDomain
|
||||
//if not empty set as infisical url
|
||||
if configFile.LoggedInUserDomain != "" {
|
||||
config.INFISICAL_URL = AppendAPIEndpoint(configFile.LoggedInUserDomain)
|
||||
}
|
||||
}
|
||||
|
||||
// check to to see if the JWT is still valid
|
||||
httpClient := resty.New().
|
||||
SetAuthToken(userCreds.JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
config.INFISICAL_URL_MANUAL_OVERRIDE = config.INFISICAL_URL
|
||||
//configFile.LoggedInUserDomain
|
||||
//if not empty set as infisical url
|
||||
if configFile.LoggedInUserDomain != "" {
|
||||
config.INFISICAL_URL = AppendAPIEndpoint(configFile.LoggedInUserDomain)
|
||||
}
|
||||
|
||||
isAuthenticated := api.CallIsAuthenticated(httpClient)
|
||||
// TODO: add refresh token
|
||||
// if !isAuthenticated {
|
||||
|
@ -20,7 +20,7 @@ func GetAllFolders(params models.GetAllFoldersParameters) ([]models.SingleFolder
|
||||
|
||||
log.Debug().Msg("GetAllFolders: Trying to fetch folders using logged in details")
|
||||
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails(true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -177,7 +177,7 @@ func CreateFolder(params models.CreateFolderParameters) (models.SingleFolder, er
|
||||
if params.InfisicalToken == "" {
|
||||
RequireLogin()
|
||||
RequireLocalWorkspaceFile()
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails(true)
|
||||
|
||||
if err != nil {
|
||||
return models.SingleFolder{}, err
|
||||
@ -224,7 +224,7 @@ func DeleteFolder(params models.DeleteFolderParameters) ([]models.SingleFolder,
|
||||
RequireLogin()
|
||||
RequireLocalWorkspaceFile()
|
||||
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails(true)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -246,7 +246,7 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo
|
||||
|
||||
log.Debug().Msg("GetAllEnvironmentVariables: Trying to fetch secrets using logged in details")
|
||||
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails(true)
|
||||
isConnected := ValidateInfisicalAPIConnection()
|
||||
|
||||
if isConnected {
|
||||
|
@ -12,6 +12,18 @@ To request time off, just submit a request in Rippling and let Maidul know at le
|
||||
|
||||
Since Infisical's team is globally distributed, it is hard for us to keep track of all the various national holidays across many different countries. Whether you'd like to celebrate Christmas or National Brisket Day (which, by the way, is on May 28th), you are welcome to take PTO on those days – just let Maidul know at least a week ahead so that we can adjust our planning.
|
||||
|
||||
## Winter Break
|
||||
## Winter break
|
||||
|
||||
Every year, Infisical team goes on a company-wide vacation during winter holidays. This year, the winter break period starts on December 21st, 2024 and ends on January 5th, 2025. You should expect to do no scheduled work during this period, but we will have a rotation process for [high and urgent service disruptions](https://infisical.com/sla).
|
||||
Every year, Infisical team goes on a company-wide vacation during winter holidays. This year, the winter break period starts on December 21st, 2024 and ends on January 5th, 2025. You should expect to do no scheduled work during this period, but we will have a rotation process for [high and urgent service disruptions](https://infisical.com/sla).
|
||||
|
||||
## Parental leave
|
||||
|
||||
At Infisical, we recognize that parental leave is a special and important time, significantly different from a typical vacation. We’re proud to offer parental leave to everyone, regardless of gender, and whether you’ve become a parent through childbirth or adoption.
|
||||
|
||||
For team members who have been with Infisical for over a year by the time of your child’s birth or adoption, you are eligible for up to 12 weeks of paid parental leave. This leave will be provided in one continuous block to allow you uninterrupted time with your family. If you have been with Infisical for less than a year, we will follow the parental leave provisions required by your local jurisdiction.
|
||||
|
||||
While we trust your judgment, parental leave is intended to be a distinct benefit and is not designed to be combined with our unlimited PTO policy. To ensure fairness and balance, we generally discourage combining parental leave with an extended vacation.
|
||||
|
||||
When you’re ready, please notify Maidul about your plans for parental leave, ideally at least four months in advance. This allows us to support you fully and arrange any necessary logistics, including salary adjustments and statutory paperwork.
|
||||
|
||||
We’re here to support you as you embark on this exciting new chapter in your life!
|
||||
|
@ -86,6 +86,7 @@ services:
|
||||
- .env
|
||||
ports:
|
||||
- 4000:4000
|
||||
- 9464:9464 # for OTEL collection of Prometheus metrics
|
||||
environment:
|
||||
- NODE_ENV=development
|
||||
- DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable
|
||||
@ -95,6 +96,42 @@ services:
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
|
||||
prometheus:
|
||||
image: prom/prometheus
|
||||
volumes:
|
||||
- ./prometheus.dev.yml:/etc/prometheus/prometheus.yml
|
||||
ports:
|
||||
- "9090:9090"
|
||||
command:
|
||||
- "--config.file=/etc/prometheus/prometheus.yml"
|
||||
profiles: [metrics]
|
||||
|
||||
otel-collector:
|
||||
image: otel/opentelemetry-collector-contrib
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otelcol-contrib/config.yaml
|
||||
ports:
|
||||
- 1888:1888 # pprof extension
|
||||
- 8888:8888 # Prometheus metrics exposed by the Collector
|
||||
- 8889:8889 # Prometheus exporter metrics
|
||||
- 13133:13133 # health_check extension
|
||||
- 4317:4317 # OTLP gRPC receiver
|
||||
- 4318:4318 # OTLP http receiver
|
||||
- 55679:55679 # zpages extension
|
||||
profiles: [metrics-otel]
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana
|
||||
container_name: grafana
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
- GF_LOG_LEVEL=debug
|
||||
ports:
|
||||
- "3005:3000"
|
||||
volumes:
|
||||
- "grafana_storage:/var/lib/grafana"
|
||||
profiles: [metrics]
|
||||
|
||||
frontend:
|
||||
container_name: infisical-dev-frontend
|
||||
restart: unless-stopped
|
||||
@ -166,3 +203,4 @@ volumes:
|
||||
driver: local
|
||||
ldap_data:
|
||||
ldap_config:
|
||||
grafana_storage:
|
||||
|
@ -3,6 +3,3 @@ title: "Bulk Create"
|
||||
openapi: "POST /api/v3/secrets/batch/raw"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
||||
|
@ -3,6 +3,3 @@ title: "Create"
|
||||
openapi: "POST /api/v3/secrets/raw/{secretName}"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
||||
|
@ -3,6 +3,3 @@ title: "Bulk Delete"
|
||||
openapi: "DELETE /api/v3/secrets/batch/raw"
|
||||
---
|
||||
|
||||
<Tip>
|
||||
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
|
||||
</Tip>
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user