mirror of
https://github.com/Infisical/infisical.git
synced 2025-04-02 14:38:48 +00:00
Compare commits
39 Commits
feat/addSe
...
feat/allow
Author | SHA1 | Date | |
---|---|---|---|
10a3658328 | |||
e8ece6be3f | |||
75ca093b24 | |||
6c0889f117 | |||
5b11232325 | |||
d55ddcd577 | |||
37cbb4c55b | |||
506b56b657 | |||
351304fda6 | |||
b6d67df966 | |||
3897f0ece5 | |||
7719ebb112 | |||
f03f02786d | |||
c60840e979 | |||
6fe7a5f069 | |||
14b7d763ad | |||
bc1b7ddcc5 | |||
dff729ffc1 | |||
9a5633fda4 | |||
f8a96576c9 | |||
88d3d62894 | |||
ac40dcc2c6 | |||
6482e88dfc | |||
a01249e903 | |||
7b3e1f12bd | |||
031c8d67b1 | |||
778b0d4368 | |||
95b57e144d | |||
1d26269993 | |||
ffee1701fc | |||
5fe3c9868f | |||
c936aa7157 | |||
05005f4258 | |||
c179d7e5ae | |||
c8553fba2b | |||
26a9d68823 | |||
af5b3aa171 | |||
d4728e31c1 | |||
9924ef3a71 |
.github/workflows
backend/src
ee
routes/v1
services
dynamic-secret
dynamic-secret-fns.ts
providers
identity-project-additional-privilege-v2
identity-project-additional-privilege
license
project-user-additional-privilege
secret-rotation/secret-rotation-queue
lib
server/routes
services
cli
docs
frontend/src
helpers
hooks
layouts/OrganizationLayout
pages
organization/BillingPage/components
secret-manager/OverviewPage
helm-charts/secrets-operator
templates
deployment.yamlinfisicaldynamicsecret-crd.yamlinfisicalpushsecret-crd.yamlinfisicalsecret-crd.yamlleader-election-rbac.yamlmanager-rbac.yamlmetrics-reader-rbac.yamlmetrics-service.yamlproxy-rbac.yamlserviceaccount.yaml
values.yamlk8-operator
27
.github/workflows/release-k8-operator-helm.yml
vendored
Normal file
27
.github/workflows/release-k8-operator-helm.yml
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
name: Release K8 Operator Helm Chart
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
release-helm:
|
||||
name: Release Helm Chart
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
|
||||
- name: Build and push helm package to CloudSmith
|
||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
139
.github/workflows/release_docker_k8_operator.yaml
vendored
139
.github/workflows/release_docker_k8_operator.yaml
vendored
@ -1,52 +1,103 @@
|
||||
name: Release image + Helm chart K8s Operator
|
||||
name: Release K8 Operator Docker Image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "infisical-k8-operator/v*.*.*"
|
||||
push:
|
||||
tags:
|
||||
- "infisical-k8-operator/v*.*.*"
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
|
||||
- uses: actions/checkout@v2
|
||||
release-image:
|
||||
name: Generate Helm Chart PR
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
pr_number: ${{ steps.create-pr.outputs.pull-request-number }}
|
||||
steps:
|
||||
- name: Extract version from tag
|
||||
id: extract_version
|
||||
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
|
||||
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
# Dependency for helm generation
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
# Dependency for helm generation
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: 1.21
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: k8-operator
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
infisical/kubernetes-operator:latest
|
||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||
# Install binaries for helm generation
|
||||
- name: Install dependencies
|
||||
working-directory: k8-operator
|
||||
run: |
|
||||
make helmify
|
||||
make kustomize
|
||||
make controller-gen
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
- name: Install Cloudsmith CLI
|
||||
run: pip install --upgrade cloudsmith-cli
|
||||
- name: Build and push helm package to Cloudsmith
|
||||
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
|
||||
env:
|
||||
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
|
||||
- name: Generate Helm Chart
|
||||
working-directory: k8-operator
|
||||
run: make helm
|
||||
|
||||
- name: Update Helm Chart Version
|
||||
run: ./k8-operator/scripts/update-version.sh ${{ steps.extract_version.outputs.version }}
|
||||
|
||||
- name: Debug - Check file changes
|
||||
run: |
|
||||
echo "Current git status:"
|
||||
git status
|
||||
echo ""
|
||||
echo "Modified files:"
|
||||
git diff --name-only
|
||||
|
||||
# If there is no diff, exit with error. Version should always be changed, so if there is no diff, something is wrong and we should exit.
|
||||
if [ -z "$(git diff --name-only)" ]; then
|
||||
echo "No helm changes or version changes. Invalid release detected, Exiting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Create Helm Chart PR
|
||||
id: create-pr
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
|
||||
committer: GitHub <noreply@github.com>
|
||||
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
|
||||
branch: helm-update-${{ steps.extract_version.outputs.version }}
|
||||
delete-branch: true
|
||||
title: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
|
||||
body: |
|
||||
This PR updates the Helm chart to version `${{ steps.extract_version.outputs.version }}`.
|
||||
Additionally the helm chart has been updated to match the latest operator code changes.
|
||||
|
||||
Associated Release Workflow: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
|
||||
Once you have approved this PR, you can trigger the helm release workflow manually.
|
||||
base: main
|
||||
|
||||
- name: 🔧 Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: k8-operator
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
infisical/kubernetes-operator:latest
|
||||
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
|
||||
|
@ -5,9 +5,11 @@ import { SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-type
|
||||
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
export const registerSshCertRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
@ -73,6 +75,16 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.SignSshKey,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
certificateTemplateId: req.body.certificateTemplateId,
|
||||
principals: req.body.principals,
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
serialNumber,
|
||||
signedKey: signedPublicKey
|
||||
@ -152,6 +164,16 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.telemetry.sendPostHogEvents({
|
||||
event: PostHogEventTypes.IssueSshCreds,
|
||||
distinctId: getTelemetryDistinctId(req),
|
||||
properties: {
|
||||
certificateTemplateId: req.body.certificateTemplateId,
|
||||
principals: req.body.principals,
|
||||
...req.auditLogInfo
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
serialNumber,
|
||||
signedKey: signedPublicKey,
|
||||
|
@ -1,31 +1,51 @@
|
||||
import crypto from "node:crypto";
|
||||
import dns from "node:dns/promises";
|
||||
import net from "node:net";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { isPrivateIp } from "@app/lib/ip/ipRange";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
|
||||
export const verifyHostInputValidity = (host: string, isGateway = false) => {
|
||||
export const verifyHostInputValidity = async (host: string, isGateway = false) => {
|
||||
const appCfg = getConfig();
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
// no need for validation when it's dev
|
||||
if (appCfg.NODE_ENV === "development") return;
|
||||
// if (appCfg.NODE_ENV === "development") return; // incase you want to remove this check in dev
|
||||
|
||||
if (host === "host.docker.internal") throw new BadRequestError({ message: "Invalid db host" });
|
||||
const reservedHosts = [appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI)].concat(
|
||||
(appCfg.DB_READ_REPLICAS || []).map((el) => getDbConnectionHost(el.DB_CONNECTION_URI)),
|
||||
getDbConnectionHost(appCfg.REDIS_URL)
|
||||
);
|
||||
|
||||
if (
|
||||
appCfg.isCloud &&
|
||||
!isGateway &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
|
||||
if (
|
||||
host === "localhost" ||
|
||||
host === "127.0.0.1" ||
|
||||
(dbHost?.length === host.length && crypto.timingSafeEqual(Buffer.from(dbHost || ""), Buffer.from(host)))
|
||||
) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
// get host db ip
|
||||
const exclusiveIps: string[] = [];
|
||||
for await (const el of reservedHosts) {
|
||||
if (el) {
|
||||
if (net.isIPv4(el)) {
|
||||
exclusiveIps.push(el);
|
||||
} else {
|
||||
const resolvedIps = await dns.resolve4(el);
|
||||
exclusiveIps.push(...resolvedIps);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const normalizedHost = host.split(":")[0];
|
||||
const inputHostIps: string[] = [];
|
||||
if (net.isIPv4(host)) {
|
||||
inputHostIps.push(host);
|
||||
} else {
|
||||
if (normalizedHost === "localhost" || normalizedHost === "host.docker.internal") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
const resolvedIps = await dns.resolve4(host);
|
||||
inputHostIps.push(...resolvedIps);
|
||||
}
|
||||
|
||||
if (!isGateway) {
|
||||
const isInternalIp = inputHostIps.some((el) => isPrivateIp(el));
|
||||
if (isInternalIp) throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
|
||||
const isAppUsedIps = inputHostIps.some((el) => exclusiveIps.includes(el));
|
||||
if (isAppUsedIps) throw new BadRequestError({ message: "Invalid db host" });
|
||||
return inputHostIps;
|
||||
};
|
||||
|
@ -13,6 +13,7 @@ import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
@ -144,6 +145,14 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
|
||||
// We can't return the parsed statements here because we need to use the handlebars template to generate the username and password, before we can use the parsed statements.
|
||||
CreateElastiCacheUserSchema.parse(JSON.parse(providerInputs.creationStatement));
|
||||
DeleteElasticCacheUserSchema.parse(JSON.parse(providerInputs.revocationStatement));
|
||||
validateHandlebarTemplate("AWS ElastiCache creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
|
||||
});
|
||||
if (providerInputs.revocationStatement) {
|
||||
validateHandlebarTemplate("AWS ElastiCache revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
}
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
@ -3,9 +3,10 @@ import handlebars from "handlebars";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generatePassword = (size = 48) => {
|
||||
@ -20,14 +21,28 @@ const generateUsername = () => {
|
||||
export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretCassandraSchema.parseAsync(inputs);
|
||||
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
const hostIps = await Promise.all(
|
||||
providerInputs.host
|
||||
.split(",")
|
||||
.filter(Boolean)
|
||||
.map((el) => verifyHostInputValidity(el).then((ip) => ip[0]))
|
||||
);
|
||||
validateHandlebarTemplate("Cassandra creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration", "keyspace"].includes(val)
|
||||
});
|
||||
if (providerInputs.renewStatement) {
|
||||
validateHandlebarTemplate("Cassandra renew", providerInputs.renewStatement, {
|
||||
allowedExpressions: (val) => ["username", "expiration", "keyspace"].includes(val)
|
||||
});
|
||||
}
|
||||
validateHandlebarTemplate("Cassandra revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
|
||||
return providerInputs;
|
||||
return { ...providerInputs, hostIps };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema> & { hostIps: string[] }) => {
|
||||
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
|
||||
const client = new cassandra.Client({
|
||||
sslOptions,
|
||||
@ -40,7 +55,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
|
||||
},
|
||||
keyspace: providerInputs.keyspace,
|
||||
localDataCenter: providerInputs?.localDataCenter,
|
||||
contactPoints: providerInputs.host.split(",").filter(Boolean)
|
||||
contactPoints: providerInputs.hostIps
|
||||
});
|
||||
return client;
|
||||
};
|
||||
|
@ -19,15 +19,14 @@ const generateUsername = () => {
|
||||
export const ElasticSearchProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema> & { hostIp: string }) => {
|
||||
const connection = new ElasticSearchClient({
|
||||
node: {
|
||||
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
|
||||
url: new URL(`${providerInputs.hostIp}:${providerInputs.port}`),
|
||||
...(providerInputs.ca && {
|
||||
ssl: {
|
||||
rejectUnauthorized: false,
|
||||
|
@ -19,15 +19,15 @@ const generateUsername = () => {
|
||||
export const MongoDBProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema> & { hostIp: string }) => {
|
||||
const isSrv = !providerInputs.port;
|
||||
const uri = isSrv
|
||||
? `mongodb+srv://${providerInputs.host}`
|
||||
: `mongodb://${providerInputs.host}:${providerInputs.port}`;
|
||||
? `mongodb+srv://${providerInputs.hostIp}`
|
||||
: `mongodb://${providerInputs.hostIp}:${providerInputs.port}`;
|
||||
|
||||
const client = new MongoClient(uri, {
|
||||
auth: {
|
||||
|
@ -3,7 +3,6 @@ import https from "https";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
@ -79,14 +78,13 @@ async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRa
|
||||
export const RabbitMqProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema> & { hostIp: string }) => {
|
||||
const axiosInstance = axios.create({
|
||||
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
|
||||
baseURL: `${providerInputs.hostIp}:${providerInputs.port}/api`,
|
||||
auth: {
|
||||
username: providerInputs.username,
|
||||
password: providerInputs.password
|
||||
|
@ -5,6 +5,7 @@ import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
|
||||
@ -51,16 +52,28 @@ const executeTransactions = async (connection: Redis, commands: string[]): Promi
|
||||
export const RedisDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretRedisDBSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
validateHandlebarTemplate("Redis creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
|
||||
});
|
||||
if (providerInputs.renewStatement) {
|
||||
validateHandlebarTemplate("Redis renew", providerInputs.renewStatement, {
|
||||
allowedExpressions: (val) => ["username", "expiration"].includes(val)
|
||||
});
|
||||
}
|
||||
validateHandlebarTemplate("Redis revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema> & { hostIp: string }) => {
|
||||
let connection: Redis | null = null;
|
||||
try {
|
||||
connection = new Redis({
|
||||
username: providerInputs.username,
|
||||
host: providerInputs.host,
|
||||
host: providerInputs.hostIp,
|
||||
port: providerInputs.port,
|
||||
password: providerInputs.password,
|
||||
...(providerInputs.ca && {
|
||||
|
@ -5,6 +5,7 @@ import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
|
||||
@ -27,14 +28,25 @@ export const SapAseProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
|
||||
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
validateHandlebarTemplate("SAP ASE creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password"].includes(val)
|
||||
});
|
||||
if (providerInputs.revocationStatement) {
|
||||
validateHandlebarTemplate("SAP ASE revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
}
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
|
||||
const $getClient = async (
|
||||
providerInputs: z.infer<typeof DynamicSecretSapAseSchema> & { hostIp: string },
|
||||
useMaster?: boolean
|
||||
) => {
|
||||
const connectionString =
|
||||
`DRIVER={FreeTDS};` +
|
||||
`SERVER=${providerInputs.host};` +
|
||||
`SERVER=${providerInputs.hostIp};` +
|
||||
`PORT=${providerInputs.port};` +
|
||||
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
|
||||
`UID=${providerInputs.username};` +
|
||||
|
@ -11,6 +11,7 @@ import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models";
|
||||
@ -28,13 +29,24 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSapHanaSchema.parseAsync(inputs);
|
||||
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
return providerInputs;
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
|
||||
validateHandlebarTemplate("SAP Hana creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
|
||||
});
|
||||
if (providerInputs.renewStatement) {
|
||||
validateHandlebarTemplate("SAP Hana renew", providerInputs.renewStatement, {
|
||||
allowedExpressions: (val) => ["username", "expiration"].includes(val)
|
||||
});
|
||||
}
|
||||
validateHandlebarTemplate("SAP Hana revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema> & { hostIp: string }) => {
|
||||
const client = hdb.createClient({
|
||||
host: providerInputs.host,
|
||||
host: providerInputs.hostIp,
|
||||
port: providerInputs.port,
|
||||
user: providerInputs.username,
|
||||
password: providerInputs.password,
|
||||
|
@ -5,6 +5,7 @@ import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
@ -31,6 +32,18 @@ const getDaysToExpiry = (expiryDate: Date) => {
|
||||
export const SnowflakeProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSnowflakeSchema.parseAsync(inputs);
|
||||
validateHandlebarTemplate("Snowflake creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
|
||||
});
|
||||
if (providerInputs.renewStatement) {
|
||||
validateHandlebarTemplate("Snowflake renew", providerInputs.renewStatement, {
|
||||
allowedExpressions: (val) => ["username", "expiration"].includes(val)
|
||||
});
|
||||
}
|
||||
validateHandlebarTemplate("Snowflake revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username"].includes(val)
|
||||
});
|
||||
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
|
@ -5,6 +5,7 @@ import { z } from "zod";
|
||||
|
||||
import { withGatewayProxy } from "@app/lib/gateway";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
|
||||
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
@ -117,8 +118,21 @@ type TSqlDatabaseProviderDTO = {
|
||||
export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
|
||||
return providerInputs;
|
||||
|
||||
const [hostIp] = await verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
|
||||
validateHandlebarTemplate("SQL creation", providerInputs.creationStatement, {
|
||||
allowedExpressions: (val) => ["username", "password", "expiration", "database"].includes(val)
|
||||
});
|
||||
if (providerInputs.renewStatement) {
|
||||
validateHandlebarTemplate("SQL renew", providerInputs.renewStatement, {
|
||||
allowedExpressions: (val) => ["username", "expiration", "database"].includes(val)
|
||||
});
|
||||
}
|
||||
validateHandlebarTemplate("SQL revoke", providerInputs.revocationStatement, {
|
||||
allowedExpressions: (val) => ["username", "database"].includes(val)
|
||||
});
|
||||
|
||||
return { ...providerInputs, hostIp };
|
||||
};
|
||||
|
||||
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
|
||||
@ -144,7 +158,8 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
}
|
||||
: undefined
|
||||
},
|
||||
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT,
|
||||
pool: { min: 0, max: 7 }
|
||||
});
|
||||
return db;
|
||||
};
|
||||
@ -178,7 +193,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
let isConnected = false;
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
const gatewayCallback = async (host = providerInputs.hostIp, port = providerInputs.port) => {
|
||||
const db = await $getClient({ ...providerInputs, port, host });
|
||||
// oracle needs from keyword
|
||||
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
||||
|
@ -5,6 +5,7 @@ import { ActionProjectType, TableName } from "@app/db/schemas";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
@ -86,6 +87,9 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
message: "Failed to update more privileged identity",
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
validateHandlebarTemplate("Identity Additional Privilege Create", JSON.stringify(customPermission || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
|
||||
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug,
|
||||
@ -173,6 +177,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
|
||||
details: { missingPermissions: permissionBoundary.missingPermissions }
|
||||
});
|
||||
|
||||
validateHandlebarTemplate("Identity Additional Privilege Update", JSON.stringify(data.permissions || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
|
||||
if (data?.slug) {
|
||||
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
|
||||
slug: data.slug,
|
||||
|
@ -5,6 +5,7 @@ import { ActionProjectType } from "@app/db/schemas";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
@ -102,6 +103,10 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
});
|
||||
if (existingSlug) throw new BadRequestError({ message: "Additional privilege of provided slug exist" });
|
||||
|
||||
validateHandlebarTemplate("Identity Additional Privilege Create", JSON.stringify(customPermission || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
|
||||
const packedPermission = JSON.stringify(packRules(customPermission));
|
||||
if (!dto.isTemporary) {
|
||||
const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.create({
|
||||
@ -203,6 +208,9 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
|
||||
}
|
||||
|
||||
const isTemporary = typeof data?.isTemporary !== "undefined" ? data.isTemporary : identityPrivilege.isTemporary;
|
||||
validateHandlebarTemplate("Identity Additional Privilege Update", JSON.stringify(data.permissions || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
|
||||
const packedPermission = data.permissions ? JSON.stringify(packRules(data.permissions)) : undefined;
|
||||
if (isTemporary) {
|
||||
|
24
backend/src/ee/services/license/licence-enums.ts
Normal file
24
backend/src/ee/services/license/licence-enums.ts
Normal file
@ -0,0 +1,24 @@
|
||||
export const BillingPlanRows = {
|
||||
MemberLimit: { name: "Organization member limit", field: "memberLimit" },
|
||||
IdentityLimit: { name: "Organization identity limit", field: "identityLimit" },
|
||||
WorkspaceLimit: { name: "Project limit", field: "workspaceLimit" },
|
||||
EnvironmentLimit: { name: "Environment limit", field: "environmentLimit" },
|
||||
SecretVersioning: { name: "Secret versioning", field: "secretVersioning" },
|
||||
PitRecovery: { name: "Point in time recovery", field: "pitRecovery" },
|
||||
Rbac: { name: "RBAC", field: "rbac" },
|
||||
CustomRateLimits: { name: "Custom rate limits", field: "customRateLimits" },
|
||||
CustomAlerts: { name: "Custom alerts", field: "customAlerts" },
|
||||
AuditLogs: { name: "Audit logs", field: "auditLogs" },
|
||||
SamlSSO: { name: "SAML SSO", field: "samlSSO" },
|
||||
Hsm: { name: "Hardware Security Module (HSM)", field: "hsm" },
|
||||
OidcSSO: { name: "OIDC SSO", field: "oidcSSO" },
|
||||
SecretApproval: { name: "Secret approvals", field: "secretApproval" },
|
||||
SecretRotation: { name: "Secret rotation", field: "secretRotation" },
|
||||
InstanceUserManagement: { name: "Instance User Management", field: "instanceUserManagement" },
|
||||
ExternalKms: { name: "External KMS", field: "externalKms" }
|
||||
} as const;
|
||||
|
||||
export const BillingPlanTableHead = {
|
||||
Allowed: { name: "Allowed" },
|
||||
Used: { name: "Used" }
|
||||
} as const;
|
@ -12,10 +12,13 @@ import { getConfig } from "@app/lib/config/env";
|
||||
import { verifyOfflineLicense } from "@app/lib/crypto";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TIdentityOrgDALFactory } from "@app/services/identity/identity-org-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { BillingPlanRows, BillingPlanTableHead } from "./licence-enums";
|
||||
import { TLicenseDALFactory } from "./license-dal";
|
||||
import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns";
|
||||
import {
|
||||
@ -28,6 +31,7 @@ import {
|
||||
TFeatureSet,
|
||||
TGetOrgBillInfoDTO,
|
||||
TGetOrgTaxIdDTO,
|
||||
TOfflineLicense,
|
||||
TOfflineLicenseContents,
|
||||
TOrgInvoiceDTO,
|
||||
TOrgLicensesDTO,
|
||||
@ -39,10 +43,12 @@ import {
|
||||
} from "./license-types";
|
||||
|
||||
type TLicenseServiceFactoryDep = {
|
||||
orgDAL: Pick<TOrgDALFactory, "findOrgById">;
|
||||
orgDAL: Pick<TOrgDALFactory, "findOrgById" | "countAllOrgMembers">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseDAL: TLicenseDALFactory;
|
||||
keyStore: Pick<TKeyStoreFactory, "setItemWithExpiry" | "getItem" | "deleteItem">;
|
||||
identityOrgMembershipDAL: TIdentityOrgDALFactory;
|
||||
projectDAL: TProjectDALFactory;
|
||||
};
|
||||
|
||||
export type TLicenseServiceFactory = ReturnType<typeof licenseServiceFactory>;
|
||||
@ -57,11 +63,14 @@ export const licenseServiceFactory = ({
|
||||
orgDAL,
|
||||
permissionService,
|
||||
licenseDAL,
|
||||
keyStore
|
||||
keyStore,
|
||||
identityOrgMembershipDAL,
|
||||
projectDAL
|
||||
}: TLicenseServiceFactoryDep) => {
|
||||
let isValidLicense = false;
|
||||
let instanceType = InstanceType.OnPrem;
|
||||
let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures();
|
||||
let selfHostedLicense: TOfflineLicense | null = null;
|
||||
|
||||
const appCfg = getConfig();
|
||||
const licenseServerCloudApi = setupLicenseRequestWithStore(
|
||||
@ -125,6 +134,7 @@ export const licenseServiceFactory = ({
|
||||
instanceType = InstanceType.EnterpriseOnPremOffline;
|
||||
logger.info(`Instance type: ${InstanceType.EnterpriseOnPremOffline}`);
|
||||
isValidLicense = true;
|
||||
selfHostedLicense = contents.license;
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -348,10 +358,21 @@ export const licenseServiceFactory = ({
|
||||
message: `Organization with ID '${orgId}' not found`
|
||||
});
|
||||
}
|
||||
const { data } = await licenseServerCloudApi.request.get(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/billing`
|
||||
);
|
||||
return data;
|
||||
if (instanceType !== InstanceType.OnPrem && instanceType !== InstanceType.EnterpriseOnPremOffline) {
|
||||
const { data } = await licenseServerCloudApi.request.get(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/billing`
|
||||
);
|
||||
return data;
|
||||
}
|
||||
|
||||
return {
|
||||
currentPeriodStart: selfHostedLicense?.issuedAt ? Date.parse(selfHostedLicense?.issuedAt) / 1000 : undefined,
|
||||
currentPeriodEnd: selfHostedLicense?.expiresAt ? Date.parse(selfHostedLicense?.expiresAt) / 1000 : undefined,
|
||||
interval: "month",
|
||||
intervalCount: 1,
|
||||
amount: 0,
|
||||
quantity: 1
|
||||
};
|
||||
};
|
||||
|
||||
// returns org current plan feature table
|
||||
@ -365,10 +386,41 @@ export const licenseServiceFactory = ({
|
||||
message: `Organization with ID '${orgId}' not found`
|
||||
});
|
||||
}
|
||||
const { data } = await licenseServerCloudApi.request.get(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/table`
|
||||
if (instanceType !== InstanceType.OnPrem && instanceType !== InstanceType.EnterpriseOnPremOffline) {
|
||||
const { data } = await licenseServerCloudApi.request.get(
|
||||
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/table`
|
||||
);
|
||||
return data;
|
||||
}
|
||||
|
||||
const mappedRows = await Promise.all(
|
||||
Object.values(BillingPlanRows).map(async ({ name, field }: { name: string; field: string }) => {
|
||||
const allowed = onPremFeatures[field as keyof TFeatureSet];
|
||||
let used = "-";
|
||||
|
||||
if (field === BillingPlanRows.MemberLimit.field) {
|
||||
const orgMemberships = await orgDAL.countAllOrgMembers(orgId);
|
||||
used = orgMemberships.toString();
|
||||
} else if (field === BillingPlanRows.WorkspaceLimit.field) {
|
||||
const projects = await projectDAL.find({ orgId });
|
||||
used = projects.length.toString();
|
||||
} else if (field === BillingPlanRows.IdentityLimit.field) {
|
||||
const identities = await identityOrgMembershipDAL.countAllOrgIdentities({ orgId });
|
||||
used = identities.toString();
|
||||
}
|
||||
|
||||
return {
|
||||
name,
|
||||
allowed,
|
||||
used
|
||||
};
|
||||
})
|
||||
);
|
||||
return data;
|
||||
|
||||
return {
|
||||
head: Object.values(BillingPlanTableHead),
|
||||
rows: mappedRows
|
||||
};
|
||||
};
|
||||
|
||||
const getOrgBillingDetails = async ({ orgId, actor, actorId, actorAuthMethod, actorOrgId }: TGetOrgBillInfoDTO) => {
|
||||
|
@ -5,6 +5,7 @@ import { ActionProjectType, TableName } from "@app/db/schemas";
|
||||
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
@ -92,6 +93,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
if (existingSlug)
|
||||
throw new BadRequestError({ message: `Additional privilege with provided slug ${slug} already exists` });
|
||||
|
||||
validateHandlebarTemplate("User Additional Privilege Create", JSON.stringify(customPermission || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
|
||||
const packedPermission = JSON.stringify(packRules(customPermission));
|
||||
if (!dto.isTemporary) {
|
||||
const additionalPrivilege = await projectUserAdditionalPrivilegeDAL.create({
|
||||
@ -185,6 +190,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
|
||||
throw new BadRequestError({ message: `Additional privilege with provided slug ${dto.slug} already exists` });
|
||||
}
|
||||
|
||||
validateHandlebarTemplate("User Additional Privilege Update", JSON.stringify(dto.permissions || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
|
||||
const isTemporary = typeof dto?.isTemporary !== "undefined" ? dto.isTemporary : userPrivilege.isTemporary;
|
||||
|
||||
const packedPermission = dto.permissions && JSON.stringify(packRules(dto.permissions));
|
||||
|
@ -8,10 +8,9 @@ import axios from "axios";
|
||||
import jmespath from "jmespath";
|
||||
import knex from "knex";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { verifyHostInputValidity } from "../../dynamic-secret/dynamic-secret-fns";
|
||||
import { TAssignOp, TDbProviderClients, TDirectAssignOp, THttpProviderFunction } from "../templates/types";
|
||||
import { TSecretRotationData, TSecretRotationDbFn } from "./secret-rotation-queue-types";
|
||||
|
||||
@ -88,32 +87,14 @@ export const secretRotationDbFn = async ({
|
||||
variables,
|
||||
options
|
||||
}: TSecretRotationDbFn) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const ssl = ca ? { rejectUnauthorized: false, ca } : undefined;
|
||||
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
|
||||
if (
|
||||
isCloud &&
|
||||
// internal ips
|
||||
(host === "host.docker.internal" || host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new Error("Invalid db host");
|
||||
if (
|
||||
host === "localhost" ||
|
||||
host === "127.0.0.1" ||
|
||||
// database infisical uses
|
||||
dbHost === host
|
||||
)
|
||||
throw new Error("Invalid db host");
|
||||
|
||||
const [hostIp] = await verifyHostInputValidity(host);
|
||||
const db = knex({
|
||||
client,
|
||||
connection: {
|
||||
database,
|
||||
port,
|
||||
host,
|
||||
host: hostIp,
|
||||
user: username,
|
||||
password,
|
||||
connectionTimeoutMillis: EXTERNAL_REQUEST_TIMEOUT,
|
||||
|
61
backend/src/lib/ip/ipRange.ts
Normal file
61
backend/src/lib/ip/ipRange.ts
Normal file
@ -0,0 +1,61 @@
|
||||
import { BlockList } from "node:net";
|
||||
|
||||
import { BadRequestError } from "../errors";
|
||||
// Define BlockList instances for each range type
|
||||
const ipv4RangeLists: Record<string, BlockList> = {
|
||||
unspecified: new BlockList(),
|
||||
broadcast: new BlockList(),
|
||||
multicast: new BlockList(),
|
||||
linkLocal: new BlockList(),
|
||||
loopback: new BlockList(),
|
||||
carrierGradeNat: new BlockList(),
|
||||
private: new BlockList(),
|
||||
reserved: new BlockList()
|
||||
};
|
||||
|
||||
// Add IPv4 CIDR ranges to each BlockList
|
||||
ipv4RangeLists.unspecified.addSubnet("0.0.0.0", 8);
|
||||
ipv4RangeLists.broadcast.addAddress("255.255.255.255");
|
||||
ipv4RangeLists.multicast.addSubnet("224.0.0.0", 4);
|
||||
ipv4RangeLists.linkLocal.addSubnet("169.254.0.0", 16);
|
||||
ipv4RangeLists.loopback.addSubnet("127.0.0.0", 8);
|
||||
ipv4RangeLists.carrierGradeNat.addSubnet("100.64.0.0", 10);
|
||||
|
||||
// IPv4 Private ranges
|
||||
ipv4RangeLists.private.addSubnet("10.0.0.0", 8);
|
||||
ipv4RangeLists.private.addSubnet("172.16.0.0", 12);
|
||||
ipv4RangeLists.private.addSubnet("192.168.0.0", 16);
|
||||
|
||||
// IPv4 Reserved ranges
|
||||
ipv4RangeLists.reserved.addSubnet("192.0.0.0", 24);
|
||||
ipv4RangeLists.reserved.addSubnet("192.0.2.0", 24);
|
||||
ipv4RangeLists.reserved.addSubnet("192.88.99.0", 24);
|
||||
ipv4RangeLists.reserved.addSubnet("198.18.0.0", 15);
|
||||
ipv4RangeLists.reserved.addSubnet("198.51.100.0", 24);
|
||||
ipv4RangeLists.reserved.addSubnet("203.0.113.0", 24);
|
||||
ipv4RangeLists.reserved.addSubnet("240.0.0.0", 4);
|
||||
|
||||
/**
|
||||
* Checks if an IP address (IPv4) is private or public
|
||||
* inspired by: https://github.com/whitequark/ipaddr.js/blob/main/lib/ipaddr.js
|
||||
*/
|
||||
export const getIpRange = (ip: string): string => {
|
||||
try {
|
||||
const rangeLists = ipv4RangeLists;
|
||||
// Check each range type
|
||||
for (const rangeName in rangeLists) {
|
||||
if (Object.hasOwn(rangeLists, rangeName)) {
|
||||
if (rangeLists[rangeName].check(ip)) {
|
||||
return rangeName;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no range matched, it's a public address
|
||||
return "unicast";
|
||||
} catch (error) {
|
||||
throw new BadRequestError({ message: "Invalid IP address", error });
|
||||
}
|
||||
};
|
||||
|
||||
export const isPrivateIp = (ip: string) => getIpRange(ip) !== "unicast";
|
21
backend/src/lib/template/validate-handlebars.ts
Normal file
21
backend/src/lib/template/validate-handlebars.ts
Normal file
@ -0,0 +1,21 @@
|
||||
import handlebars from "handlebars";
|
||||
|
||||
import { BadRequestError } from "../errors";
|
||||
import { logger } from "../logger";
|
||||
|
||||
type SanitizationArg = {
|
||||
allowedExpressions?: (arg: string) => boolean;
|
||||
};
|
||||
|
||||
export const validateHandlebarTemplate = (templateName: string, template: string, dto: SanitizationArg) => {
|
||||
const parsedAst = handlebars.parse(template);
|
||||
parsedAst.body.forEach((el) => {
|
||||
if (el.type === "ContentStatement") return;
|
||||
if (el.type === "MustacheStatement" && "path" in el) {
|
||||
const { path } = el as { type: "MustacheStatement"; path: { type: "PathExpression"; original: string } };
|
||||
if (path.type === "PathExpression" && dto?.allowedExpressions?.(path.original)) return;
|
||||
}
|
||||
logger.error(el, "Template sanitization failed");
|
||||
throw new BadRequestError({ message: `Template sanitization failed: ${templateName}` });
|
||||
});
|
||||
};
|
@ -413,7 +413,14 @@ export const registerRoutes = async (
|
||||
serviceTokenDAL,
|
||||
projectDAL
|
||||
});
|
||||
const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL, keyStore });
|
||||
const licenseService = licenseServiceFactory({
|
||||
permissionService,
|
||||
orgDAL,
|
||||
licenseDAL,
|
||||
keyStore,
|
||||
identityOrgMembershipDAL,
|
||||
projectDAL
|
||||
});
|
||||
|
||||
const hsmService = hsmServiceFactory({
|
||||
hsmModule,
|
||||
|
@ -64,9 +64,11 @@ export const identityUaServiceFactory = ({
|
||||
ipAddress: ip,
|
||||
trustedIps: identityUa.clientSecretTrustedIps as TIp[]
|
||||
});
|
||||
const clientSecretPrefix = clientSecret.slice(0, 4);
|
||||
const clientSecrtInfo = await identityUaClientSecretDAL.find({
|
||||
identityUAId: identityUa.id,
|
||||
isClientSecretRevoked: false
|
||||
isClientSecretRevoked: false,
|
||||
clientSecretPrefix
|
||||
});
|
||||
|
||||
let validClientSecretInfo: (typeof clientSecrtInfo)[0] | null = null;
|
||||
@ -251,14 +253,17 @@ export const identityUaServiceFactory = ({
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
|
||||
const uaIdentityAuth = await identityUaDAL.findOne({ identityId });
|
||||
if (!uaIdentityAuth) {
|
||||
throw new NotFoundError({ message: `Failed to find universal auth for identity with ID ${identityId}` });
|
||||
}
|
||||
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.UNIVERSAL_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "The identity does not have universal auth"
|
||||
});
|
||||
}
|
||||
|
||||
const uaIdentityAuth = await identityUaDAL.findOne({ identityId });
|
||||
|
||||
if (
|
||||
(accessTokenMaxTTL || uaIdentityAuth.accessTokenMaxTTL) > 0 &&
|
||||
(accessTokenTTL || uaIdentityAuth.accessTokenMaxTTL) > (accessTokenMaxTTL || uaIdentityAuth.accessTokenMaxTTL)
|
||||
@ -327,14 +332,17 @@ export const identityUaServiceFactory = ({
|
||||
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
|
||||
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
|
||||
|
||||
const uaIdentityAuth = await identityUaDAL.findOne({ identityId });
|
||||
if (!uaIdentityAuth) {
|
||||
throw new NotFoundError({ message: `Failed to find universal auth for identity with ID ${identityId}` });
|
||||
}
|
||||
|
||||
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.UNIVERSAL_AUTH)) {
|
||||
throw new BadRequestError({
|
||||
message: "The identity does not have universal auth"
|
||||
});
|
||||
}
|
||||
|
||||
const uaIdentityAuth = await identityUaDAL.findOne({ identityId });
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
|
@ -9,6 +9,7 @@ import {
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
|
||||
import { ActorAuthMethod } from "../auth/auth-type";
|
||||
@ -72,6 +73,9 @@ export const projectRoleServiceFactory = ({
|
||||
throw new BadRequestError({ name: "Create Role", message: "Project role with same slug already exists" });
|
||||
}
|
||||
|
||||
validateHandlebarTemplate("Project Role Create", JSON.stringify(data.permissions || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
const role = await projectRoleDAL.create({
|
||||
...data,
|
||||
projectId
|
||||
@ -134,7 +138,9 @@ export const projectRoleServiceFactory = ({
|
||||
if (existingRole && existingRole.id !== roleId)
|
||||
throw new BadRequestError({ name: "Update Role", message: "Project role with the same slug already exists" });
|
||||
}
|
||||
|
||||
validateHandlebarTemplate("Project Role Update", JSON.stringify(data.permissions || []), {
|
||||
allowedExpressions: (val) => val.includes("identity.")
|
||||
});
|
||||
const updatedRole = await projectRoleDAL.updateById(projectRole.id, {
|
||||
...data,
|
||||
permissions: data.permissions ? data.permissions : undefined
|
||||
|
@ -15,7 +15,9 @@ export enum PostHogEventTypes {
|
||||
UserOrgInvitation = "User Org Invitation",
|
||||
TelemetryInstanceStats = "Self Hosted Instance Stats",
|
||||
SecretRequestCreated = "Secret Request Created",
|
||||
SecretRequestDeleted = "Secret Request Deleted"
|
||||
SecretRequestDeleted = "Secret Request Deleted",
|
||||
SignSshKey = "Sign SSH Key",
|
||||
IssueSshCreds = "Issue SSH Credentials"
|
||||
}
|
||||
|
||||
export type TSecretModifiedEvent = {
|
||||
@ -139,6 +141,24 @@ export type TSecretRequestDeletedEvent = {
|
||||
};
|
||||
};
|
||||
|
||||
export type TSignSshKeyEvent = {
|
||||
event: PostHogEventTypes.SignSshKey;
|
||||
properties: {
|
||||
certificateTemplateId: string;
|
||||
principals: string[];
|
||||
userAgent?: string;
|
||||
};
|
||||
};
|
||||
|
||||
export type TIssueSshCredsEvent = {
|
||||
event: PostHogEventTypes.IssueSshCreds;
|
||||
properties: {
|
||||
certificateTemplateId: string;
|
||||
principals: string[];
|
||||
userAgent?: string;
|
||||
};
|
||||
};
|
||||
|
||||
export type TPostHogEvent = { distinctId: string } & (
|
||||
| TSecretModifiedEvent
|
||||
| TAdminInitEvent
|
||||
@ -151,4 +171,6 @@ export type TPostHogEvent = { distinctId: string } & (
|
||||
| TTelemetryInstanceStatsEvent
|
||||
| TSecretRequestCreatedEvent
|
||||
| TSecretRequestDeletedEvent
|
||||
| TSignSshKeyEvent
|
||||
| TIssueSshCredsEvent
|
||||
);
|
||||
|
@ -12,7 +12,7 @@ require (
|
||||
github.com/fatih/semgroup v1.2.0
|
||||
github.com/gitleaks/go-gitdiff v0.8.0
|
||||
github.com/h2non/filetype v1.1.3
|
||||
github.com/infisical/go-sdk v0.4.8
|
||||
github.com/infisical/go-sdk v0.5.1
|
||||
github.com/infisical/infisical-kmip v0.3.5
|
||||
github.com/mattn/go-isatty v0.0.20
|
||||
github.com/muesli/ansi v0.0.0-20221106050444-61f0cd9a192a
|
||||
@ -34,6 +34,7 @@ require (
|
||||
golang.org/x/sys v0.31.0
|
||||
golang.org/x/term v0.30.0
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
|
||||
require (
|
||||
@ -125,7 +126,6 @@ require (
|
||||
google.golang.org/grpc v1.64.1 // indirect
|
||||
google.golang.org/protobuf v1.36.1 // indirect
|
||||
gopkg.in/ini.v1 v1.62.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
|
@ -277,8 +277,8 @@ github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
|
||||
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/infisical/go-sdk v0.4.8 h1:aphRnaauC5//PkP1ZbY9RSK2RiT1LjPS5o4CbX0x5OQ=
|
||||
github.com/infisical/go-sdk v0.4.8/go.mod h1:bMO9xSaBeXkDBhTIM4FkkREAfw2V8mv5Bm7lvo4+uDk=
|
||||
github.com/infisical/go-sdk v0.5.1 h1:bl0D4A6CmvfL8RwEQTcZh39nsxC6q3HSs76/4J8grWY=
|
||||
github.com/infisical/go-sdk v0.5.1/go.mod h1:ExjqFLRz7LSpZpGluqDLvFl6dFBLq5LKyLW7GBaMAIs=
|
||||
github.com/infisical/infisical-kmip v0.3.5 h1:QM3s0e18B+mYv3a9HQNjNAlbwZJBzXq5BAJM2scIeiE=
|
||||
github.com/infisical/infisical-kmip v0.3.5/go.mod h1:bO1M4YtKyutNg1bREPmlyZspC5duSR7hyQ3lPmLzrIs=
|
||||
github.com/jedib0t/go-pretty v4.3.0+incompatible h1:CGs8AVhEKg/n9YbUenWmNStRW2PHJzaeDodcfvRAbIo=
|
||||
@ -858,4 +858,4 @@ honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9
|
||||
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
||||
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
|
||||
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
|
||||
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
|
||||
|
@ -29,7 +29,6 @@ import (
|
||||
"github.com/Infisical/infisical-merge/packages/config"
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@ -514,7 +513,10 @@ type NewAgentMangerOptions struct {
|
||||
}
|
||||
|
||||
func NewAgentManager(options NewAgentMangerOptions) *AgentManager {
|
||||
|
||||
customHeaders, err := util.GetInfisicalCustomHeadersMap()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get custom headers")
|
||||
}
|
||||
return &AgentManager{
|
||||
filePaths: options.FileDeposits,
|
||||
templates: options.Templates,
|
||||
@ -529,6 +531,7 @@ func NewAgentManager(options NewAgentMangerOptions) *AgentManager {
|
||||
SiteUrl: config.INFISICAL_URL,
|
||||
UserAgent: api.USER_AGENT, // ? Should we perhaps use a different user agent for the Agent for better analytics?
|
||||
AutoTokenRefresh: false,
|
||||
CustomHeaders: customHeaders,
|
||||
}),
|
||||
}
|
||||
|
||||
@ -716,7 +719,11 @@ func (tm *AgentManager) FetchNewAccessToken() error {
|
||||
|
||||
// Refreshes the existing access token
|
||||
func (tm *AgentManager) RefreshAccessToken() error {
|
||||
httpClient := resty.New()
|
||||
httpClient, err := util.GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
httpClient.SetRetryCount(10000).
|
||||
SetRetryMaxWaitTime(20 * time.Second).
|
||||
SetRetryWaitTime(5 * time.Second)
|
||||
|
@ -10,7 +10,6 @@ import (
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
@ -70,8 +69,12 @@ var bootstrapCmd = &cobra.Command{
|
||||
return
|
||||
}
|
||||
|
||||
httpClient := resty.New().
|
||||
SetHeader("Accept", "application/json")
|
||||
httpClient, err := util.GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
log.Error().Msgf("Failed to get resty client with custom headers: %v", err)
|
||||
return
|
||||
}
|
||||
httpClient.SetHeader("Accept", "application/json")
|
||||
|
||||
bootstrapResponse, err := api.CallBootstrapInstance(httpClient, api.BootstrapInstanceRequest{
|
||||
Domain: util.AppendAPIEndpoint(domain),
|
||||
|
@ -14,7 +14,6 @@ import (
|
||||
// "github.com/Infisical/infisical-merge/packages/models"
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
// "github.com/Infisical/infisical-merge/packages/visualize"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/posthog/posthog-go"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
@ -56,7 +55,10 @@ func getDynamicSecretList(cmd *cobra.Command, args []string) {
|
||||
}
|
||||
|
||||
var infisicalToken string
|
||||
httpClient := resty.New()
|
||||
httpClient, err := util.GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get resty client with custom headers")
|
||||
}
|
||||
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
@ -85,10 +87,16 @@ func getDynamicSecretList(cmd *cobra.Command, args []string) {
|
||||
|
||||
httpClient.SetAuthToken(infisicalToken)
|
||||
|
||||
customHeaders, err := util.GetInfisicalCustomHeadersMap()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get custom headers")
|
||||
}
|
||||
|
||||
infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{
|
||||
SiteUrl: config.INFISICAL_URL,
|
||||
UserAgent: api.USER_AGENT,
|
||||
AutoTokenRefresh: false,
|
||||
CustomHeaders: customHeaders,
|
||||
})
|
||||
infisicalClient.Auth().SetAccessToken(infisicalToken)
|
||||
|
||||
@ -164,7 +172,10 @@ func createDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
|
||||
}
|
||||
|
||||
var infisicalToken string
|
||||
httpClient := resty.New()
|
||||
httpClient, err := util.GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get resty client with custom headers")
|
||||
}
|
||||
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
@ -193,10 +204,16 @@ func createDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
|
||||
|
||||
httpClient.SetAuthToken(infisicalToken)
|
||||
|
||||
customHeaders, err := util.GetInfisicalCustomHeadersMap()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get custom headers")
|
||||
}
|
||||
|
||||
infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{
|
||||
SiteUrl: config.INFISICAL_URL,
|
||||
UserAgent: api.USER_AGENT,
|
||||
AutoTokenRefresh: false,
|
||||
CustomHeaders: customHeaders,
|
||||
})
|
||||
infisicalClient.Auth().SetAccessToken(infisicalToken)
|
||||
|
||||
@ -286,7 +303,10 @@ func renewDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
|
||||
}
|
||||
|
||||
var infisicalToken string
|
||||
httpClient := resty.New()
|
||||
httpClient, err := util.GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get resty client with custom headers")
|
||||
}
|
||||
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
@ -315,10 +335,16 @@ func renewDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
|
||||
|
||||
httpClient.SetAuthToken(infisicalToken)
|
||||
|
||||
customHeaders, err := util.GetInfisicalCustomHeadersMap()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get custom headers")
|
||||
}
|
||||
|
||||
infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{
|
||||
SiteUrl: config.INFISICAL_URL,
|
||||
UserAgent: api.USER_AGENT,
|
||||
AutoTokenRefresh: false,
|
||||
CustomHeaders: customHeaders,
|
||||
})
|
||||
infisicalClient.Auth().SetAccessToken(infisicalToken)
|
||||
|
||||
@ -384,7 +410,10 @@ func revokeDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
|
||||
}
|
||||
|
||||
var infisicalToken string
|
||||
httpClient := resty.New()
|
||||
httpClient, err := util.GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get resty client with custom headers")
|
||||
}
|
||||
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
@ -413,10 +442,16 @@ func revokeDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
|
||||
|
||||
httpClient.SetAuthToken(infisicalToken)
|
||||
|
||||
customHeaders, err := util.GetInfisicalCustomHeadersMap()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get custom headers")
|
||||
}
|
||||
|
||||
infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{
|
||||
SiteUrl: config.INFISICAL_URL,
|
||||
UserAgent: api.USER_AGENT,
|
||||
AutoTokenRefresh: false,
|
||||
CustomHeaders: customHeaders,
|
||||
})
|
||||
infisicalClient.Auth().SetAccessToken(infisicalToken)
|
||||
|
||||
@ -481,7 +516,10 @@ func listDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
|
||||
}
|
||||
|
||||
var infisicalToken string
|
||||
httpClient := resty.New()
|
||||
httpClient, err := util.GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get resty client with custom headers")
|
||||
}
|
||||
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
@ -510,10 +548,16 @@ func listDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
|
||||
|
||||
httpClient.SetAuthToken(infisicalToken)
|
||||
|
||||
customHeaders, err := util.GetInfisicalCustomHeadersMap()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get custom headers")
|
||||
}
|
||||
|
||||
infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{
|
||||
SiteUrl: config.INFISICAL_URL,
|
||||
UserAgent: api.USER_AGENT,
|
||||
AutoTokenRefresh: false,
|
||||
CustomHeaders: customHeaders,
|
||||
})
|
||||
infisicalClient.Auth().SetAccessToken(infisicalToken)
|
||||
|
||||
|
@ -10,7 +10,6 @@ import (
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/manifoldco/promptui"
|
||||
"github.com/posthog/posthog-go"
|
||||
"github.com/rs/zerolog/log"
|
||||
@ -50,7 +49,10 @@ var initCmd = &cobra.Command{
|
||||
util.PrintErrorMessageAndExit("Your login session has expired, please run [infisical login] and try again")
|
||||
}
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient, err := util.GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get resty client with custom headers")
|
||||
}
|
||||
httpClient.SetAuthToken(userCreds.UserCredentials.JTWToken)
|
||||
|
||||
organizationResponse, err := api.CallGetAllOrganizations(httpClient)
|
||||
@ -81,7 +83,10 @@ var initCmd = &cobra.Command{
|
||||
for i < 6 {
|
||||
mfaVerifyCode := askForMFACode(tokenResponse.MfaMethod)
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient, err := util.GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get resty client with custom headers")
|
||||
}
|
||||
httpClient.SetAuthToken(tokenResponse.Token)
|
||||
verifyMFAresponse, mfaErrorResponse, requestError := api.CallVerifyMfaToken(httpClient, api.VerifyMfaTokenRequest{
|
||||
Email: userCreds.UserCredentials.Email,
|
||||
|
@ -27,7 +27,6 @@ import (
|
||||
"github.com/Infisical/infisical-merge/packages/srp"
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
"github.com/fatih/color"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/manifoldco/promptui"
|
||||
"github.com/posthog/posthog-go"
|
||||
"github.com/rs/cors"
|
||||
@ -178,10 +177,16 @@ var loginCmd = &cobra.Command{
|
||||
return
|
||||
}
|
||||
|
||||
customHeaders, err := util.GetInfisicalCustomHeadersMap()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get custom headers")
|
||||
}
|
||||
|
||||
infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{
|
||||
SiteUrl: config.INFISICAL_URL,
|
||||
UserAgent: api.USER_AGENT,
|
||||
AutoTokenRefresh: false,
|
||||
CustomHeaders: customHeaders,
|
||||
})
|
||||
|
||||
loginMethod, err := cmd.Flags().GetString("method")
|
||||
@ -359,7 +364,10 @@ func cliDefaultLogin(userCredentialsToBeStored *models.UserCredentials) {
|
||||
for i < 6 {
|
||||
mfaVerifyCode := askForMFACode("email")
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient, err := util.GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get resty client with custom headers")
|
||||
}
|
||||
httpClient.SetAuthToken(loginTwoResponse.Token)
|
||||
verifyMFAresponse, mfaErrorResponse, requestError := api.CallVerifyMfaToken(httpClient, api.VerifyMfaTokenRequest{
|
||||
Email: email,
|
||||
@ -726,7 +734,10 @@ func askForLoginCredentials() (email string, password string, err error) {
|
||||
|
||||
func getFreshUserCredentials(email string, password string) (*api.GetLoginOneV2Response, *api.GetLoginTwoV2Response, error) {
|
||||
log.Debug().Msg(fmt.Sprint("getFreshUserCredentials: ", "email", email, "password: ", password))
|
||||
httpClient := resty.New()
|
||||
httpClient, err := util.GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
httpClient.SetRetryCount(5)
|
||||
|
||||
params := srp.GetParams(4096)
|
||||
@ -776,7 +787,10 @@ func getFreshUserCredentials(email string, password string) (*api.GetLoginOneV2R
|
||||
func GetJwtTokenWithOrganizationId(oldJwtToken string, email string) string {
|
||||
log.Debug().Msg(fmt.Sprint("GetJwtTokenWithOrganizationId: ", "oldJwtToken", oldJwtToken))
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient, err := util.GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get resty client with custom headers")
|
||||
}
|
||||
httpClient.SetAuthToken(oldJwtToken)
|
||||
|
||||
organizationResponse, err := api.CallGetAllOrganizations(httpClient)
|
||||
@ -811,7 +825,10 @@ func GetJwtTokenWithOrganizationId(oldJwtToken string, email string) string {
|
||||
for i < 6 {
|
||||
mfaVerifyCode := askForMFACode(selectedOrgRes.MfaMethod)
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient, err := util.GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get resty client with custom headers")
|
||||
}
|
||||
httpClient.SetAuthToken(selectedOrgRes.Token)
|
||||
verifyMFAresponse, mfaErrorResponse, requestError := api.CallVerifyMfaToken(httpClient, api.VerifyMfaTokenRequest{
|
||||
Email: email,
|
||||
@ -913,7 +930,14 @@ func askToPasteJwtToken(success chan models.UserCredentials, failure chan error)
|
||||
}
|
||||
|
||||
// verify JTW
|
||||
httpClient := resty.New().
|
||||
httpClient, err := util.GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
failure <- err
|
||||
fmt.Println("Error getting resty client with custom headers", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
httpClient.
|
||||
SetAuthToken(userCredentials.JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
|
@ -13,7 +13,6 @@ import (
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
"github.com/Infisical/infisical-merge/packages/visualize"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/posthog/posthog-go"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
@ -274,8 +273,12 @@ var secretsDeleteCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
httpClient := resty.New().
|
||||
SetHeader("Accept", "application/json")
|
||||
httpClient, err := util.GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get resty client with custom headers")
|
||||
}
|
||||
|
||||
httpClient.SetHeader("Accept", "application/json")
|
||||
|
||||
if projectId == "" {
|
||||
workspaceFile, err := util.GetWorkSpaceFromFile()
|
||||
|
@ -315,10 +315,16 @@ func issueCredentials(cmd *cobra.Command, args []string) {
|
||||
}
|
||||
}
|
||||
|
||||
customHeaders, err := util.GetInfisicalCustomHeadersMap()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get custom headers")
|
||||
}
|
||||
|
||||
infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{
|
||||
SiteUrl: config.INFISICAL_URL,
|
||||
UserAgent: api.USER_AGENT,
|
||||
AutoTokenRefresh: false,
|
||||
CustomHeaders: customHeaders,
|
||||
})
|
||||
infisicalClient.Auth().SetAccessToken(infisicalToken)
|
||||
|
||||
@ -555,10 +561,16 @@ func signKey(cmd *cobra.Command, args []string) {
|
||||
signedKeyPath = outFilePath
|
||||
}
|
||||
|
||||
customHeaders, err := util.GetInfisicalCustomHeadersMap()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get custom headers")
|
||||
}
|
||||
|
||||
infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{
|
||||
SiteUrl: config.INFISICAL_URL,
|
||||
UserAgent: api.USER_AGENT,
|
||||
AutoTokenRefresh: false,
|
||||
CustomHeaders: customHeaders,
|
||||
})
|
||||
infisicalClient.Auth().SetAccessToken(infisicalToken)
|
||||
|
||||
|
@ -13,7 +13,6 @@ import (
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/crypto"
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@ -136,7 +135,11 @@ var tokensCreateCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
// make a call to the api to save the encrypted symmetric key details
|
||||
httpClient := resty.New()
|
||||
httpClient, err := util.GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get resty client with custom headers")
|
||||
}
|
||||
|
||||
httpClient.SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
|
@ -13,6 +13,7 @@ import (
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/systemd"
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/pion/dtls/v3"
|
||||
"github.com/pion/logging"
|
||||
@ -40,7 +41,11 @@ type Gateway struct {
|
||||
}
|
||||
|
||||
func NewGateway(identityToken string) (Gateway, error) {
|
||||
httpClient := resty.New()
|
||||
httpClient, err := util.GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
return Gateway{}, fmt.Errorf("unable to get client with custom headers [err=%v]", err)
|
||||
}
|
||||
|
||||
httpClient.SetAuthToken(identityToken)
|
||||
|
||||
return Gateway{
|
||||
|
@ -4,8 +4,11 @@ import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/config"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
func GetHomeDir() (string, error) {
|
||||
@ -27,3 +30,88 @@ func ValidateInfisicalAPIConnection() (ok bool) {
|
||||
_, err := http.Get(fmt.Sprintf("%v/status", config.INFISICAL_URL))
|
||||
return err == nil
|
||||
}
|
||||
|
||||
func GetRestyClientWithCustomHeaders() (*resty.Client, error) {
|
||||
httpClient := resty.New()
|
||||
customHeaders := os.Getenv("INFISICAL_CUSTOM_HEADERS")
|
||||
if customHeaders != "" {
|
||||
headers, err := GetInfisicalCustomHeadersMap()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
httpClient.SetHeaders(headers)
|
||||
}
|
||||
return httpClient, nil
|
||||
}
|
||||
|
||||
func GetInfisicalCustomHeadersMap() (map[string]string, error) {
|
||||
customHeaders := os.Getenv("INFISICAL_CUSTOM_HEADERS")
|
||||
if customHeaders == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
headers := map[string]string{}
|
||||
|
||||
pos := 0
|
||||
for pos < len(customHeaders) {
|
||||
for pos < len(customHeaders) && unicode.IsSpace(rune(customHeaders[pos])) {
|
||||
pos++
|
||||
}
|
||||
|
||||
if pos >= len(customHeaders) {
|
||||
break
|
||||
}
|
||||
|
||||
keyStart := pos
|
||||
for pos < len(customHeaders) && customHeaders[pos] != '=' && !unicode.IsSpace(rune(customHeaders[pos])) {
|
||||
pos++
|
||||
}
|
||||
|
||||
if pos >= len(customHeaders) || customHeaders[pos] != '=' {
|
||||
return nil, fmt.Errorf("invalid custom header format. Expected \"headerKey1=value1 headerKey2=value2 ....\" but got %v", customHeaders)
|
||||
}
|
||||
|
||||
key := customHeaders[keyStart:pos]
|
||||
pos++
|
||||
|
||||
for pos < len(customHeaders) && unicode.IsSpace(rune(customHeaders[pos])) {
|
||||
pos++
|
||||
}
|
||||
|
||||
var value string
|
||||
|
||||
if pos < len(customHeaders) {
|
||||
if customHeaders[pos] == '"' || customHeaders[pos] == '\'' {
|
||||
quoteChar := customHeaders[pos]
|
||||
pos++
|
||||
valueStart := pos
|
||||
|
||||
for pos < len(customHeaders) &&
|
||||
(customHeaders[pos] != quoteChar ||
|
||||
(pos > 0 && customHeaders[pos-1] == '\\')) {
|
||||
pos++
|
||||
}
|
||||
|
||||
if pos < len(customHeaders) {
|
||||
value = customHeaders[valueStart:pos]
|
||||
pos++
|
||||
} else {
|
||||
value = customHeaders[valueStart:]
|
||||
}
|
||||
} else {
|
||||
valueStart := pos
|
||||
for pos < len(customHeaders) && !unicode.IsSpace(rune(customHeaders[pos])) {
|
||||
pos++
|
||||
}
|
||||
value = customHeaders[valueStart:pos]
|
||||
}
|
||||
}
|
||||
|
||||
if key != "" && !strings.EqualFold(key, "User-Agent") && !strings.EqualFold(key, "Accept") {
|
||||
headers[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
return headers, nil
|
||||
}
|
||||
|
@ -56,6 +56,7 @@ func WriteInitalConfig(userCredentials *models.UserCredentials) error {
|
||||
LoggedInUsers: existingConfigFile.LoggedInUsers,
|
||||
VaultBackendType: existingConfigFile.VaultBackendType,
|
||||
VaultBackendPassphrase: existingConfigFile.VaultBackendPassphrase,
|
||||
Domains: existingConfigFile.Domains,
|
||||
}
|
||||
|
||||
configFileMarshalled, err := json.Marshal(configFile)
|
||||
|
@ -9,7 +9,6 @@ import (
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/config"
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/zalando/go-keyring"
|
||||
)
|
||||
|
||||
@ -85,7 +84,12 @@ func GetCurrentLoggedInUserDetails(setConfigVariables bool) (LoggedInUserDetails
|
||||
}
|
||||
|
||||
// check to to see if the JWT is still valid
|
||||
httpClient := resty.New().
|
||||
httpClient, err := GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
return LoggedInUserDetails{}, fmt.Errorf("getCurrentLoggedInUserDetails: unable to get client with custom headers [err=%s]", err)
|
||||
}
|
||||
|
||||
httpClient.
|
||||
SetAuthToken(userCreds.JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
|
@ -6,7 +6,6 @@ import (
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
@ -65,7 +64,11 @@ func GetAllFolders(params models.GetAllFoldersParameters) ([]models.SingleFolder
|
||||
|
||||
func GetFoldersViaJTW(JTWToken string, workspaceId string, environmentName string, foldersPath string) ([]models.SingleFolder, error) {
|
||||
// set up resty client
|
||||
httpClient := resty.New()
|
||||
httpClient, err := GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
httpClient.SetAuthToken(JTWToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
@ -100,7 +103,10 @@ func GetFoldersViaServiceToken(fullServiceToken string, workspaceId string, envi
|
||||
|
||||
serviceToken := fmt.Sprintf("%v.%v.%v", serviceTokenParts[0], serviceTokenParts[1], serviceTokenParts[2])
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient, err := GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get client with custom headers [err=%v]", err)
|
||||
}
|
||||
|
||||
httpClient.SetAuthToken(serviceToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
@ -143,7 +149,11 @@ func GetFoldersViaServiceToken(fullServiceToken string, workspaceId string, envi
|
||||
}
|
||||
|
||||
func GetFoldersViaMachineIdentity(accessToken string, workspaceId string, envSlug string, foldersPath string) ([]models.SingleFolder, error) {
|
||||
httpClient := resty.New()
|
||||
httpClient, err := GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
httpClient.SetAuthToken(accessToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
@ -191,9 +201,12 @@ func CreateFolder(params models.CreateFolderParameters) (models.SingleFolder, er
|
||||
}
|
||||
|
||||
// set up resty client
|
||||
httpClient := resty.New()
|
||||
httpClient.
|
||||
SetAuthToken(params.InfisicalToken).
|
||||
httpClient, err := GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
return models.SingleFolder{}, err
|
||||
}
|
||||
|
||||
httpClient.SetAuthToken(params.InfisicalToken).
|
||||
SetHeader("Accept", "application/json").
|
||||
SetHeader("Content-Type", "application/json")
|
||||
|
||||
@ -238,9 +251,12 @@ func DeleteFolder(params models.DeleteFolderParameters) ([]models.SingleFolder,
|
||||
}
|
||||
|
||||
// set up resty client
|
||||
httpClient := resty.New()
|
||||
httpClient.
|
||||
SetAuthToken(params.InfisicalToken).
|
||||
httpClient, err := GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
httpClient.SetAuthToken(params.InfisicalToken).
|
||||
SetHeader("Accept", "application/json").
|
||||
SetHeader("Content-Type", "application/json")
|
||||
|
||||
|
@ -16,7 +16,6 @@ import (
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@ -120,7 +119,11 @@ func GetInfisicalToken(cmd *cobra.Command) (token *models.TokenDetails, err erro
|
||||
}
|
||||
|
||||
func UniversalAuthLogin(clientId string, clientSecret string) (api.UniversalAuthLoginResponse, error) {
|
||||
httpClient := resty.New()
|
||||
httpClient, err := GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
return api.UniversalAuthLoginResponse{}, err
|
||||
}
|
||||
|
||||
httpClient.SetRetryCount(10000).
|
||||
SetRetryMaxWaitTime(20 * time.Second).
|
||||
SetRetryWaitTime(5 * time.Second)
|
||||
@ -135,7 +138,11 @@ func UniversalAuthLogin(clientId string, clientSecret string) (api.UniversalAuth
|
||||
|
||||
func RenewMachineIdentityAccessToken(accessToken string) (string, error) {
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient, err := GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
httpClient.SetRetryCount(10000).
|
||||
SetRetryMaxWaitTime(20 * time.Second).
|
||||
SetRetryWaitTime(5 * time.Second)
|
||||
|
@ -14,7 +14,6 @@ import (
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/crypto"
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/zalando/go-keyring"
|
||||
"gopkg.in/yaml.v3"
|
||||
@ -28,7 +27,10 @@ func GetPlainTextSecretsViaServiceToken(fullServiceToken string, environment str
|
||||
|
||||
serviceToken := fmt.Sprintf("%v.%v.%v", serviceTokenParts[0], serviceTokenParts[1], serviceTokenParts[2])
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient, err := GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get client with custom headers [err=%v]", err)
|
||||
}
|
||||
|
||||
httpClient.SetAuthToken(serviceToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
@ -79,7 +81,11 @@ func GetPlainTextSecretsViaServiceToken(fullServiceToken string, environment str
|
||||
}
|
||||
|
||||
func GetPlainTextSecretsV3(accessToken string, workspaceId string, environmentName string, secretsPath string, includeImports bool, recursive bool, tagSlugs string, expandSecretReferences bool) (models.PlaintextSecretResult, error) {
|
||||
httpClient := resty.New()
|
||||
httpClient, err := GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
return models.PlaintextSecretResult{}, err
|
||||
}
|
||||
|
||||
httpClient.SetAuthToken(accessToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
@ -122,7 +128,11 @@ func GetPlainTextSecretsV3(accessToken string, workspaceId string, environmentNa
|
||||
}
|
||||
|
||||
func GetSinglePlainTextSecretByNameV3(accessToken string, workspaceId string, environmentName string, secretsPath string, secretName string) (models.SingleEnvironmentVariable, string, error) {
|
||||
httpClient := resty.New()
|
||||
httpClient, err := GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
return models.SingleEnvironmentVariable{}, "", err
|
||||
}
|
||||
|
||||
httpClient.SetAuthToken(accessToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
@ -153,7 +163,11 @@ func GetSinglePlainTextSecretByNameV3(accessToken string, workspaceId string, en
|
||||
}
|
||||
|
||||
func CreateDynamicSecretLease(accessToken string, projectSlug string, environmentName string, secretsPath string, slug string, ttl string) (models.DynamicSecretLease, error) {
|
||||
httpClient := resty.New()
|
||||
httpClient, err := GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
return models.DynamicSecretLease{}, err
|
||||
}
|
||||
|
||||
httpClient.SetAuthToken(accessToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
@ -525,7 +539,11 @@ func GetEnvelopmentBasedOnGitBranch(workspaceFile models.WorkspaceConfigFile) st
|
||||
}
|
||||
|
||||
func GetPlainTextWorkspaceKey(authenticationToken string, receiverPrivateKey string, workspaceId string) ([]byte, error) {
|
||||
httpClient := resty.New()
|
||||
httpClient, err := GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("GetPlainTextWorkspaceKey: unable to get client with custom headers [err=%v]", err)
|
||||
}
|
||||
|
||||
httpClient.SetAuthToken(authenticationToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
@ -672,9 +690,12 @@ func SetRawSecrets(secretArgs []string, secretType string, environmentName strin
|
||||
getAllEnvironmentVariablesRequest.InfisicalToken = tokenDetails.Token
|
||||
}
|
||||
|
||||
httpClient := resty.New().
|
||||
SetAuthToken(tokenDetails.Token).
|
||||
SetHeader("Accept", "application/json")
|
||||
httpClient, err := GetRestyClientWithCustomHeaders()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get client with custom headers [err=%v]", err)
|
||||
}
|
||||
|
||||
httpClient.SetHeader("Accept", "application/json")
|
||||
|
||||
// pull current secrets
|
||||
secrets, err := GetAllEnvironmentVariables(getAllEnvironmentVariablesRequest, "")
|
||||
|
@ -33,3 +33,28 @@ Yes. This is simply a configuration file and contains no sensitive data.
|
||||
https://app.infisical.com/project/<your_project_id>/settings
|
||||
```
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="How do I use custom headers with the Infisical CLI?">
|
||||
|
||||
The Infisical CLI supports custom HTTP headers for requests to servers that require additional authentication. Set these headers using the `INFISICAL_CUSTOM_HEADERS` environment variable:
|
||||
|
||||
```bash
|
||||
export INFISICAL_CUSTOM_HEADERS="Access-Client-Id=your-client-id Access-Client-Secret=your-client-secret"
|
||||
```
|
||||
|
||||
After setting this environment variable, run your Infisical commands as usual.
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="Why would I need to use custom headers?">
|
||||
|
||||
Custom headers are necessary when your Infisical server is protected by services like Cloudflare Access or other reverse proxies that require specific authentication headers. Without this feature, you would need to implement security workarounds that might compromise your security posture.
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="What format should I use for the custom headers?">
|
||||
|
||||
Custom headers should be specified in the format `headername1=headervalue1 headername2=headervalue2`, with spaces separating each header-value pair. For example:
|
||||
|
||||
```bash
|
||||
export INFISICAL_CUSTOM_HEADERS="Header1=value1 Header2=value2 Header3=value3"
|
||||
```
|
||||
</Accordion>
|
@ -120,6 +120,22 @@ The CLI is designed for a variety of secret management applications ranging from
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
<Tip>
|
||||
## Custom Request Headers
|
||||
|
||||
The Infisical CLI supports custom HTTP headers for requests to servers protected by authentication services such as Cloudflare Access. Configure these headers using the `INFISICAL_CUSTOM_HEADERS` environment variable:
|
||||
|
||||
```bash
|
||||
# Syntax: headername1=headervalue1 headername2=headervalue2
|
||||
export INFISICAL_CUSTOM_HEADERS="Access-Client-Id=your-client-id Access-Client-Secret=your-client-secret"
|
||||
|
||||
# Execute Infisical commands after setting the environment variable
|
||||
infisical secrets ls
|
||||
```
|
||||
|
||||
This functionality enables secure interaction with Infisical instances that require specific authentication headers.
|
||||
</Tip>
|
||||
|
||||
## History
|
||||
|
||||
Your terminal keeps a history with the commands you run. When you create Infisical secrets directly from your terminal, they'll stay there for a while.
|
||||
|
@ -1,6 +1,6 @@
|
||||
---
|
||||
title: "Overview"
|
||||
description: "Track evert event action performed within Infisical projects."
|
||||
description: "Track all actions performed within Infisical"
|
||||
---
|
||||
|
||||
<Info>
|
||||
|
@ -99,6 +99,10 @@ client := infisical.NewInfisicalClient(context.Background(), infisical.Config{
|
||||
<ParamField query="CacheExpiryInSeconds" type="number" default={0} optional>
|
||||
Defines how long certain responses should be cached in memory, in seconds. When set to a positive value, responses from specific methods (like secret fetching) will be cached for this duration. Set to 0 to disable caching.
|
||||
</ParamField>
|
||||
|
||||
<ParamField query="CustomHeaders" type="map[string]string" optional>
|
||||
Allows you to pass custom headers to the HTTP requests made by the SDK. Expected format is a map of `Header1: Value1, Header2: Value 2`.
|
||||
</ParamField>
|
||||
</Expandable>
|
||||
|
||||
</ParamField>
|
||||
|
@ -1,4 +1,5 @@
|
||||
export const isInfisicalCloud = () =>
|
||||
window.location.origin.includes("https://app.infisical.com") ||
|
||||
window.location.origin.includes("https://us.infisical.com") ||
|
||||
window.location.origin.includes("https://eu.infisical.com");
|
||||
window.location.origin.includes("https://eu.infisical.com") ||
|
||||
window.location.origin.includes("https://gamma.infisical.com");
|
||||
|
@ -182,7 +182,8 @@ export const useGetImportedSecretsAllEnvs = ({
|
||||
comment: encSecret.secretComment,
|
||||
createdAt: encSecret.createdAt,
|
||||
updatedAt: encSecret.updatedAt,
|
||||
version: encSecret.version
|
||||
version: encSecret.version,
|
||||
sourceEnv: env
|
||||
};
|
||||
})
|
||||
})),
|
||||
|
@ -86,13 +86,5 @@ export const useSecretOverview = (secrets: DashboardProjectSecretsOverview["secr
|
||||
[secrets]
|
||||
);
|
||||
|
||||
const getSecretByKey = useCallback(
|
||||
(env: string, key: string) => {
|
||||
const sec = secrets?.find((s) => s.env === env && s.key === key);
|
||||
return sec;
|
||||
},
|
||||
[secrets]
|
||||
);
|
||||
|
||||
return { secKeys, getSecretByKey, getEnvSecretKeyCount };
|
||||
return { secKeys, getEnvSecretKeyCount };
|
||||
};
|
||||
|
@ -12,17 +12,13 @@ export const DefaultSideBar = () => (
|
||||
</MenuItem>
|
||||
)}
|
||||
</Link>
|
||||
{(window.location.origin.includes("https://app.infisical.com") ||
|
||||
window.location.origin.includes("https://eu.infisical.com") ||
|
||||
window.location.origin.includes("https://gamma.infisical.com")) && (
|
||||
<Link to="/organization/billing">
|
||||
{({ isActive }) => (
|
||||
<MenuItem isSelected={isActive} icon="spinning-coin">
|
||||
Usage & Billing
|
||||
</MenuItem>
|
||||
)}
|
||||
</Link>
|
||||
)}
|
||||
<Link to="/organization/billing">
|
||||
{({ isActive }) => (
|
||||
<MenuItem isSelected={isActive} icon="spinning-coin">
|
||||
Usage & Billing
|
||||
</MenuItem>
|
||||
)}
|
||||
</Link>
|
||||
</MenuGroup>
|
||||
<MenuGroup title="Other">
|
||||
<Link to="/organization/access-management">
|
||||
|
@ -370,17 +370,11 @@ export const MinimizedOrgSidebar = () => {
|
||||
Gateways
|
||||
</DropdownMenuItem>
|
||||
</Link>
|
||||
{(window.location.origin.includes("https://app.infisical.com") ||
|
||||
window.location.origin.includes("https://eu.infisical.com") ||
|
||||
window.location.origin.includes("https://gamma.infisical.com")) && (
|
||||
<Link to="/organization/billing">
|
||||
<DropdownMenuItem
|
||||
icon={<FontAwesomeIcon className="w-3" icon={faMoneyBill} />}
|
||||
>
|
||||
Usage & Billing
|
||||
</DropdownMenuItem>
|
||||
</Link>
|
||||
)}
|
||||
<Link to="/organization/billing">
|
||||
<DropdownMenuItem icon={<FontAwesomeIcon className="w-3" icon={faMoneyBill} />}>
|
||||
Usage & Billing
|
||||
</DropdownMenuItem>
|
||||
</Link>
|
||||
<Link to="/organization/audit-logs">
|
||||
<DropdownMenuItem icon={<FontAwesomeIcon className="w-3" icon={faBook} />}>
|
||||
Audit Logs
|
||||
|
@ -9,6 +9,7 @@ import {
|
||||
useOrganization,
|
||||
useSubscription
|
||||
} from "@app/context";
|
||||
import { isInfisicalCloud } from "@app/helpers/platform";
|
||||
import {
|
||||
useCreateCustomerPortalSession,
|
||||
useGetOrgPlanBillingInfo,
|
||||
@ -47,6 +48,9 @@ export const PreviewSection = () => {
|
||||
};
|
||||
|
||||
function formatPlanSlug(slug: string) {
|
||||
if (!slug) {
|
||||
return "-";
|
||||
}
|
||||
return slug.replace(/(\b[a-z])/g, (match) => match.toUpperCase()).replace(/-/g, " ");
|
||||
}
|
||||
|
||||
@ -54,6 +58,11 @@ export const PreviewSection = () => {
|
||||
try {
|
||||
if (!subscription || !currentOrg) return;
|
||||
|
||||
if (!isInfisicalCloud()) {
|
||||
window.open("https://infisical.com/pricing", "_blank");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!subscription.has_used_trial) {
|
||||
// direct user to start pro trial
|
||||
const url = await getOrgTrialUrl.mutateAsync({
|
||||
@ -71,6 +80,19 @@ export const PreviewSection = () => {
|
||||
}
|
||||
};
|
||||
|
||||
const getUpgradePlanLabel = () => {
|
||||
if (!isInfisicalCloud()) {
|
||||
return (
|
||||
<div>
|
||||
Go to Pricing
|
||||
<FontAwesomeIcon icon={faArrowUpRightFromSquare} className="mb-[0.06rem] ml-1 text-xs" />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return !subscription.has_used_trial ? "Start Pro Free Trial" : "Upgrade Plan";
|
||||
};
|
||||
|
||||
return (
|
||||
<div>
|
||||
{subscription &&
|
||||
@ -97,7 +119,7 @@ export const PreviewSection = () => {
|
||||
color="mineshaft"
|
||||
isDisabled={!isAllowed}
|
||||
>
|
||||
{!subscription.has_used_trial ? "Start Pro Free Trial" : "Upgrade Plan"}
|
||||
{getUpgradePlanLabel()}
|
||||
</Button>
|
||||
)}
|
||||
</OrgPermissionCan>
|
||||
@ -133,22 +155,24 @@ export const PreviewSection = () => {
|
||||
subscription.status === "trialing" ? "(Trial)" : ""
|
||||
}`}
|
||||
</p>
|
||||
<OrgPermissionCan I={OrgPermissionActions.Edit} a={OrgPermissionSubjects.Billing}>
|
||||
{(isAllowed) => (
|
||||
<button
|
||||
type="button"
|
||||
onClick={async () => {
|
||||
if (!currentOrg?.id) return;
|
||||
const { url } = await createCustomerPortalSession.mutateAsync(currentOrg.id);
|
||||
window.location.href = url;
|
||||
}}
|
||||
disabled={!isAllowed}
|
||||
className="text-primary"
|
||||
>
|
||||
Manage plan →
|
||||
</button>
|
||||
)}
|
||||
</OrgPermissionCan>
|
||||
{isInfisicalCloud() && (
|
||||
<OrgPermissionCan I={OrgPermissionActions.Edit} a={OrgPermissionSubjects.Billing}>
|
||||
{(isAllowed) => (
|
||||
<button
|
||||
type="button"
|
||||
onClick={async () => {
|
||||
if (!currentOrg?.id) return;
|
||||
const { url } = await createCustomerPortalSession.mutateAsync(currentOrg.id);
|
||||
window.location.href = url;
|
||||
}}
|
||||
disabled={!isAllowed}
|
||||
className="text-primary"
|
||||
>
|
||||
Manage plan →
|
||||
</button>
|
||||
)}
|
||||
</OrgPermissionCan>
|
||||
)}
|
||||
</div>
|
||||
<div className="mr-4 flex-1 rounded-lg border border-mineshaft-600 bg-mineshaft-900 p-4">
|
||||
<p className="mb-2 text-gray-400">Price</p>
|
||||
@ -161,7 +185,7 @@ export const PreviewSection = () => {
|
||||
<div className="flex-1 rounded-lg border border-mineshaft-600 bg-mineshaft-900 p-4">
|
||||
<p className="mb-2 text-gray-400">Subscription renews on</p>
|
||||
<p className="mb-8 text-2xl font-semibold text-mineshaft-50">
|
||||
{formatDate(data.currentPeriodEnd)}
|
||||
{data.currentPeriodEnd ? formatDate(data.currentPeriodEnd) : "-"}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { Tab, TabList, TabPanel, Tabs } from "@app/components/v2";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/context";
|
||||
import { isInfisicalCloud } from "@app/helpers/platform";
|
||||
import { withPermission } from "@app/hoc";
|
||||
|
||||
import { BillingCloudTab } from "../BillingCloudTab";
|
||||
@ -16,25 +17,33 @@ const tabs = [
|
||||
|
||||
export const BillingTabGroup = withPermission(
|
||||
() => {
|
||||
const tabsFiltered = isInfisicalCloud()
|
||||
? tabs
|
||||
: [{ name: "Infisical Self-Hosted", key: "tab-infisical-cloud" }];
|
||||
|
||||
return (
|
||||
<Tabs defaultValue={tabs[0].key}>
|
||||
<TabList>
|
||||
{tabs.map((tab) => (
|
||||
{tabsFiltered.map((tab) => (
|
||||
<Tab value={tab.key}>{tab.name}</Tab>
|
||||
))}
|
||||
</TabList>
|
||||
<TabPanel value={tabs[0].key}>
|
||||
<BillingCloudTab />
|
||||
</TabPanel>
|
||||
<TabPanel value={tabs[1].key}>
|
||||
<BillingSelfHostedTab />
|
||||
</TabPanel>
|
||||
<TabPanel value={tabs[2].key}>
|
||||
<BillingReceiptsTab />
|
||||
</TabPanel>
|
||||
<TabPanel value={tabs[3].key}>
|
||||
<BillingDetailsTab />
|
||||
</TabPanel>
|
||||
{isInfisicalCloud() && (
|
||||
<>
|
||||
<TabPanel value={tabs[1].key}>
|
||||
<BillingSelfHostedTab />
|
||||
</TabPanel>
|
||||
<TabPanel value={tabs[2].key}>
|
||||
<BillingReceiptsTab />
|
||||
</TabPanel>
|
||||
<TabPanel value={tabs[3].key}>
|
||||
<BillingDetailsTab />
|
||||
</TabPanel>
|
||||
</>
|
||||
)}
|
||||
</Tabs>
|
||||
);
|
||||
},
|
||||
|
@ -81,7 +81,6 @@ import { CreateSecretForm } from "./components/CreateSecretForm";
|
||||
import { FolderBreadCrumbs } from "./components/FolderBreadCrumbs";
|
||||
import { SecretOverviewDynamicSecretRow } from "./components/SecretOverviewDynamicSecretRow";
|
||||
import { SecretOverviewFolderRow } from "./components/SecretOverviewFolderRow";
|
||||
import { SecretOverviewImportListView } from "./components/SecretOverviewImportListView";
|
||||
import {
|
||||
SecretNoAccessOverviewTableRow,
|
||||
SecretOverviewTableRow
|
||||
@ -203,12 +202,16 @@ export const OverviewPage = () => {
|
||||
setVisibleEnvs(userAvailableEnvs);
|
||||
}, [userAvailableEnvs]);
|
||||
|
||||
const { isImportedSecretPresentInEnv, getImportedSecretByKey, getEnvImportedSecretKeyCount } =
|
||||
useGetImportedSecretsAllEnvs({
|
||||
projectId: workspaceId,
|
||||
path: secretPath,
|
||||
environments: (userAvailableEnvs || []).map(({ slug }) => slug)
|
||||
});
|
||||
const {
|
||||
secretImports,
|
||||
isImportedSecretPresentInEnv,
|
||||
getImportedSecretByKey,
|
||||
getEnvImportedSecretKeyCount
|
||||
} = useGetImportedSecretsAllEnvs({
|
||||
projectId: workspaceId,
|
||||
path: secretPath,
|
||||
environments: (userAvailableEnvs || []).map(({ slug }) => slug)
|
||||
});
|
||||
|
||||
const { isPending: isOverviewLoading, data: overview } = useGetProjectSecretsOverview(
|
||||
{
|
||||
@ -232,7 +235,6 @@ export const OverviewPage = () => {
|
||||
secrets,
|
||||
folders,
|
||||
dynamicSecrets,
|
||||
imports,
|
||||
totalFolderCount,
|
||||
totalSecretCount,
|
||||
totalDynamicSecretCount,
|
||||
@ -244,16 +246,20 @@ export const OverviewPage = () => {
|
||||
totalUniqueDynamicSecretsInPage
|
||||
} = overview ?? {};
|
||||
|
||||
const importsShaped = imports
|
||||
?.filter((el) => !el.isReserved)
|
||||
?.map(({ importPath, importEnv }) => ({ importPath, importEnv }))
|
||||
.filter(
|
||||
(el, index, self) =>
|
||||
index ===
|
||||
self.findIndex(
|
||||
(item) => item.importPath === el.importPath && item.importEnv.slug === el.importEnv.slug
|
||||
)
|
||||
);
|
||||
const secretImportsShaped = secretImports
|
||||
?.flatMap(({ data }) => data)
|
||||
.filter(Boolean)
|
||||
.flatMap((item) => item?.secrets || []);
|
||||
|
||||
const handleIsImportedSecretPresentInEnv = (envSlug: string, secretName: string) => {
|
||||
if (secrets?.some((s) => s.key === secretName && s.env === envSlug)) {
|
||||
return false;
|
||||
}
|
||||
if (secretImportsShaped.some((s) => s.key === secretName && s.sourceEnv === envSlug)) {
|
||||
return true;
|
||||
}
|
||||
return isImportedSecretPresentInEnv(envSlug, secretName);
|
||||
};
|
||||
|
||||
useResetPageHelper({
|
||||
totalCount,
|
||||
@ -267,7 +273,18 @@ export const OverviewPage = () => {
|
||||
const { dynamicSecretNames, isDynamicSecretPresentInEnv } =
|
||||
useDynamicSecretOverview(dynamicSecrets);
|
||||
|
||||
const { secKeys, getSecretByKey, getEnvSecretKeyCount } = useSecretOverview(secrets);
|
||||
const { secKeys, getEnvSecretKeyCount } = useSecretOverview(
|
||||
secrets?.concat(secretImportsShaped) || []
|
||||
);
|
||||
|
||||
const getSecretByKey = useCallback(
|
||||
(env: string, key: string) => {
|
||||
const sec = secrets?.find((s) => s.env === env && s.key === key);
|
||||
return sec;
|
||||
},
|
||||
[secrets]
|
||||
);
|
||||
|
||||
const { data: tags } = useGetWsTags(
|
||||
permission.can(ProjectPermissionActions.Read, ProjectPermissionSub.Tags) ? workspaceId : ""
|
||||
);
|
||||
@ -1124,24 +1141,13 @@ export const OverviewPage = () => {
|
||||
key={`overview-${dynamicSecretName}-${index + 1}`}
|
||||
/>
|
||||
))}
|
||||
{filter.import &&
|
||||
importsShaped &&
|
||||
importsShaped?.length > 0 &&
|
||||
importsShaped?.map((item, index) => (
|
||||
<SecretOverviewImportListView
|
||||
secretImport={item}
|
||||
environments={visibleEnvs}
|
||||
key={`overview-secret-input-${index + 1}`}
|
||||
allSecretImports={imports}
|
||||
/>
|
||||
))}
|
||||
{secKeys.map((key, index) => (
|
||||
<SecretOverviewTableRow
|
||||
isSelected={Boolean(selectedEntries.secret[key])}
|
||||
onToggleSecretSelect={() => toggleSelectedEntry(EntryType.SECRET, key)}
|
||||
secretPath={secretPath}
|
||||
getImportedSecretByKey={getImportedSecretByKey}
|
||||
isImportedSecretPresentInEnv={isImportedSecretPresentInEnv}
|
||||
isImportedSecretPresentInEnv={handleIsImportedSecretPresentInEnv}
|
||||
onSecretCreate={handleSecretCreate}
|
||||
onSecretDelete={handleSecretDelete}
|
||||
onSecretUpdate={handleSecretUpdate}
|
||||
|
@ -1,85 +0,0 @@
|
||||
import { faCheck, faFileImport, faXmark } from "@fortawesome/free-solid-svg-icons";
|
||||
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
||||
import { twMerge } from "tailwind-merge";
|
||||
|
||||
import { Td, Tr } from "@app/components/v2";
|
||||
import { TSecretImport, WorkspaceEnv } from "@app/hooks/api/types";
|
||||
import { EnvFolderIcon } from "@app/pages/secret-manager/SecretDashboardPage/components/SecretImportListView/SecretImportItem";
|
||||
|
||||
type Props = {
|
||||
secretImport: { importPath: string; importEnv: WorkspaceEnv };
|
||||
environments: { name: string; slug: string }[];
|
||||
allSecretImports?: TSecretImport[];
|
||||
};
|
||||
|
||||
export const SecretOverviewImportListView = ({
|
||||
secretImport,
|
||||
environments = [],
|
||||
allSecretImports = []
|
||||
}: Props) => {
|
||||
const isSecretPresentInEnv = (envSlug: string) => {
|
||||
return allSecretImports.some((item) => {
|
||||
if (item.isReplication) {
|
||||
if (
|
||||
item.importPath === secretImport.importPath &&
|
||||
item.importEnv.slug === secretImport.importEnv.slug
|
||||
) {
|
||||
const reservedItem = allSecretImports.find((element) =>
|
||||
element.importPath.includes(`__reserve_replication_${item.id}`)
|
||||
);
|
||||
// If the reserved item exists, check if the envSlug matches
|
||||
if (reservedItem) {
|
||||
return reservedItem.environment === envSlug;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// If the item is not replication, check if the envSlug matches directly
|
||||
return (
|
||||
item.environment === envSlug &&
|
||||
item.importPath === secretImport.importPath &&
|
||||
item.importEnv.slug === secretImport.importEnv.slug
|
||||
);
|
||||
}
|
||||
return false;
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<Tr className="group">
|
||||
<Td className="sticky left-0 z-10 border-r border-mineshaft-600 bg-mineshaft-800 bg-clip-padding px-0 py-0 group-hover:bg-mineshaft-700">
|
||||
<div className="group flex cursor-pointer">
|
||||
<div className="flex w-11 items-center py-2 pl-5 text-green-700">
|
||||
<FontAwesomeIcon icon={faFileImport} />
|
||||
</div>
|
||||
<div className="flex flex-grow items-center py-2 pl-4 pr-2">
|
||||
<EnvFolderIcon
|
||||
env={secretImport.importEnv.slug || ""}
|
||||
secretPath={secretImport.importPath || ""}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</Td>
|
||||
{environments.map(({ slug }, i) => {
|
||||
const isPresent = isSecretPresentInEnv(slug);
|
||||
return (
|
||||
<Td
|
||||
key={`sec-overview-${slug}-${i + 1}-value`}
|
||||
className={twMerge(
|
||||
"px-0 py-0 group-hover:bg-mineshaft-700",
|
||||
isPresent ? "text-green-600" : "text-red-600"
|
||||
)}
|
||||
>
|
||||
<div className="h-full w-full border-r border-mineshaft-600 px-5 py-[0.85rem]">
|
||||
<div className="flex justify-center">
|
||||
<FontAwesomeIcon
|
||||
// eslint-disable-next-line no-nested-ternary
|
||||
icon={isSecretPresentInEnv(slug) ? faCheck : faXmark}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</Td>
|
||||
);
|
||||
})}
|
||||
</Tr>
|
||||
);
|
||||
};
|
@ -1 +0,0 @@
|
||||
export { SecretOverviewImportListView } from "./SecretOverviewImportListView";
|
2
frontend/src/pages/secret-manager/OverviewPage/components/SecretOverviewTableRow/SecretRenameRow.tsx
2
frontend/src/pages/secret-manager/OverviewPage/components/SecretOverviewTableRow/SecretRenameRow.tsx
@ -162,7 +162,7 @@ function SecretRenameRow({ environments, getSecretByKey, secretKey, secretPath }
|
||||
render={({ field, fieldState: { error } }) => (
|
||||
<Input
|
||||
autoComplete="off"
|
||||
isReadOnly={isReadOnly}
|
||||
isReadOnly={isReadOnly || secrets.filter(Boolean).length === 0}
|
||||
autoCapitalization={currentWorkspace?.autoCapitalization}
|
||||
variant="plain"
|
||||
isDisabled={isOverriden}
|
||||
|
@ -88,4 +88,4 @@ spec:
|
||||
serviceAccountName: {{ include "secrets-operator.fullname" . }}-controller-manager
|
||||
terminationGracePeriodSeconds: 10
|
||||
nodeSelector: {{ toYaml .Values.controllerManager.nodeSelector | nindent 8 }}
|
||||
tolerations: {{ toYaml .Values.controllerManager.tolerations | nindent 8 }}
|
||||
tolerations: {{ toYaml .Values.controllerManager.tolerations | nindent 8 }}
|
||||
|
@ -309,4 +309,4 @@ status:
|
||||
plural: ""
|
||||
conditions: []
|
||||
storedVersions: []
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
@ -266,4 +266,4 @@ status:
|
||||
plural: ""
|
||||
conditions: []
|
||||
storedVersions: []
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
@ -504,5 +504,4 @@ status:
|
||||
plural: ""
|
||||
conditions: []
|
||||
storedVersions: []
|
||||
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
@ -56,4 +56,4 @@ roleRef:
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: '{{ include "secrets-operator.fullname" . }}-controller-manager'
|
||||
namespace: '{{ .Release.Namespace }}'
|
||||
namespace: '{{ .Release.Namespace }}'
|
||||
|
@ -53,6 +53,15 @@ rules:
|
||||
- list
|
||||
- update
|
||||
- watch
|
||||
- apiGroups:
|
||||
- apps
|
||||
resources:
|
||||
- deployments
|
||||
verbs:
|
||||
- get
|
||||
- list
|
||||
- update
|
||||
- watch
|
||||
- apiGroups:
|
||||
- secrets.infisical.com
|
||||
resources:
|
||||
@ -159,4 +168,4 @@ roleRef:
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: '{{ include "secrets-operator.fullname" . }}-controller-manager'
|
||||
namespace: '{{ .Release.Namespace }}'
|
||||
namespace: '{{ .Release.Namespace }}'
|
||||
|
@ -13,4 +13,5 @@ rules:
|
||||
- /metrics
|
||||
verbs:
|
||||
- get
|
||||
{{- end }}
|
||||
|
||||
{{- end }}
|
||||
|
@ -14,4 +14,4 @@ spec:
|
||||
control-plane: controller-manager
|
||||
{{- include "secrets-operator.selectorLabels" . | nindent 4 }}
|
||||
ports:
|
||||
{{- .Values.metricsService.ports | toYaml | nindent 2 }}
|
||||
{{- .Values.metricsService.ports | toYaml | nindent 2 }}
|
||||
|
@ -39,4 +39,5 @@ subjects:
|
||||
- kind: ServiceAccount
|
||||
name: '{{ include "secrets-operator.fullname" . }}-controller-manager'
|
||||
namespace: '{{ .Release.Namespace }}'
|
||||
{{- end }}
|
||||
|
||||
{{- end }}
|
||||
|
@ -8,4 +8,4 @@ metadata:
|
||||
app.kubernetes.io/part-of: k8-operator
|
||||
{{- include "secrets-operator.labels" . | nindent 4 }}
|
||||
annotations:
|
||||
{{- toYaml .Values.controllerManager.serviceAccount.annotations | nindent 4 }}
|
||||
{{- toYaml .Values.controllerManager.serviceAccount.annotations | nindent 4 }}
|
||||
|
@ -1,15 +1,15 @@
|
||||
controllerManager:
|
||||
kubeRbacProxy:
|
||||
args:
|
||||
- --secure-listen-address=0.0.0.0:8443
|
||||
- --upstream=http://127.0.0.1:8080/
|
||||
- --logtostderr=true
|
||||
- --v=0
|
||||
- --secure-listen-address=0.0.0.0:8443
|
||||
- --upstream=http://127.0.0.1:8080/
|
||||
- --logtostderr=true
|
||||
- --v=0
|
||||
containerSecurityContext:
|
||||
allowPrivilegeEscalation: false
|
||||
capabilities:
|
||||
drop:
|
||||
- ALL
|
||||
- ALL
|
||||
image:
|
||||
repository: gcr.io/kubebuilder/kube-rbac-proxy
|
||||
tag: v0.15.0
|
||||
@ -22,17 +22,17 @@ controllerManager:
|
||||
memory: 64Mi
|
||||
manager:
|
||||
args:
|
||||
- --health-probe-bind-address=:8081
|
||||
- --metrics-bind-address=127.0.0.1:8080
|
||||
- --leader-elect
|
||||
- --health-probe-bind-address=:8081
|
||||
- --metrics-bind-address=127.0.0.1:8080
|
||||
- --leader-elect
|
||||
containerSecurityContext:
|
||||
allowPrivilegeEscalation: false
|
||||
capabilities:
|
||||
drop:
|
||||
- ALL
|
||||
- ALL
|
||||
image:
|
||||
repository: infisical/kubernetes-operator
|
||||
tag: v0.8.15
|
||||
tag: <helm-pr-will-update-this-automatically>
|
||||
resources:
|
||||
limits:
|
||||
cpu: 500m
|
||||
@ -45,14 +45,14 @@ controllerManager:
|
||||
annotations: {}
|
||||
nodeSelector: {}
|
||||
tolerations: []
|
||||
metricsService:
|
||||
ports:
|
||||
- name: https
|
||||
port: 8443
|
||||
protocol: TCP
|
||||
targetPort: https
|
||||
type: ClusterIP
|
||||
kubernetesClusterDomain: cluster.local
|
||||
scopedNamespace: ""
|
||||
scopedRBAC: false
|
||||
installCRDs: true
|
||||
metricsService:
|
||||
ports:
|
||||
- name: https
|
||||
port: 8443
|
||||
protocol: TCP
|
||||
targetPort: https
|
||||
type: ClusterIP
|
||||
|
@ -48,9 +48,12 @@ helmify: $(HELMIFY) ## Download helmify locally if necessary.
|
||||
$(HELMIFY): $(LOCALBIN)
|
||||
test -s $(LOCALBIN)/helmify || GOBIN=$(LOCALBIN) go install github.com/arttor/helmify/cmd/helmify@latest
|
||||
|
||||
helm: manifests kustomize helmify
|
||||
legacy-helm: manifests kustomize helmify
|
||||
$(KUSTOMIZE) build config/default | $(HELMIFY) ../helm-charts/secrets-operator
|
||||
|
||||
helm: manifests kustomize helmify
|
||||
./scripts/generate-helm.sh
|
||||
|
||||
## Yaml for Kubectl
|
||||
kubectl-install: manifests kustomize
|
||||
mkdir -p kubectl-install
|
||||
|
332
k8-operator/scripts/generate-helm.sh
Executable file
332
k8-operator/scripts/generate-helm.sh
Executable file
@ -0,0 +1,332 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &> /dev/null && pwd)
|
||||
PROJECT_ROOT=$(cd "${SCRIPT_DIR}/.." && pwd)
|
||||
HELM_DIR="${PROJECT_ROOT}/../helm-charts/secrets-operator"
|
||||
LOCALBIN="${PROJECT_ROOT}/bin"
|
||||
KUSTOMIZE="${LOCALBIN}/kustomize"
|
||||
HELMIFY="${LOCALBIN}/helmify"
|
||||
|
||||
|
||||
cd "${PROJECT_ROOT}"
|
||||
# first run the regular helm target to generate base templates
|
||||
"${KUSTOMIZE}" build config/default | "${HELMIFY}" "${HELM_DIR}"
|
||||
|
||||
|
||||
|
||||
# ? NOTE: Processes all files that end with crd.yaml (so only actual CRDs)
|
||||
for crd_file in "${HELM_DIR}"/templates/*crd.yaml; do
|
||||
# skip if file doesn't exist (pattern doesn't match)
|
||||
[ -e "$crd_file" ] || continue
|
||||
|
||||
echo "Processing CRD file: ${crd_file}"
|
||||
|
||||
cp "$crd_file" "$crd_file.bkp"
|
||||
|
||||
# if we ever need to run conditional logic based on the CRD kind, we can use this
|
||||
# CRD_KIND=$(grep -E "kind: [a-zA-Z]+" "$crd_file" | head -n1 | awk '{print $2}')
|
||||
# echo "Found CRD kind: ${CRD_KIND}"
|
||||
|
||||
# create a new file with the conditional statement, then append the entire original content
|
||||
echo "{{- if .Values.installCRDs }}" > "$crd_file.new"
|
||||
cat "$crd_file.bkp" >> "$crd_file.new"
|
||||
|
||||
# make sure the file ends with a newline before adding the end tag (otherwise it might get messed up and end up on the same line as the last line)
|
||||
# check if file already ends with a newline
|
||||
if [ "$(tail -c1 "$crd_file.new" | wc -l)" -eq 0 ]; then
|
||||
# File doesn't end with a newline, add one
|
||||
echo "" >> "$crd_file.new"
|
||||
fi
|
||||
|
||||
# add the end tag on a new line
|
||||
echo "{{- end }}" >> "$crd_file.new"
|
||||
|
||||
# replace the original file with the new one
|
||||
mv "$crd_file.new" "$crd_file"
|
||||
|
||||
# clean up backup
|
||||
rm "$crd_file.bkp"
|
||||
|
||||
echo "Completed processing for: ${crd_file}"
|
||||
done
|
||||
|
||||
# ? NOTE: Processes only the manager-rbac.yaml file
|
||||
if [ -f "${HELM_DIR}/templates/manager-rbac.yaml" ]; then
|
||||
echo "Processing manager-rbac.yaml file specifically"
|
||||
|
||||
|
||||
cp "${HELM_DIR}/templates/manager-rbac.yaml" "${HELM_DIR}/templates/manager-rbac.yaml.bkp"
|
||||
|
||||
# extract the rules section from the original file
|
||||
rules_section=$(sed -n '/^rules:/,/^---/p' "${HELM_DIR}/templates/manager-rbac.yaml.bkp" | sed '$d')
|
||||
# extract the original label lines
|
||||
original_labels=$(sed -n '/^ labels:/,/^roleRef:/p' "${HELM_DIR}/templates/manager-rbac.yaml.bkp" | grep "app.kubernetes.io")
|
||||
|
||||
# create a new file from scratch with exactly what we want
|
||||
{
|
||||
# first section: Role/ClusterRole
|
||||
echo "apiVersion: rbac.authorization.k8s.io/v1"
|
||||
echo "{{- if and .Values.scopedNamespace .Values.scopedRBAC }}"
|
||||
echo "kind: Role"
|
||||
echo "{{- else }}"
|
||||
echo "kind: ClusterRole"
|
||||
echo "{{- end }}"
|
||||
echo "metadata:"
|
||||
echo " name: {{ include \"secrets-operator.fullname\" . }}-manager-role"
|
||||
echo " {{- if and .Values.scopedNamespace .Values.scopedRBAC }}"
|
||||
echo " namespace: {{ .Values.scopedNamespace | quote }}"
|
||||
echo " {{- end }}"
|
||||
echo " labels:"
|
||||
echo " {{- include \"secrets-operator.labels\" . | nindent 4 }}"
|
||||
|
||||
# add the existing rules section from helm-generated file
|
||||
echo "$rules_section"
|
||||
|
||||
# second section: RoleBinding/ClusterRoleBinding
|
||||
echo "---"
|
||||
echo "apiVersion: rbac.authorization.k8s.io/v1"
|
||||
echo "{{- if and .Values.scopedNamespace .Values.scopedRBAC }}"
|
||||
echo "kind: RoleBinding"
|
||||
echo "{{- else }}"
|
||||
echo "kind: ClusterRoleBinding"
|
||||
echo "{{- end }}"
|
||||
echo "metadata:"
|
||||
echo " name: {{ include \"secrets-operator.fullname\" . }}-manager-rolebinding"
|
||||
echo " {{- if and .Values.scopedNamespace .Values.scopedRBAC }}"
|
||||
echo " namespace: {{ .Values.scopedNamespace | quote }}"
|
||||
echo " {{- end }}"
|
||||
echo " labels:"
|
||||
echo "$original_labels"
|
||||
echo " {{- include \"secrets-operator.labels\" . | nindent 4 }}"
|
||||
|
||||
# add the roleRef section with custom logic
|
||||
echo "roleRef:"
|
||||
echo " apiGroup: rbac.authorization.k8s.io"
|
||||
echo " {{- if and .Values.scopedNamespace .Values.scopedRBAC }}"
|
||||
echo " kind: Role"
|
||||
echo " {{- else }}"
|
||||
echo " kind: ClusterRole"
|
||||
echo " {{- end }}"
|
||||
echo " name: '{{ include \"secrets-operator.fullname\" . }}-manager-role'"
|
||||
|
||||
# add the subjects section
|
||||
sed -n '/^subjects:/,$ p' "${HELM_DIR}/templates/manager-rbac.yaml.bkp"
|
||||
} > "${HELM_DIR}/templates/manager-rbac.yaml.new"
|
||||
|
||||
mv "${HELM_DIR}/templates/manager-rbac.yaml.new" "${HELM_DIR}/templates/manager-rbac.yaml"
|
||||
rm "${HELM_DIR}/templates/manager-rbac.yaml.bkp"
|
||||
|
||||
echo "Completed processing for manager-rbac.yaml with both role conditions and metadata applied"
|
||||
fi
|
||||
|
||||
# ? NOTE(Daniel): Processes proxy-rbac.yaml and metrics-reader-rbac.yaml
|
||||
for rbac_file in "${HELM_DIR}/templates/proxy-rbac.yaml" "${HELM_DIR}/templates/metrics-reader-rbac.yaml"; do
|
||||
if [ -f "$rbac_file" ]; then
|
||||
echo "Adding scopedNamespace condition to $(basename "$rbac_file")"
|
||||
|
||||
{
|
||||
echo "{{- if not .Values.scopedNamespace }}"
|
||||
cat "$rbac_file"
|
||||
echo ""
|
||||
echo "{{- end }}"
|
||||
} > "$rbac_file.new"
|
||||
|
||||
mv "$rbac_file.new" "$rbac_file"
|
||||
|
||||
echo "Completed processing for $(basename "$rbac_file")"
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
# ? NOTE(Daniel): Processes metrics-service.yaml
|
||||
if [ -f "${HELM_DIR}/templates/metrics-service.yaml" ]; then
|
||||
echo "Processing metrics-service.yaml file specifically"
|
||||
|
||||
metrics_file="${HELM_DIR}/templates/metrics-service.yaml"
|
||||
touch "${metrics_file}.new"
|
||||
|
||||
while IFS= read -r line; do
|
||||
if [[ "$line" == *"{{- include \"secrets-operator.selectorLabels\" . | nindent 4 }}"* ]]; then
|
||||
# keep original indentation for the selector labels line
|
||||
echo " {{- include \"secrets-operator.selectorLabels\" . | nindent 4 }}" >> "${metrics_file}.new"
|
||||
elif [[ "$line" == *"{{- .Values.metricsService.ports | toYaml | nindent 2 }}"* ]]; then
|
||||
# fix indentation for the ports line - use less indentation here
|
||||
echo " {{- .Values.metricsService.ports | toYaml | nindent 2 }}" >> "${metrics_file}.new"
|
||||
else
|
||||
echo "$line" >> "${metrics_file}.new"
|
||||
fi
|
||||
done < "${metrics_file}"
|
||||
|
||||
mv "${metrics_file}.new" "${metrics_file}"
|
||||
echo "Completed processing for metrics_service.yaml"
|
||||
fi
|
||||
|
||||
|
||||
|
||||
# ? NOTE(Daniel): Processes deployment.yaml
|
||||
if [ -f "${HELM_DIR}/templates/deployment.yaml" ]; then
|
||||
echo "Processing deployment.yaml file"
|
||||
|
||||
touch "${HELM_DIR}/templates/deployment.yaml.new"
|
||||
|
||||
securityContext_replaced=0
|
||||
in_first_securityContext=0
|
||||
first_securityContext_found=0
|
||||
|
||||
# process the file line by line
|
||||
while IFS= read -r line; do
|
||||
# check if this is the first securityContext line (for kube-rbac-proxy)
|
||||
if [[ "$line" =~ securityContext.*Values.controllerManager.kubeRbacProxy ]] && [ "$first_securityContext_found" -eq 0 ]; then
|
||||
echo "$line" >> "${HELM_DIR}/templates/deployment.yaml.new"
|
||||
first_securityContext_found=1
|
||||
in_first_securityContext=1
|
||||
continue
|
||||
fi
|
||||
|
||||
# check if this is the args line after the first securityContext
|
||||
if [ "$in_first_securityContext" -eq 1 ] && [[ "$line" =~ args: ]]; then
|
||||
# Add our custom args section with conditional logic
|
||||
echo " - args:" >> "${HELM_DIR}/templates/deployment.yaml.new"
|
||||
echo " {{- toYaml .Values.controllerManager.manager.args | nindent 8 }}" >> "${HELM_DIR}/templates/deployment.yaml.new"
|
||||
echo " {{- if and .Values.scopedNamespace .Values.scopedRBAC }}" >> "${HELM_DIR}/templates/deployment.yaml.new"
|
||||
echo " - --namespace={{ .Values.scopedNamespace }}" >> "${HELM_DIR}/templates/deployment.yaml.new"
|
||||
echo " {{- end }}" >> "${HELM_DIR}/templates/deployment.yaml.new"
|
||||
in_first_securityContext=0
|
||||
continue
|
||||
fi
|
||||
|
||||
# check if this is the problematic pod securityContext line
|
||||
if [[ "$line" =~ securityContext.*Values.controllerManager.podSecurityContext ]] && [ "$securityContext_replaced" -eq 0 ]; then
|
||||
# Replace with our custom securityContext
|
||||
echo " securityContext:" >> "${HELM_DIR}/templates/deployment.yaml.new"
|
||||
echo " runAsNonRoot: true" >> "${HELM_DIR}/templates/deployment.yaml.new"
|
||||
securityContext_replaced=1
|
||||
continue
|
||||
fi
|
||||
|
||||
# skip the line if it's just the trailing part of the replacement
|
||||
if [[ "$securityContext_replaced" -eq 1 ]] && [[ "$line" =~ ^[[:space:]]*[0-9]+[[:space:]]*\}\} ]]; then
|
||||
# this is the trailing part of the template expression, skip it
|
||||
securityContext_replaced=0
|
||||
continue
|
||||
fi
|
||||
|
||||
# skip the simplified args line that replaced our custom one
|
||||
if [[ "$line" =~ args:.*Values.controllerManager.manager.args ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "$line" >> "${HELM_DIR}/templates/deployment.yaml.new"
|
||||
done < "${HELM_DIR}/templates/deployment.yaml"
|
||||
|
||||
echo " nodeSelector: {{ toYaml .Values.controllerManager.nodeSelector | nindent 8 }}" >> "${HELM_DIR}/templates/deployment.yaml.new"
|
||||
echo " tolerations: {{ toYaml .Values.controllerManager.tolerations | nindent 8 }}" >> "${HELM_DIR}/templates/deployment.yaml.new"
|
||||
|
||||
mv "${HELM_DIR}/templates/deployment.yaml.new" "${HELM_DIR}/templates/deployment.yaml"
|
||||
echo "Completed processing for deployment.yaml"
|
||||
fi
|
||||
|
||||
# ? NOTE(Daniel): Processes values.yaml
|
||||
if [ -f "${HELM_DIR}/values.yaml" ]; then
|
||||
echo "Processing values.yaml file"
|
||||
|
||||
# Create a temporary file
|
||||
touch "${HELM_DIR}/values.yaml.new"
|
||||
|
||||
# Flag to track sections
|
||||
in_resources_section=0
|
||||
in_service_account=0
|
||||
|
||||
previous_line=""
|
||||
# Process the file line by line
|
||||
while IFS= read -r line; do
|
||||
|
||||
# Check if previous line includes infisical/kubernetes-operator and this line includes tag:
|
||||
if [[ "$previous_line" =~ infisical/kubernetes-operator ]] && [[ "$line" =~ ^[[:space:]]*tag: ]]; then
|
||||
# Get the indentation
|
||||
indent=$(echo "$line" | sed 's/\(^[[:space:]]*\).*/\1/')
|
||||
# Replace with our custom tag
|
||||
echo "${indent}tag: <helm-pr-will-update-this-automatically>" >> "${HELM_DIR}/values.yaml.new"
|
||||
continue
|
||||
fi
|
||||
|
||||
|
||||
if [[ "$line" =~ resources: ]]; then
|
||||
in_resources_section=1
|
||||
fi
|
||||
|
||||
if [[ "$line" =~ podSecurityContext: ]]; then
|
||||
# skip this line and continue to the next line
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ "$line" =~ runAsNonRoot: ]] && [ "$in_resources_section" -eq 1 ]; then
|
||||
# also skip this line and continue to the next line
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ "$line" =~ ^[[:space:]]*serviceAccount: ]]; then
|
||||
# set the flag to 1 so we can continue to print the associated lines later
|
||||
in_service_account=1
|
||||
# print the current line
|
||||
echo "$line" >> "${HELM_DIR}/values.yaml.new"
|
||||
continue
|
||||
fi
|
||||
|
||||
# process annotations under serviceAccount (only if in_service_account is true)
|
||||
if [ "$in_service_account" -eq 1 ]; then
|
||||
# Print the current line (annotations)
|
||||
echo "$line" >> "${HELM_DIR}/values.yaml.new"
|
||||
|
||||
# if we've processed the annotations, add our new fields
|
||||
if [[ "$line" =~ annotations: ]]; then
|
||||
# get the base indentation level (of serviceAccount:)
|
||||
base_indent=$(echo "$line" | sed 's/\(^[[:space:]]*\).*/\1/')
|
||||
base_indent=${base_indent%??} # Remove two spaces to get to parent level
|
||||
|
||||
# add nodeSelector and tolerations at the same level as serviceAccount
|
||||
echo "${base_indent}nodeSelector: {}" >> "${HELM_DIR}/values.yaml.new"
|
||||
echo "${base_indent}tolerations: []" >> "${HELM_DIR}/values.yaml.new"
|
||||
fi
|
||||
|
||||
# exit serviceAccount section when we hit the next top-level item
|
||||
if [[ "$line" =~ ^[[:space:]]{2}[a-zA-Z] ]] && ! [[ "$line" =~ annotations: ]]; then
|
||||
in_service_account=0
|
||||
fi
|
||||
|
||||
continue
|
||||
fi
|
||||
|
||||
# if we reach this point, we'll exit the resources section, this is the next top-level item
|
||||
if [ "$in_resources_section" -eq 1 ] && [[ "$line" =~ ^[[:space:]]{2}[a-zA-Z] ]]; then
|
||||
in_resources_section=0
|
||||
fi
|
||||
|
||||
# output the line unchanged
|
||||
echo "$line" >> "${HELM_DIR}/values.yaml.new"
|
||||
previous_line="$line"
|
||||
done < "${HELM_DIR}/values.yaml"
|
||||
|
||||
|
||||
|
||||
# hacky, just append the kubernetesClusterDomain fields at the end of the file
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
# macOS version
|
||||
sed -i '' '/kubernetesClusterDomain: /d' "${HELM_DIR}/values.yaml.new"
|
||||
else
|
||||
# Linux version
|
||||
sed -i '/kubernetesClusterDomain: /d' "${HELM_DIR}/values.yaml.new"
|
||||
fi
|
||||
|
||||
echo "kubernetesClusterDomain: cluster.local" >> "${HELM_DIR}/values.yaml.new"
|
||||
echo "scopedNamespace: \"\"" >> "${HELM_DIR}/values.yaml.new"
|
||||
echo "scopedRBAC: false" >> "${HELM_DIR}/values.yaml.new"
|
||||
echo "installCRDs: true" >> "${HELM_DIR}/values.yaml.new"
|
||||
|
||||
# replace the original file with the new one
|
||||
mv "${HELM_DIR}/values.yaml.new" "${HELM_DIR}/values.yaml"
|
||||
|
||||
echo "Completed processing for values.yaml"
|
||||
fi
|
||||
|
||||
echo "Helm chart generation complete with custom templating applied."
|
37
k8-operator/scripts/update-version.sh
Executable file
37
k8-operator/scripts/update-version.sh
Executable file
@ -0,0 +1,37 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &> /dev/null && pwd)
|
||||
PATH_TO_HELM_CHART="${SCRIPT_DIR}/../../helm-charts/secrets-operator"
|
||||
|
||||
VERSION=$1
|
||||
VERSION_WITHOUT_V=$(echo "$VERSION" | sed 's/^v//') # needed to validate semver
|
||||
|
||||
|
||||
if [ -z "$VERSION" ]; then
|
||||
echo "Usage: $0 <version>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
if ! [[ "$VERSION_WITHOUT_V" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "Error: Version must follow semantic versioning (e.g. 0.0.1)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ "$VERSION" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "Error: Version must start with 'v' (e.g. v0.0.1)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# For Linux vs macOS sed compatibility
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
# macOS version
|
||||
sed -i '' -e '/repository: infisical\/kubernetes-operator/{n;s/tag: .*/tag: '"$VERSION"'/;}' "${PATH_TO_HELM_CHART}/values.yaml"
|
||||
sed -i '' 's/appVersion: .*/appVersion: "'"$VERSION"'"/g' "${PATH_TO_HELM_CHART}/Chart.yaml"
|
||||
sed -i '' 's/version: .*/version: '"$VERSION"'/g' "${PATH_TO_HELM_CHART}/Chart.yaml"
|
||||
else
|
||||
# Linux version
|
||||
sed -i -e '/repository: infisical\/kubernetes-operator/{n;s/tag: .*/tag: '"$VERSION"'/;}' "${PATH_TO_HELM_CHART}/values.yaml"
|
||||
sed -i 's/appVersion: .*/appVersion: "'"$VERSION"'"/g' "${PATH_TO_HELM_CHART}/Chart.yaml"
|
||||
sed -i 's/version: .*/version: '"$VERSION"'/g' "${PATH_TO_HELM_CHART}/Chart.yaml"
|
||||
fi
|
Reference in New Issue
Block a user