Compare commits

...

35 Commits

Author SHA1 Message Date
Maidul Islam
2084539f61 fix logic 2025-04-09 20:55:41 -04:00
Maidul Islam
34cf47a5eb remove console 2025-04-09 20:47:16 -04:00
Maidul Islam
b90c6cf3fc remove rate limits for self host 2025-04-09 20:45:51 -04:00
Maidul Islam
bbc94da522 Merge pull request #3384 from akhilmhdh/feat/win-get
feat: added winget to build
2025-04-09 12:24:37 -04:00
=
8a241771ec feat: added winget to build 2025-04-09 21:11:39 +05:30
Akhil Mohan
1f23515aac Merge pull request #3367 from akhilmhdh/feat/syntax-highlight
Add filter by role for org identity and search identity api
2025-04-09 20:02:52 +05:30
=
63dc9ec35d feat: updated search message on empty result with role filter 2025-04-09 15:15:54 +05:30
=
1d083befe4 feat: added order by 2025-04-09 15:09:55 +05:30
=
c01e29b932 feat: rabbit review changes 2025-04-09 15:09:54 +05:30
=
3aed79071b feat: added search endpoint to docs 2025-04-09 15:09:54 +05:30
=
140fa49871 feat: added advance filter for identities list table in org 2025-04-09 15:09:54 +05:30
=
03a3e80082 feat: completed api for new search identities 2025-04-09 15:09:54 +05:30
Maidul Islam
bfcfffbabf update notice 2025-04-08 21:15:31 -04:00
Maidul Islam
210bd220e5 Delete .github/workflows/codeql.yml 2025-04-08 20:51:25 -04:00
Maidul Islam
7be2a10631 Merge pull request #3380 from Infisical/end-cloudsmith-publish
update install scrip for deb
2025-04-08 20:49:52 -04:00
Maidul Islam
5753eb7d77 rename install file 2025-04-08 20:49:14 -04:00
Maidul Islam
cb86aa40fa update install scrip for deb 2025-04-08 20:47:33 -04:00
Maidul Islam
1131143a71 remove gpg passphrase 2025-04-08 18:28:23 -04:00
Maidul Islam
a50b8120fd Merge pull request #3378 from akhilmhdh/fix/doc-p-access-image
feat: updated ruby action
2025-04-08 16:21:06 -04:00
=
f1ee53d417 feat: updated ruby action 2025-04-09 01:49:35 +05:30
Maidul Islam
229ad79f49 Merge pull request #3377 from akhilmhdh/fix/doc-p-access-image
feat: added passphrase
2025-04-08 15:56:34 -04:00
=
d7dbd01ecf feat: banner respect silent 2025-04-09 01:24:38 +05:30
=
026fd21fd4 feat: added passphrase 2025-04-09 01:05:31 +05:30
Maidul Islam
9b9c1a52b3 Merge pull request #3376 from akhilmhdh/fix/doc-p-access-image
feat: added s3 deb pipeline
2025-04-08 15:05:32 -04:00
Maidul Islam
98aa424e2e Update .github/workflows/release_build_infisical_cli.yml
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-04-08 15:02:47 -04:00
=
2cd5df1ab3 feat: updated message 2025-04-09 00:30:48 +05:30
Daniel Hougaard
e0d863e06e Merge pull request #3375 from Infisical/helm-update-v0.9.1
Update Helm chart to version v0.9.1
2025-04-08 22:52:42 +04:00
=
d991af557b feat: added s3 deb pipeline 2025-04-09 00:22:00 +05:30
DanielHougaard
ae54d04357 Update Helm chart to version v0.9.1 2025-04-08 18:51:31 +00:00
Daniel Hougaard
fa590ba697 Merge pull request #3348 from Infisical/daniel/k8s-auto-reviewer-token
feat(k8s): k8s auth automatic service account token creation
2025-04-08 22:45:57 +04:00
Maidul Islam
1da2896bb0 Create codeql.yml 2025-04-07 21:00:43 -04:00
Sheen
423a2f38ea Merge pull request #3371 from Infisical/misc/add-missing-version-filter
misc: add missing version filter in get secret by name
2025-04-08 02:55:21 +08:00
Daniel Hougaard
3f190426fe fix: added docs for operator managed service account tokens & made audience optional 2025-04-04 03:15:11 +04:00
Daniel Hougaard
3d072c2f48 feat(k8s): automatic service account token creation for k8s auth 2025-04-01 23:39:22 +04:00
Daniel Hougaard
82b828c10e feat(k8s): automatic service account token creation for k8s auth 2025-04-01 23:16:38 +04:00
54 changed files with 2756 additions and 530 deletions

View File

@@ -1,132 +1,147 @@
name: Build and release CLI
on:
workflow_dispatch:
workflow_dispatch:
push:
# run only against tags
tags:
- "infisical-cli/v*.*.*"
push:
# run only against tags
tags:
- "infisical-cli/v*.*.*"
permissions:
contents: write
contents: write
jobs:
cli-integration-tests:
name: Run tests before deployment
uses: ./.github/workflows/run-cli-tests.yml
secrets:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
cli-integration-tests:
name: Run tests before deployment
uses: ./.github/workflows/run-cli-tests.yml
secrets:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
npm-release:
runs-on: ubuntu-latest
npm-release:
runs-on: ubuntu-latest
env:
working-directory: ./npm
needs:
- cli-integration-tests
- goreleaser
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Extract version
run: |
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
echo "Version extracted: $VERSION"
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
- name: Print version
run: echo ${{ env.CLI_VERSION }}
- name: Setup Node
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
with:
node-version: 20
cache: "npm"
cache-dependency-path: ./npm/package-lock.json
- name: Install dependencies
working-directory: ${{ env.working-directory }}
run: npm install --ignore-scripts
- name: Set NPM version
working-directory: ${{ env.working-directory }}
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
- name: Setup NPM
working-directory: ${{ env.working-directory }}
run: |
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
env:
working-directory: ./npm
needs:
- cli-integration-tests
- goreleaser
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Extract version
run: |
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
echo "Version extracted: $VERSION"
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
- name: Pack NPM
working-directory: ${{ env.working-directory }}
run: npm pack
- name: Print version
run: echo ${{ env.CLI_VERSION }}
- name: Publish NPM
working-directory: ${{ env.working-directory }}
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Setup Node
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
with:
node-version: 20
cache: "npm"
cache-dependency-path: ./npm/package-lock.json
- name: Install dependencies
working-directory: ${{ env.working-directory }}
run: npm install --ignore-scripts
- name: Set NPM version
working-directory: ${{ env.working-directory }}
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
- name: Setup NPM
working-directory: ${{ env.working-directory }}
run: |
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Pack NPM
working-directory: ${{ env.working-directory }}
run: npm pack
- name: Publish NPM
working-directory: ${{ env.working-directory }}
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
goreleaser:
runs-on: ubuntu-latest
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: Setup for libssl1.0-dev
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
sudo apt update
sudo apt-get install -y libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: v1.26.2-pro
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
goreleaser:
runs-on: ubuntu-latest
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: Setup for libssl1.0-dev
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
sudo apt update
sudo apt-get install -y libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: v1.26.2-pro
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- uses: ruby/setup-ruby@354a1ad156761f5ee2b7b13fa8e09943a5e8d252
with:
ruby-version: "3.3" # Not needed with a .ruby-version, .tool-versions or mise.toml
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
- name: Install deb-s3
run: gem install deb-s3
- name: Configure GPG Key
run: echo -n "$GPG_SIGNING_KEY" | base64 --decode | gpg --batch --import
env:
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
GPG_SIGNING_KEY_PASSPHRASE: ${{ secrets.GPG_SIGNING_KEY_PASSPHRASE }}
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
INFISICAL_CLI_S3_BUCKET: ${{ secrets.INFISICAL_CLI_S3_BUCKET }}
INFISICAL_CLI_REPO_SIGNING_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_SIGNING_KEY_ID }}
AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}

View File

@@ -162,6 +162,24 @@ scoop:
description: "The official Infisical CLI"
license: MIT
winget:
- name: infisical
publisher: infisical
license: MIT
homepage: https://infisical.com
short_description: "The official Infisical CLI"
repository:
owner: infisical
name: winget-pkgs
branch: "infisical-{{.Version}}"
pull_request:
enabled: true
draft: false
base:
owner: microsoft
name: winget-pkgs
branch: master
aurs:
- name: infisical-bin
homepage: "https://infisical.com"

View File

@@ -66,6 +66,17 @@ export const IDENTITIES = {
},
LIST: {
orgId: "The ID of the organization to list identities."
},
SEARCH: {
search: {
desc: "The filters to apply to the search.",
name: "The name of the identity to filter by.",
role: "The organizational role of the identity to filter by."
},
offset: "The offset to start from. If you enter 10, it will start from the 10th identity.",
limit: "The number of identities to return.",
orderBy: "The column to order identities by.",
orderDirection: "The direction to order identities in."
}
} as const;

View File

@@ -0,0 +1,141 @@
import { Knex } from "knex";
import { SearchResourceOperators, TSearchResourceOperator } from "./search";
const buildKnexQuery = (
query: Knex.QueryBuilder,
// when it's multiple table field means it's field1 or field2
fields: string | string[],
operator: SearchResourceOperators,
value: unknown
) => {
switch (operator) {
case SearchResourceOperators.$eq: {
if (typeof value !== "string" && typeof value !== "number")
throw new Error("Invalid value type for $eq operator");
if (typeof fields === "string") {
return void query.where(fields, "=", value);
}
return void query.where((qb) => {
return fields.forEach((el, index) => {
if (index === 0) {
return void qb.where(el, "=", value);
}
return void qb.orWhere(el, "=", value);
});
});
}
case SearchResourceOperators.$neq: {
if (typeof value !== "string" && typeof value !== "number")
throw new Error("Invalid value type for $neq operator");
if (typeof fields === "string") {
return void query.where(fields, "<>", value);
}
return void query.where((qb) => {
return fields.forEach((el, index) => {
if (index === 0) {
return void qb.where(el, "<>", value);
}
return void qb.orWhere(el, "<>", value);
});
});
}
case SearchResourceOperators.$in: {
if (!Array.isArray(value)) throw new Error("Invalid value type for $in operator");
if (typeof fields === "string") {
return void query.whereIn(fields, value);
}
return void query.where((qb) => {
return fields.forEach((el, index) => {
if (index === 0) {
return void qb.whereIn(el, value);
}
return void qb.orWhereIn(el, value);
});
});
}
case SearchResourceOperators.$contains: {
if (typeof value !== "string") throw new Error("Invalid value type for $contains operator");
if (typeof fields === "string") {
return void query.whereILike(fields, `%${value}%`);
}
return void query.where((qb) => {
return fields.forEach((el, index) => {
if (index === 0) {
return void qb.whereILike(el, `%${value}%`);
}
return void qb.orWhereILike(el, `%${value}%`);
});
});
}
default:
throw new Error(`Unsupported operator: ${String(operator)}`);
}
};
export const buildKnexFilterForSearchResource = <T extends { [K: string]: TSearchResourceOperator }, K extends keyof T>(
rootQuery: Knex.QueryBuilder,
searchFilter: T & { $or?: T[] },
getAttributeField: (attr: K) => string | string[] | null
) => {
const { $or: orFilters = [] } = searchFilter;
(Object.keys(searchFilter) as K[]).forEach((key) => {
// akhilmhdh: yes, we could have split in top. This is done to satisfy ts type error
if (key === "$or") return;
const dbField = getAttributeField(key);
if (!dbField) throw new Error(`DB field not found for ${String(key)}`);
const dbValue = searchFilter[key];
if (typeof dbValue === "string" || typeof dbValue === "number") {
buildKnexQuery(rootQuery, dbField, SearchResourceOperators.$eq, dbValue);
return;
}
Object.keys(dbValue as Record<string, unknown>).forEach((el) => {
buildKnexQuery(
rootQuery,
dbField,
el as SearchResourceOperators,
(dbValue as Record<SearchResourceOperators, unknown>)[el as SearchResourceOperators]
);
});
});
if (orFilters.length) {
void rootQuery.andWhere((andQb) => {
return orFilters.forEach((orFilter) => {
return void andQb.orWhere((qb) => {
(Object.keys(orFilter) as K[]).forEach((key) => {
const dbField = getAttributeField(key);
if (!dbField) throw new Error(`DB field not found for ${String(key)}`);
const dbValue = orFilter[key];
if (typeof dbValue === "string" || typeof dbValue === "number") {
buildKnexQuery(qb, dbField, SearchResourceOperators.$eq, dbValue);
return;
}
Object.keys(dbValue as Record<string, unknown>).forEach((el) => {
buildKnexQuery(
qb,
dbField,
el as SearchResourceOperators,
(dbValue as Record<SearchResourceOperators, unknown>)[el as SearchResourceOperators]
);
});
});
});
});
});
}
};

View File

@@ -0,0 +1,43 @@
import { z } from "zod";
export enum SearchResourceOperators {
$eq = "$eq",
$neq = "$neq",
$in = "$in",
$contains = "$contains"
}
export const SearchResourceOperatorSchema = z.union([
z.string(),
z.number(),
z
.object({
[SearchResourceOperators.$eq]: z.string().optional(),
[SearchResourceOperators.$neq]: z.string().optional(),
[SearchResourceOperators.$in]: z.string().array().optional(),
[SearchResourceOperators.$contains]: z.string().array().optional()
})
.partial()
]);
export type TSearchResourceOperator = z.infer<typeof SearchResourceOperatorSchema>;
export type TSearchResource = {
[k: string]: z.ZodOptional<
z.ZodUnion<
[
z.ZodEffects<z.ZodString | z.ZodNumber>,
z.ZodObject<{
[SearchResourceOperators.$eq]?: z.ZodOptional<z.ZodEffects<z.ZodString | z.ZodNumber>>;
[SearchResourceOperators.$neq]?: z.ZodOptional<z.ZodEffects<z.ZodString | z.ZodNumber>>;
[SearchResourceOperators.$in]?: z.ZodOptional<z.ZodArray<z.ZodEffects<z.ZodString | z.ZodNumber>>>;
[SearchResourceOperators.$contains]?: z.ZodOptional<z.ZodEffects<z.ZodString>>;
}>
]
>
>;
};
export const buildSearchZodSchema = <T extends TSearchResource>(schema: z.ZodObject<T>) => {
return schema.extend({ $or: schema.array().optional() }).optional();
};

View File

@@ -1,3 +1,5 @@
import { z } from "zod";
export enum CharacterType {
Alphabets = "alphabets",
Numbers = "numbers",
@@ -101,3 +103,10 @@ export const characterValidator = (allowedCharacters: CharacterType[]) => {
return regex.test(input);
};
};
export const zodValidateCharacters = (allowedCharacters: CharacterType[]) => {
const validator = characterValidator(allowedCharacters);
return (schema: z.ZodString, fieldName: string) => {
return schema.refine(validator, { message: `${fieldName} can only contain ${allowedCharacters.join(",")}` });
};
};

View File

@@ -113,7 +113,7 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
await server.register(fastifyErrHandler);
// Rate limiters and security headers
if (appCfg.isProductionMode) {
if (appCfg.isProductionMode && appCfg.isCloud) {
await server.register<FastifyRateLimitOptions>(ratelimiter, globalRateLimiterCfg());
}

View File

@@ -3,15 +3,26 @@ import { z } from "zod";
import { IdentitiesSchema, IdentityOrgMembershipsSchema, OrgMembershipRole, OrgRolesSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { IDENTITIES } from "@app/lib/api-docs";
import { buildSearchZodSchema, SearchResourceOperators } from "@app/lib/search-resource/search";
import { OrderByDirection } from "@app/lib/types";
import { CharacterType, zodValidateCharacters } from "@app/lib/validator/validate-string";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { OrgIdentityOrderBy } from "@app/services/identity/identity-types";
import { isSuperAdmin } from "@app/services/super-admin/super-admin-fns";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
import { SanitizedProjectSchema } from "../sanitizedSchemas";
const searchResourceZodValidate = zodValidateCharacters([
CharacterType.AlphaNumeric,
CharacterType.Spaces,
CharacterType.Underscore,
CharacterType.Hyphen
]);
export const registerIdentityRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
@@ -245,7 +256,7 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
method: "GET",
url: "/",
config: {
rateLimit: writeLimit
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
@@ -289,6 +300,103 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "POST",
url: "/search",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Search identities",
security: [
{
bearerAuth: []
}
],
body: z.object({
orderBy: z
.nativeEnum(OrgIdentityOrderBy)
.default(OrgIdentityOrderBy.Name)
.describe(IDENTITIES.SEARCH.orderBy)
.optional(),
orderDirection: z
.nativeEnum(OrderByDirection)
.default(OrderByDirection.ASC)
.describe(IDENTITIES.SEARCH.orderDirection)
.optional(),
limit: z.number().max(100).default(50).describe(IDENTITIES.SEARCH.limit),
offset: z.number().default(0).describe(IDENTITIES.SEARCH.offset),
search: buildSearchZodSchema(
z
.object({
name: z
.union([
searchResourceZodValidate(z.string().max(255), "Name"),
z
.object({
[SearchResourceOperators.$eq]: searchResourceZodValidate(z.string().max(255), "Name $eq"),
[SearchResourceOperators.$contains]: searchResourceZodValidate(
z.string().max(255),
"Name $contains"
),
[SearchResourceOperators.$in]: searchResourceZodValidate(z.string().max(255), "Name $in").array()
})
.partial()
])
.describe(IDENTITIES.SEARCH.search.name),
role: z
.union([
searchResourceZodValidate(z.string().max(255), "Role"),
z
.object({
[SearchResourceOperators.$eq]: searchResourceZodValidate(z.string().max(255), "Role $eq"),
[SearchResourceOperators.$in]: searchResourceZodValidate(z.string().max(255), "Role $in").array()
})
.partial()
])
.describe(IDENTITIES.SEARCH.search.role)
})
.describe(IDENTITIES.SEARCH.search.desc)
.partial()
)
}),
response: {
200: z.object({
identities: IdentityOrgMembershipsSchema.extend({
customRole: OrgRolesSchema.pick({
id: true,
name: true,
slug: true,
permissions: true,
description: true
}).optional(),
identity: IdentitiesSchema.pick({ name: true, id: true }).extend({
authMethods: z.array(z.string())
})
}).array(),
totalCount: z.number()
})
}
},
handler: async (req) => {
const { identityMemberships, totalCount } = await server.services.identity.searchOrgIdentities({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
searchFilter: req.body.search,
orgId: req.permission.orgId,
limit: req.body.limit,
offset: req.body.offset,
orderBy: req.body.orderBy,
orderDirection: req.body.orderDirection
});
return { identities: identityMemberships, totalCount };
}
});
server.route({
method: "GET",
url: "/:identityId/identity-memberships",

View File

@@ -14,10 +14,15 @@ import {
TIdentityUniversalAuths,
TOrgRoles
} from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { BadRequestError, DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
import { buildKnexFilterForSearchResource } from "@app/lib/search-resource/db";
import { OrderByDirection } from "@app/lib/types";
import { OrgIdentityOrderBy, TListOrgIdentitiesByOrgIdDTO } from "@app/services/identity/identity-types";
import {
OrgIdentityOrderBy,
TListOrgIdentitiesByOrgIdDTO,
TSearchOrgIdentitiesByOrgIdDAL
} from "@app/services/identity/identity-types";
import { buildAuthMethods } from "./identity-fns";
@@ -195,7 +200,6 @@ export const identityOrgDALFactory = (db: TDbClient) => {
"paginatedIdentity.identityId",
`${TableName.IdentityJwtAuth}.identityId`
)
.select(
db.ref("id").withSchema("paginatedIdentity"),
db.ref("role").withSchema("paginatedIdentity"),
@@ -309,6 +313,214 @@ export const identityOrgDALFactory = (db: TDbClient) => {
}
};
const searchIdentities = async (
{
limit,
offset = 0,
orderBy = OrgIdentityOrderBy.Name,
orderDirection = OrderByDirection.ASC,
searchFilter,
orgId
}: TSearchOrgIdentitiesByOrgIdDAL,
tx?: Knex
) => {
try {
const searchQuery = (tx || db.replicaNode())(TableName.IdentityOrgMembership)
.join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityOrgMembership}.identityId`)
.where(`${TableName.IdentityOrgMembership}.orgId`, orgId)
.leftJoin(TableName.OrgRoles, `${TableName.IdentityOrgMembership}.roleId`, `${TableName.OrgRoles}.id`)
.orderBy(`${TableName.Identity}.${orderBy}`, orderDirection)
.select(`${TableName.IdentityOrgMembership}.id`)
.select<{ id: string; total_count: string }>(
db.raw(
`count(${TableName.IdentityOrgMembership}."identityId") OVER(PARTITION BY ${TableName.IdentityOrgMembership}."orgId") as total_count`
)
)
.as("searchedIdentities");
if (searchFilter) {
buildKnexFilterForSearchResource(searchQuery, searchFilter, (attr) => {
switch (attr) {
case "role":
return [`${TableName.OrgRoles}.slug`, `${TableName.IdentityOrgMembership}.role`];
case "name":
return `${TableName.Identity}.name`;
default:
throw new BadRequestError({ message: `Invalid ${String(attr)} provided` });
}
});
}
if (limit) {
void searchQuery.offset(offset).limit(limit);
}
type TSubquery = Awaited<typeof searchQuery>;
const query = (tx || db.replicaNode())(TableName.IdentityOrgMembership)
.where(`${TableName.IdentityOrgMembership}.orgId`, orgId)
.join<TSubquery>(searchQuery, `${TableName.IdentityOrgMembership}.id`, "searchedIdentities.id")
.join(TableName.Identity, `${TableName.IdentityOrgMembership}.identityId`, `${TableName.Identity}.id`)
.leftJoin(TableName.OrgRoles, `${TableName.IdentityOrgMembership}.roleId`, `${TableName.OrgRoles}.id`)
.leftJoin(TableName.IdentityMetadata, (queryBuilder) => {
void queryBuilder
.on(`${TableName.IdentityOrgMembership}.identityId`, `${TableName.IdentityMetadata}.identityId`)
.andOn(`${TableName.IdentityOrgMembership}.orgId`, `${TableName.IdentityMetadata}.orgId`);
})
.leftJoin(
TableName.IdentityUniversalAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityUniversalAuth}.identityId`
)
.leftJoin(
TableName.IdentityGcpAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityGcpAuth}.identityId`
)
.leftJoin(
TableName.IdentityAwsAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityAwsAuth}.identityId`
)
.leftJoin(
TableName.IdentityKubernetesAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityKubernetesAuth}.identityId`
)
.leftJoin(
TableName.IdentityOidcAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityOidcAuth}.identityId`
)
.leftJoin(
TableName.IdentityAzureAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityAzureAuth}.identityId`
)
.leftJoin(
TableName.IdentityTokenAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityTokenAuth}.identityId`
)
.leftJoin(
TableName.IdentityJwtAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityJwtAuth}.identityId`
)
.select(
db.ref("id").withSchema(TableName.IdentityOrgMembership),
db.ref("total_count").withSchema("searchedIdentities"),
db.ref("role").withSchema(TableName.IdentityOrgMembership),
db.ref("roleId").withSchema(TableName.IdentityOrgMembership),
db.ref("orgId").withSchema(TableName.IdentityOrgMembership),
db.ref("createdAt").withSchema(TableName.IdentityOrgMembership),
db.ref("updatedAt").withSchema(TableName.IdentityOrgMembership),
db.ref("identityId").withSchema(TableName.IdentityOrgMembership).as("identityId"),
db.ref("name").withSchema(TableName.Identity).as("identityName"),
db.ref("id").as("uaId").withSchema(TableName.IdentityUniversalAuth),
db.ref("id").as("gcpId").withSchema(TableName.IdentityGcpAuth),
db.ref("id").as("awsId").withSchema(TableName.IdentityAwsAuth),
db.ref("id").as("kubernetesId").withSchema(TableName.IdentityKubernetesAuth),
db.ref("id").as("oidcId").withSchema(TableName.IdentityOidcAuth),
db.ref("id").as("azureId").withSchema(TableName.IdentityAzureAuth),
db.ref("id").as("tokenId").withSchema(TableName.IdentityTokenAuth),
db.ref("id").as("jwtId").withSchema(TableName.IdentityJwtAuth)
)
// cr stands for custom role
.select(db.ref("id").as("crId").withSchema(TableName.OrgRoles))
.select(db.ref("name").as("crName").withSchema(TableName.OrgRoles))
.select(db.ref("slug").as("crSlug").withSchema(TableName.OrgRoles))
.select(db.ref("description").as("crDescription").withSchema(TableName.OrgRoles))
.select(db.ref("permissions").as("crPermission").withSchema(TableName.OrgRoles))
.select(db.ref("permissions").as("crPermission").withSchema(TableName.OrgRoles))
.select(
db.ref("id").withSchema(TableName.IdentityMetadata).as("metadataId"),
db.ref("key").withSchema(TableName.IdentityMetadata).as("metadataKey"),
db.ref("value").withSchema(TableName.IdentityMetadata).as("metadataValue")
);
if (orderBy === OrgIdentityOrderBy.Name) {
void query.orderBy("identityName", orderDirection);
}
const docs = await query;
const formattedDocs = sqlNestRelationships({
data: docs,
key: "id",
parentMapper: ({
crId,
crDescription,
crSlug,
crPermission,
crName,
identityId,
identityName,
role,
roleId,
total_count,
id,
uaId,
awsId,
gcpId,
jwtId,
kubernetesId,
oidcId,
azureId,
tokenId,
createdAt,
updatedAt
}) => ({
role,
roleId,
identityId,
id,
total_count: total_count as string,
orgId,
createdAt,
updatedAt,
customRole: roleId
? {
id: crId,
name: crName,
slug: crSlug,
permissions: crPermission,
description: crDescription
}
: undefined,
identity: {
id: identityId,
name: identityName,
authMethods: buildAuthMethods({
uaId,
awsId,
gcpId,
kubernetesId,
oidcId,
azureId,
tokenId,
jwtId
})
}
}),
childrenMapper: [
{
key: "metadataId",
label: "metadata" as const,
mapper: ({ metadataKey, metadataValue, metadataId }) => ({
id: metadataId,
key: metadataKey,
value: metadataValue
})
}
]
});
return { docs: formattedDocs, totalCount: Number(formattedDocs?.[0]?.total_count ?? 0) };
} catch (error) {
throw new DatabaseError({ error, name: "FindByOrgId" });
}
};
const countAllOrgIdentities = async (
{ search, ...filter }: Partial<TIdentityOrgMemberships> & Pick<TListOrgIdentitiesByOrgIdDTO, "search">,
tx?: Knex
@@ -331,5 +543,5 @@ export const identityOrgDALFactory = (db: TDbClient) => {
}
};
return { ...identityOrgOrm, find, findOne, countAllOrgIdentities };
return { ...identityOrgOrm, find, findOne, countAllOrgIdentities, searchIdentities };
};

View File

@@ -21,6 +21,7 @@ import {
TGetIdentityByIdDTO,
TListOrgIdentitiesByOrgIdDTO,
TListProjectIdentitiesByIdentityIdDTO,
TSearchOrgIdentitiesByOrgIdDTO,
TUpdateIdentityDTO
} from "./identity-types";
@@ -288,6 +289,33 @@ export const identityServiceFactory = ({
return { identityMemberships, totalCount };
};
const searchOrgIdentities = async ({
orgId,
actor,
actorId,
actorAuthMethod,
actorOrgId,
limit,
offset,
orderBy,
orderDirection,
searchFilter = {}
}: TSearchOrgIdentitiesByOrgIdDTO) => {
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
const { totalCount, docs } = await identityOrgMembershipDAL.searchIdentities({
orgId,
limit,
offset,
orderBy,
orderDirection,
searchFilter
});
return { identityMemberships: docs, totalCount };
};
const listProjectIdentitiesByIdentityId = async ({
identityId,
actor,
@@ -317,6 +345,7 @@ export const identityServiceFactory = ({
deleteIdentity,
listOrgIdentities,
getIdentityById,
searchOrgIdentities,
listProjectIdentitiesByIdentityId
};
};

View File

@@ -1,4 +1,5 @@
import { IPType } from "@app/lib/ip";
import { TSearchResourceOperator } from "@app/lib/search-resource/search";
import { OrderByDirection, TOrgPermission } from "@app/lib/types";
export type TCreateIdentityDTO = {
@@ -46,3 +47,17 @@ export enum OrgIdentityOrderBy {
Name = "name"
// Role = "role"
}
export type TSearchOrgIdentitiesByOrgIdDAL = {
limit?: number;
offset?: number;
orderBy?: OrgIdentityOrderBy;
orderDirection?: OrderByDirection;
orgId: string;
searchFilter?: Partial<{
name: Omit<TSearchResourceOperator, "number">;
role: Omit<TSearchResourceOperator, "number">;
}>;
};
export type TSearchOrgIdentitiesByOrgIdDTO = TSearchOrgIdentitiesByOrgIdDAL & TOrgPermission;

View File

@@ -50,6 +50,7 @@ func init() {
config.INFISICAL_URL = util.AppendAPIEndpoint(config.INFISICAL_URL)
// util.DisplayAptInstallationChangeBanner(silent)
if !util.IsRunningInDocker() && !silent {
util.CheckForUpdate()
}

View File

@@ -53,6 +53,25 @@ func CheckForUpdate() {
}
}
func DisplayAptInstallationChangeBanner(isSilent bool) {
if isSilent {
return
}
if runtime.GOOS == "linux" {
_, err := exec.LookPath("apt-get")
isApt := err == nil
if isApt {
yellow := color.New(color.FgYellow).SprintFunc()
msg := fmt.Sprintf("%s",
yellow("Update Required: Your current package installation script is outdated and will no longer receive updates.\nPlease update to the new installation script which can be found here https://infisical.com/docs/cli/overview#installation debian section\n"),
)
fmt.Fprintln(os.Stderr, msg)
}
}
}
func getLatestTag(repoOwner string, repoName string) (string, string, error) {
url := fmt.Sprintf("https://api.github.com/repos/%s/%s/releases/latest", repoOwner, repoName)
resp, err := http.Get(url)

551
cli/scripts/setup.deb.sh Normal file
View File

@@ -0,0 +1,551 @@
#!/usr/bin/env bash
#
# The core commands execute start from the "MAIN" section below.
#
test -z "$BASH_SOURCE" && {
self="sudo -E bash"
prefix="<curl command> |"
} || {
self=$(readlink -f ${BASH_SOURCE:-$0})
prefix=""
}
tmp_log=$(mktemp .s3_setup_XXXXXXXXX)
# Environment variables that can be set
PKG_URL=${PKG_URL:-"https://artifacts-cli.infisical.com"}
PKG_PATH=${PKG_PATH:-"deb"}
PACKAGE_NAME=${PACKAGE_NAME:-"infisical"}
GPG_KEY_URL=${GPG_KEY_URL:-"${PKG_URL}/infisical.gpg"}
colours=$(tput colors 2>/dev/null || echo "256")
no_colour="\e[39;49m"
green_colour="\e[32m"
red_colour="\e[41;97m"
bold="\e[1m"
reset="\e[0m"
use_colours=$(test -n "$colours" && test $colours -ge 8 && echo "yes")
test "$use_colours" == "yes" || {
no_colour=""
green_colour=""
red_colour=""
bold=""
reset=""
}
example_name="Ubuntu/Focal (20.04)"
example_distro="ubuntu"
example_codename="focal"
example_version="20.04"
function echo_helptext {
local help_text="$*"
echo " ^^^^: ... $help_text"
}
function die {
local text="$@"
test ! -z "$text" && {
echo_helptext "$text" 1>&2
}
local prefix="${red_colour} !!!!${no_colour}"
echo -e "$prefix: Oh no, your setup failed! :-( ... But we might be able to help. :-)"
echo -e "$prefix: "
echo -e "$prefix: ${bold}Please check your S3 bucket configuration and try again.${reset}"
echo -e "$prefix: "
test -f "$tmp_log" && {
local n=20
echo -e "$prefix: Last $n log lines from $tmp_log (might not be errors, nor even relevant):"
echo -e "$prefix:"
check_tool_silent "xargs" && {
check_tool_silent "fmt" && {
tail -n $n $tmp_log | fmt -t | xargs -Ilog echo -e "$prefix: > log"
} || {
tail -n $n $tmp_log | xargs -Ilog echo -e "$prefix: > log"
}
} || {
echo
tail -n $n $tmp_log
}
}
exit 1
}
function echo_colour {
local colour="${1:-"no"}_colour"; shift
echo -e "${!colour}$@${no_colour}"
}
function echo_green_or_red {
local rc="$1"
local good="${2:-YES}"
local bad="${3:-NO}"
test "$rc" -eq 0 && {
echo_colour "green" "$good"
} || {
echo_colour "red" "$bad"
}
return $rc
}
function echo_clearline {
local rc="$?"
echo -e -n "\033[1K\r"
return $rc
}
function echo_status {
local rc="$1"
local good="$2"
local bad="$3"
local text="$4"
local help_text="$5"
local newline=$(test "$6" != "no" && echo "\n" || echo "")
local status_text=$(echo_green_or_red "$rc" "$good" "$bad")
echo_clearline
local width=$(test "$use_colours" == "yes" && echo "16" || echo "5")
printf "%${width}s %s${newline}" "${status_text}:" "$text"
test $rc -ne 0 && test ! -z "$help_text" && {
echo_helptext "$help_text"
echo
}
return $rc
}
function echo_running {
local rc=$?
local text="$1"
echo_status 0 " RUN" " RUN" "$text" "" "no"
return $rc
}
function echo_okfail_rc {
local rc=$1
local text="$2"
local help_text="$3"
echo_clearline
echo_status $rc " OK" " NOPE" "$text" "$help_text"
return $rc
}
function echo_okfail {
echo_okfail_rc $? "$@"
return $?
}
function check_tool_silent {
local tool=${1}
command -v $tool &>/dev/null || which $tool &>/dev/null
return $?
}
function check_tool {
local tool=${1}
local optional=${2:-false}
local required_text="optional"
if ! $optional; then required_text="required"; fi
local text="Checking for $required_text executable '$tool' ..."
echo_running "$text"
check_tool_silent "$tool"
echo_okfail "$text" || {
if ! $optional; then
die "$tool is not installed, but is required by this script."
fi
return 1
}
return 0
}
function cleanup {
echo
rm -rf $tmp_log
}
function shutdown {
echo_colour "red" " !!!!: Operation cancelled by user!"
exit 2
}
function check_os {
test ! -z "$distro" && test ! -z "${version}${codename}"
return $?
}
function detect_os_system {
check_os && return 0
echo_running "$text"
local text="Detecting your OS distribution and release using system methods ..."
local tool_rc=1
test -f '/etc/os-release' && {
. /etc/os-release
distro=${distro:-$ID}
codename=${codename:-$VERSION_CODENAME}
codename=${codename:-$(echo $VERSION | cut -d '(' -f 2 | cut -d ')' -f 1)}
version=${version:-$VERSION_ID}
test -z "${version}${codename}" && test -f '/etc/debian_version' && {
# Workaround for Debian unstable releases; get the codename from debian_version
codename=$(cat /etc/debian_version | cut -d '/' -f1)
}
tool_rc=0
}
check_os
local rc=$?
echo_okfail_rc $rc "$text"
test $tool_rc -eq 0 && {
report_os_expanded
}
return $rc
}
function report_os_attribute {
local name=$1
local value=$2
local coloured=""
echo -n "$name="
test -z "$value" && {
echo -e -n "${red_colour}<empty>${no_colour} "
} || {
echo -e -n "${green_colour}${value}${no_colour} "
}
}
function report_os_expanded {
echo_helptext "Detected/provided for your OS/distribution, version and architecture:"
echo " >>>>:"
report_os_values
}
function report_os_values {
echo -n " >>>>: ... "
report_os_attribute "distro" $distro
report_os_attribute "codename" "stable (fixed)"
report_os_attribute "arch" $arch
echo
echo " >>>>:"
}
function detect_os_legacy_python {
check_os && return 0
local text="Detecting your OS distribution and release using legacy python ..."
echo_running "$text"
IFS='' read -r -d '' script <<-'EOF'
from __future__ import unicode_literals, print_function
import platform;
info = platform.linux_distribution() or ('', '', '');
for key, value in zip(('distro', 'version', 'codename'), info):
print("local guess_%s=\"%s\"\n" % (key, value.lower().replace(' ', '')));
EOF
local tool_rc=1
check_tool_silent "python" && {
eval $(python -c "$script")
distro=${distro:-$guess_distro}
codename=${codename:-$guess_codename}
version=${version:-$guess_version}
tool_rc=$?
}
check_os
local rc=$?
echo_okfail_rc $rc "$text"
check_tool_silent "python" || {
echo_helptext "Python isn't available, so skipping detection method (hint: install python)"
}
test $tool_rc -eq 0 && {
report_os
}
return $rc
}
function detect_os_modern_python {
check_os && return 0
check_tool_silent "python" && {
local text="Ensuring python-pip is installed ..."
echo_running "$text"
check_tool_silent "pip"
echo_okfail "$text" || {
local text="Checking if pip can be bootstrapped without get-pip ..."
echo_running "$text"
python -m ensurepip --default-pip &>$tmp_log
echo_okfail "$text" || {
local text="Installing pip via get-pip bootstrap ..."
echo_running "$text"
curl -1sLf https://bootstrap.pypa.io/get-pip.py 2>$tmp/log | python &>$tmp_log
echo_okfail "$text" || die "Failed to install pip!"
}
}
local text="Installing 'distro' python library ..."
echo_running "$text"
python -c 'import distro' &>$tmp_log || python -m pip install distro &>$tmp_log
echo_okfail "$text" || die "Failed to install required 'distro' python library!"
}
IFS='' read -r -d '' script <<-'EOF'
from __future__ import unicode_literals, print_function
import distro;
info = distro.linux_distribution(full_distribution_name=False) or ('', '', '');
for key, value in zip(('distro', 'version', 'codename'), info):
print("local guess_%s=\"%s\"\n" % (key, value.lower().replace(' ', '')));
EOF
local text="Detecting your OS distribution and release using modern python ..."
echo_running "$text"
local tool_rc=1
check_tool_silent "python" && {
eval $(python -c "$script")
distro=${distro:-$guess_distro}
codename=${codename:-$guess_codename}
version=${version:-$guess_version}
tool_rc=$?
}
check_os
local rc=$?
echo_okfail_rc $rc "$text"
check_tool_silent "python" || {
echo_helptext "Python isn't available, so skipping detection method (hint: install python)"
}
test $tool_rc -eq 0 && {
report_os_expanded
}
return $rc
}
function detect_os {
# Backwards compat for old distribution parameter names
distro=${distro:-$os}
# Always use "stable" as the codename
codename="stable"
arch=${arch:-$(arch || uname -m)}
# Only detect OS if not manually specified
if [ -z "$distro" ]; then
detect_os_system ||
detect_os_legacy_python ||
detect_os_modern_python
fi
# Always ensure we have a distro
(test -z "$distro") && {
echo_okfail_rc "1" "Unable to detect your OS distribution!"
cat <<EOF
>>>>:
>>>>: The 'distro' value is required. Without it, the install script
>>>>: cannot retrieve the correct configuration for this system.
>>>>:
>>>>: You can force this script to use a particular value by specifying distro
>>>>: via environment variable. E.g., to specify a distro
>>>>: such as $example_name, use the following:
>>>>:
>>>>: $prefix distro=$example_distro $self
>>>>:
EOF
die
}
}
function create_repo_config {
if [ -z "$PKG_PATH" ]; then
repo_url="${PKG_URL}"
else
repo_url="${PKG_URL}/${PKG_PATH}"
fi
# Create configuration with GPG key verification
local gpg_keyring_path="/usr/share/keyrings/${PACKAGE_NAME}-archive-keyring.gpg"
local apt_conf=$(cat <<EOF
deb [arch=$(dpkg --print-architecture) signed-by=${gpg_keyring_path}] ${repo_url} stable main
EOF
)
echo "$apt_conf"
return 0
}
function check_gpg_key {
local text="Checking if GPG key is accessible at ${GPG_KEY_URL} ..."
echo_running "$text"
local code="$(curl -1IsL -w "%{http_code}\\n" "$GPG_KEY_URL" -o /dev/null --connect-timeout 15 --max-time 60)"
test "$code" == "200" && {
echo_okfail_rc 0 "$text"
return 0
} || {
echo_okfail_rc 1 "$text"
echo_helptext "Failed to access the GPG key. Please check that it exists in your S3 bucket."
cat <<EOF
>>>>:
>>>>: It looks like we can't access the GPG key at ${GPG_KEY_URL}
>>>>:
EOF
die
}
}
function check_dpkg_tool {
local tool=${1}
local required=${2:-true}
local install=${3:-true}
local text="Checking for apt dependency '$tool' ..."
echo_running "$text"
dpkg -l | grep "$tool\>" &>$tmp_log
echo_okfail "$text" || {
if $install; then
test "$apt_updated" == "yes" || update_apt
local text="Attempting to install '$tool' ..."
echo_running "$text"
apt-get install -y "$tool" &>$tmp_log
echo_okfail "$text" || {
if $required; then
die "Could not install '$tool', check your permissions, etc."
fi
}
else {
if $required; then
die "$tool is not installed, but is required by this script."
fi
}
fi
}
return 0
}
function update_apt {
local text="Updating apt repository metadata cache ..."
local tmp_log=$(mktemp .s3_deb_output_XXXXXXXXX.log)
echo_running "$text"
apt-get update &>$tmp_log
echo_okfail "$text" || {
echo_colour "red" "Failed to update via apt-get update"
cat $tmp_log
rm -rf $tmp_log
die "Failed to update via apt-get update - Context above (maybe no packages?)."
}
rm -rf $tmp_log
apt_updated="yes"
}
function install_apt_prereqs {
# Debian-archive-keyring has to be installed for apt-transport-https.
test "${distro}" == "debian" && {
check_dpkg_tool "debian-keyring"
check_dpkg_tool "debian-archive-keyring"
}
check_dpkg_tool "apt-transport-https"
check_dpkg_tool "ca-certificates" false
check_dpkg_tool "gnupg"
}
function import_gpg_key {
local text="Importing '$PACKAGE_NAME' repository GPG key from S3 ..."
echo_running "$text"
local gpg_keyring_path="/usr/share/keyrings/${PACKAGE_NAME}-archive-keyring.gpg"
# Check if GPG key is accessible
check_gpg_key
# Download and import GPG key
curl -1sLf "${GPG_KEY_URL}" | gpg --dearmor > $gpg_keyring_path
chmod 644 $gpg_keyring_path
# Check for older apt versions that don't support signed-by
local signed_by_version="1.1"
local detected_version=$(dpkg -s apt | grep Version | cut -d' ' -f2)
if [ "$(printf "%s\n" $detected_version $signed_by_version | sort -V | head -n 1)" != "$signed_by_version" ]; then
echo_helptext "Detected older apt version without signed-by support. Copying key to trusted.gpg.d."
cp ${gpg_keyring_path} /etc/apt/trusted.gpg.d/${PACKAGE_NAME}.gpg
chmod 644 /etc/apt/trusted.gpg.d/${PACKAGE_NAME}.gpg
fi
echo_okfail "$text" || die "Could not import the GPG key for this repository"
}
function setup_repository {
local repo_path="/etc/apt/sources.list.d/${PACKAGE_NAME}.list"
local text="Installing '$PACKAGE_NAME' repository via apt ..."
echo_running "$text"
create_repo_config > "$repo_path"
chmod 644 $repo_path
echo_okfail "$text" || die "Could not install the repository, do you have permissions?"
}
function usage () {
cat <<EOF
Usage: $self [opts]
-h Displays this usage text.
-i Ignore repository setup errors during setup and
continue with install. This will leave the repository config
in place rather than removing it upon errors.
-p Package name to use for repository setup (default: ${PACKAGE_NAME})
-k GPG key URL (default: ${GPG_KEY_URL})
EOF
exit 0
}
trap cleanup EXIT
trap shutdown INT
ignore_errors=1
apt_updated="no"
while getopts ":ihp:b:s:k:" OPT; do
case $OPT in
i) ignore_errors=0 ;;
h) usage ;;
p) PACKAGE_NAME=$OPTARG ;;
b) PKG_URL=$OPTARG ;;
s) PKG_PATH=$OPTARG ;;
k) GPG_KEY_URL=$OPTARG ;;
\?) usage ;;
esac
done
shift $(($OPTIND - 1))
#
# MAIN
#
echo "Executing the setup script for the '$PACKAGE_NAME' S3 repository ..."
echo
check_tool "curl"
check_tool "apt-get"
detect_os
install_apt_prereqs
import_gpg_key
setup_repository
update_apt
echo_okfail_rc "0" "The repository has been installed successfully - You're ready to rock!"
echo
echo "You can now install the package with: apt install $PACKAGE_NAME"

View File

@@ -4,12 +4,18 @@ for i in *.apk; do
cloudsmith push alpine --republish infisical/infisical-cli/alpine/any-version $i
done
# for i in *.deb; do
# [ -f "$i" ] || break
# cloudsmith push deb --republish infisical/infisical-cli/any-distro/any-version $i
# done
for i in *.deb; do
[ -f "$i" ] || break
cloudsmith push deb --republish infisical/infisical-cli/any-distro/any-version $i
deb-s3 upload --bucket=$INFISICAL_CLI_S3_BUCKET --prefix=deb --visibility=private --sign=$INFISICAL_CLI_REPO_SIGNING_KEY_ID --preserve-versions $i
done
for i in *.rpm; do
[ -f "$i" ] || break
cloudsmith push rpm --republish infisical/infisical-cli/any-distro/any-version $i
done
done

View File

@@ -0,0 +1,4 @@
---
title: "Search"
openapi: "POST /api/v1/identities/search"
---

View File

@@ -8,6 +8,11 @@ You can use it across various environments, whether it's local development, CI/C
## Installation
<Warning>
As of 04/08/25, all future releases for Debian/Ubuntu will be distributed via the official Infisical repository at https://artifacts-cli.infisical.com.
No new releases will be published for Debian/Ubuntu on Cloudsmith going forward.
</Warning>
<Tabs>
<Tab title="MacOS">
Use [brew](https://brew.sh/) package manager
@@ -93,11 +98,12 @@ You can use it across various environments, whether it's local development, CI/C
</Tip>
</Tab>
<Tab title="Debian/Ubuntu">
Add Infisical repository
```bash
curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' \
'https://artifacts-cli.infisical.com/setup.deb.sh' \
| sudo -E bash
```

View File

@@ -264,6 +264,7 @@ The available authentication methods are `universalAuth`, `kubernetesAuth`, `aws
- `credentialsRef.secretName`: The name of the Kubernetes secret.
- `credentialsRef.secretNamespace`: The namespace of the Kubernetes secret.
Example:
```yaml
@@ -296,6 +297,9 @@ The available authentication methods are `universalAuth`, `kubernetesAuth`, `aws
- `serviceAccountRef`: The name and namespace of the service account that will be used to authenticate with Infisical.
- `serviceAccountRef.name`: The name of the service account.
- `serviceAccountRef.namespace`: The namespace of the service account.
- `autoCreateServiceAccountToken`: If set to `true`, the operator will automatically create a short-lived service account token on-demand for the service account. Defaults to `false`.
- `serviceAccountTokenAudiences`: Optionally specify audience for the service account token. This field is only relevant if you have set `autoCreateServiceAccountToken` to `true`. No audience is specified by default.
Example:
@@ -303,6 +307,9 @@ The available authentication methods are `universalAuth`, `kubernetesAuth`, `aws
spec:
kubernetesAuth:
identityId: <machine-identity-id>
autoCreateServiceAccountToken: true # Automatically creates short-lived service account tokens for the service account.
serviceAccountTokenAudiences:
- <audience> # Optionally specify audience for the service account token. No audience is specified by default.
serviceAccountRef:
name: <secret-name>
namespace: <secret-namespace>

View File

@@ -291,6 +291,8 @@ After applying the InfisicalPushSecret CRD, you should notice that the secrets y
- `serviceAccountRef`: The name and namespace of the service account that will be used to authenticate with Infisical.
- `serviceAccountRef.name`: The name of the service account.
- `serviceAccountRef.namespace`: The namespace of the service account.
- `autoCreateServiceAccountToken`: If set to `true`, the operator will automatically create a short-lived service account token on-demand for the service account. Defaults to `false`.
- `serviceAccountTokenAudiences`: Optionally specify audience for the service account token. This field is only relevant if you have set `autoCreateServiceAccountToken` to `true`. No audience is specified by default.
Example:
@@ -298,6 +300,9 @@ After applying the InfisicalPushSecret CRD, you should notice that the secrets y
spec:
kubernetesAuth:
identityId: <machine-identity-id>
autoCreateServiceAccountToken: true # Automatically creates short-lived service account tokens for the service account.
serviceAccountTokenAudiences:
- <audience> # Optionally specify audience for the service account token. No audience is specified by default.
serviceAccountRef:
name: <secret-name>
namespace: <secret-namespace>

View File

@@ -156,157 +156,420 @@ spec:
<Accordion title="authentication.kubernetesAuth">
The Kubernetes machine identity authentication method is used to authenticate with Infisical. The identity ID is stored in a field in the InfisicalSecret resource. This authentication method can only be used within a Kubernetes environment.
<Steps>
<Step title="Obtaining the token reviewer JWT for Infisical">
1.1. Start by creating a service account in your Kubernetes cluster that will be used by Infisical to authenticate with the Kubernetes API Server.
<Tabs>
<Tab title="Short-lived service account tokens (Recommended)">
Short-lived service account tokens are automatically created by the operator and are valid only for a short period of time. This is the recommended approach for using Kubernetes auth in the Infisical Secrets Operator.
```yaml infisical-service-account.yaml
apiVersion: v1
kind: ServiceAccount
metadata:
name: infisical-auth
namespace: default
<Steps>
<Step title="Obtaining the token reviewer JWT for Infisical">
**1.1.** Start by creating a reviewer service account in your Kubernetes cluster that will be used by Infisical to authenticate with the Kubernetes API Server.
```
```yaml infisical-reviewer-service-account.yaml
apiVersion: v1
kind: ServiceAccount
metadata:
name: infisical-token-reviewer
namespace: default
```
kubectl apply -f infisical-service-account.yaml
```
```
1.2. Bind the service account to the `system:auth-delegator` cluster role. As described [here](https://kubernetes.io/docs/reference/access-authn-authz/rbac/#other-component-roles), this role allows delegated authentication and authorization checks, specifically for Infisical to access the [TokenReview API](https://kubernetes.io/docs/reference/kubernetes-api/authentication-resources/token-review-v1/). You can apply the following configuration file:
```bash
kubectl apply -f infisical-reviewer-service-account.yaml
```
```yaml cluster-role-binding.yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: role-tokenreview-binding
namespace: default
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: system:auth-delegator
subjects:
- kind: ServiceAccount
name: infisical-auth
namespace: default
```
**1.2.** Bind the reviewer service account to the `system:auth-delegator` cluster role. As described [here](https://kubernetes.io/docs/reference/access-authn-authz/rbac/#other-component-roles), this role allows delegated authentication and authorization checks, specifically for Infisical to access the [TokenReview API](https://kubernetes.io/docs/reference/kubernetes-api/authentication-resources/token-review-v1/). You can apply the following configuration file:
```
kubectl apply -f cluster-role-binding.yaml
```
```yaml infisical-reviewer-cluster-role-binding.yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: infisical-token-reviewer-role-binding
namespace: default
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: system:auth-delegator
subjects:
- kind: ServiceAccount
name: infisical-token-reviewer
namespace: default
```
1.3. Next, create a long-lived service account JWT token (i.e. the token reviewer JWT token) for the service account using this configuration file for a new `Secret` resource:
```bash
kubectl apply -f infisical-reviewer-cluster-role-binding.yaml
```
```yaml service-account-token.yaml
apiVersion: v1
kind: Secret
type: kubernetes.io/service-account-token
metadata:
name: infisical-auth-token
annotations:
kubernetes.io/service-account.name: "infisical-auth"
```
**1.3.** Next, create a long-lived service account JWT token (i.e. the token reviewer JWT token) for the service account using this configuration file for a new `Secret` resource:
```yaml service-account-reviewer-token.yaml
apiVersion: v1
kind: Secret
type: kubernetes.io/service-account-token
metadata:
name: infisical-token-reviewer-token
annotations:
kubernetes.io/service-account.name: "infisical-token-reviewer"
```
```
kubectl apply -f service-account-token.yaml
```
```bash
kubectl apply -f service-account-reviewer-token.yaml
```
1.4. Link the secret in step 1.3 to the service account in step 1.1:
**1.4.** Link the secret in step 1.3 to the service account in step 1.1:
```bash
kubectl patch serviceaccount infisical-auth -p '{"secrets": [{"name": "infisical-auth-token"}]}' -n default
```
```bash
kubectl patch serviceaccount infisical-token-reviewer -p '{"secrets": [{"name": "infisical-token-reviewer-token"}]}' -n default
```
1.5. Finally, retrieve the token reviewer JWT token from the secret.
**1.5.** Finally, retrieve the token reviewer JWT token from the secret.
```bash
kubectl get secret infisical-auth-token -n default -o=jsonpath='{.data.token}' | base64 --decode
```
```bash
kubectl get secret infisical-token-reviewer-token -n default -o=jsonpath='{.data.token}' | base64 --decode
```
Keep this JWT token handy as you will need it for the **Token Reviewer JWT** field when configuring the Kubernetes Auth authentication method for the identity in step 2.
Keep this JWT token handy as you will need it for the **Token Reviewer JWT** field when configuring the Kubernetes Auth authentication method for the identity in step 2.
</Step>
</Step>
<Step title="Creating an identity">
To create an identity, head to your Organization Settings > Access Control > Machine Identities and press **Create identity**.
<Step title="Creating an identity">
To create an identity, head to your Organization Settings > Access Control > Machine Identities and press **Create identity**.
![identities organization](/images/platform/identities/identities-org.png)
![identities organization](/images/platform/identities/identities-org.png)
When creating an identity, you specify an organization level [role](/documentation/platform/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles.
When creating an identity, you specify an organization level [role](/documentation/platform/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles.
![identities organization create](/images/platform/identities/identities-org-create.png)
![identities organization create](/images/platform/identities/identities-org-create.png)
Now input a few details for your new identity. Here's some guidance for each field:
Now input a few details for your new identity. Here's some guidance for each field:
- Name (required): A friendly name for the identity.
- Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to.
- Name (required): A friendly name for the identity.
- Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to.
Once you've created an identity, you'll be prompted to configure the authentication method for it. Here, select **Kubernetes Auth**.
Once you've created an identity, you'll be prompted to configure the authentication method for it. Here, select **Kubernetes Auth**.
<Info>
To learn more about each field of the Kubernetes native authentication method, see step 2 of [guide](/documentation/platform/identities/kubernetes-auth#guide).
</Info>
<Info>
To learn more about each field of the Kubernetes native authentication method, see step 2 of [guide](/documentation/platform/identities/kubernetes-auth#guide).
</Info>
![identities organization create auth method](/images/platform/identities/identities-org-create-kubernetes-auth-method.png)
![identities organization create auth method](/images/platform/identities/identities-org-create-kubernetes-auth-method.png)
</Step>
<Step title="Adding an identity to a project">
To allow the operator to use the given identity to access secrets, you will need to add the identity to project(s) that you would like to grant it access to.
</Step>
<Step title="Adding an identity to a project">
To allow the operator to use the given identity to access secrets, you will need to add the identity to project(s) that you would like to grant it access to.
To do this, head over to the project you want to add the identity to and go to Project Settings > Access Control > Machine Identities and press **Add identity**.
To do this, head over to the project you want to add the identity to and go to Project Settings > Access Control > Machine Identities and press **Add identity**.
Next, select the identity you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this identity can have access to.
Next, select the identity you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this identity can have access to.
![identities project](/images/platform/identities/identities-project.png)
![identities project](/images/platform/identities/identities-project.png)
![identities project create](/images/platform/identities/identities-project-create.png)
![identities project create](/images/platform/identities/identities-project-create.png)
</Step>
<Step title="Add your identity ID & service account to your InfisicalSecret resource">
Once you have created your machine identity and added it to your project(s), you will need to add the identity ID to your InfisicalSecret resource.
In the `authentication.kubernetesAuth.identityId` field, add the identity ID of the machine identity you created.
See the example below for more details.
</Step>
<Step title="Add your Kubernetes service account token to the InfisicalSecret resource">
Add the service account details from the previous steps under `authentication.kubernetesAuth.serviceAccountRef`.
Here you will need to enter the name and namespace of the service account.
The example below shows a complete InfisicalSecret resource with all required fields defined.
</Step>
</Step>
</Steps>
<Step title="Create a new Kubernetes service account to authenticate with Infisical">
You have already created the reviewer service account in step **1.1**. Now, create a new Kubernetes service account that will be used to authenticate with Infisical.
This service account will create short-lived tokens that will be used to authenticate with Infisical. The operator itself will handle the creation of these tokens automatically.
<Info>
Make sure to also populate the `secretsScope` field with the project slug
_`projectSlug`_, environment slug _`envSlug`_, and secrets path
_`secretsPath`_ that you want to fetch secrets from. Please see the example
below.
</Info>
```yaml infisical-service-account.yaml
kind: ServiceAccount
apiVersion: v1
metadata:
name: infisical-service-account
```
## Example
```bash
kubectl apply -f infisical-service-account.yaml -n default
```
```yaml example-kubernetes-auth.yaml
apiVersion: secrets.infisical.com/v1alpha1
kind: InfisicalSecret
metadata:
name: infisicalsecret-sample-crd
spec:
authentication:
kubernetesAuth:
identityId: <machine-identity-id>
serviceAccountRef:
name: <service-account-name>
namespace: <service-account-namespace>
</Step>
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
secretsScope:
projectSlug: your-project-slug
envSlug: prod
secretsPath: "/path"
recursive: true
...
```
<Step title="Add your identity ID & service account to your InfisicalSecret resource">
Once you have created your machine identity and added it to your project(s), you will need to add the identity ID to your InfisicalSecret resource.
In the `authentication.kubernetesAuth.identityId` field, add the identity ID of the machine identity you created.
See the example below for more details.
</Step>
<Step title="Add your Kubernetes service account token to the InfisicalSecret resource">
Add the service account details from the previous steps under `authentication.kubernetesAuth.serviceAccountRef`.
Here you will need to enter the name and namespace of the service account.
The example below shows a complete InfisicalSecret resource with all required fields defined.
Make sure you set `authentication.kubernetesAuth.autoCreateServiceAccountToken` to `true` to automatically create short-lived service account tokens for the service account.
</Step>
</Steps>
<Info>
Make sure to also populate the `secretsScope` field with the project slug
_`projectSlug`_, environment slug _`envSlug`_, and secrets path
_`secretsPath`_ that you want to fetch secrets from. Please see the example
below.
</Info>
## Example
```yaml example-kubernetes-auth.yaml
apiVersion: secrets.infisical.com/v1alpha1
kind: InfisicalSecret
metadata:
name: infisicalsecret-sample-crd
spec:
authentication:
kubernetesAuth:
identityId: <machine-identity-id>
autoCreateServiceAccountToken: true # Automatically creates short-lived service account tokens for the service account.
serviceAccountTokenAudiences:
- <audience> # Optionally specify audience for the service account token. No audience is specified by default.
serviceAccountRef:
name: infisical-service-account # The service account we just created in the previous step.
namespace: <service-account-namespace>
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
secretsScope:
projectSlug: your-project-slug
envSlug: prod
secretsPath: "/path"
recursive: true
...
```
</Tab>
<Tab title="Manual long-lived service account tokens">
Manual long-lived service account tokens are manually created by the user and are valid indefinitely unless deleted or rotated. In most cases, you should be using the automatic short-lived service account tokens as they are more secure and easier to use.
<Steps>
<Step title="Obtaining the token reviewer JWT for Infisical">
**1.1.** Start by creating a reviewer service account in your Kubernetes cluster that will be used by Infisical to authenticate with the Kubernetes API Server.
```yaml infisical-reviewer-service-account.yaml
apiVersion: v1
kind: ServiceAccount
metadata:
name: infisical-token-reviewer
namespace: default
```
```bash
kubectl apply -f infisical-reviewer-service-account.yaml
```
**1.2.** Bind the reviewer service account to the `system:auth-delegator` cluster role. As described [here](https://kubernetes.io/docs/reference/access-authn-authz/rbac/#other-component-roles), this role allows delegated authentication and authorization checks, specifically for Infisical to access the [TokenReview API](https://kubernetes.io/docs/reference/kubernetes-api/authentication-resources/token-review-v1/). You can apply the following configuration file:
```yaml infisical-reviewer-cluster-role-binding.yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: infisical-token-reviewer-role-binding
namespace: default
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: system:auth-delegator
subjects:
- kind: ServiceAccount
name: infisical-token-reviewer
namespace: default
```
```bash
kubectl apply -f infisical-reviewer-cluster-role-binding.yaml
```
**1.3.** Next, create a long-lived service account JWT token (i.e. the token reviewer JWT token) for the service account using this configuration file for a new `Secret` resource:
```yaml service-account-reviewer-token.yaml
apiVersion: v1
kind: Secret
type: kubernetes.io/service-account-token
metadata:
name: infisical-token-reviewer-token
annotations:
kubernetes.io/service-account.name: "infisical-token-reviewer"
```
```bash
kubectl apply -f service-account-reviewer-token.yaml
```
**1.4.** Link the secret in step 1.3 to the service account in step 1.1:
```bash
kubectl patch serviceaccount infisical-token-reviewer -p '{"secrets": [{"name": "infisical-token-reviewer-token"}]}' -n default
```
**1.5.** Finally, retrieve the token reviewer JWT token from the secret.
```bash
kubectl get secret infisical-token-reviewer-token -n default -o=jsonpath='{.data.token}' | base64 --decode
```
Keep this JWT token handy as you will need it for the **Token Reviewer JWT** field when configuring the Kubernetes Auth authentication method for the identity in step 2.
</Step>
<Step title="Creating an identity">
To create an identity, head to your Organization Settings > Access Control > Machine Identities and press **Create identity**.
![identities organization](/images/platform/identities/identities-org.png)
When creating an identity, you specify an organization level [role](/documentation/platform/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles.
![identities organization create](/images/platform/identities/identities-org-create.png)
Now input a few details for your new identity. Here's some guidance for each field:
- Name (required): A friendly name for the identity.
- Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to.
Once you've created an identity, you'll be prompted to configure the authentication method for it. Here, select **Kubernetes Auth**.
<Info>
To learn more about each field of the Kubernetes native authentication method, see step 2 of [guide](/documentation/platform/identities/kubernetes-auth#guide).
</Info>
![identities organization create auth method](/images/platform/identities/identities-org-create-kubernetes-auth-method.png)
</Step>
<Step title="Adding an identity to a project">
To allow the operator to use the given identity to access secrets, you will need to add the identity to project(s) that you would like to grant it access to.
To do this, head over to the project you want to add the identity to and go to Project Settings > Access Control > Machine Identities and press **Add identity**.
Next, select the identity you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this identity can have access to.
![identities project](/images/platform/identities/identities-project.png)
![identities project create](/images/platform/identities/identities-project-create.png)
</Step>
<Step title="Create a new Kubernetes service account to authenticate with Infisical">
You have already created the reviewer service account in step **1.1**. Now, create a new Kubernetes service account that will be used to authenticate with Infisical.
```yaml infisical-service-account.yaml
kind: ServiceAccount
apiVersion: v1
metadata:
name: infisical-service-account
```
```bash
kubectl apply -f infisical-service-account.yaml -n default
```
</Step>
<Step title="Create a service account token for the Kubernetes service account">
Create a service account token for the newly created Kubernetes service account from the previous step.
```yaml infisical-service-account-token.yaml
apiVersion: v1
kind: Secret
type: kubernetes.io/service-account-token
metadata:
name: infisical-service-account-token
annotations:
kubernetes.io/service-account.name: "infisical-service-account"
```
```bash
kubectl apply -f infisical-service-account-token.yaml -n default
```
Patch the service account with the newly created service account token.
```bash
kubectl patch serviceaccount infisical-service-account -p '{"secrets": [{"name": "infisical-service-account-token"}]}' -n default
```
</Step>
<Step title="Add your identity ID & service account to your InfisicalSecret resource">
Once you have created your machine identity and added it to your project(s), you will need to add the identity ID to your InfisicalSecret resource.
In the `authentication.kubernetesAuth.identityId` field, add the identity ID of the machine identity you created.
See the example below for more details.
</Step>
<Step title="Add your Kubernetes service account token to the InfisicalSecret resource">
Add the service account details from the previous steps under `authentication.kubernetesAuth.serviceAccountRef`.
Here you will need to enter the name and namespace of the service account.
The example below shows a complete InfisicalSecret resource with all required fields defined.
</Step>
</Steps>
<Info>
Make sure to also populate the `secretsScope` field with the project slug
_`projectSlug`_, environment slug _`envSlug`_, and secrets path
_`secretsPath`_ that you want to fetch secrets from. Please see the example
below.
</Info>
## Example
```yaml example-kubernetes-auth.yaml
apiVersion: secrets.infisical.com/v1alpha1
kind: InfisicalSecret
metadata:
name: infisicalsecret-sample-crd
spec:
authentication:
kubernetesAuth:
identityId: <machine-identity-id>
serviceAccountRef:
name: infisical-service-account # The service account we just created in the previous step. (*not* the reviewer service account)
namespace: <service-account-namespace>
# secretsScope is identical to the secrets scope in the universalAuth field in this sample.
secretsScope:
projectSlug: your-project-slug
envSlug: prod
secretsPath: "/path"
recursive: true
...
```
</Tab>
</Tabs>
</Accordion>

View File

@@ -582,7 +582,8 @@
"api-reference/endpoints/identities/update",
"api-reference/endpoints/identities/delete",
"api-reference/endpoints/identities/get-by-id",
"api-reference/endpoints/identities/list"
"api-reference/endpoints/identities/list",
"api-reference/endpoints/identities/search"
]
},
{

View File

@@ -23,6 +23,7 @@
"@hcaptcha/react-hcaptcha": "^1.11.0",
"@headlessui/react": "^1.7.19",
"@hookform/resolvers": "^3.9.1",
"@lexical/react": "^0.29.0",
"@lottiefiles/dotlottie-react": "^0.12.0",
"@octokit/rest": "^21.0.2",
"@peculiar/x509": "^1.12.3",
@@ -66,6 +67,7 @@
"jspdf": "^2.5.2",
"jsrp": "^0.2.4",
"jwt-decode": "^4.0.0",
"lexical": "^0.29.0",
"ms": "^2.1.3",
"nprogress": "^0.2.0",
"picomatch": "^4.0.2",
@@ -1570,6 +1572,260 @@
}
}
},
"node_modules/@lexical/clipboard": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/clipboard/-/clipboard-0.29.0.tgz",
"integrity": "sha512-llxZosYCwH13p2GfPfhAinukdvAZYxWuwf5md107X80hsE8TQJj25unjqTwRKQ+w/wD+hpmBMziU8+K/WTitWQ==",
"license": "MIT",
"dependencies": {
"@lexical/html": "0.29.0",
"@lexical/list": "0.29.0",
"@lexical/selection": "0.29.0",
"@lexical/utils": "0.29.0",
"lexical": "0.29.0"
}
},
"node_modules/@lexical/code": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/code/-/code-0.29.0.tgz",
"integrity": "sha512-yKGzoKpyIO39Xf7OKLPpoCE5V8mTDCM3l3CDHZR3X1gM/VZQzf4jAiO3b06y9YkQ2fM8kqwchYu87wGvs8/iIQ==",
"license": "MIT",
"dependencies": {
"@lexical/utils": "0.29.0",
"lexical": "0.29.0",
"prismjs": "^1.30.0"
}
},
"node_modules/@lexical/devtools-core": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/devtools-core/-/devtools-core-0.29.0.tgz",
"integrity": "sha512-uUq0m9ql/7mthp7Ho1vnG7Id6imQ5kD5mxUhX2lmgHretS+yAHGsGsGiPIVHdPWeVmUb2n4IVDJ+cJbUsUjQJw==",
"license": "MIT",
"dependencies": {
"@lexical/html": "0.29.0",
"@lexical/link": "0.29.0",
"@lexical/mark": "0.29.0",
"@lexical/table": "0.29.0",
"@lexical/utils": "0.29.0",
"lexical": "0.29.0"
},
"peerDependencies": {
"react": ">=17.x",
"react-dom": ">=17.x"
}
},
"node_modules/@lexical/dragon": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/dragon/-/dragon-0.29.0.tgz",
"integrity": "sha512-Zaky2jd/Pp1blAZqPeGNdyhxnVL4lwVjbWPxhfS1gbW4Q5CBQ3aD3B0T4ljiKfmRNJm004LJ9q7KjhlRbREvZA==",
"license": "MIT",
"dependencies": {
"lexical": "0.29.0"
}
},
"node_modules/@lexical/hashtag": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/hashtag/-/hashtag-0.29.0.tgz",
"integrity": "sha512-fa7s0Yi2RKz/GvgT5XU9fborx6VPU3VtvvEPaIXgyd6zXZRiOhD9rGypwB3oj4fMK1ndx2dX0m7SwhMJo48D8w==",
"license": "MIT",
"dependencies": {
"@lexical/utils": "0.29.0",
"lexical": "0.29.0"
}
},
"node_modules/@lexical/history": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/history/-/history-0.29.0.tgz",
"integrity": "sha512-OrCwZycp/yaq63mw511NutkwAB+W6WSchG1xTxlLh6nbc8jnbvKhCf4CGbnrvlhD7hTuzxJ8FI9/2M/2zv/mNQ==",
"license": "MIT",
"dependencies": {
"@lexical/utils": "0.29.0",
"lexical": "0.29.0"
}
},
"node_modules/@lexical/html": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/html/-/html-0.29.0.tgz",
"integrity": "sha512-+jV6ijppOpxpUGeXkGssXJbsAmFALfeLrgbM0xuZbxZ7RgYZ+5Atn00WjSno7+JV5EOuRkYmCNtS1tiHtXMY1g==",
"license": "MIT",
"dependencies": {
"@lexical/selection": "0.29.0",
"@lexical/utils": "0.29.0",
"lexical": "0.29.0"
}
},
"node_modules/@lexical/link": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/link/-/link-0.29.0.tgz",
"integrity": "sha512-wGbKRF0x/6ZQHuCfr8m8qD1J0R1kFmWINBG2A1hUXPDf7UY5qm/nS2oKNDGpjiDMGwkVZ7n7WfzeBGO+KRe/Lg==",
"license": "MIT",
"dependencies": {
"@lexical/utils": "0.29.0",
"lexical": "0.29.0"
}
},
"node_modules/@lexical/list": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/list/-/list-0.29.0.tgz",
"integrity": "sha512-sWiof+i2ff8rL7KxJ3dxHLwyJfX423e1EVLmAdQEOPhyZJiNbeLTSNhNGsZ8FjFoBwvTTEDwuQZm3iT3hliKOg==",
"license": "MIT",
"dependencies": {
"@lexical/selection": "0.29.0",
"@lexical/utils": "0.29.0",
"lexical": "0.29.0"
}
},
"node_modules/@lexical/mark": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/mark/-/mark-0.29.0.tgz",
"integrity": "sha512-UB3x6pyUdpZHRqF4tiajLnC1+Umvt7x8Rkkdi29aNNvzIWniVwGkBOlmvFus7x+4dOV1D1fydwiP4m38nGgLDw==",
"license": "MIT",
"dependencies": {
"@lexical/utils": "0.29.0",
"lexical": "0.29.0"
}
},
"node_modules/@lexical/markdown": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/markdown/-/markdown-0.29.0.tgz",
"integrity": "sha512-4Od8WoDoviv9DxJZVgrIORTIAzyoGOpztbGbIBXguGmwvy7NnHQDh9fZYIYRrdI1Awp1VVGdJ3ku/7KTgSOoRw==",
"license": "MIT",
"dependencies": {
"@lexical/code": "0.29.0",
"@lexical/link": "0.29.0",
"@lexical/list": "0.29.0",
"@lexical/rich-text": "0.29.0",
"@lexical/text": "0.29.0",
"@lexical/utils": "0.29.0",
"lexical": "0.29.0"
}
},
"node_modules/@lexical/offset": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/offset/-/offset-0.29.0.tgz",
"integrity": "sha512-VyD2Ff3rBJpo++Fxvi3MNYmDELa+9nA0EgXqGRNb3MvRehRjHbaDbymtLMMHIwvbkF5lnra+ubStcTRQmoQxXw==",
"license": "MIT",
"dependencies": {
"lexical": "0.29.0"
}
},
"node_modules/@lexical/overflow": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/overflow/-/overflow-0.29.0.tgz",
"integrity": "sha512-IzH3M652Ej2gB2sK65N3yTgyiQAa3I3tqKbSnBRiXu/+isxHoCy/qRr9/kL63uy7zhGvgV+EYsoffQCawIFt8Q==",
"license": "MIT",
"dependencies": {
"lexical": "0.29.0"
}
},
"node_modules/@lexical/plain-text": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/plain-text/-/plain-text-0.29.0.tgz",
"integrity": "sha512-F5C3meDb2HmO0NmKJBVRkjmX9PNln6O1jXU/APJuSFBdvfcIWSY58ncHR4zy2M5LF1Q5PQMWyIay9p+SqOtY5A==",
"license": "MIT",
"dependencies": {
"@lexical/clipboard": "0.29.0",
"@lexical/selection": "0.29.0",
"@lexical/utils": "0.29.0",
"lexical": "0.29.0"
}
},
"node_modules/@lexical/react": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/react/-/react-0.29.0.tgz",
"integrity": "sha512-YMlnljW/jxmwSzsRv5UPatfOoMZXqxFmRIEltTUIQfrOFdqn+ssUtCpjE6xRD1oxD6KpSIekakzLs+y/8+7CuQ==",
"license": "MIT",
"dependencies": {
"@lexical/devtools-core": "0.29.0",
"@lexical/dragon": "0.29.0",
"@lexical/hashtag": "0.29.0",
"@lexical/history": "0.29.0",
"@lexical/link": "0.29.0",
"@lexical/list": "0.29.0",
"@lexical/mark": "0.29.0",
"@lexical/markdown": "0.29.0",
"@lexical/overflow": "0.29.0",
"@lexical/plain-text": "0.29.0",
"@lexical/rich-text": "0.29.0",
"@lexical/table": "0.29.0",
"@lexical/text": "0.29.0",
"@lexical/utils": "0.29.0",
"@lexical/yjs": "0.29.0",
"lexical": "0.29.0",
"react-error-boundary": "^3.1.4"
},
"peerDependencies": {
"react": ">=17.x",
"react-dom": ">=17.x"
}
},
"node_modules/@lexical/rich-text": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/rich-text/-/rich-text-0.29.0.tgz",
"integrity": "sha512-fSKgXGxJUOWo7dwSTUYFVBNNk4pPN8norsZfdmKM1kGDS1/GKuVzlzHLKZ7rQb8RLD5a43p4ifEL+28P+q0Qqg==",
"license": "MIT",
"dependencies": {
"@lexical/clipboard": "0.29.0",
"@lexical/selection": "0.29.0",
"@lexical/utils": "0.29.0",
"lexical": "0.29.0"
}
},
"node_modules/@lexical/selection": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/selection/-/selection-0.29.0.tgz",
"integrity": "sha512-lX9CRrXgKte65cozTHFXwUJ2fvZD92OEtos+YU+U40GJjf3NdheGeKDxDfOpF4AXrYRSszY7E0CzmIvuEs0p4A==",
"license": "MIT",
"dependencies": {
"lexical": "0.29.0"
}
},
"node_modules/@lexical/table": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/table/-/table-0.29.0.tgz",
"integrity": "sha512-Jdj32kBDeJh/0dGaZB14JggnEIS956/cN7grnLr7cmhhVzDicvLMBENSXQVEJAQVcSIU4G9EvxC7GJZ9VgqDnA==",
"license": "MIT",
"dependencies": {
"@lexical/clipboard": "0.29.0",
"@lexical/utils": "0.29.0",
"lexical": "0.29.0"
}
},
"node_modules/@lexical/text": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/text/-/text-0.29.0.tgz",
"integrity": "sha512-QnNGr6ickTLk76o3PdxJjPwt//dpuh8idVfR73WdCIoAwkhiEPUxxTZERoMsudXj6O/lJ+/HhI61wVjLckYr3A==",
"license": "MIT",
"dependencies": {
"lexical": "0.29.0"
}
},
"node_modules/@lexical/utils": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/utils/-/utils-0.29.0.tgz",
"integrity": "sha512-y2hhWQDjcXdplsAaQMuZx6ht9u1I4BV5NynA+WKoQ3h8vKxzeDnpCxVOK/zxU1R5dhM/nilnFu7uhvrSeEn+TQ==",
"license": "MIT",
"dependencies": {
"@lexical/list": "0.29.0",
"@lexical/selection": "0.29.0",
"@lexical/table": "0.29.0",
"lexical": "0.29.0"
}
},
"node_modules/@lexical/yjs": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/@lexical/yjs/-/yjs-0.29.0.tgz",
"integrity": "sha512-6IXWWlGkVJEzWP/+LcuKYJ9jmcFp8k7TT/jmz4V5gBD9Ut3swOGsIA/sQCtB9y7jad10csaDVmFdFzGNWKVH9A==",
"license": "MIT",
"dependencies": {
"@lexical/offset": "0.29.0",
"@lexical/selection": "0.29.0",
"lexical": "0.29.0"
},
"peerDependencies": {
"yjs": ">=13.5.22"
}
},
"node_modules/@lottiefiles/dotlottie-react": {
"version": "0.12.0",
"resolved": "https://registry.npmjs.org/@lottiefiles/dotlottie-react/-/dotlottie-react-0.12.0.tgz",
@@ -8871,6 +9127,17 @@
"node": ">=10"
}
},
"node_modules/isomorphic.js": {
"version": "0.2.5",
"resolved": "https://registry.npmjs.org/isomorphic.js/-/isomorphic.js-0.2.5.tgz",
"integrity": "sha512-PIeMbHqMt4DnUP3MA/Flc0HElYjMXArsw1qwJZcm9sqR8mq3l8NYizFMty0pWwE/tzIGH3EKK5+jes5mAr85yw==",
"license": "MIT",
"peer": true,
"funding": {
"type": "GitHub Sponsors ❤",
"url": "https://github.com/sponsors/dmonad"
}
},
"node_modules/iterator.prototype": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/iterator.prototype/-/iterator.prototype-1.1.4.tgz",
@@ -9100,6 +9367,34 @@
"node": ">= 0.8.0"
}
},
"node_modules/lexical": {
"version": "0.29.0",
"resolved": "https://registry.npmjs.org/lexical/-/lexical-0.29.0.tgz",
"integrity": "sha512-eoBHUEn0LmExKeK6x2cFKU0FPaMk2Bc5HgiCzTiv5ymKtwWw7LeKcxaNPmLxRRdQpcWV1IMKjayAbw7Lt/Gu7w==",
"license": "MIT"
},
"node_modules/lib0": {
"version": "0.2.102",
"resolved": "https://registry.npmjs.org/lib0/-/lib0-0.2.102.tgz",
"integrity": "sha512-g70kydI0I1sZU0ChO8mBbhw0oUW/8U0GHzygpvEIx8k+jgOpqnTSb/E+70toYVqHxBhrERD21TwD5QcZJQ40ZQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"isomorphic.js": "^0.2.4"
},
"bin": {
"0ecdsa-generate-keypair": "bin/0ecdsa-generate-keypair.js",
"0gentesthtml": "bin/gentesthtml.js",
"0serve": "bin/0serve.js"
},
"engines": {
"node": ">=16"
},
"funding": {
"type": "GitHub Sponsors ❤",
"url": "https://github.com/sponsors/dmonad"
}
},
"node_modules/lilconfig": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz",
@@ -10857,6 +11152,15 @@
}
}
},
"node_modules/prismjs": {
"version": "1.30.0",
"resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.30.0.tgz",
"integrity": "sha512-DEvV2ZF2r2/63V+tK8hQvrR2ZGn10srHbXviTlcv7Kpzw8jWiNTqbVgjO3IY8RxrrOUF8VPMQQFysYYYv0YZxw==",
"license": "MIT",
"engines": {
"node": ">=6"
}
},
"node_modules/process": {
"version": "0.11.10",
"resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz",
@@ -11142,6 +11446,22 @@
"react": "^18.3.1"
}
},
"node_modules/react-error-boundary": {
"version": "3.1.4",
"resolved": "https://registry.npmjs.org/react-error-boundary/-/react-error-boundary-3.1.4.tgz",
"integrity": "sha512-uM9uPzZJTF6wRQORmSrvOIgt4lJ9MC1sNgEOj2XGsDTRE4kmpWxg7ENK9EWNKJRMAOY9z0MuF4yIfl6gp4sotA==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.12.5"
},
"engines": {
"node": ">=10",
"npm": ">=6"
},
"peerDependencies": {
"react": ">=16.13.1"
}
},
"node_modules/react-fast-compare": {
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-3.2.2.tgz",
@@ -13587,9 +13907,9 @@
}
},
"node_modules/vite": {
"version": "5.4.14",
"resolved": "https://registry.npmjs.org/vite/-/vite-5.4.14.tgz",
"integrity": "sha512-EK5cY7Q1D8JNhSaPKVK4pwBFvaTmZxEnoKXLG/U9gmdDcihQGNzFlgIvaxezFR4glP1LsuiedwMBqCXH3wZccA==",
"version": "5.4.16",
"resolved": "https://registry.npmjs.org/vite/-/vite-5.4.16.tgz",
"integrity": "sha512-Y5gnfp4NemVfgOTDQAunSD4346fal44L9mszGGY/e+qxsRT5y1sMlS/8tiQ8AFAp+MFgYNSINdfEchJiPm41vQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -14131,6 +14451,24 @@
"node": ">=8"
}
},
"node_modules/yjs": {
"version": "13.6.24",
"resolved": "https://registry.npmjs.org/yjs/-/yjs-13.6.24.tgz",
"integrity": "sha512-xn/pYLTZa3uD1uDG8lpxfLRo5SR/rp0frdASOl2a71aYNvUXdWcLtVL91s2y7j+Q8ppmjZ9H3jsGVgoFMbT2VA==",
"license": "MIT",
"peer": true,
"dependencies": {
"lib0": "^0.2.99"
},
"engines": {
"node": ">=16.0.0",
"npm": ">=8.0.0"
},
"funding": {
"type": "GitHub Sponsors ❤",
"url": "https://github.com/sponsors/dmonad"
}
},
"node_modules/yocto-queue": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",

View File

@@ -27,6 +27,7 @@
"@hcaptcha/react-hcaptcha": "^1.11.0",
"@headlessui/react": "^1.7.19",
"@hookform/resolvers": "^3.9.1",
"@lexical/react": "^0.29.0",
"@lottiefiles/dotlottie-react": "^0.12.0",
"@octokit/rest": "^21.0.2",
"@peculiar/x509": "^1.12.3",
@@ -70,6 +71,7 @@
"jspdf": "^2.5.2",
"jsrp": "^0.2.4",
"jwt-decode": "^4.0.0",
"lexical": "^0.29.0",
"ms": "^2.1.3",
"nprogress": "^0.2.0",
"picomatch": "^4.0.2",

View File

@@ -0,0 +1,159 @@
/* eslint-disable no-underscore-dangle */
import { forwardRef, InputHTMLAttributes } from "react";
import { InitialConfigType, LexicalComposer } from "@lexical/react/LexicalComposer";
import { ContentEditable } from "@lexical/react/LexicalContentEditable";
import { LexicalErrorBoundary } from "@lexical/react/LexicalErrorBoundary";
import { OnChangePlugin } from "@lexical/react/LexicalOnChangePlugin";
import { PlainTextPlugin } from "@lexical/react/LexicalPlainTextPlugin";
import { ReactNode } from "@tanstack/react-router";
import { cva, VariantProps } from "cva";
import { EditorState, LexicalEditor } from "lexical";
import { twMerge } from "tailwind-merge";
import { HighlightNode } from "./EditorHighlight";
import { EditorPlaceholderPlugin } from "./EditorPlaceholderPlugin";
// Catch any errors that occur during Lexical updates and log them
// or throw them as needed. If you don't throw them, Lexical will
// try to recover gracefully without losing user data.
function onError(error: Error) {
console.error(error);
}
const inputVariants = cva(
"input w-full py-[0.375rem] text-gray-400 placeholder:text-sm placeholder-gray-500 placeholder-opacity-50 outline-none focus:ring-2 hover:ring-bunker-400/60 duration-100",
{
variants: {
size: {
xs: ["text-xs"],
sm: ["text-sm"],
md: ["text-md"],
lg: ["text-lg"]
},
isRounded: {
true: ["rounded-md"],
false: ""
},
variant: {
filled: ["bg-mineshaft-900", "text-gray-400"],
outline: ["bg-transparent"],
plain: "bg-transparent outline-none"
},
isError: {
true: "focus:ring-red/50 placeholder-red-300",
false: "focus:ring-primary-400/50 focus:ring-1"
}
},
compoundVariants: []
}
);
const inputParentContainerVariants = cva("inline-flex font-inter items-center border relative", {
variants: {
isRounded: {
true: ["rounded-md"],
false: ""
},
isError: {
true: "border-red",
false: "border-mineshaft-500"
},
isFullWidth: {
true: "w-full",
false: ""
},
variant: {
filled: ["bg-bunker-800", "text-gray-400"],
outline: ["bg-transparent"],
plain: "border-none"
}
}
});
type Props = Omit<
InputHTMLAttributes<HTMLDivElement>,
"size" | "onChange" | "placeholder" | "aria-placeholder"
> &
VariantProps<typeof inputVariants> & {
children?: ReactNode;
namespace?: string;
placeholder?: string;
isFullWidth?: boolean;
isRequired?: boolean;
leftIcon?: ReactNode;
rightIcon?: ReactNode;
isDisabled?: boolean;
isReadOnly?: boolean;
containerClassName?: string;
onChange: (editorState: EditorState, editor: LexicalEditor, tags: Set<string>) => void;
initialValue?: string;
};
export const Editor = forwardRef<HTMLDivElement, Props>(
(
{
children,
namespace = "infisical-editor",
className,
containerClassName,
isRounded = true,
isFullWidth = true,
isDisabled,
isError = false,
isRequired,
leftIcon,
rightIcon,
variant = "filled",
size = "md",
isReadOnly,
placeholder,
onChange,
...props
},
ref
) => {
const initialConfig: InitialConfigType = {
namespace,
onError,
nodes: [HighlightNode]
};
return (
<div
className={inputParentContainerVariants({
isRounded,
isError,
isFullWidth,
variant,
className: containerClassName
})}
>
{leftIcon && <span className="absolute left-0 ml-3 text-sm">{leftIcon}</span>}
<LexicalComposer initialConfig={initialConfig}>
<PlainTextPlugin
contentEditable={
<ContentEditable
ref={ref}
aria-required={isRequired}
readOnly={isReadOnly}
disabled={isDisabled}
className={twMerge(
leftIcon ? "pl-10" : "pl-2.5",
rightIcon ? "pr-10" : "pr-2.5",
inputVariants({ className, isError, size, isRounded, variant })
)}
{...props}
placeholder={null}
/>
}
ErrorBoundary={LexicalErrorBoundary}
/>
<OnChangePlugin onChange={onChange} />
<EditorPlaceholderPlugin placeholder={placeholder} />
{children}
</LexicalComposer>
{rightIcon && <span className="absolute right-0 mr-3">{rightIcon}</span>}
</div>
);
}
);

View File

@@ -0,0 +1,127 @@
/* eslint-disable no-underscore-dangle,@typescript-eslint/class-methods-use-this */
import { useCallback, useEffect } from "react";
import { useLexicalComposerContext } from "@lexical/react/LexicalComposerContext";
import { useLexicalTextEntity } from "@lexical/react/useLexicalTextEntity";
import {
$applyNodeReplacement,
EditorConfig,
LexicalNode,
SerializedTextNode,
Spread,
TextNode
} from "lexical";
type HighlightTheme = { contentClassName: string };
type Trigger = { startTrigger: string; endTrigger: string };
export type SerializedHighlightNode = Spread<
{
__highlightTheme: HighlightTheme;
__trigger: Trigger;
},
SerializedTextNode
>;
export class HighlightNode extends TextNode {
__highlightTheme: HighlightTheme;
__trigger: Trigger;
constructor(
text: string,
highlightTheme: HighlightTheme = {
contentClassName: "ph-no-capture text-yellow-200/80"
},
trigger: Trigger = { startTrigger: "${", endTrigger: "}" },
key?: string
) {
super(text, key);
this.__highlightTheme = highlightTheme;
this.__trigger = trigger;
}
static getType(): string {
return "highlight";
}
static clone(node: HighlightNode): HighlightNode {
return new HighlightNode(node.__text, node.__highlightTheme, node.__trigger, node.__key);
}
static importJSON(serializedNode: SerializedHighlightNode): HighlightNode {
return $applyNodeReplacement(new HighlightNode("")).updateFromJSON(serializedNode);
}
createDOM(config: EditorConfig): HTMLElement {
const dom = super.createDOM(config);
dom.style.cursor = "default";
dom.className = this.__highlightTheme.contentClassName;
return dom;
}
canInsertTextBefore(): boolean {
return false;
}
canInsertTextAfter(): boolean {
return false;
}
isTextEntity(): true {
return true;
}
}
export function $createKeywordNode(keyword: string = ""): HighlightNode {
return $applyNodeReplacement(new HighlightNode(keyword));
}
export function $isKeywordNode(node: LexicalNode | null | undefined): boolean {
return node instanceof HighlightNode;
}
type Props = {
contentClassName?: string;
startTrigger?: string;
endTrigger?: string;
};
export const EditorHighlightPlugin = ({
endTrigger = "}",
startTrigger = "${",
contentClassName = "ph-no-capture text-yellow-200/80"
}: Props) => {
const [editor] = useLexicalComposerContext();
useEffect(() => {
if (!editor.hasNodes([HighlightNode])) {
throw new Error("HighlightsPlugin: HighlightsNode not registered on editor");
}
}, [editor]);
const createKeywordNode = useCallback((textNode: TextNode): HighlightNode => {
return $applyNodeReplacement(
new HighlightNode(
textNode.getTextContent(),
{ contentClassName },
{ startTrigger, endTrigger }
)
);
}, []);
const getKeywordMatch = useCallback((text: string) => {
for (let i = 0; i < text.length; i += 1) {
if (text.slice(i, i + 2) === startTrigger) {
const closingBracketIndex = text.indexOf(endTrigger, i + 2);
if (closingBracketIndex !== -1) {
return { start: i, end: closingBracketIndex + 1 };
}
return null;
}
}
return null;
}, []);
useLexicalTextEntity<HighlightNode>(getKeywordMatch, HighlightNode, createKeywordNode);
return null;
};

View File

@@ -0,0 +1,22 @@
import { useEffect } from "react";
import { useLexicalComposerContext } from "@lexical/react/LexicalComposerContext";
import { useLexicalIsTextContentEmpty } from "@lexical/react/useLexicalIsTextContentEmpty";
export const EditorPlaceholderPlugin = ({ placeholder }: { placeholder: string | undefined }) => {
const [editor] = useLexicalComposerContext();
const isEmpty = useLexicalIsTextContentEmpty(editor);
/* Set the placeholder on root. */
useEffect(() => {
const rootElement = editor.getRootElement() as HTMLElement;
if (rootElement) {
if (isEmpty && placeholder) {
rootElement.setAttribute("placeholder", placeholder);
} else {
rootElement.removeAttribute("placeholder");
}
}
}, [editor, isEmpty]); // eslint-disable-line
return null;
};

View File

@@ -0,0 +1,2 @@
export { Editor } from "./Editor";
export { EditorHighlightPlugin } from "./EditorHighlight";

View File

@@ -11,6 +11,7 @@ export * from "./DatePicker";
export * from "./DeleteActionModal";
export * from "./Drawer";
export * from "./Dropdown";
export * from "./Editor";
export * from "./EmailServiceSetupModal";
export * from "./EmptyState";
export * from "./FilterableSelect";

View File

@@ -46,5 +46,6 @@ export {
useGetIdentityTokenAuth,
useGetIdentityTokensTokenAuth,
useGetIdentityUniversalAuth,
useGetIdentityUniversalAuthClientSecrets
useGetIdentityUniversalAuthClientSecrets,
useSearchIdentities
} from "./queries";

View File

@@ -15,11 +15,13 @@ import {
IdentityMembershipOrg,
IdentityOidcAuth,
IdentityTokenAuth,
IdentityUniversalAuth
IdentityUniversalAuth,
TSearchIdentitiesDTO
} from "./types";
export const identitiesKeys = {
getIdentityById: (identityId: string) => [{ identityId }, "identity"] as const,
searchIdentities: (dto: TSearchIdentitiesDTO) => ["identity", "search", dto] as const,
getIdentityUniversalAuth: (identityId: string) =>
[{ identityId }, "identity-universal-auth"] as const,
getIdentityUniversalAuthClientSecrets: (identityId: string) =>
@@ -53,6 +55,26 @@ export const useGetIdentityById = (identityId: string) => {
});
};
export const useSearchIdentities = (dto: TSearchIdentitiesDTO) => {
const { limit, search, offset, orderBy, orderDirection } = dto;
return useQuery({
queryKey: identitiesKeys.searchIdentities(dto),
queryFn: async () => {
const { data } = await apiRequest.post<{
identities: IdentityMembershipOrg[];
totalCount: number;
}>("/api/v1/identities/search", {
limit,
offset,
orderBy,
orderDirection,
search
});
return data;
}
});
};
export const useGetIdentityProjectMemberships = (identityId: string) => {
return useQuery({
enabled: Boolean(identityId),

View File

@@ -1,3 +1,5 @@
import { OrderByDirection } from "../generic/types";
import { OrgIdentityOrderBy } from "../organization/types";
import { TOrgRole } from "../roles/types";
import { ProjectUserMembershipTemporaryMode, Workspace } from "../workspace/types";
import { IdentityAuthMethod, IdentityJwtConfigurationType } from "./enums";
@@ -540,3 +542,14 @@ export type TProjectIdentitiesList = {
identityMemberships: IdentityMembership[];
totalCount: number;
};
export type TSearchIdentitiesDTO = {
limit?: number;
offset?: number;
orderBy?: OrgIdentityOrderBy;
orderDirection?: OrderByDirection;
search: {
name?: { $contains: string };
role?: { $in: string[] };
};
};

View File

@@ -191,3 +191,10 @@ html {
#nprogress .bar {
@apply bg-primary-400;
}
[contentEditable="true"]:before {
content: attr(placeholder);
position: absolute;
top: 0.5rem;
@apply text-sm text-gray-500 opacity-50;
}

View File

@@ -33,14 +33,12 @@ import { IdentityPanel } from "@app/pages/admin/OverviewPage/components/Identity
import { AuthPanel } from "./components/AuthPanel";
import { EncryptionPanel } from "./components/EncryptionPanel";
import { IntegrationPanel } from "./components/IntegrationPanel";
import { RateLimitPanel } from "./components/RateLimitPanel";
import { UserPanel } from "./components/UserPanel";
enum TabSections {
Settings = "settings",
Encryption = "encryption",
Auth = "auth",
RateLimit = "rate-limit",
Integrations = "integrations",
Users = "users",
Identities = "identities",
@@ -163,7 +161,6 @@ export const OverviewPage = () => {
<Tab value={TabSections.Settings}>General</Tab>
<Tab value={TabSections.Encryption}>Encryption</Tab>
<Tab value={TabSections.Auth}>Authentication</Tab>
<Tab value={TabSections.RateLimit}>Rate Limit</Tab>
<Tab value={TabSections.Integrations}>Integrations</Tab>
<Tab value={TabSections.Users}>User Identities</Tab>
<Tab value={TabSections.Identities}>Machine Identities</Tab>
@@ -262,7 +259,6 @@ export const OverviewPage = () => {
<SelectClear
selectValue={defaultAuthOrgId}
onClear={() => {
console.log("clearing");
onChange("");
}}
>
@@ -403,9 +399,6 @@ export const OverviewPage = () => {
<TabPanel value={TabSections.Auth}>
<AuthPanel />
</TabPanel>
<TabPanel value={TabSections.RateLimit}>
<RateLimitPanel />
</TabPanel>
<TabPanel value={TabSections.Integrations}>
<IntegrationPanel />
</TabPanel>

View File

@@ -1,240 +0,0 @@
import { Controller, useForm } from "react-hook-form";
import { zodResolver } from "@hookform/resolvers/zod";
import { z } from "zod";
import { UpgradePlanModal } from "@app/components/license/UpgradePlanModal";
import { createNotification } from "@app/components/notifications";
import { Button, ContentLoader, FormControl, Input } from "@app/components/v2";
import { useSubscription } from "@app/context";
import { usePopUp } from "@app/hooks";
import { useGetRateLimit, useUpdateRateLimit } from "@app/hooks/api";
const formSchema = z.object({
readRateLimit: z.number(),
writeRateLimit: z.number(),
secretsRateLimit: z.number(),
authRateLimit: z.number(),
inviteUserRateLimit: z.number(),
mfaRateLimit: z.number(),
publicEndpointLimit: z.number()
});
type TRateLimitForm = z.infer<typeof formSchema>;
export const RateLimitPanel = () => {
const { data: rateLimit, isPending } = useGetRateLimit();
const { subscription } = useSubscription();
const { mutateAsync: updateRateLimit } = useUpdateRateLimit();
const { handlePopUpToggle, handlePopUpOpen, popUp } = usePopUp(["upgradePlan"] as const);
const {
control,
handleSubmit,
formState: { isSubmitting, isDirty }
} = useForm<TRateLimitForm>({
resolver: zodResolver(formSchema),
values: {
// eslint-disable-next-line
readRateLimit: rateLimit?.readRateLimit ?? 600,
writeRateLimit: rateLimit?.writeRateLimit ?? 200,
secretsRateLimit: rateLimit?.secretsRateLimit ?? 60,
authRateLimit: rateLimit?.authRateLimit ?? 60,
inviteUserRateLimit: rateLimit?.inviteUserRateLimit ?? 30,
mfaRateLimit: rateLimit?.mfaRateLimit ?? 20,
publicEndpointLimit: rateLimit?.publicEndpointLimit ?? 30
}
});
const onRateLimitFormSubmit = async (formData: TRateLimitForm) => {
try {
if (subscription && !subscription.customRateLimits) {
handlePopUpOpen("upgradePlan");
return;
}
const {
readRateLimit,
writeRateLimit,
secretsRateLimit,
authRateLimit,
inviteUserRateLimit,
mfaRateLimit,
publicEndpointLimit
} = formData;
await updateRateLimit({
readRateLimit,
writeRateLimit,
secretsRateLimit,
authRateLimit,
inviteUserRateLimit,
mfaRateLimit,
publicEndpointLimit
});
createNotification({
text: "Rate limits have been successfully updated. Please allow at least 10 minutes for the changes to take effect.",
type: "success"
});
} catch (e) {
console.error(e);
createNotification({
type: "error",
text: "Failed to update rate limiting setting."
});
}
};
return isPending ? (
<ContentLoader />
) : (
<form
className="mb-6 rounded-lg border border-mineshaft-600 bg-mineshaft-900 p-4"
onSubmit={handleSubmit(onRateLimitFormSubmit)}
>
<div className="mb-8 flex flex-col justify-start">
<div className="mb-4 text-xl font-semibold text-mineshaft-100">Configure rate limits</div>
<Controller
control={control}
name="readRateLimit"
defaultValue={300}
render={({ field, fieldState: { error } }) => (
<FormControl
label="Global read requests per minute"
className="w-72"
isError={Boolean(error)}
errorText={error?.message}
>
<Input
{...field}
value={field.value}
onChange={(e) => field.onChange(Number(e.target.value))}
/>
</FormControl>
)}
/>
<Controller
control={control}
defaultValue={300}
name="writeRateLimit"
render={({ field, fieldState: { error } }) => (
<FormControl
label="Global write requests per minute"
className="w-72"
isError={Boolean(error)}
errorText={error?.message}
>
<Input
{...field}
value={field.value || ""}
onChange={(e) => field.onChange(Number(e.target.value))}
/>
</FormControl>
)}
/>
<Controller
control={control}
defaultValue={300}
name="secretsRateLimit"
render={({ field, fieldState: { error } }) => (
<FormControl
label="Secret requests per minute"
className="w-72"
isError={Boolean(error)}
errorText={error?.message}
>
<Input
{...field}
value={field.value || ""}
onChange={(e) => field.onChange(Number(e.target.value))}
/>
</FormControl>
)}
/>
<Controller
control={control}
defaultValue={300}
name="authRateLimit"
render={({ field, fieldState: { error } }) => (
<FormControl
label="Auth requests per minute"
className="w-72"
isError={Boolean(error)}
errorText={error?.message}
>
<Input
{...field}
value={field.value || ""}
onChange={(e) => field.onChange(Number(e.target.value))}
/>
</FormControl>
)}
/>
<Controller
control={control}
defaultValue={300}
name="inviteUserRateLimit"
render={({ field, fieldState: { error } }) => (
<FormControl
label="User invitation requests per minute"
className="w-72"
isError={Boolean(error)}
errorText={error?.message}
>
<Input
{...field}
value={field.value || ""}
onChange={(e) => field.onChange(Number(e.target.value))}
/>
</FormControl>
)}
/>
<Controller
control={control}
defaultValue={300}
name="mfaRateLimit"
render={({ field, fieldState: { error } }) => (
<FormControl
label="Multi factor auth requests per minute"
className="w-72"
isError={Boolean(error)}
errorText={error?.message}
>
<Input
{...field}
value={field.value || ""}
onChange={(e) => field.onChange(Number(e.target.value))}
/>
</FormControl>
)}
/>
<Controller
control={control}
defaultValue={300}
name="publicEndpointLimit"
render={({ field, fieldState: { error } }) => (
<FormControl
label="Secret sharing requests per minute"
className="w-72"
isError={Boolean(error)}
errorText={error?.message}
>
<Input
{...field}
value={field.value || ""}
onChange={(e) => field.onChange(Number(e.target.value))}
/>
</FormControl>
)}
/>
</div>
<Button type="submit" isLoading={isSubmitting} isDisabled={isSubmitting || !isDirty}>
Save
</Button>
<UpgradePlanModal
isOpen={popUp.upgradePlan.isOpen}
onOpenChange={(isOpen) => handlePopUpToggle("upgradePlan", isOpen)}
text="You can configure custom rate limits if you switch to Infisical's Enterprise plan."
/>
</form>
);
};

View File

@@ -1,7 +1,10 @@
import { useState } from "react";
import { Controller, useForm } from "react-hook-form";
import {
faArrowDown,
faArrowUp,
faEllipsis,
faFilter,
faMagnifyingGlass,
faServer
} from "@fortawesome/free-solid-svg-icons";
@@ -12,14 +15,19 @@ import { twMerge } from "tailwind-merge";
import { createNotification } from "@app/components/notifications";
import { OrgPermissionCan } from "@app/components/permissions";
import {
Button,
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuTrigger,
EmptyState,
FormControl,
IconButton,
Input,
Pagination,
Popover,
PopoverContent,
PopoverTrigger,
Select,
SelectItem,
Spinner,
@@ -30,11 +38,12 @@ import {
Td,
Th,
THead,
Tooltip,
Tr
} from "@app/components/v2";
import { OrgPermissionIdentityActions, OrgPermissionSubjects, useOrganization } from "@app/context";
import { usePagination, useResetPageHelper } from "@app/hooks";
import { useGetIdentityMembershipOrgs, useGetOrgRoles, useUpdateIdentity } from "@app/hooks/api";
import { useGetOrgRoles, useSearchIdentities, useUpdateIdentity } from "@app/hooks/api";
import { OrderByDirection } from "@app/hooks/api/generic/types";
import { OrgIdentityOrderBy } from "@app/hooks/api/organization/types";
import { UsePopUpState } from "@app/hooks/usePopUp";
@@ -68,22 +77,22 @@ export const IdentityTable = ({ handlePopUpOpen }: Props) => {
page,
setPerPage
} = usePagination<OrgIdentityOrderBy>(OrgIdentityOrderBy.Name);
const [filteredRoles, setFilteredRoles] = useState<string[]>([]);
const organizationId = currentOrg?.id || "";
const { mutateAsync: updateMutateAsync } = useUpdateIdentity();
const { data, isPending, isFetching } = useGetIdentityMembershipOrgs(
{
organizationId,
offset,
limit,
orderDirection,
orderBy,
search: debouncedSearch
},
{ placeholderData: (prevData) => prevData }
);
const { data, isPending, isFetching } = useSearchIdentities({
offset,
limit,
orderDirection,
orderBy,
search: {
name: debouncedSearch ? { $contains: debouncedSearch } : undefined,
role: filteredRoles?.length ? { $in: filteredRoles } : undefined
}
});
const { totalCount = 0 } = data ?? {};
useResetPageHelper({
@@ -91,6 +100,7 @@ export const IdentityTable = ({ handlePopUpOpen }: Props) => {
offset,
setPage
});
const filterForm = useForm<{ roles: string }>();
const { data: roles } = useGetOrgRoles(organizationId);
@@ -132,13 +142,78 @@ export const IdentityTable = ({ handlePopUpOpen }: Props) => {
return (
<div>
<Input
containerClassName="mb-4"
value={search}
onChange={(e) => setSearch(e.target.value)}
leftIcon={<FontAwesomeIcon icon={faMagnifyingGlass} />}
placeholder="Search identities by name..."
/>
<div className="mb-4 flex items-center space-x-2">
<Input
value={search}
onChange={(e) => setSearch(e.target.value)}
leftIcon={<FontAwesomeIcon icon={faMagnifyingGlass} />}
placeholder="Search identities by name..."
/>
<div>
<Popover>
<PopoverTrigger>
<IconButton
ariaLabel="filter"
variant="outline_bg"
className={filteredRoles?.length ? "border-primary" : ""}
>
<Tooltip content="Advance Filter">
<FontAwesomeIcon icon={faFilter} />
</Tooltip>
</IconButton>
</PopoverTrigger>
<PopoverContent className="w-auto border border-mineshaft-600 bg-mineshaft-800 p-2 drop-shadow-2xl">
<div className="mb-4 border-b border-b-gray-700 pb-2 text-sm text-mineshaft-300">
Advance Filter
</div>
<form
onSubmit={filterForm.handleSubmit((el) => {
setFilteredRoles(el.roles?.split(",")?.filter(Boolean) || []);
})}
>
<Controller
control={filterForm.control}
name="roles"
render={({ field, fieldState: { error } }) => (
<FormControl
label="Roles"
helperText="Eg: admin,viewer"
isError={Boolean(error?.message)}
errorText={error?.message}
>
<Input {...field} />
</FormControl>
)}
/>
<div className="flex items-center space-x-2">
<Button
type="submit"
size="xs"
colorSchema="primary"
variant="outline_bg"
className="mt-4"
>
Apply Filter
</Button>
{Boolean(filteredRoles.length) && (
<Button
size="xs"
variant="link"
className="ml-4 mt-4"
onClick={() => {
filterForm.reset({ roles: "" });
setFilteredRoles([]);
}}
>
Clear
</Button>
)}
</div>
</form>
</PopoverContent>
</Popover>
</div>
</div>
<TableContainer>
<Table>
<THead>
@@ -190,7 +265,7 @@ export const IdentityTable = ({ handlePopUpOpen }: Props) => {
<TBody>
{isPending && <TableSkeleton columns={3} innerKey="org-identities" />}
{!isPending &&
data?.identityMemberships.map(({ identity: { id, name }, role, customRole }) => {
data?.identities?.map(({ identity: { id, name }, role, customRole }) => {
return (
<Tr
className="h-10 cursor-pointer transition-colors duration-100 hover:bg-mineshaft-700"
@@ -307,10 +382,10 @@ export const IdentityTable = ({ handlePopUpOpen }: Props) => {
onChangePerPage={(newPerPage) => setPerPage(newPerPage)}
/>
)}
{!isPending && data && data?.identityMemberships.length === 0 && (
{!isPending && data && data?.identities.length === 0 && (
<EmptyState
title={
debouncedSearch.trim().length > 0
debouncedSearch.trim().length > 0 || filteredRoles?.length > 0
? "No identities match search filter"
: "No identities have been created in this organization"
}

View File

@@ -13,9 +13,9 @@ type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: v0.9.0
version: v0.9.1
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
appVersion: "v0.9.0"
appVersion: "v0.9.1"

View File

@@ -74,6 +74,13 @@ spec:
type: object
kubernetesAuth:
properties:
autoCreateServiceAccountToken:
description: Optionally automatically create a service account
token for the configured service account. If this is set to
`true`, the operator will automatically create a service account
token for the configured service account. This field is recommended
in most cases.
type: boolean
identityId:
type: string
serviceAccountRef:
@@ -86,6 +93,13 @@ spec:
- name
- namespace
type: object
serviceAccountTokenAudiences:
description: The audiences to use for the service account token.
This is only relevant if `autoCreateServiceAccountToken` is
true.
items:
type: string
type: array
required:
- identityId
- serviceAccountRef

View File

@@ -74,6 +74,13 @@ spec:
type: object
kubernetesAuth:
properties:
autoCreateServiceAccountToken:
description: Optionally automatically create a service account
token for the configured service account. If this is set to
`true`, the operator will automatically create a service account
token for the configured service account. This field is recommended
in most cases.
type: boolean
identityId:
type: string
serviceAccountRef:
@@ -86,6 +93,13 @@ spec:
- name
- namespace
type: object
serviceAccountTokenAudiences:
description: The audiences to use for the service account token.
This is only relevant if `autoCreateServiceAccountToken` is
true.
items:
type: string
type: array
required:
- identityId
- serviceAccountRef

View File

@@ -137,6 +137,12 @@ spec:
type: object
kubernetesAuth:
properties:
autoCreateServiceAccountToken:
description: Optionally automatically create a service account
token for the configured service account. If this is set to
`true`, the operator will automatically create a service account
token for the configured service account.
type: boolean
identityId:
type: string
secretsScope:
@@ -164,6 +170,13 @@ spec:
- name
- namespace
type: object
serviceAccountTokenAudiences:
description: The audiences to use for the service account token.
This is only relevant if `autoCreateServiceAccountToken` is
true.
items:
type: string
type: array
required:
- identityId
- secretsScope

View File

@@ -23,6 +23,13 @@ rules:
- list
- update
- watch
- apiGroups:
- ""
resources:
- pods
verbs:
- get
- list
- apiGroups:
- ""
resources:
@@ -42,6 +49,12 @@ rules:
- get
- list
- watch
- apiGroups:
- ""
resources:
- serviceaccounts/token
verbs:
- create
- apiGroups:
- apps
resources:
@@ -62,6 +75,12 @@ rules:
- list
- update
- watch
- apiGroups:
- authentication.k8s.io
resources:
- tokenreviews
verbs:
- create
- apiGroups:
- secrets.infisical.com
resources:

View File

@@ -32,7 +32,7 @@ controllerManager:
- ALL
image:
repository: infisical/kubernetes-operator
tag: v0.9.0
tag: v0.9.1
resources:
limits:
cpu: 500m

View File

@@ -24,3 +24,6 @@ Dockerfile.cross
*.swp
*.swo
*~
# Testing directories
auto-token

View File

@@ -49,6 +49,14 @@ type GenericKubernetesAuth struct {
IdentityID string `json:"identityId"`
// +kubebuilder:validation:Required
ServiceAccountRef KubernetesServiceAccountRef `json:"serviceAccountRef"`
// Optionally automatically create a service account token for the configured service account.
// If this is set to `true`, the operator will automatically create a service account token for the configured service account. This field is recommended in most cases.
// +kubebuilder:validation:Optional
AutoCreateServiceAccountToken bool `json:"autoCreateServiceAccountToken"`
// The audiences to use for the service account token. This is only relevant if `autoCreateServiceAccountToken` is true.
// +kubebuilder:validation:Optional
ServiceAccountTokenAudiences []string `json:"serviceAccountTokenAudiences"`
}
type TLSConfig struct {

View File

@@ -38,6 +38,14 @@ type KubernetesAuthDetails struct {
// +kubebuilder:validation:Required
SecretsScope MachineIdentityScopeInWorkspace `json:"secretsScope"`
// Optionally automatically create a service account token for the configured service account.
// If this is set to `true`, the operator will automatically create a service account token for the configured service account.
// +kubebuilder:validation:Optional
AutoCreateServiceAccountToken bool `json:"autoCreateServiceAccountToken"`
// The audiences to use for the service account token. This is only relevant if `autoCreateServiceAccountToken` is true.
// +kubebuilder:validation:Optional
ServiceAccountTokenAudiences []string `json:"serviceAccountTokenAudiences"`
}
type KubernetesServiceAccountRef struct {

View File

@@ -48,7 +48,7 @@ func (in *Authentication) DeepCopyInto(out *Authentication) {
out.ServiceAccount = in.ServiceAccount
out.ServiceToken = in.ServiceToken
out.UniversalAuth = in.UniversalAuth
out.KubernetesAuth = in.KubernetesAuth
in.KubernetesAuth.DeepCopyInto(&out.KubernetesAuth)
out.AwsIamAuth = in.AwsIamAuth
out.AzureAuth = in.AzureAuth
out.GcpIdTokenAuth = in.GcpIdTokenAuth
@@ -207,7 +207,7 @@ func (in *GenericGcpIdTokenAuth) DeepCopy() *GenericGcpIdTokenAuth {
func (in *GenericInfisicalAuthentication) DeepCopyInto(out *GenericInfisicalAuthentication) {
*out = *in
out.UniversalAuth = in.UniversalAuth
out.KubernetesAuth = in.KubernetesAuth
in.KubernetesAuth.DeepCopyInto(&out.KubernetesAuth)
out.AwsIamAuth = in.AwsIamAuth
out.AzureAuth = in.AzureAuth
out.GcpIdTokenAuth = in.GcpIdTokenAuth
@@ -228,6 +228,11 @@ func (in *GenericInfisicalAuthentication) DeepCopy() *GenericInfisicalAuthentica
func (in *GenericKubernetesAuth) DeepCopyInto(out *GenericKubernetesAuth) {
*out = *in
out.ServiceAccountRef = in.ServiceAccountRef
if in.ServiceAccountTokenAudiences != nil {
in, out := &in.ServiceAccountTokenAudiences, &out.ServiceAccountTokenAudiences
*out = make([]string, len(*in))
copy(*out, *in)
}
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GenericKubernetesAuth.
@@ -336,7 +341,7 @@ func (in *InfisicalDynamicSecretList) DeepCopyObject() runtime.Object {
func (in *InfisicalDynamicSecretSpec) DeepCopyInto(out *InfisicalDynamicSecretSpec) {
*out = *in
in.ManagedSecretReference.DeepCopyInto(&out.ManagedSecretReference)
out.Authentication = in.Authentication
in.Authentication.DeepCopyInto(&out.Authentication)
out.DynamicSecret = in.DynamicSecret
out.TLS = in.TLS
}
@@ -476,7 +481,7 @@ func (in *InfisicalPushSecretSecretSource) DeepCopy() *InfisicalPushSecretSecret
func (in *InfisicalPushSecretSpec) DeepCopyInto(out *InfisicalPushSecretSpec) {
*out = *in
out.Destination = in.Destination
out.Authentication = in.Authentication
in.Authentication.DeepCopyInto(&out.Authentication)
in.Push.DeepCopyInto(&out.Push)
out.TLS = in.TLS
}
@@ -583,7 +588,7 @@ func (in *InfisicalSecretList) DeepCopyObject() runtime.Object {
func (in *InfisicalSecretSpec) DeepCopyInto(out *InfisicalSecretSpec) {
*out = *in
out.TokenSecretReference = in.TokenSecretReference
out.Authentication = in.Authentication
in.Authentication.DeepCopyInto(&out.Authentication)
in.ManagedSecretReference.DeepCopyInto(&out.ManagedSecretReference)
if in.ManagedKubeSecretReferences != nil {
in, out := &in.ManagedKubeSecretReferences, &out.ManagedKubeSecretReferences
@@ -654,6 +659,11 @@ func (in *KubernetesAuthDetails) DeepCopyInto(out *KubernetesAuthDetails) {
*out = *in
out.ServiceAccountRef = in.ServiceAccountRef
out.SecretsScope = in.SecretsScope
if in.ServiceAccountTokenAudiences != nil {
in, out := &in.ServiceAccountTokenAudiences, &out.ServiceAccountTokenAudiences
*out = make([]string, len(*in))
copy(*out, *in)
}
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KubernetesAuthDetails.

View File

@@ -73,6 +73,13 @@ spec:
type: object
kubernetesAuth:
properties:
autoCreateServiceAccountToken:
description: Optionally automatically create a service account
token for the configured service account. If this is set
to `true`, the operator will automatically create a service
account token for the configured service account. This field
is recommended in most cases.
type: boolean
identityId:
type: string
serviceAccountRef:
@@ -85,6 +92,13 @@ spec:
- name
- namespace
type: object
serviceAccountTokenAudiences:
description: The audiences to use for the service account
token. This is only relevant if `autoCreateServiceAccountToken`
is true.
items:
type: string
type: array
required:
- identityId
- serviceAccountRef

View File

@@ -73,6 +73,13 @@ spec:
type: object
kubernetesAuth:
properties:
autoCreateServiceAccountToken:
description: Optionally automatically create a service account
token for the configured service account. If this is set
to `true`, the operator will automatically create a service
account token for the configured service account. This field
is recommended in most cases.
type: boolean
identityId:
type: string
serviceAccountRef:
@@ -85,6 +92,13 @@ spec:
- name
- namespace
type: object
serviceAccountTokenAudiences:
description: The audiences to use for the service account
token. This is only relevant if `autoCreateServiceAccountToken`
is true.
items:
type: string
type: array
required:
- identityId
- serviceAccountRef

View File

@@ -136,6 +136,12 @@ spec:
type: object
kubernetesAuth:
properties:
autoCreateServiceAccountToken:
description: Optionally automatically create a service account
token for the configured service account. If this is set
to `true`, the operator will automatically create a service
account token for the configured service account.
type: boolean
identityId:
type: string
secretsScope:
@@ -163,6 +169,13 @@ spec:
- name
- namespace
type: object
serviceAccountTokenAudiences:
description: The audiences to use for the service account
token. This is only relevant if `autoCreateServiceAccountToken`
is true.
items:
type: string
type: array
required:
- identityId
- secretsScope

View File

@@ -16,6 +16,13 @@ rules:
- list
- update
- watch
- apiGroups:
- ""
resources:
- pods
verbs:
- get
- list
- apiGroups:
- ""
resources:
@@ -35,6 +42,12 @@ rules:
- get
- list
- watch
- apiGroups:
- ""
resources:
- serviceaccounts/token
verbs:
- create
- apiGroups:
- apps
resources:
@@ -55,6 +68,12 @@ rules:
- list
- update
- watch
- apiGroups:
- authentication.k8s.io
resources:
- tokenreviews
verbs:
- create
- apiGroups:
- secrets.infisical.com
resources:

View File

@@ -45,6 +45,9 @@ func (r *InfisicalDynamicSecretReconciler) GetLogger(req ctrl.Request) logr.Logg
// +kubebuilder:rbac:groups="",resources=configmaps,verbs=get;list;watch;create;update;delete
// +kubebuilder:rbac:groups=apps,resources=deployments,verbs=list;watch;get;update
// +kubebuilder:rbac:groups="",resources=serviceaccounts,verbs=get;list;watch
//+kubebuilder:rbac:groups="",resources=pods,verbs=get;list
//+kubebuilder:rbac:groups="authentication.k8s.io",resources=tokenreviews,verbs=create
//+kubebuilder:rbac:groups="",resources=serviceaccounts/token,verbs=create
func (r *InfisicalDynamicSecretReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) {

View File

@@ -48,6 +48,9 @@ func (r *InfisicalPushSecretReconciler) GetLogger(req ctrl.Request) logr.Logger
//+kubebuilder:rbac:groups="",resources=configmaps,verbs=get;list;watch;create;update;delete
//+kubebuilder:rbac:groups=apps,resources=deployments,verbs=list;watch;get;update
//+kubebuilder:rbac:groups="",resources=serviceaccounts,verbs=get;list;watch
//+kubebuilder:rbac:groups="",resources=pods,verbs=get;list
//+kubebuilder:rbac:groups="authentication.k8s.io",resources=tokenreviews,verbs=create
//+kubebuilder:rbac:groups="",resources=serviceaccounts/token,verbs=create
// Reconcile is part of the main kubernetes reconciliation loop which aims to
// move the current state of the cluster closer to the desired state.

View File

@@ -44,6 +44,9 @@ func (r *InfisicalSecretReconciler) GetLogger(req ctrl.Request) logr.Logger {
//+kubebuilder:rbac:groups="",resources=configmaps,verbs=get;list;watch;create;update;delete
//+kubebuilder:rbac:groups=apps,resources=deployments;daemonsets;statefulsets,verbs=list;watch;get;update
//+kubebuilder:rbac:groups="",resources=serviceaccounts,verbs=get;list;watch
//+kubebuilder:rbac:groups="",resources=pods,verbs=get;list
//+kubebuilder:rbac:groups="authentication.k8s.io",resources=tokenreviews,verbs=create
//+kubebuilder:rbac:groups="",resources=serviceaccounts/token,verbs=create
// Reconcile is part of the main kubernetes reconciliation loop which aims to
// move the current state of the cluster closer to the desired state.

View File

@@ -8,12 +8,51 @@ import (
corev1 "k8s.io/api/core/v1"
authenticationv1 "k8s.io/api/authentication/v1"
"github.com/Infisical/infisical/k8-operator/api/v1alpha1"
"github.com/aws/smithy-go/ptr"
infisicalSdk "github.com/infisical/go-sdk"
"sigs.k8s.io/controller-runtime/pkg/client"
)
func GetServiceAccountToken(k8sClient client.Client, namespace string, serviceAccountName string) (string, error) {
func GetServiceAccountToken(k8sClient client.Client, namespace string, serviceAccountName string, autoCreateServiceAccountToken bool, serviceAccountTokenAudiences []string) (string, error) {
if autoCreateServiceAccountToken {
restClient, err := GetRestClientFromClient()
if err != nil {
return "", fmt.Errorf("failed to get REST client: %w", err)
}
tokenRequest := &authenticationv1.TokenRequest{
Spec: authenticationv1.TokenRequestSpec{
ExpirationSeconds: ptr.Int64(600), // 10 minutes. the token only needs to be valid for when we do the initial k8s login.
},
}
if len(serviceAccountTokenAudiences) > 0 {
// Conditionally add the audiences if they are specified.
// Failing to do this causes a default audience to be used, which is not what we want if the user doesn't specify any.
tokenRequest.Spec.Audiences = serviceAccountTokenAudiences
}
result := &authenticationv1.TokenRequest{}
err = restClient.
Post().
Namespace(namespace).
Resource("serviceaccounts").
Name(serviceAccountName).
SubResource("token").
Body(tokenRequest).
Do(context.Background()).
Into(result)
if err != nil {
return "", fmt.Errorf("failed to create token: %w", err)
}
return result.Status.Token, nil
}
serviceAccount := &corev1.ServiceAccount{}
err := k8sClient.Get(context.TODO(), client.ObjectKey{Name: serviceAccountName, Namespace: namespace}, serviceAccount)
@@ -172,7 +211,9 @@ func HandleKubernetesAuth(ctx context.Context, reconcilerClient client.Client, s
Namespace: infisicalPushSecret.Spec.Authentication.KubernetesAuth.ServiceAccountRef.Namespace,
Name: infisicalPushSecret.Spec.Authentication.KubernetesAuth.ServiceAccountRef.Name,
},
SecretsScope: v1alpha1.MachineIdentityScopeInWorkspace{},
SecretsScope: v1alpha1.MachineIdentityScopeInWorkspace{},
AutoCreateServiceAccountToken: infisicalPushSecret.Spec.Authentication.KubernetesAuth.AutoCreateServiceAccountToken,
ServiceAccountTokenAudiences: infisicalPushSecret.Spec.Authentication.KubernetesAuth.ServiceAccountTokenAudiences,
}
case SecretCrd.INFISICAL_DYNAMIC_SECRET:
@@ -188,7 +229,9 @@ func HandleKubernetesAuth(ctx context.Context, reconcilerClient client.Client, s
Namespace: infisicalDynamicSecret.Spec.Authentication.KubernetesAuth.ServiceAccountRef.Namespace,
Name: infisicalDynamicSecret.Spec.Authentication.KubernetesAuth.ServiceAccountRef.Name,
},
SecretsScope: v1alpha1.MachineIdentityScopeInWorkspace{},
SecretsScope: v1alpha1.MachineIdentityScopeInWorkspace{},
AutoCreateServiceAccountToken: infisicalDynamicSecret.Spec.Authentication.KubernetesAuth.AutoCreateServiceAccountToken,
ServiceAccountTokenAudiences: infisicalDynamicSecret.Spec.Authentication.KubernetesAuth.ServiceAccountTokenAudiences,
}
}
@@ -196,7 +239,14 @@ func HandleKubernetesAuth(ctx context.Context, reconcilerClient client.Client, s
return AuthenticationDetails{}, ErrAuthNotApplicable
}
serviceAccountToken, err := GetServiceAccountToken(reconcilerClient, kubernetesAuthSpec.ServiceAccountRef.Namespace, kubernetesAuthSpec.ServiceAccountRef.Name)
serviceAccountToken, err := GetServiceAccountToken(
reconcilerClient,
kubernetesAuthSpec.ServiceAccountRef.Namespace,
kubernetesAuthSpec.ServiceAccountRef.Name,
kubernetesAuthSpec.AutoCreateServiceAccountToken,
kubernetesAuthSpec.ServiceAccountTokenAudiences,
)
if err != nil {
return AuthenticationDetails{}, fmt.Errorf("unable to get service account token [err=%s]", err)
}

View File

@@ -9,6 +9,9 @@ import (
corev1 "k8s.io/api/core/v1"
k8Errors "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/types"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/rest"
"k8s.io/client-go/tools/clientcmd"
"sigs.k8s.io/controller-runtime/pkg/client"
)
@@ -58,3 +61,32 @@ func GetInfisicalUniversalAuthFromKubeSecret(ctx context.Context, reconcilerClie
return model.MachineIdentityDetails{ClientId: string(clientIdFromSecret), ClientSecret: string(clientSecretFromSecret)}, nil
}
func getKubeClusterConfig() (*rest.Config, error) {
config, err := rest.InClusterConfig()
if err != nil {
loadingRules := clientcmd.NewDefaultClientConfigLoadingRules()
configOverrides := &clientcmd.ConfigOverrides{}
kubeConfig := clientcmd.NewNonInteractiveDeferredLoadingClientConfig(loadingRules, configOverrides)
return kubeConfig.ClientConfig()
}
return config, nil
}
func GetRestClientFromClient() (rest.Interface, error) {
config, err := getKubeClusterConfig()
if err != nil {
return nil, err
}
clientset, err := kubernetes.NewForConfig(config)
if err != nil {
return nil, err
}
return clientset.CoreV1().RESTClient(), nil
}