mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-21 21:42:16 +00:00
Compare commits
215 Commits
groups-pha
...
groups-pha
Author | SHA1 | Date | |
---|---|---|---|
0357e7c80e | |||
ba1b223655 | |||
3b88a2759b | |||
30ccb78c81 | |||
b4f1bec1a9 | |||
ab79342743 | |||
1957531ac4 | |||
61ae0e2fc7 | |||
87b571d6ff | |||
1e6af8ad8f | |||
a771ddf859 | |||
c4cd6909bb | |||
49642480d3 | |||
b667dccc0d | |||
fdda247120 | |||
ee8a88d062 | |||
33349839cd | |||
8f3883c7d4 | |||
38cfb7fd41 | |||
a331eb8dc4 | |||
2dcb409d3b | |||
39bcb73f3d | |||
52189111d7 | |||
f369761920 | |||
8eb22630b6 | |||
d650fd68c0 | |||
387c899193 | |||
37882e6344 | |||
68a1aa6f46 | |||
fa18ca41ac | |||
8485fdc1cd | |||
49ae2386c0 | |||
f2b1f3f0e7 | |||
69aa20e35c | |||
524c7ae78f | |||
e13f7a7486 | |||
1867fb2fc4 | |||
5dd144b97b | |||
b1b430e003 | |||
fb09980413 | |||
3b36cb8b3d | |||
be6a98d0bb | |||
f8e1ed09d2 | |||
5c71116be6 | |||
07cc4fd1ab | |||
ea4ef7f7ef | |||
0482424a1c | |||
74bdbc0724 | |||
a0d5c67456 | |||
db4f4d8f28 | |||
d6f6f51d16 | |||
79a0f3d701 | |||
46912c4c3c | |||
6636377cb5 | |||
26320ddce4 | |||
f5964040d7 | |||
dcaa7f1fce | |||
a4119ee1bb | |||
74f866715f | |||
667f696d26 | |||
5f3938c33d | |||
07845ad6af | |||
17fa72be13 | |||
bf3e93460a | |||
306709cde6 | |||
c41518c822 | |||
f0f2905789 | |||
212a7b49f0 | |||
22e3fcb43c | |||
93b65a1534 | |||
039882e78b | |||
f0f51089fe | |||
447141ab1f | |||
d2ba436338 | |||
ad0d281629 | |||
ce2a9c8640 | |||
ac97f273e3 | |||
69c50af14e | |||
c8638479a8 | |||
8aa75484f3 | |||
66d70f5a25 | |||
8e7cf5f9ac | |||
f9f79cb69e | |||
4235be4be9 | |||
5c3f2e66fd | |||
a37b3ccede | |||
d64eb4b901 | |||
519403023a | |||
b2a976f3d4 | |||
6e882aa46e | |||
bf4db0a9ff | |||
3a3e3a7afc | |||
a7af3a48d9 | |||
cdba78b51d | |||
0c324e804c | |||
47aca3f3e2 | |||
80da2a19aa | |||
858a35812a | |||
31ef1a2183 | |||
66a6f9de71 | |||
6333eccc4a | |||
0af2b113df | |||
63a7941047 | |||
edeac08cb5 | |||
019b0ae09a | |||
1d00bb0a64 | |||
d96f1320ed | |||
50dbefeb48 | |||
56ac2c6780 | |||
c2f16da411 | |||
8223aee2ef | |||
5bd2af9621 | |||
b3df6ce6b5 | |||
e12eb5347d | |||
83a4426d31 | |||
3fd1fbc355 | |||
306d2b4bd9 | |||
c2c66af1f9 | |||
7ae65478aa | |||
b1594e65c6 | |||
0bce5b1daa | |||
207db93483 | |||
972f6a4887 | |||
6e1bece9d9 | |||
63e8bc1845 | |||
4f92663b66 | |||
a66a6790c0 | |||
bde853d280 | |||
acda627236 | |||
875afbb4d6 | |||
56f50a18dc | |||
801c438d05 | |||
baba411502 | |||
4c20ac6564 | |||
4e8556dec2 | |||
2d7b9ec1e4 | |||
8bb9ed4394 | |||
e4246ae85f | |||
f24067542f | |||
a7f5a61f37 | |||
b5fd7698d8 | |||
61c3102573 | |||
d6a5bf9d50 | |||
d0cb06d875 | |||
70f63b3190 | |||
d42f620e1b | |||
2b0670a409 | |||
cc25639157 | |||
5ff30aed10 | |||
656ec4bf16 | |||
0bac9a8e02 | |||
5142e6e5f6 | |||
5c0e5a8ae0 | |||
71e309bbcb | |||
8ff407927c | |||
49c735caf9 | |||
b4de2ea85d | |||
8b8baf1ef2 | |||
2a89b872c5 | |||
2d2d9a5987 | |||
a20a60850b | |||
35e38c23dd | |||
b79e61c86b | |||
e555d3129d | |||
a41883137c | |||
c414bf6c39 | |||
9b782a9da6 | |||
497c0cf63d | |||
93761f37ea | |||
68e530e5d2 | |||
d9005e8665 | |||
20b1cdf909 | |||
4bae65cc55 | |||
6da5f12855 | |||
7a242c4976 | |||
b01d381993 | |||
1ac18fcf0c | |||
8d5ef5f4d9 | |||
35b5253853 | |||
99d59a38d5 | |||
9ab1fce0e0 | |||
9992fbf3dd | |||
5e0d64525f | |||
8bcf936b91 | |||
1a2508d91a | |||
e81a77652f | |||
26778d92d3 | |||
b135ba263c | |||
9b7ef55ad7 | |||
872f8bdad8 | |||
80b0dc6895 | |||
20898c00c6 | |||
2200bd646e | |||
fb69236f47 | |||
918734b26b | |||
729c75112b | |||
738e8cfc5c | |||
6daeed68a0 | |||
31a499c9cd | |||
358ca3decd | |||
0899fdb7d5 | |||
54fcc23a6c | |||
f9957e111c | |||
1193e33890 | |||
ec64753795 | |||
c908310f6e | |||
ee2b8a594a | |||
3ae27e088f | |||
393c0c9e90 | |||
5e453ab8a6 | |||
273c78c0a5 | |||
1bcc742466 | |||
1fc9e60254 | |||
126e385046 | |||
2f932ad103 |
26
.github/resources/rename_migration_files.py
vendored
Normal file
26
.github/resources/rename_migration_files.py
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
def rename_migrations():
|
||||
migration_folder = "./backend/src/db/migrations"
|
||||
with open("added_files.txt", "r") as file:
|
||||
changed_files = file.readlines()
|
||||
|
||||
# Find the latest file among the changed files
|
||||
latest_timestamp = datetime.now() # utc time
|
||||
for file_path in changed_files:
|
||||
file_path = file_path.strip()
|
||||
# each new file bump by 1s
|
||||
latest_timestamp = latest_timestamp + timedelta(seconds=1)
|
||||
|
||||
new_filename = os.path.join(migration_folder, latest_timestamp.strftime("%Y%m%d%H%M%S") + f"_{file_path.split('_')[1]}")
|
||||
old_filename = os.path.join(migration_folder, file_path)
|
||||
os.rename(old_filename, new_filename)
|
||||
print(f"Renamed {old_filename} to {new_filename}")
|
||||
|
||||
if len(changed_files) == 0:
|
||||
print("No new files added to migration folder")
|
||||
|
||||
if __name__ == "__main__":
|
||||
rename_migrations()
|
||||
|
48
.github/workflows/update-be-new-migration-latest-timestamp.yml
vendored
Normal file
48
.github/workflows/update-be-new-migration-latest-timestamp.yml
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
name: Rename Migrations
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
paths:
|
||||
- 'backend/src/db/migrations/**'
|
||||
|
||||
jobs:
|
||||
rename:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.pull_request.merged == true
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get list of newly added files in migration folder
|
||||
run: |
|
||||
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^A' | cut -f2 | xargs -n1 basename > added_files.txt
|
||||
if [ ! -s added_files.txt ]; then
|
||||
echo "No new files added. Skipping"
|
||||
echo "SKIP_RENAME=true" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Script to rename migrations
|
||||
if: env.SKIP_RENAME != 'true'
|
||||
run: python .github/resources/rename_migration_files.py
|
||||
|
||||
- name: Commit and push changes
|
||||
if: env.SKIP_RENAME != 'true'
|
||||
run: |
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git add ./backend/src/db/migrations
|
||||
rm added_files.txt
|
||||
git commit -m "chore: renamed new migration files to latest timestamp (gh-action)"
|
||||
|
||||
- name: Create Pull Request
|
||||
if: env.SKIP_RENAME != 'true'
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: 'chore: renamed new migration files to latest UTC (gh-action)'
|
||||
title: 'GH Action: rename new migration file timestamp'
|
||||
branch-suffix: timestamp
|
@ -2,4 +2,5 @@
|
||||
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/IdentityRbacSection.tsx:generic-api-key:206
|
||||
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:304
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/MemberRbacSection.tsx:generic-api-key:206
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:292
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:292
|
||||
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
|
||||
|
@ -76,7 +76,7 @@ Check out the [Quickstart Guides](https://infisical.com/docs/getting-started/int
|
||||
|
||||
| Use Infisical Cloud | Deploy Infisical on premise |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <a href="https://infisical.com/docs/self-hosting/deployment-options/aws-ec2"><img src=".github/images/deploy-to-aws.png" width="150" width="300" /></a> <a href="https://infisical.com/docs/self-hosting/deployment-options/digital-ocean-marketplace" alt="Deploy to DigitalOcean"> <img width="217" alt="Deploy to DO" src="https://www.deploytodo.com/do-btn-blue.svg"/> </a> <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
|
||||
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
|
||||
|
||||
### Run Infisical locally
|
||||
|
||||
|
2
backend/src/@types/fastify.d.ts
vendored
2
backend/src/@types/fastify.d.ts
vendored
@ -3,6 +3,7 @@ import "fastify";
|
||||
import { TUsers } from "@app/db/schemas";
|
||||
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
@ -120,6 +121,7 @@ declare module "fastify" {
|
||||
scim: TScimServiceFactory;
|
||||
ldap: TLdapConfigServiceFactory;
|
||||
auditLog: TAuditLogServiceFactory;
|
||||
auditLogStream: TAuditLogStreamServiceFactory;
|
||||
secretScanning: TSecretScanningServiceFactory;
|
||||
license: TLicenseServiceFactory;
|
||||
trustedIp: TTrustedIpServiceFactory;
|
||||
|
8
backend/src/@types/knex.d.ts
vendored
8
backend/src/@types/knex.d.ts
vendored
@ -7,6 +7,9 @@ import {
|
||||
TApiKeysUpdate,
|
||||
TAuditLogs,
|
||||
TAuditLogsInsert,
|
||||
TAuditLogStreams,
|
||||
TAuditLogStreamsInsert,
|
||||
TAuditLogStreamsUpdate,
|
||||
TAuditLogsUpdate,
|
||||
TAuthTokens,
|
||||
TAuthTokenSessions,
|
||||
@ -404,6 +407,11 @@ declare module "knex/types/tables" {
|
||||
[TableName.LdapGroupMap]: Knex.CompositeTableType<TLdapGroupMaps, TLdapGroupMapsInsert, TLdapGroupMapsUpdate>;
|
||||
[TableName.OrgBot]: Knex.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
|
||||
[TableName.AuditLog]: Knex.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
|
||||
[TableName.AuditLogStream]: Knex.CompositeTableType<
|
||||
TAuditLogStreams,
|
||||
TAuditLogStreamsInsert,
|
||||
TAuditLogStreamsUpdate
|
||||
>;
|
||||
[TableName.GitAppInstallSession]: Knex.CompositeTableType<
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
|
@ -0,0 +1,15 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
t.string("searchFilter").notNullable().defaultTo("");
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
t.dropColumn("searchFilter");
|
||||
});
|
||||
}
|
28
backend/src/db/migrations/20240429154610_audit-log-index.ts
Normal file
28
backend/src/db/migrations/20240429154610_audit-log-index.ts
Normal file
@ -0,0 +1,28 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
|
||||
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
|
||||
const doesCreatedAtExist = await knex.schema.hasColumn(TableName.AuditLog, "createdAt");
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesProjectIdExist && doesCreatedAtExist) t.index(["projectId", "createdAt"]);
|
||||
if (doesOrgIdExist && doesCreatedAtExist) t.index(["orgId", "createdAt"]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
|
||||
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
|
||||
const doesCreatedAtExist = await knex.schema.hasColumn(TableName.AuditLog, "createdAt");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.AuditLog)) {
|
||||
await knex.schema.alterTable(TableName.AuditLog, (t) => {
|
||||
if (doesProjectIdExist && doesCreatedAtExist) t.dropIndex(["projectId", "createdAt"]);
|
||||
if (doesOrgIdExist && doesCreatedAtExist) t.dropIndex(["orgId", "createdAt"]);
|
||||
});
|
||||
}
|
||||
}
|
28
backend/src/db/migrations/20240503101144_audit-log-stream.ts
Normal file
28
backend/src/db/migrations/20240503101144_audit-log-stream.ts
Normal file
@ -0,0 +1,28 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.AuditLogStream))) {
|
||||
await knex.schema.createTable(TableName.AuditLogStream, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("url").notNullable();
|
||||
t.text("encryptedHeadersCiphertext");
|
||||
t.text("encryptedHeadersIV");
|
||||
t.text("encryptedHeadersTag");
|
||||
t.string("encryptedHeadersAlgorithm");
|
||||
t.string("encryptedHeadersKeyEncoding");
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.AuditLogStream);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropOnUpdateTrigger(knex, TableName.AuditLogStream);
|
||||
await knex.schema.dropTableIfExists(TableName.AuditLogStream);
|
||||
}
|
@ -0,0 +1,47 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const isUsersTablePresent = await knex.schema.hasTable(TableName.Users);
|
||||
if (isUsersTablePresent) {
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
t.boolean("isEmailVerified");
|
||||
});
|
||||
}
|
||||
|
||||
const isUserAliasTablePresent = await knex.schema.hasTable(TableName.UserAliases);
|
||||
if (isUserAliasTablePresent) {
|
||||
await knex.schema.alterTable(TableName.UserAliases, (t) => {
|
||||
t.string("username").nullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
const isSuperAdminTablePresent = await knex.schema.hasTable(TableName.SuperAdmin);
|
||||
if (isSuperAdminTablePresent) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.boolean("trustSamlEmails").defaultTo(false);
|
||||
t.boolean("trustLdapEmails").defaultTo(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.Users, "isEmailVerified")) {
|
||||
await knex.schema.alterTable(TableName.Users, (t) => {
|
||||
t.dropColumn("isEmailVerified");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "trustSamlEmails")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("trustSamlEmails");
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "trustLdapEmails")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("trustLdapEmails");
|
||||
});
|
||||
}
|
||||
}
|
25
backend/src/db/schemas/audit-log-streams.ts
Normal file
25
backend/src/db/schemas/audit-log-streams.ts
Normal file
@ -0,0 +1,25 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const AuditLogStreamsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
url: z.string(),
|
||||
encryptedHeadersCiphertext: z.string().nullable().optional(),
|
||||
encryptedHeadersIV: z.string().nullable().optional(),
|
||||
encryptedHeadersTag: z.string().nullable().optional(),
|
||||
encryptedHeadersAlgorithm: z.string().nullable().optional(),
|
||||
encryptedHeadersKeyEncoding: z.string().nullable().optional(),
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TAuditLogStreams = z.infer<typeof AuditLogStreamsSchema>;
|
||||
export type TAuditLogStreamsInsert = Omit<z.input<typeof AuditLogStreamsSchema>, TImmutableDBKeys>;
|
||||
export type TAuditLogStreamsUpdate = Partial<Omit<z.input<typeof AuditLogStreamsSchema>, TImmutableDBKeys>>;
|
@ -1,4 +1,5 @@
|
||||
export * from "./api-keys";
|
||||
export * from "./audit-log-streams";
|
||||
export * from "./audit-logs";
|
||||
export * from "./auth-token-sessions";
|
||||
export * from "./auth-tokens";
|
||||
|
@ -25,7 +25,8 @@ export const LdapConfigsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
groupSearchBase: z.string().default(""),
|
||||
groupSearchFilter: z.string().default("")
|
||||
groupSearchFilter: z.string().default(""),
|
||||
searchFilter: z.string().default("")
|
||||
});
|
||||
|
||||
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;
|
||||
|
@ -62,6 +62,7 @@ export enum TableName {
|
||||
LdapConfig = "ldap_configs",
|
||||
LdapGroupMap = "ldap_group_maps",
|
||||
AuditLog = "audit_logs",
|
||||
AuditLogStream = "audit_log_streams",
|
||||
GitAppInstallSession = "git_app_install_sessions",
|
||||
GitAppOrg = "git_app_org",
|
||||
SecretScanningGitRisk = "secret_scanning_git_risks",
|
||||
|
@ -14,7 +14,9 @@ export const SuperAdminSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
allowedSignUpDomain: z.string().nullable().optional(),
|
||||
instanceId: z.string().uuid().default("00000000-0000-0000-0000-000000000000")
|
||||
instanceId: z.string().uuid().default("00000000-0000-0000-0000-000000000000"),
|
||||
trustSamlEmails: z.boolean().default(false).nullable().optional(),
|
||||
trustLdapEmails: z.boolean().default(false).nullable().optional()
|
||||
});
|
||||
|
||||
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
|
||||
|
@ -10,7 +10,7 @@ import { TImmutableDBKeys } from "./models";
|
||||
export const UserAliasesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
userId: z.string().uuid(),
|
||||
username: z.string(),
|
||||
username: z.string().nullable().optional(),
|
||||
aliasType: z.string(),
|
||||
externalId: z.string(),
|
||||
emails: z.string().array().nullable().optional(),
|
||||
|
@ -21,7 +21,8 @@ export const UsersSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
isGhost: z.boolean().default(false),
|
||||
username: z.string()
|
||||
username: z.string(),
|
||||
isEmailVerified: z.boolean().nullable().optional()
|
||||
});
|
||||
|
||||
export type TUsers = z.infer<typeof UsersSchema>;
|
||||
|
215
backend/src/ee/routes/v1/audit-log-stream-router.ts
Normal file
215
backend/src/ee/routes/v1/audit-log-stream-router.ts
Normal file
@ -0,0 +1,215 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { AUDIT_LOG_STREAMS } from "@app/lib/api-docs";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedAuditLogStreamSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerAuditLogStreamRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Create an Audit Log Stream.",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
body: z.object({
|
||||
url: z.string().min(1).describe(AUDIT_LOG_STREAMS.CREATE.url),
|
||||
headers: z
|
||||
.object({
|
||||
key: z.string().min(1).trim().describe(AUDIT_LOG_STREAMS.CREATE.headers.key),
|
||||
value: z.string().min(1).trim().describe(AUDIT_LOG_STREAMS.CREATE.headers.value)
|
||||
})
|
||||
.describe(AUDIT_LOG_STREAMS.CREATE.headers.desc)
|
||||
.array()
|
||||
.optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
auditLogStream: SanitizedAuditLogStreamSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const auditLogStream = await server.services.auditLogStream.create({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
url: req.body.url,
|
||||
headers: req.body.headers
|
||||
});
|
||||
|
||||
return { auditLogStream };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Update an Audit Log Stream by ID.",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
id: z.string().describe(AUDIT_LOG_STREAMS.UPDATE.id)
|
||||
}),
|
||||
body: z.object({
|
||||
url: z.string().optional().describe(AUDIT_LOG_STREAMS.UPDATE.url),
|
||||
headers: z
|
||||
.object({
|
||||
key: z.string().min(1).trim().describe(AUDIT_LOG_STREAMS.UPDATE.headers.key),
|
||||
value: z.string().min(1).trim().describe(AUDIT_LOG_STREAMS.UPDATE.headers.value)
|
||||
})
|
||||
.describe(AUDIT_LOG_STREAMS.UPDATE.headers.desc)
|
||||
.array()
|
||||
.optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
auditLogStream: SanitizedAuditLogStreamSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const auditLogStream = await server.services.auditLogStream.updateById({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
id: req.params.id,
|
||||
url: req.body.url,
|
||||
headers: req.body.headers
|
||||
});
|
||||
|
||||
return { auditLogStream };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Delete an Audit Log Stream by ID.",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
id: z.string().describe(AUDIT_LOG_STREAMS.DELETE.id)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
auditLogStream: SanitizedAuditLogStreamSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const auditLogStream = await server.services.auditLogStream.deleteById({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
id: req.params.id
|
||||
});
|
||||
|
||||
return { auditLogStream };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "Get an Audit Log Stream by ID.",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
params: z.object({
|
||||
id: z.string().describe(AUDIT_LOG_STREAMS.GET_BY_ID.id)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
auditLogStream: SanitizedAuditLogStreamSchema.extend({
|
||||
headers: z
|
||||
.object({
|
||||
key: z.string(),
|
||||
value: z.string()
|
||||
})
|
||||
.array()
|
||||
.optional()
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const auditLogStream = await server.services.auditLogStream.getById({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
id: req.params.id
|
||||
});
|
||||
|
||||
return { auditLogStream };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "List Audit Log Streams.",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
response: {
|
||||
200: z.object({
|
||||
auditLogStreams: SanitizedAuditLogStreamSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const auditLogStreams = await server.services.auditLogStream.list({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod
|
||||
});
|
||||
|
||||
return { auditLogStreams };
|
||||
}
|
||||
});
|
||||
};
|
@ -1,3 +1,4 @@
|
||||
import { registerAuditLogStreamRouter } from "./audit-log-stream-router";
|
||||
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
|
||||
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
|
||||
import { registerGroupRouter } from "./group-router";
|
||||
@ -55,6 +56,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
await server.register(registerSecretRotationRouter, { prefix: "/secret-rotations" });
|
||||
await server.register(registerSecretVersionRouter, { prefix: "/secret" });
|
||||
await server.register(registerGroupRouter, { prefix: "/groups" });
|
||||
await server.register(registerAuditLogStreamRouter, { prefix: "/audit-log-streams" });
|
||||
await server.register(
|
||||
async (privilegeRouter) => {
|
||||
await privilegeRouter.register(registerUserAdditionalPrivilegeRouter, { prefix: "/users" });
|
||||
|
@ -16,8 +16,9 @@ import { z } from "zod";
|
||||
|
||||
import { LdapConfigsSchema, LdapGroupMapsSchema } from "@app/db/schemas";
|
||||
import { TLDAPConfig } from "@app/ee/services/ldap-config/ldap-config-types";
|
||||
import { searchGroups } from "@app/ee/services/ldap-config/ldap-fns";
|
||||
import { isValidLdapFilter, searchGroups } from "@app/ee/services/ldap-config/ldap-fns";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
@ -52,14 +53,22 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
// eslint-disable-next-line
|
||||
async (req: IncomingMessage, user, cb) => {
|
||||
try {
|
||||
if (!user.email) throw new BadRequestError({ message: "Invalid request. Missing email." });
|
||||
const ldapConfig = (req as unknown as FastifyRequest).ldapConfig as TLDAPConfig;
|
||||
|
||||
const groupFilter = "(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))";
|
||||
const searchFilter =
|
||||
ldapConfig.groupSearchFilter ||
|
||||
groupFilter.replace("{{.Username}}", user.uid).replace("{{.UserDN}}", user.dn);
|
||||
let groups: { dn: string; cn: string }[] | undefined;
|
||||
if (ldapConfig.groupSearchBase) {
|
||||
const groupFilter = "(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))";
|
||||
const groupSearchFilter = (ldapConfig.groupSearchFilter || groupFilter)
|
||||
.replace(/{{\.Username}}/g, user.uid)
|
||||
.replace(/{{\.UserDN}}/g, user.dn);
|
||||
|
||||
const shouldProcessGroups = ldapConfig.groupSearchFilter && ldapConfig.groupSearchBase;
|
||||
if (!isValidLdapFilter(groupSearchFilter)) {
|
||||
throw new Error("Generated LDAP search filter is invalid.");
|
||||
}
|
||||
|
||||
groups = await searchGroups(ldapConfig, groupSearchFilter, ldapConfig.groupSearchBase);
|
||||
}
|
||||
|
||||
const { isUserCompleted, providerAuthToken } = await server.services.ldap.ldapLogin({
|
||||
ldapConfigId: ldapConfig.id,
|
||||
@ -67,10 +76,8 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
username: user.uid,
|
||||
firstName: user.givenName ?? user.cn ?? "",
|
||||
lastName: user.sn ?? "",
|
||||
emails: user.mail ? [user.mail] : [],
|
||||
groups: shouldProcessGroups
|
||||
? await searchGroups(ldapConfig, searchFilter, ldapConfig.groupSearchBase)
|
||||
: undefined,
|
||||
email: user.mail,
|
||||
groups,
|
||||
relayState: ((req as unknown as FastifyRequest).body as { RelayState?: string }).RelayState,
|
||||
orgId: (req as unknown as FastifyRequest).ldapConfig.organization
|
||||
});
|
||||
@ -132,6 +139,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
bindDN: z.string(),
|
||||
bindPass: z.string(),
|
||||
searchBase: z.string(),
|
||||
searchFilter: z.string(),
|
||||
groupSearchBase: z.string(),
|
||||
groupSearchFilter: z.string(),
|
||||
caCert: z.string()
|
||||
@ -165,8 +173,12 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
bindDN: z.string().trim(),
|
||||
bindPass: z.string().trim(),
|
||||
searchBase: z.string().trim(),
|
||||
groupSearchBase: z.string().trim().default(""),
|
||||
groupSearchFilter: z.string().trim().default(""),
|
||||
searchFilter: z.string().trim().default("(uid={{username}})"),
|
||||
groupSearchBase: z.string().trim(),
|
||||
groupSearchFilter: z
|
||||
.string()
|
||||
.trim()
|
||||
.default("(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))"),
|
||||
caCert: z.string().trim().default("")
|
||||
}),
|
||||
response: {
|
||||
@ -202,6 +214,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
bindDN: z.string().trim(),
|
||||
bindPass: z.string().trim(),
|
||||
searchBase: z.string().trim(),
|
||||
searchFilter: z.string().trim(),
|
||||
groupSearchBase: z.string().trim(),
|
||||
groupSearchFilter: z.string().trim(),
|
||||
caCert: z.string().trim()
|
||||
@ -327,4 +340,32 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
return ldapGroupMap;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/config/:configId/test-connection",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
params: z.object({
|
||||
configId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.boolean()
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const result = await server.services.ldap.testLDAPConnection({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
orgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
ldapConfigId: req.params.configId
|
||||
});
|
||||
return result;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -102,12 +102,12 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
if (!profile) throw new BadRequestError({ message: "Missing profile" });
|
||||
const email = profile?.email ?? (profile?.emailAddress as string); // emailRippling is added because in Rippling the field `email` reserved
|
||||
|
||||
if (!profile.email || !profile.firstName) {
|
||||
if (!email || !profile.firstName) {
|
||||
throw new BadRequestError({ message: "Invalid request. Missing email or first name" });
|
||||
}
|
||||
|
||||
const { isUserCompleted, providerAuthToken } = await server.services.saml.samlLogin({
|
||||
username: profile.nameID ?? email,
|
||||
externalId: profile.nameID,
|
||||
email,
|
||||
firstName: profile.firstName as string,
|
||||
lastName: profile.lastName as string,
|
||||
|
@ -153,7 +153,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const users = await req.server.services.scim.listScimUsers({
|
||||
offset: req.query.startIndex,
|
||||
startIndex: req.query.startIndex,
|
||||
limit: req.query.count,
|
||||
filter: req.query.filter,
|
||||
orgId: req.permission.orgId
|
||||
@ -163,11 +163,11 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:userId",
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "GET",
|
||||
schema: {
|
||||
params: z.object({
|
||||
userId: z.string().trim()
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
201: z.object({
|
||||
@ -193,7 +193,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const user = await req.server.services.scim.getScimUser({
|
||||
userId: req.params.userId,
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId
|
||||
});
|
||||
return user;
|
||||
@ -249,7 +249,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
const primaryEmail = req.body.emails?.find((email) => email.primary)?.value;
|
||||
|
||||
const user = await req.server.services.scim.createScimUser({
|
||||
username: req.body.userName,
|
||||
externalId: req.body.userName,
|
||||
email: primaryEmail,
|
||||
firstName: req.body.name.givenName,
|
||||
lastName: req.body.name.familyName,
|
||||
@ -261,11 +261,11 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:userId",
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "DELETE",
|
||||
schema: {
|
||||
params: z.object({
|
||||
userId: z.string().trim()
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({})
|
||||
@ -274,7 +274,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const user = await req.server.services.scim.deleteScimUser({
|
||||
userId: req.params.userId,
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId
|
||||
});
|
||||
|
||||
@ -361,7 +361,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
handler: async (req) => {
|
||||
const groups = await req.server.services.scim.listScimGroups({
|
||||
orgId: req.permission.orgId,
|
||||
offset: req.query.startIndex,
|
||||
startIndex: req.query.startIndex,
|
||||
limit: req.query.count
|
||||
});
|
||||
|
||||
@ -416,10 +416,10 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(), // infisical userId
|
||||
value: z.string(), // infisical orgMembershipId
|
||||
display: z.string()
|
||||
})
|
||||
) // note: is this where members are added to group?
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -534,11 +534,11 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/Users/:userId",
|
||||
url: "/Users/:orgMembershipId",
|
||||
method: "PUT",
|
||||
schema: {
|
||||
params: z.object({
|
||||
userId: z.string().trim()
|
||||
orgMembershipId: z.string().trim()
|
||||
}),
|
||||
body: z.object({
|
||||
schemas: z.array(z.string()),
|
||||
@ -575,7 +575,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const user = await req.server.services.scim.replaceScimUser({
|
||||
userId: req.params.userId,
|
||||
orgMembershipId: req.params.orgMembershipId,
|
||||
orgId: req.permission.orgId,
|
||||
active: req.body.active
|
||||
});
|
||||
|
@ -0,0 +1,11 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TAuditLogStreamDALFactory = ReturnType<typeof auditLogStreamDALFactory>;
|
||||
|
||||
export const auditLogStreamDALFactory = (db: TDbClient) => {
|
||||
const orm = ormify(db, TableName.AuditLogStream);
|
||||
|
||||
return orm;
|
||||
};
|
@ -0,0 +1,233 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { RawAxiosRequestHeaders } from "axios";
|
||||
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { validateLocalIps } from "@app/lib/validator";
|
||||
|
||||
import { AUDIT_LOG_STREAM_TIMEOUT } from "../audit-log/audit-log-queue";
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { TAuditLogStreamDALFactory } from "./audit-log-stream-dal";
|
||||
import {
|
||||
LogStreamHeaders,
|
||||
TCreateAuditLogStreamDTO,
|
||||
TDeleteAuditLogStreamDTO,
|
||||
TGetDetailsAuditLogStreamDTO,
|
||||
TListAuditLogStreamDTO,
|
||||
TUpdateAuditLogStreamDTO
|
||||
} from "./audit-log-stream-types";
|
||||
|
||||
type TAuditLogStreamServiceFactoryDep = {
|
||||
auditLogStreamDAL: TAuditLogStreamDALFactory;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TAuditLogStreamServiceFactory = ReturnType<typeof auditLogStreamServiceFactory>;
|
||||
|
||||
export const auditLogStreamServiceFactory = ({
|
||||
auditLogStreamDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
}: TAuditLogStreamServiceFactoryDep) => {
|
||||
const create = async ({
|
||||
url,
|
||||
actor,
|
||||
headers = [],
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod
|
||||
}: TCreateAuditLogStreamDTO) => {
|
||||
if (!actorOrgId) throw new BadRequestError({ message: "Missing org id from token" });
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.auditLogStreams)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create audit log streams due to plan restriction. Upgrade plan to create group."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Settings);
|
||||
|
||||
validateLocalIps(url);
|
||||
|
||||
const totalStreams = await auditLogStreamDAL.find({ orgId: actorOrgId });
|
||||
if (totalStreams.length >= plan.auditLogStreamLimit) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to create audit log streams due to plan limit reached. Kindly contact Infisical to add more streams."
|
||||
});
|
||||
}
|
||||
|
||||
// testing connection first
|
||||
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
if (headers.length)
|
||||
headers.forEach(({ key, value }) => {
|
||||
streamHeaders[key] = value;
|
||||
});
|
||||
await request
|
||||
.post(
|
||||
url,
|
||||
{ ping: "ok" },
|
||||
{
|
||||
headers: streamHeaders,
|
||||
// request timeout
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
// connection timeout
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
}
|
||||
)
|
||||
.catch((err) => {
|
||||
throw new Error(`Failed to connect with the source ${(err as Error)?.message}`);
|
||||
});
|
||||
const encryptedHeaders = headers ? infisicalSymmetricEncypt(JSON.stringify(headers)) : undefined;
|
||||
const logStream = await auditLogStreamDAL.create({
|
||||
orgId: actorOrgId,
|
||||
url,
|
||||
...(encryptedHeaders
|
||||
? {
|
||||
encryptedHeadersCiphertext: encryptedHeaders.ciphertext,
|
||||
encryptedHeadersIV: encryptedHeaders.iv,
|
||||
encryptedHeadersTag: encryptedHeaders.tag,
|
||||
encryptedHeadersAlgorithm: encryptedHeaders.algorithm,
|
||||
encryptedHeadersKeyEncoding: encryptedHeaders.encoding
|
||||
}
|
||||
: {})
|
||||
});
|
||||
return logStream;
|
||||
};
|
||||
|
||||
const updateById = async ({
|
||||
id,
|
||||
url,
|
||||
actor,
|
||||
headers = [],
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod
|
||||
}: TUpdateAuditLogStreamDTO) => {
|
||||
if (!actorOrgId) throw new BadRequestError({ message: "Missing org id from token" });
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.auditLogStreams)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update audit log streams due to plan restriction. Upgrade plan to create group."
|
||||
});
|
||||
|
||||
const logStream = await auditLogStreamDAL.findById(id);
|
||||
if (!logStream) throw new BadRequestError({ message: "Audit log stream not found" });
|
||||
|
||||
const { orgId } = logStream;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
|
||||
|
||||
if (url) validateLocalIps(url);
|
||||
|
||||
// testing connection first
|
||||
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
if (headers.length)
|
||||
headers.forEach(({ key, value }) => {
|
||||
streamHeaders[key] = value;
|
||||
});
|
||||
|
||||
await request
|
||||
.post(
|
||||
url || logStream.url,
|
||||
{ ping: "ok" },
|
||||
{
|
||||
headers: streamHeaders,
|
||||
// request timeout
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
// connection timeout
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
}
|
||||
)
|
||||
.catch((err) => {
|
||||
throw new Error(`Failed to connect with the source ${(err as Error)?.message}`);
|
||||
});
|
||||
|
||||
const encryptedHeaders = headers ? infisicalSymmetricEncypt(JSON.stringify(headers)) : undefined;
|
||||
const updatedLogStream = await auditLogStreamDAL.updateById(id, {
|
||||
url,
|
||||
...(encryptedHeaders
|
||||
? {
|
||||
encryptedHeadersCiphertext: encryptedHeaders.ciphertext,
|
||||
encryptedHeadersIV: encryptedHeaders.iv,
|
||||
encryptedHeadersTag: encryptedHeaders.tag,
|
||||
encryptedHeadersAlgorithm: encryptedHeaders.algorithm,
|
||||
encryptedHeadersKeyEncoding: encryptedHeaders.encoding
|
||||
}
|
||||
: {})
|
||||
});
|
||||
return updatedLogStream;
|
||||
};
|
||||
|
||||
const deleteById = async ({ id, actor, actorId, actorOrgId, actorAuthMethod }: TDeleteAuditLogStreamDTO) => {
|
||||
if (!actorOrgId) throw new BadRequestError({ message: "Missing org id from token" });
|
||||
|
||||
const logStream = await auditLogStreamDAL.findById(id);
|
||||
if (!logStream) throw new BadRequestError({ message: "Audit log stream not found" });
|
||||
|
||||
const { orgId } = logStream;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Settings);
|
||||
|
||||
const deletedLogStream = await auditLogStreamDAL.deleteById(id);
|
||||
return deletedLogStream;
|
||||
};
|
||||
|
||||
const getById = async ({ id, actor, actorId, actorOrgId, actorAuthMethod }: TGetDetailsAuditLogStreamDTO) => {
|
||||
const logStream = await auditLogStreamDAL.findById(id);
|
||||
if (!logStream) throw new BadRequestError({ message: "Audit log stream not found" });
|
||||
|
||||
const { orgId } = logStream;
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
|
||||
|
||||
const headers =
|
||||
logStream?.encryptedHeadersCiphertext && logStream?.encryptedHeadersIV && logStream?.encryptedHeadersTag
|
||||
? (JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
tag: logStream.encryptedHeadersTag,
|
||||
iv: logStream.encryptedHeadersIV,
|
||||
ciphertext: logStream.encryptedHeadersCiphertext,
|
||||
keyEncoding: logStream.encryptedHeadersKeyEncoding as SecretKeyEncoding
|
||||
})
|
||||
) as LogStreamHeaders[])
|
||||
: undefined;
|
||||
|
||||
return { ...logStream, headers };
|
||||
};
|
||||
|
||||
const list = async ({ actor, actorId, actorOrgId, actorAuthMethod }: TListAuditLogStreamDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
|
||||
|
||||
const logStreams = await auditLogStreamDAL.find({ orgId: actorOrgId });
|
||||
return logStreams;
|
||||
};
|
||||
|
||||
return {
|
||||
create,
|
||||
updateById,
|
||||
deleteById,
|
||||
getById,
|
||||
list
|
||||
};
|
||||
};
|
@ -0,0 +1,27 @@
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
|
||||
export type LogStreamHeaders = {
|
||||
key: string;
|
||||
value: string;
|
||||
};
|
||||
|
||||
export type TCreateAuditLogStreamDTO = Omit<TOrgPermission, "orgId"> & {
|
||||
url: string;
|
||||
headers?: LogStreamHeaders[];
|
||||
};
|
||||
|
||||
export type TUpdateAuditLogStreamDTO = Omit<TOrgPermission, "orgId"> & {
|
||||
id: string;
|
||||
url?: string;
|
||||
headers?: LogStreamHeaders[];
|
||||
};
|
||||
|
||||
export type TDeleteAuditLogStreamDTO = Omit<TOrgPermission, "orgId"> & {
|
||||
id: string;
|
||||
};
|
||||
|
||||
export type TListAuditLogStreamDTO = Omit<TOrgPermission, "orgId">;
|
||||
|
||||
export type TGetDetailsAuditLogStreamDTO = Omit<TOrgPermission, "orgId"> & {
|
||||
id: string;
|
||||
};
|
@ -1,13 +1,21 @@
|
||||
import { RawAxiosRequestHeaders } from "axios";
|
||||
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
|
||||
import { TAuditLogStreamDALFactory } from "../audit-log-stream/audit-log-stream-dal";
|
||||
import { LogStreamHeaders } from "../audit-log-stream/audit-log-stream-types";
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { TAuditLogDALFactory } from "./audit-log-dal";
|
||||
import { TCreateAuditLogDTO } from "./audit-log-types";
|
||||
|
||||
type TAuditLogQueueServiceFactoryDep = {
|
||||
auditLogDAL: TAuditLogDALFactory;
|
||||
auditLogStreamDAL: Pick<TAuditLogStreamDALFactory, "find">;
|
||||
queueService: TQueueServiceFactory;
|
||||
projectDAL: Pick<TProjectDALFactory, "findById">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
@ -15,11 +23,15 @@ type TAuditLogQueueServiceFactoryDep = {
|
||||
|
||||
export type TAuditLogQueueServiceFactory = ReturnType<typeof auditLogQueueServiceFactory>;
|
||||
|
||||
// keep this timeout 5s it must be fast because else the queue will take time to finish
|
||||
// audit log is a crowded queue thus needs to be fast
|
||||
export const AUDIT_LOG_STREAM_TIMEOUT = 5 * 1000;
|
||||
export const auditLogQueueServiceFactory = ({
|
||||
auditLogDAL,
|
||||
queueService,
|
||||
projectDAL,
|
||||
licenseService
|
||||
licenseService,
|
||||
auditLogStreamDAL
|
||||
}: TAuditLogQueueServiceFactoryDep) => {
|
||||
const pushToLog = async (data: TCreateAuditLogDTO) => {
|
||||
await queueService.queue(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
||||
@ -47,7 +59,7 @@ export const auditLogQueueServiceFactory = ({
|
||||
// skip inserting if audit log retention is 0 meaning its not supported
|
||||
if (ttl === 0) return;
|
||||
|
||||
await auditLogDAL.create({
|
||||
const auditLog = await auditLogDAL.create({
|
||||
actor: actor.type,
|
||||
actorMetadata: actor.metadata,
|
||||
userAgent,
|
||||
@ -59,6 +71,46 @@ export const auditLogQueueServiceFactory = ({
|
||||
eventMetadata: event.metadata,
|
||||
userAgentType
|
||||
});
|
||||
|
||||
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
|
||||
await Promise.allSettled(
|
||||
logStreams.map(
|
||||
async ({
|
||||
url,
|
||||
encryptedHeadersTag,
|
||||
encryptedHeadersIV,
|
||||
encryptedHeadersKeyEncoding,
|
||||
encryptedHeadersCiphertext
|
||||
}) => {
|
||||
const streamHeaders =
|
||||
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
|
||||
? (JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
|
||||
iv: encryptedHeadersIV,
|
||||
tag: encryptedHeadersTag,
|
||||
ciphertext: encryptedHeadersCiphertext
|
||||
})
|
||||
) as LogStreamHeaders[])
|
||||
: [];
|
||||
|
||||
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
|
||||
if (streamHeaders.length)
|
||||
streamHeaders.forEach(({ key, value }) => {
|
||||
headers[key] = value;
|
||||
});
|
||||
|
||||
return request.post(url, auditLog, {
|
||||
headers,
|
||||
// request timeout
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
// connection timeout
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
});
|
||||
}
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
queueService.start(QueueName.AuditLogPrune, async () => {
|
||||
|
194
backend/src/ee/services/dynamic-secret/providers/aws-iam.ts
Normal file
194
backend/src/ee/services/dynamic-secret/providers/aws-iam.ts
Normal file
@ -0,0 +1,194 @@
|
||||
import {
|
||||
AddUserToGroupCommand,
|
||||
AttachUserPolicyCommand,
|
||||
CreateAccessKeyCommand,
|
||||
CreateUserCommand,
|
||||
DeleteAccessKeyCommand,
|
||||
DeleteUserCommand,
|
||||
DeleteUserPolicyCommand,
|
||||
DetachUserPolicyCommand,
|
||||
GetUserCommand,
|
||||
IAMClient,
|
||||
ListAccessKeysCommand,
|
||||
ListAttachedUserPoliciesCommand,
|
||||
ListGroupsForUserCommand,
|
||||
ListUserPoliciesCommand,
|
||||
PutUserPolicyCommand,
|
||||
RemoveUserFromGroupCommand
|
||||
} from "@aws-sdk/client-iam";
|
||||
import { z } from "zod";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
|
||||
|
||||
const generateUsername = () => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const AwsIamProvider = (): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretAwsIamSchema.parseAsync(inputs);
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
const getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
|
||||
const client = new IAMClient({
|
||||
region: providerInputs.region,
|
||||
credentials: {
|
||||
accessKeyId: providerInputs.accessKey,
|
||||
secretAccessKey: providerInputs.secretAccessKey
|
||||
}
|
||||
});
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = generateUsername();
|
||||
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
|
||||
const createUserRes = await client.send(
|
||||
new CreateUserCommand({
|
||||
Path: awsPath,
|
||||
PermissionsBoundary: permissionBoundaryPolicyArn || undefined,
|
||||
Tags: [{ Key: "createdBy", Value: "infisical-dynamic-secret" }],
|
||||
UserName: username
|
||||
})
|
||||
);
|
||||
if (!createUserRes.User) throw new BadRequestError({ message: "Failed to create AWS IAM User" });
|
||||
if (userGroups) {
|
||||
await Promise.all(
|
||||
userGroups
|
||||
.split(",")
|
||||
.filter(Boolean)
|
||||
.map((group) =>
|
||||
client.send(new AddUserToGroupCommand({ UserName: createUserRes?.User?.UserName, GroupName: group }))
|
||||
)
|
||||
);
|
||||
}
|
||||
if (policyArns) {
|
||||
await Promise.all(
|
||||
policyArns
|
||||
.split(",")
|
||||
.filter(Boolean)
|
||||
.map((policyArn) =>
|
||||
client.send(new AttachUserPolicyCommand({ UserName: createUserRes?.User?.UserName, PolicyArn: policyArn }))
|
||||
)
|
||||
);
|
||||
}
|
||||
if (policyDocument) {
|
||||
await client.send(
|
||||
new PutUserPolicyCommand({
|
||||
UserName: createUserRes.User.UserName,
|
||||
PolicyName: `infisical-dynamic-policy-${alphaNumericNanoId(4)}`,
|
||||
PolicyDocument: policyDocument
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const createAccessKeyRes = await client.send(
|
||||
new CreateAccessKeyCommand({
|
||||
UserName: createUserRes.User.UserName
|
||||
})
|
||||
);
|
||||
if (!createAccessKeyRes.AccessKey)
|
||||
throw new BadRequestError({ message: "Failed to create AWS IAM User access key" });
|
||||
|
||||
return {
|
||||
entityId: username,
|
||||
data: {
|
||||
ACCESS_KEY: createAccessKeyRes.AccessKey.AccessKeyId,
|
||||
SECRET_ACCESS_KEY: createAccessKeyRes.AccessKey.SecretAccessKey,
|
||||
USERNAME: username
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const client = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
|
||||
// remove user from groups
|
||||
const userGroups = await client.send(new ListGroupsForUserCommand({ UserName: username }));
|
||||
await Promise.all(
|
||||
(userGroups.Groups || []).map(({ GroupName }) =>
|
||||
client.send(
|
||||
new RemoveUserFromGroupCommand({
|
||||
GroupName,
|
||||
UserName: username
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
// remove user access keys
|
||||
const userAccessKeys = await client.send(new ListAccessKeysCommand({ UserName: username }));
|
||||
await Promise.all(
|
||||
(userAccessKeys.AccessKeyMetadata || []).map(({ AccessKeyId }) =>
|
||||
client.send(
|
||||
new DeleteAccessKeyCommand({
|
||||
AccessKeyId,
|
||||
UserName: username
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
// remove user inline policies
|
||||
const userInlinePolicies = await client.send(new ListUserPoliciesCommand({ UserName: username }));
|
||||
await Promise.all(
|
||||
(userInlinePolicies.PolicyNames || []).map((policyName) =>
|
||||
client.send(
|
||||
new DeleteUserPolicyCommand({
|
||||
PolicyName: policyName,
|
||||
UserName: username
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
// remove user attached policies
|
||||
const userAttachedPolicies = await client.send(new ListAttachedUserPoliciesCommand({ UserName: username }));
|
||||
await Promise.all(
|
||||
(userAttachedPolicies.AttachedPolicies || []).map((policy) =>
|
||||
client.send(
|
||||
new DetachUserPolicyCommand({
|
||||
PolicyArn: policy.PolicyArn,
|
||||
UserName: username
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
await client.send(new DeleteUserCommand({ UserName: username }));
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
const renew = async (_inputs: unknown, entityId: string) => {
|
||||
// do nothing
|
||||
const username = entityId;
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
return {
|
||||
validateProviderInputs,
|
||||
validateConnection,
|
||||
create,
|
||||
revoke,
|
||||
renew
|
||||
};
|
||||
};
|
@ -1,8 +1,10 @@
|
||||
import { AwsIamProvider } from "./aws-iam";
|
||||
import { CassandraProvider } from "./cassandra";
|
||||
import { DynamicSecretProviders } from "./models";
|
||||
import { SqlDatabaseProvider } from "./sql-database";
|
||||
|
||||
export const buildDynamicSecretProviders = () => ({
|
||||
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
|
||||
[DynamicSecretProviders.Cassandra]: CassandraProvider()
|
||||
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
|
||||
[DynamicSecretProviders.AwsIam]: AwsIamProvider()
|
||||
});
|
||||
|
@ -8,38 +8,51 @@ export enum SqlProviders {
|
||||
|
||||
export const DynamicSecretSqlDBSchema = z.object({
|
||||
client: z.nativeEnum(SqlProviders),
|
||||
host: z.string().toLowerCase(),
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
database: z.string(),
|
||||
username: z.string(),
|
||||
password: z.string(),
|
||||
creationStatement: z.string(),
|
||||
revocationStatement: z.string(),
|
||||
renewStatement: z.string().optional(),
|
||||
database: z.string().trim(),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim(),
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim(),
|
||||
renewStatement: z.string().trim().optional(),
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretCassandraSchema = z.object({
|
||||
host: z.string().toLowerCase(),
|
||||
host: z.string().trim().toLowerCase(),
|
||||
port: z.number(),
|
||||
localDataCenter: z.string().min(1),
|
||||
keyspace: z.string().optional(),
|
||||
username: z.string(),
|
||||
password: z.string(),
|
||||
creationStatement: z.string(),
|
||||
revocationStatement: z.string(),
|
||||
renewStatement: z.string().optional(),
|
||||
localDataCenter: z.string().trim().min(1),
|
||||
keyspace: z.string().trim().optional(),
|
||||
username: z.string().trim(),
|
||||
password: z.string().trim(),
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim(),
|
||||
renewStatement: z.string().trim().optional(),
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretAwsIamSchema = z.object({
|
||||
accessKey: z.string().trim().min(1),
|
||||
secretAccessKey: z.string().trim().min(1),
|
||||
region: z.string().trim().min(1),
|
||||
awsPath: z.string().trim().optional(),
|
||||
permissionBoundaryPolicyArn: z.string().trim().optional(),
|
||||
policyDocument: z.string().trim().optional(),
|
||||
userGroups: z.string().trim().optional(),
|
||||
policyArns: z.string().trim().optional()
|
||||
});
|
||||
|
||||
export enum DynamicSecretProviders {
|
||||
SqlDatabase = "sql-database",
|
||||
Cassandra = "cassandra"
|
||||
Cassandra = "cassandra",
|
||||
AwsIam = "aws-iam"
|
||||
}
|
||||
|
||||
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
|
||||
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema })
|
||||
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
|
||||
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema })
|
||||
]);
|
||||
|
||||
export type TDynamicProviderFns = {
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { SecretKeyEncoding, TUsers } from "@app/db/schemas";
|
||||
import { SecretKeyEncoding, TableName, TUsers } from "@app/db/schemas";
|
||||
import { decryptAsymmetric, encryptAsymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ScimRequestError } from "@app/lib/errors";
|
||||
|
||||
@ -188,9 +188,9 @@ export const addUsersToGroupByUserIds = async ({
|
||||
// check if all user(s) are part of the organization
|
||||
const existingUserOrgMemberships = await orgDAL.findMembership(
|
||||
{
|
||||
orgId: group.orgId,
|
||||
[`${TableName.OrgMembership}.orgId` as "orgId"]: group.orgId,
|
||||
$in: {
|
||||
userId: userIds
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: userIds
|
||||
}
|
||||
},
|
||||
{ tx }
|
||||
|
@ -1,7 +1,14 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { OrgMembershipRole, OrgMembershipStatus, SecretKeyEncoding, TLdapConfigsUpdate } from "@app/db/schemas";
|
||||
import {
|
||||
OrgMembershipRole,
|
||||
OrgMembershipStatus,
|
||||
SecretKeyEncoding,
|
||||
TableName,
|
||||
TLdapConfigsUpdate,
|
||||
TUsers
|
||||
} from "@app/db/schemas";
|
||||
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
|
||||
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "@app/ee/services/group/group-fns";
|
||||
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||
@ -19,12 +26,15 @@ import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
|
||||
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { normalizeUsername } from "@app/services/user/user-fns";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
import { UserAliasType } from "@app/services/user-alias/user-alias-types";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
@ -37,13 +47,16 @@ import {
|
||||
TGetLdapCfgDTO,
|
||||
TGetLdapGroupMapsDTO,
|
||||
TLdapLoginDTO,
|
||||
TTestLdapConnectionDTO,
|
||||
TUpdateLdapCfgDTO
|
||||
} from "./ldap-config-types";
|
||||
import { testLDAPConfig } from "./ldap-fns";
|
||||
import { TLdapGroupMapDALFactory } from "./ldap-group-map-dal";
|
||||
|
||||
type TLdapConfigServiceFactoryDep = {
|
||||
ldapConfigDAL: Pick<TLdapConfigDALFactory, "create" | "update" | "findOne">;
|
||||
ldapGroupMapDAL: Pick<TLdapGroupMapDALFactory, "find" | "create" | "delete" | "findLdapGroupMapsByLdapConfigId">;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
|
||||
orgDAL: Pick<
|
||||
TOrgDALFactory,
|
||||
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
||||
@ -73,6 +86,7 @@ export const ldapConfigServiceFactory = ({
|
||||
ldapConfigDAL,
|
||||
ldapGroupMapDAL,
|
||||
orgDAL,
|
||||
orgMembershipDAL,
|
||||
orgBotDAL,
|
||||
groupDAL,
|
||||
groupProjectDAL,
|
||||
@ -96,6 +110,7 @@ export const ldapConfigServiceFactory = ({
|
||||
bindDN,
|
||||
bindPass,
|
||||
searchBase,
|
||||
searchFilter,
|
||||
groupSearchBase,
|
||||
groupSearchFilter,
|
||||
caCert
|
||||
@ -173,6 +188,7 @@ export const ldapConfigServiceFactory = ({
|
||||
bindPassIV,
|
||||
bindPassTag,
|
||||
searchBase,
|
||||
searchFilter,
|
||||
groupSearchBase,
|
||||
groupSearchFilter,
|
||||
encryptedCACert,
|
||||
@ -194,6 +210,7 @@ export const ldapConfigServiceFactory = ({
|
||||
bindDN,
|
||||
bindPass,
|
||||
searchBase,
|
||||
searchFilter,
|
||||
groupSearchBase,
|
||||
groupSearchFilter,
|
||||
caCert
|
||||
@ -212,6 +229,7 @@ export const ldapConfigServiceFactory = ({
|
||||
isActive,
|
||||
url,
|
||||
searchBase,
|
||||
searchFilter,
|
||||
groupSearchBase,
|
||||
groupSearchFilter
|
||||
};
|
||||
@ -315,6 +333,7 @@ export const ldapConfigServiceFactory = ({
|
||||
bindDN,
|
||||
bindPass,
|
||||
searchBase: ldapConfig.searchBase,
|
||||
searchFilter: ldapConfig.searchFilter,
|
||||
groupSearchBase: ldapConfig.groupSearchBase,
|
||||
groupSearchFilter: ldapConfig.groupSearchFilter,
|
||||
caCert
|
||||
@ -350,7 +369,7 @@ export const ldapConfigServiceFactory = ({
|
||||
bindDN: ldapConfig.bindDN,
|
||||
bindCredentials: ldapConfig.bindPass,
|
||||
searchBase: ldapConfig.searchBase,
|
||||
searchFilter: "(uid={{username}})",
|
||||
searchFilter: ldapConfig.searchFilter || "(uid={{username}})",
|
||||
// searchAttributes: ["uid", "uidNumber", "givenName", "sn", "mail"],
|
||||
...(ldapConfig.caCert !== ""
|
||||
? {
|
||||
@ -372,16 +391,17 @@ export const ldapConfigServiceFactory = ({
|
||||
username,
|
||||
firstName,
|
||||
lastName,
|
||||
emails,
|
||||
email,
|
||||
groups,
|
||||
orgId,
|
||||
relayState
|
||||
}: TLdapLoginDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const serverCfg = await getServerCfg();
|
||||
let userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
aliasType: AuthMethod.LDAP
|
||||
aliasType: UserAliasType.LDAP
|
||||
});
|
||||
|
||||
const organization = await orgDAL.findOrgById(orgId);
|
||||
@ -389,7 +409,13 @@ export const ldapConfigServiceFactory = ({
|
||||
|
||||
if (userAlias) {
|
||||
await userDAL.transaction(async (tx) => {
|
||||
const [orgMembership] = await orgDAL.findMembership({ userId: userAlias.userId }, { tx });
|
||||
const [orgMembership] = await orgDAL.findMembership(
|
||||
{
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: userAlias.userId,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
if (!orgMembership) {
|
||||
await orgDAL.createMembership(
|
||||
{
|
||||
@ -412,40 +438,75 @@ export const ldapConfigServiceFactory = ({
|
||||
});
|
||||
} else {
|
||||
userAlias = await userDAL.transaction(async (tx) => {
|
||||
const uniqueUsername = await normalizeUsername(username, userDAL);
|
||||
const newUser = await userDAL.create(
|
||||
{
|
||||
username: uniqueUsername,
|
||||
email: emails[0],
|
||||
firstName,
|
||||
lastName,
|
||||
authMethods: [AuthMethod.LDAP],
|
||||
isGhost: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
let newUser: TUsers | undefined;
|
||||
if (serverCfg.trustSamlEmails) {
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
if (!newUser) {
|
||||
const uniqueUsername = await normalizeUsername(username, userDAL);
|
||||
newUser = await userDAL.create(
|
||||
{
|
||||
username: serverCfg.trustLdapEmails ? email : uniqueUsername,
|
||||
email,
|
||||
isEmailVerified: serverCfg.trustLdapEmails,
|
||||
firstName,
|
||||
lastName,
|
||||
authMethods: [],
|
||||
isGhost: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
const newUserAlias = await userAliasDAL.create(
|
||||
{
|
||||
userId: newUser.id,
|
||||
username,
|
||||
aliasType: AuthMethod.LDAP,
|
||||
aliasType: UserAliasType.LDAP,
|
||||
externalId,
|
||||
emails,
|
||||
emails: [email],
|
||||
orgId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await orgDAL.createMembership(
|
||||
const [orgMembership] = await orgDAL.findMembership(
|
||||
{
|
||||
userId: newUser.id,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
status: OrgMembershipStatus.Invited
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: newUser.id,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
},
|
||||
tx
|
||||
{ tx }
|
||||
);
|
||||
|
||||
if (!orgMembership) {
|
||||
await orgMembershipDAL.create(
|
||||
{
|
||||
userId: userAlias.userId,
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
status: newUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
|
||||
},
|
||||
tx
|
||||
);
|
||||
// Only update the membership to Accepted if the user account is already completed.
|
||||
} else if (orgMembership.status === OrgMembershipStatus.Invited && newUser.isAccepted) {
|
||||
await orgDAL.updateMembershipById(
|
||||
orgMembership.id,
|
||||
{
|
||||
status: OrgMembershipStatus.Accepted
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
return newUserAlias;
|
||||
});
|
||||
}
|
||||
@ -536,11 +597,14 @@ export const ldapConfigServiceFactory = ({
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
||||
firstName,
|
||||
lastName,
|
||||
organizationName: organization.name,
|
||||
organizationId: organization.id,
|
||||
organizationSlug: organization.slug,
|
||||
authMethod: AuthMethod.LDAP,
|
||||
authType: UserAliasType.LDAP,
|
||||
isUserCompleted,
|
||||
...(relayState
|
||||
? {
|
||||
@ -650,6 +714,23 @@ export const ldapConfigServiceFactory = ({
|
||||
return deletedGroupMap;
|
||||
};
|
||||
|
||||
const testLDAPConnection = async ({ actor, actorId, orgId, actorAuthMethod, actorOrgId }: TTestLdapConnectionDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Ldap);
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.ldap)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to test LDAP connection due to plan restriction. Upgrade plan to test the LDAP connection."
|
||||
});
|
||||
|
||||
const ldapConfig = await getLdapCfg({
|
||||
orgId
|
||||
});
|
||||
|
||||
return testLDAPConfig(ldapConfig);
|
||||
};
|
||||
|
||||
return {
|
||||
createLdapCfg,
|
||||
updateLdapCfg,
|
||||
@ -660,6 +741,7 @@ export const ldapConfigServiceFactory = ({
|
||||
bootLdap,
|
||||
getLdapGroupMaps,
|
||||
createLdapGroupMap,
|
||||
deleteLdapGroupMap
|
||||
deleteLdapGroupMap,
|
||||
testLDAPConnection
|
||||
};
|
||||
};
|
||||
|
@ -20,6 +20,7 @@ export type TCreateLdapCfgDTO = {
|
||||
bindDN: string;
|
||||
bindPass: string;
|
||||
searchBase: string;
|
||||
searchFilter: string;
|
||||
groupSearchBase: string;
|
||||
groupSearchFilter: string;
|
||||
caCert: string;
|
||||
@ -33,6 +34,7 @@ export type TUpdateLdapCfgDTO = {
|
||||
bindDN: string;
|
||||
bindPass: string;
|
||||
searchBase: string;
|
||||
searchFilter: string;
|
||||
groupSearchBase: string;
|
||||
groupSearchFilter: string;
|
||||
caCert: string;
|
||||
@ -49,7 +51,7 @@ export type TLdapLoginDTO = {
|
||||
username: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
emails: string[];
|
||||
email: string;
|
||||
orgId: string;
|
||||
groups?: {
|
||||
dn: string;
|
||||
@ -72,3 +74,7 @@ export type TDeleteLdapGroupMapDTO = {
|
||||
ldapConfigId: string;
|
||||
ldapGroupMapId: string;
|
||||
} & TOrgPermission;
|
||||
|
||||
export type TTestLdapConnectionDTO = {
|
||||
ldapConfigId: string;
|
||||
} & TOrgPermission;
|
||||
|
@ -4,6 +4,65 @@ import { logger } from "@app/lib/logger";
|
||||
|
||||
import { TLDAPConfig } from "./ldap-config-types";
|
||||
|
||||
export const isValidLdapFilter = (filter: string) => {
|
||||
try {
|
||||
ldapjs.parseFilter(filter);
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error("Invalid LDAP filter");
|
||||
logger.error(error);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Test the LDAP configuration by attempting to bind to the LDAP server
|
||||
* @param ldapConfig - The LDAP configuration to test
|
||||
* @returns {Boolean} isConnected - Whether or not the connection was successful
|
||||
*/
|
||||
export const testLDAPConfig = async (ldapConfig: TLDAPConfig): Promise<boolean> => {
|
||||
return new Promise((resolve) => {
|
||||
const ldapClient = ldapjs.createClient({
|
||||
url: ldapConfig.url,
|
||||
bindDN: ldapConfig.bindDN,
|
||||
bindCredentials: ldapConfig.bindPass,
|
||||
...(ldapConfig.caCert !== ""
|
||||
? {
|
||||
tlsOptions: {
|
||||
ca: [ldapConfig.caCert]
|
||||
}
|
||||
}
|
||||
: {})
|
||||
});
|
||||
|
||||
ldapClient.on("error", (err) => {
|
||||
logger.error("LDAP client error:", err);
|
||||
logger.error(err);
|
||||
resolve(false);
|
||||
});
|
||||
|
||||
ldapClient.bind(ldapConfig.bindDN, ldapConfig.bindPass, (err) => {
|
||||
if (err) {
|
||||
logger.error("Error binding to LDAP");
|
||||
logger.error(err);
|
||||
ldapClient.unbind();
|
||||
resolve(false);
|
||||
} else {
|
||||
logger.info("Successfully connected and bound to LDAP.");
|
||||
ldapClient.unbind();
|
||||
resolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Search for groups in the LDAP server
|
||||
* @param ldapConfig - The LDAP configuration to use
|
||||
* @param filter - The filter to use when searching for groups
|
||||
* @param base - The base to search from
|
||||
* @returns
|
||||
*/
|
||||
export const searchGroups = async (
|
||||
ldapConfig: TLDAPConfig,
|
||||
filter: string,
|
||||
@ -31,11 +90,7 @@ export const searchGroups = async (
|
||||
},
|
||||
(err, res) => {
|
||||
if (err) {
|
||||
ldapClient.unbind((unbindError) => {
|
||||
if (unbindError) {
|
||||
logger.error("Error unbinding LDAP client:", unbindError);
|
||||
}
|
||||
});
|
||||
ldapClient.unbind();
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
@ -51,19 +106,11 @@ export const searchGroups = async (
|
||||
groups.push({ dn, cn });
|
||||
});
|
||||
res.on("error", (error) => {
|
||||
ldapClient.unbind((unbindError) => {
|
||||
if (unbindError) {
|
||||
logger.error("Error unbinding LDAP client:", unbindError);
|
||||
}
|
||||
});
|
||||
ldapClient.unbind();
|
||||
reject(error);
|
||||
});
|
||||
res.on("end", () => {
|
||||
ldapClient.unbind((unbindError) => {
|
||||
if (unbindError) {
|
||||
logger.error("Error unbinding LDAP client:", unbindError);
|
||||
}
|
||||
});
|
||||
ldapClient.unbind();
|
||||
resolve(groups);
|
||||
});
|
||||
}
|
||||
|
@ -24,6 +24,8 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
customAlerts: false,
|
||||
auditLogs: false,
|
||||
auditLogsRetentionDays: 0,
|
||||
auditLogStreams: false,
|
||||
auditLogStreamLimit: 3,
|
||||
samlSSO: false,
|
||||
scim: false,
|
||||
ldap: false,
|
||||
|
@ -40,6 +40,8 @@ export type TFeatureSet = {
|
||||
customAlerts: false;
|
||||
auditLogs: false;
|
||||
auditLogsRetentionDays: 0;
|
||||
auditLogStreams: false;
|
||||
auditLogStreamLimit: 3;
|
||||
samlSSO: false;
|
||||
scim: false;
|
||||
ldap: false;
|
||||
|
@ -7,7 +7,8 @@ import {
|
||||
SecretKeyEncoding,
|
||||
TableName,
|
||||
TSamlConfigs,
|
||||
TSamlConfigsUpdate
|
||||
TSamlConfigsUpdate,
|
||||
TUsers
|
||||
} from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
@ -19,10 +20,18 @@ import {
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { normalizeUsername } from "@app/services/user/user-fns";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
import { UserAliasType } from "@app/services/user-alias/user-alias-types";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
@ -31,15 +40,19 @@ import { TSamlConfigDALFactory } from "./saml-config-dal";
|
||||
import { TCreateSamlCfgDTO, TGetSamlCfgDTO, TSamlLoginDTO, TUpdateSamlCfgDTO } from "./saml-config-types";
|
||||
|
||||
type TSamlConfigServiceFactoryDep = {
|
||||
samlConfigDAL: TSamlConfigDALFactory;
|
||||
userDAL: Pick<TUserDALFactory, "create" | "findOne" | "transaction" | "updateById">;
|
||||
samlConfigDAL: Pick<TSamlConfigDALFactory, "create" | "findOne" | "update" | "findById">;
|
||||
userDAL: Pick<TUserDALFactory, "create" | "findOne" | "transaction" | "updateById" | "findById">;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
|
||||
orgDAL: Pick<
|
||||
TOrgDALFactory,
|
||||
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
||||
>;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
};
|
||||
|
||||
export type TSamlConfigServiceFactory = ReturnType<typeof samlConfigServiceFactory>;
|
||||
@ -48,9 +61,13 @@ export const samlConfigServiceFactory = ({
|
||||
samlConfigDAL,
|
||||
orgBotDAL,
|
||||
orgDAL,
|
||||
orgMembershipDAL,
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
permissionService,
|
||||
licenseService
|
||||
licenseService,
|
||||
tokenService,
|
||||
smtpService
|
||||
}: TSamlConfigServiceFactoryDep) => {
|
||||
const createSamlCfg = async ({
|
||||
cert,
|
||||
@ -305,7 +322,7 @@ export const samlConfigServiceFactory = ({
|
||||
};
|
||||
|
||||
const samlLogin = async ({
|
||||
username,
|
||||
externalId,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
@ -314,37 +331,40 @@ export const samlConfigServiceFactory = ({
|
||||
relayState
|
||||
}: TSamlLoginDTO) => {
|
||||
const appCfg = getConfig();
|
||||
let user = await userDAL.findOne({ username });
|
||||
const serverCfg = await getServerCfg();
|
||||
const userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
aliasType: UserAliasType.SAML
|
||||
});
|
||||
|
||||
const organization = await orgDAL.findOrgById(orgId);
|
||||
if (!organization) throw new BadRequestError({ message: "Org not found" });
|
||||
|
||||
// TODO(dangtony98): remove this after aliases update
|
||||
if (authProvider === AuthMethod.KEYCLOAK_SAML && appCfg.LICENSE_SERVER_KEY) {
|
||||
throw new BadRequestError({ message: "Keycloak SAML is not yet available on Infisical Cloud" });
|
||||
}
|
||||
|
||||
if (user) {
|
||||
await userDAL.transaction(async (tx) => {
|
||||
let user: TUsers;
|
||||
if (userAlias) {
|
||||
user = await userDAL.transaction(async (tx) => {
|
||||
const foundUser = await userDAL.findById(userAlias.userId, tx);
|
||||
const [orgMembership] = await orgDAL.findMembership(
|
||||
{
|
||||
userId: user.id,
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: foundUser.id,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
if (!orgMembership) {
|
||||
await orgDAL.createMembership(
|
||||
await orgMembershipDAL.create(
|
||||
{
|
||||
userId: user.id,
|
||||
orgId,
|
||||
userId: userAlias.userId,
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
status: OrgMembershipStatus.Accepted
|
||||
status: foundUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
|
||||
},
|
||||
tx
|
||||
);
|
||||
} else if (orgMembership.status === OrgMembershipStatus.Invited) {
|
||||
// Only update the membership to Accepted if the user account is already completed.
|
||||
} else if (orgMembership.status === OrgMembershipStatus.Invited && foundUser.isAccepted) {
|
||||
await orgDAL.updateMembershipById(
|
||||
orgMembership.id,
|
||||
{
|
||||
@ -353,40 +373,97 @@ export const samlConfigServiceFactory = ({
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
return foundUser;
|
||||
});
|
||||
} else {
|
||||
user = await userDAL.transaction(async (tx) => {
|
||||
const newUser = await userDAL.create(
|
||||
let newUser: TUsers | undefined;
|
||||
if (serverCfg.trustSamlEmails) {
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
if (!newUser) {
|
||||
const uniqueUsername = await normalizeUsername(`${firstName ?? ""}-${lastName ?? ""}`, userDAL);
|
||||
newUser = await userDAL.create(
|
||||
{
|
||||
username: serverCfg.trustSamlEmails ? email : uniqueUsername,
|
||||
email,
|
||||
isEmailVerified: serverCfg.trustSamlEmails,
|
||||
firstName,
|
||||
lastName,
|
||||
authMethods: [],
|
||||
isGhost: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
await userAliasDAL.create(
|
||||
{
|
||||
username,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
authMethods: [AuthMethod.EMAIL],
|
||||
isGhost: false
|
||||
userId: newUser.id,
|
||||
aliasType: UserAliasType.SAML,
|
||||
externalId,
|
||||
emails: email ? [email] : [],
|
||||
orgId
|
||||
},
|
||||
tx
|
||||
);
|
||||
await orgDAL.createMembership({
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
status: OrgMembershipStatus.Invited
|
||||
});
|
||||
|
||||
const [orgMembership] = await orgDAL.findMembership(
|
||||
{
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: newUser.id,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
|
||||
if (!orgMembership) {
|
||||
await orgMembershipDAL.create(
|
||||
{
|
||||
userId: newUser.id,
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
status: newUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
|
||||
},
|
||||
tx
|
||||
);
|
||||
// Only update the membership to Accepted if the user account is already completed.
|
||||
} else if (orgMembership.status === OrgMembershipStatus.Invited && newUser.isAccepted) {
|
||||
await orgDAL.updateMembershipById(
|
||||
orgMembership.id,
|
||||
{
|
||||
status: OrgMembershipStatus.Accepted
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
return newUser;
|
||||
});
|
||||
}
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
const providerAuthToken = jwt.sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
||||
firstName,
|
||||
lastName,
|
||||
organizationName: organization.name,
|
||||
organizationId: organization.id,
|
||||
organizationSlug: organization.slug,
|
||||
authMethod: authProvider,
|
||||
authType: UserAliasType.SAML,
|
||||
isUserCompleted,
|
||||
...(relayState
|
||||
? {
|
||||
@ -402,6 +479,22 @@ export const samlConfigServiceFactory = ({
|
||||
|
||||
await samlConfigDAL.update({ orgId }, { lastUsed: new Date() });
|
||||
|
||||
if (user.email && !user.isEmailVerified) {
|
||||
const token = await tokenService.createTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return { isUserCompleted, providerAuthToken };
|
||||
};
|
||||
|
||||
|
@ -45,8 +45,8 @@ export type TGetSamlCfgDTO =
|
||||
};
|
||||
|
||||
export type TSamlLoginDTO = {
|
||||
username: string;
|
||||
email?: string;
|
||||
externalId: string;
|
||||
email: string;
|
||||
firstName: string;
|
||||
lastName?: string;
|
||||
authProvider: string;
|
||||
|
@ -2,31 +2,31 @@ import { TListScimGroups, TListScimUsers, TScimGroup, TScimUser } from "./scim-t
|
||||
|
||||
export const buildScimUserList = ({
|
||||
scimUsers,
|
||||
offset,
|
||||
startIndex,
|
||||
limit
|
||||
}: {
|
||||
scimUsers: TScimUser[];
|
||||
offset: number;
|
||||
startIndex: number;
|
||||
limit: number;
|
||||
}): TListScimUsers => {
|
||||
return {
|
||||
Resources: scimUsers,
|
||||
itemsPerPage: limit,
|
||||
schemas: ["urn:ietf:params:scim:api:messages:2.0:ListResponse"],
|
||||
startIndex: offset,
|
||||
startIndex,
|
||||
totalResults: scimUsers.length
|
||||
};
|
||||
};
|
||||
|
||||
export const buildScimUser = ({
|
||||
userId,
|
||||
orgMembershipId,
|
||||
username,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
active
|
||||
}: {
|
||||
userId: string;
|
||||
orgMembershipId: string;
|
||||
username: string;
|
||||
email?: string | null;
|
||||
firstName: string;
|
||||
@ -35,7 +35,7 @@ export const buildScimUser = ({
|
||||
}): TScimUser => {
|
||||
const scimUser = {
|
||||
schemas: ["urn:ietf:params:scim:schemas:core:2.0:User"],
|
||||
id: userId,
|
||||
id: orgMembershipId,
|
||||
userName: username,
|
||||
displayName: `${firstName} ${lastName}`,
|
||||
name: {
|
||||
@ -65,18 +65,18 @@ export const buildScimUser = ({
|
||||
|
||||
export const buildScimGroupList = ({
|
||||
scimGroups,
|
||||
offset,
|
||||
startIndex,
|
||||
limit
|
||||
}: {
|
||||
scimGroups: TScimGroup[];
|
||||
offset: number;
|
||||
startIndex: number;
|
||||
limit: number;
|
||||
}): TListScimGroups => {
|
||||
return {
|
||||
Resources: scimGroups,
|
||||
itemsPerPage: limit,
|
||||
schemas: ["urn:ietf:params:scim:api:messages:2.0:ListResponse"],
|
||||
startIndex: offset,
|
||||
startIndex,
|
||||
totalResults: scimGroups.length
|
||||
};
|
||||
};
|
||||
|
@ -2,7 +2,7 @@ import { ForbiddenError } from "@casl/ability";
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { OrgMembershipRole, OrgMembershipStatus, TableName, TGroups } from "@app/db/schemas";
|
||||
import { OrgMembershipRole, OrgMembershipStatus, TableName, TGroups, TOrgMemberships, TUsers } from "@app/db/schemas";
|
||||
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
|
||||
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "@app/ee/services/group/group-fns";
|
||||
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||
@ -11,16 +11,21 @@ import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, ScimRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TOrgPermission } from "@app/lib/types";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { deleteOrgMembership } from "@app/services/org/org-fns";
|
||||
import { deleteOrgMembershipFn } from "@app/services/org/org-fns";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
|
||||
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { normalizeUsername } from "@app/services/user/user-fns";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
import { UserAliasType } from "@app/services/user-alias/user-alias-types";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
@ -47,24 +52,32 @@ import {
|
||||
|
||||
type TScimServiceFactoryDep = {
|
||||
scimDAL: Pick<TScimDALFactory, "create" | "find" | "findById" | "deleteById">;
|
||||
userDAL: Pick<TUserDALFactory, "find" | "findOne" | "create" | "transaction" | "findUserEncKeyByUserIdsBatch">;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
"find" | "findOne" | "create" | "transaction" | "findUserEncKeyByUserIdsBatch" | "findById"
|
||||
>;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "findOne" | "create" | "delete">;
|
||||
orgDAL: Pick<
|
||||
TOrgDALFactory,
|
||||
"createMembership" | "findById" | "findMembership" | "deleteMembershipById" | "transaction"
|
||||
"createMembership" | "findById" | "findMembership" | "deleteMembershipById" | "transaction" | "updateMembershipById"
|
||||
>;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find" | "findOne" | "create" | "updateById">;
|
||||
projectDAL: Pick<TProjectDALFactory, "find" | "findProjectGhostUser">;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find" | "delete">;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find" | "delete" | "findProjectMembershipsByUserId">;
|
||||
groupDAL: Pick<
|
||||
TGroupDALFactory,
|
||||
"create" | "findOne" | "findAllGroupMembers" | "update" | "delete" | "findGroups" | "transaction"
|
||||
>;
|
||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||
userGroupMembershipDAL: TUserGroupMembershipDALFactory; // TODO: Pick
|
||||
userGroupMembershipDAL: Pick<
|
||||
TUserGroupMembershipDALFactory,
|
||||
"find" | "transaction" | "insertMany" | "filterProjectsByUserMembership" | "delete"
|
||||
>;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany" | "delete">;
|
||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
smtpService: TSmtpService;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
};
|
||||
|
||||
export type TScimServiceFactory = ReturnType<typeof scimServiceFactory>;
|
||||
@ -73,7 +86,9 @@ export const scimServiceFactory = ({
|
||||
licenseService,
|
||||
scimDAL,
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
orgDAL,
|
||||
orgMembershipDAL,
|
||||
projectDAL,
|
||||
projectMembershipDAL,
|
||||
groupDAL,
|
||||
@ -160,7 +175,7 @@ export const scimServiceFactory = ({
|
||||
};
|
||||
|
||||
// SCIM server endpoints
|
||||
const listScimUsers = async ({ offset, limit, filter, orgId }: TListScimUsersDTO): Promise<TListScimUsers> => {
|
||||
const listScimUsers = async ({ startIndex, limit, filter, orgId }: TListScimUsersDTO): Promise<TListScimUsers> => {
|
||||
const org = await orgDAL.findById(orgId);
|
||||
|
||||
if (!org.scimEnabled)
|
||||
@ -178,11 +193,11 @@ export const scimServiceFactory = ({
|
||||
attributeName = "email";
|
||||
}
|
||||
|
||||
return { [attributeName]: parsedValue };
|
||||
return { [attributeName]: parsedValue.replace(/"/g, "") };
|
||||
};
|
||||
|
||||
const findOpts = {
|
||||
...(offset && { offset }),
|
||||
...(startIndex && { offset: startIndex - 1 }),
|
||||
...(limit && { limit })
|
||||
};
|
||||
|
||||
@ -194,10 +209,10 @@ export const scimServiceFactory = ({
|
||||
findOpts
|
||||
);
|
||||
|
||||
const scimUsers = users.map(({ userId, username, firstName, lastName, email }) =>
|
||||
const scimUsers = users.map(({ id, externalId, username, firstName, lastName, email }) =>
|
||||
buildScimUser({
|
||||
userId: userId ?? "",
|
||||
username,
|
||||
orgMembershipId: id ?? "",
|
||||
username: externalId ?? username,
|
||||
firstName: firstName ?? "",
|
||||
lastName: lastName ?? "",
|
||||
email,
|
||||
@ -207,16 +222,16 @@ export const scimServiceFactory = ({
|
||||
|
||||
return buildScimUserList({
|
||||
scimUsers,
|
||||
offset,
|
||||
startIndex,
|
||||
limit
|
||||
});
|
||||
};
|
||||
|
||||
const getScimUser = async ({ userId, orgId }: TGetScimUserDTO) => {
|
||||
const getScimUser = async ({ orgMembershipId, orgId }: TGetScimUserDTO) => {
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
userId,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
[`${TableName.OrgMembership}.id` as "id"]: orgMembershipId,
|
||||
[`${TableName.OrgMembership}.orgId` as "orgId"]: orgId
|
||||
})
|
||||
.catch(() => {
|
||||
throw new ScimRequestError({
|
||||
@ -238,8 +253,8 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
|
||||
return buildScimUser({
|
||||
userId: membership.userId as string,
|
||||
username: membership.username,
|
||||
orgMembershipId: membership.id,
|
||||
username: membership.externalId ?? membership.username,
|
||||
email: membership.email ?? "",
|
||||
firstName: membership.firstName as string,
|
||||
lastName: membership.lastName as string,
|
||||
@ -247,7 +262,9 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
};
|
||||
|
||||
const createScimUser = async ({ username, email, firstName, lastName, orgId }: TCreateScimUserDTO) => {
|
||||
const createScimUser = async ({ externalId, email, firstName, lastName, orgId }: TCreateScimUserDTO) => {
|
||||
if (!email) throw new ScimRequestError({ detail: "Invalid request. Missing email.", status: 400 });
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
|
||||
if (!org)
|
||||
@ -262,67 +279,121 @@ export const scimServiceFactory = ({
|
||||
status: 403
|
||||
});
|
||||
|
||||
let user = await userDAL.findOne({
|
||||
username
|
||||
const appCfg = getConfig();
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
const userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
aliasType: UserAliasType.SAML
|
||||
});
|
||||
|
||||
if (user) {
|
||||
await userDAL.transaction(async (tx) => {
|
||||
const [orgMembership] = await orgDAL.findMembership(
|
||||
const { user: createdUser, orgMembership: createdOrgMembership } = await userDAL.transaction(async (tx) => {
|
||||
let user: TUsers | undefined;
|
||||
let orgMembership: TOrgMemberships;
|
||||
if (userAlias) {
|
||||
user = await userDAL.findById(userAlias.userId, tx);
|
||||
orgMembership = await orgMembershipDAL.findOne(
|
||||
{
|
||||
userId: user.id,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
orgId
|
||||
},
|
||||
{ tx }
|
||||
tx
|
||||
);
|
||||
if (orgMembership)
|
||||
throw new ScimRequestError({
|
||||
detail: "User already exists in the database",
|
||||
status: 409
|
||||
});
|
||||
|
||||
if (!orgMembership) {
|
||||
await orgDAL.createMembership(
|
||||
orgMembership = await orgMembershipDAL.create(
|
||||
{
|
||||
userId: user.id,
|
||||
orgId,
|
||||
userId: userAlias.userId,
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
status: OrgMembershipStatus.Invited
|
||||
status: user.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
|
||||
},
|
||||
tx
|
||||
);
|
||||
} else if (orgMembership.status === OrgMembershipStatus.Invited && user.isAccepted) {
|
||||
orgMembership = await orgMembershipDAL.updateById(
|
||||
orgMembership.id,
|
||||
{
|
||||
status: OrgMembershipStatus.Accepted
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
user = await userDAL.transaction(async (tx) => {
|
||||
const newUser = await userDAL.create(
|
||||
} else {
|
||||
if (serverCfg.trustSamlEmails) {
|
||||
user = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
const uniqueUsername = await normalizeUsername(`${firstName}-${lastName}`, userDAL);
|
||||
user = await userDAL.create(
|
||||
{
|
||||
username: serverCfg.trustSamlEmails ? email : uniqueUsername,
|
||||
email,
|
||||
isEmailVerified: serverCfg.trustSamlEmails,
|
||||
firstName,
|
||||
lastName,
|
||||
authMethods: [],
|
||||
isGhost: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
await userAliasDAL.create(
|
||||
{
|
||||
username,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
authMethods: [AuthMethod.EMAIL],
|
||||
isGhost: false
|
||||
userId: user.id,
|
||||
aliasType: UserAliasType.SAML,
|
||||
externalId,
|
||||
emails: email ? [email] : [],
|
||||
orgId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await orgDAL.createMembership(
|
||||
const [foundOrgMembership] = await orgDAL.findMembership(
|
||||
{
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
userId: newUser.id,
|
||||
role: OrgMembershipRole.Member,
|
||||
status: OrgMembershipStatus.Invited
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: user.id,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
},
|
||||
tx
|
||||
{ tx }
|
||||
);
|
||||
return newUser;
|
||||
});
|
||||
}
|
||||
|
||||
const appCfg = getConfig();
|
||||
orgMembership = foundOrgMembership;
|
||||
|
||||
if (!orgMembership) {
|
||||
orgMembership = await orgMembershipDAL.create(
|
||||
{
|
||||
userId: user.id,
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
status: user.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
|
||||
},
|
||||
tx
|
||||
);
|
||||
// Only update the membership to Accepted if the user account is already completed.
|
||||
} else if (orgMembership.status === OrgMembershipStatus.Invited && user.isAccepted) {
|
||||
orgMembership = await orgDAL.updateMembershipById(
|
||||
orgMembership.id,
|
||||
{
|
||||
status: OrgMembershipStatus.Accepted
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return { user, orgMembership };
|
||||
});
|
||||
|
||||
if (email) {
|
||||
await smtpService.sendMail({
|
||||
@ -337,20 +408,20 @@ export const scimServiceFactory = ({
|
||||
}
|
||||
|
||||
return buildScimUser({
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
firstName: user.firstName as string,
|
||||
lastName: user.lastName as string,
|
||||
email: user.email ?? "",
|
||||
orgMembershipId: createdOrgMembership.id,
|
||||
username: externalId,
|
||||
firstName: createdUser.firstName as string,
|
||||
lastName: createdUser.lastName as string,
|
||||
email: createdUser.email ?? "",
|
||||
active: true
|
||||
});
|
||||
};
|
||||
|
||||
const updateScimUser = async ({ userId, orgId, operations }: TUpdateScimUserDTO) => {
|
||||
const updateScimUser = async ({ orgMembershipId, orgId, operations }: TUpdateScimUserDTO) => {
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
userId,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
[`${TableName.OrgMembership}.id` as "id"]: orgMembershipId,
|
||||
[`${TableName.OrgMembership}.orgId` as "orgId"]: orgId
|
||||
})
|
||||
.catch(() => {
|
||||
throw new ScimRequestError({
|
||||
@ -386,18 +457,20 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
|
||||
if (!active) {
|
||||
await deleteOrgMembership({
|
||||
await deleteOrgMembershipFn({
|
||||
orgMembershipId: membership.id,
|
||||
orgId: membership.orgId,
|
||||
orgDAL,
|
||||
projectDAL,
|
||||
projectMembershipDAL
|
||||
projectMembershipDAL,
|
||||
projectKeyDAL,
|
||||
userAliasDAL,
|
||||
licenseService
|
||||
});
|
||||
}
|
||||
|
||||
return buildScimUser({
|
||||
userId: membership.userId as string,
|
||||
username: membership.username,
|
||||
orgMembershipId: membership.id,
|
||||
username: membership.externalId ?? membership.username,
|
||||
email: membership.email,
|
||||
firstName: membership.firstName as string,
|
||||
lastName: membership.lastName as string,
|
||||
@ -405,11 +478,11 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
};
|
||||
|
||||
const replaceScimUser = async ({ userId, active, orgId }: TReplaceScimUserDTO) => {
|
||||
const replaceScimUser = async ({ orgMembershipId, active, orgId }: TReplaceScimUserDTO) => {
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
userId,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
[`${TableName.OrgMembership}.id` as "id"]: orgMembershipId,
|
||||
[`${TableName.OrgMembership}.orgId` as "orgId"]: orgId
|
||||
})
|
||||
.catch(() => {
|
||||
throw new ScimRequestError({
|
||||
@ -431,19 +504,20 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
|
||||
if (!active) {
|
||||
// tx
|
||||
await deleteOrgMembership({
|
||||
await deleteOrgMembershipFn({
|
||||
orgMembershipId: membership.id,
|
||||
orgId: membership.orgId,
|
||||
orgDAL,
|
||||
projectDAL,
|
||||
projectMembershipDAL
|
||||
projectMembershipDAL,
|
||||
projectKeyDAL,
|
||||
userAliasDAL,
|
||||
licenseService
|
||||
});
|
||||
}
|
||||
|
||||
return buildScimUser({
|
||||
userId: membership.userId as string,
|
||||
username: membership.username,
|
||||
orgMembershipId: membership.id,
|
||||
username: membership.externalId ?? membership.username,
|
||||
email: membership.email,
|
||||
firstName: membership.firstName as string,
|
||||
lastName: membership.lastName as string,
|
||||
@ -451,18 +525,11 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
};
|
||||
|
||||
const deleteScimUser = async ({ userId, orgId }: TDeleteScimUserDTO) => {
|
||||
const [membership] = await orgDAL
|
||||
.findMembership({
|
||||
userId,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
})
|
||||
.catch(() => {
|
||||
throw new ScimRequestError({
|
||||
detail: "User not found",
|
||||
status: 404
|
||||
});
|
||||
});
|
||||
const deleteScimUser = async ({ orgMembershipId, orgId }: TDeleteScimUserDTO) => {
|
||||
const [membership] = await orgDAL.findMembership({
|
||||
[`${TableName.OrgMembership}.id` as "id"]: orgMembershipId,
|
||||
[`${TableName.OrgMembership}.orgId` as "orgId"]: orgId
|
||||
});
|
||||
|
||||
if (!membership)
|
||||
throw new ScimRequestError({
|
||||
@ -477,18 +544,20 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
await deleteOrgMembership({
|
||||
await deleteOrgMembershipFn({
|
||||
orgMembershipId: membership.id,
|
||||
orgId: membership.orgId,
|
||||
orgDAL,
|
||||
projectDAL,
|
||||
projectMembershipDAL
|
||||
projectMembershipDAL,
|
||||
projectKeyDAL,
|
||||
userAliasDAL,
|
||||
licenseService
|
||||
});
|
||||
|
||||
return {}; // intentionally return empty object upon success
|
||||
};
|
||||
|
||||
const listScimGroups = async ({ orgId, offset, limit }: TListScimGroupsDTO) => {
|
||||
const listScimGroups = async ({ orgId, startIndex, limit }: TListScimGroupsDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
@ -509,21 +578,27 @@ export const scimServiceFactory = ({
|
||||
status: 403
|
||||
});
|
||||
|
||||
const groups = await groupDAL.findGroups({
|
||||
orgId
|
||||
});
|
||||
const groups = await groupDAL.findGroups(
|
||||
{
|
||||
orgId
|
||||
},
|
||||
{
|
||||
offset: startIndex - 1,
|
||||
limit
|
||||
}
|
||||
);
|
||||
|
||||
const scimGroups = groups.map((group) =>
|
||||
buildScimGroup({
|
||||
groupId: group.id,
|
||||
name: group.name,
|
||||
members: []
|
||||
members: [] // does this need to be populated?
|
||||
})
|
||||
);
|
||||
|
||||
return buildScimGroupList({
|
||||
scimGroups,
|
||||
offset,
|
||||
startIndex,
|
||||
limit
|
||||
});
|
||||
};
|
||||
@ -562,9 +637,15 @@ export const scimServiceFactory = ({
|
||||
);
|
||||
|
||||
if (members && members.length) {
|
||||
const orgMemberships = await orgMembershipDAL.find({
|
||||
$in: {
|
||||
id: members.map((member) => member.value)
|
||||
}
|
||||
});
|
||||
|
||||
const newMembers = await addUsersToGroupByUserIds({
|
||||
group,
|
||||
userIds: members.map((member) => member.value),
|
||||
userIds: orgMemberships.map((membership) => membership.userId as string),
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
orgDAL,
|
||||
@ -581,12 +662,19 @@ export const scimServiceFactory = ({
|
||||
return { group, newMembers: [] };
|
||||
});
|
||||
|
||||
const orgMemberships = await orgDAL.findMembership({
|
||||
[`${TableName.OrgMembership}.orgId` as "orgId"]: orgId,
|
||||
$in: {
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: newGroup.newMembers.map((member) => member.id)
|
||||
}
|
||||
});
|
||||
|
||||
return buildScimGroup({
|
||||
groupId: newGroup.group.id,
|
||||
name: newGroup.group.name,
|
||||
members: newGroup.newMembers.map((member) => ({
|
||||
value: member.id,
|
||||
display: `${member.firstName} ${member.lastName}`
|
||||
members: orgMemberships.map(({ id, firstName, lastName }) => ({
|
||||
value: id,
|
||||
display: `${firstName} ${lastName}`
|
||||
}))
|
||||
});
|
||||
};
|
||||
@ -615,15 +703,22 @@ export const scimServiceFactory = ({
|
||||
groupId: group.id
|
||||
});
|
||||
|
||||
const orgMemberships = await orgDAL.findMembership({
|
||||
[`${TableName.OrgMembership}.orgId` as "orgId"]: orgId,
|
||||
$in: {
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: users
|
||||
.filter((user) => user.isPartOfGroup)
|
||||
.map((user) => user.id)
|
||||
}
|
||||
});
|
||||
|
||||
return buildScimGroup({
|
||||
groupId: group.id,
|
||||
name: group.name,
|
||||
members: users
|
||||
.filter((user) => user.isPartOfGroup)
|
||||
.map((user) => ({
|
||||
value: user.id,
|
||||
display: `${user.firstName} ${user.lastName}`
|
||||
}))
|
||||
members: orgMemberships.map(({ id, firstName, lastName }) => ({
|
||||
value: id,
|
||||
display: `${firstName} ${lastName}`
|
||||
}))
|
||||
});
|
||||
};
|
||||
|
||||
@ -667,7 +762,13 @@ export const scimServiceFactory = ({
|
||||
}
|
||||
|
||||
if (members) {
|
||||
const membersIdsSet = new Set(members.map((member) => member.value));
|
||||
const orgMemberships = await orgMembershipDAL.find({
|
||||
$in: {
|
||||
id: members.map((member) => member.value)
|
||||
}
|
||||
});
|
||||
|
||||
const membersIdsSet = new Set(orgMemberships.map((orgMembership) => orgMembership.userId));
|
||||
|
||||
const directMemberUserIds = (
|
||||
await userGroupMembershipDAL.find({
|
||||
@ -686,13 +787,13 @@ export const scimServiceFactory = ({
|
||||
const allMembersUserIds = directMemberUserIds.concat(pendingGroupAdditionsUserIds);
|
||||
const allMembersUserIdsSet = new Set(allMembersUserIds);
|
||||
|
||||
const toAddUserIds = members.filter((member) => !allMembersUserIdsSet.has(member.value));
|
||||
const toAddUserIds = orgMemberships.filter((member) => !allMembersUserIdsSet.has(member.userId as string));
|
||||
const toRemoveUserIds = allMembersUserIds.filter((userId) => !membersIdsSet.has(userId));
|
||||
|
||||
if (toAddUserIds.length) {
|
||||
await addUsersToGroupByUserIds({
|
||||
group,
|
||||
userIds: toAddUserIds.map((member) => member.value),
|
||||
userIds: toAddUserIds.map((member) => member.userId as string),
|
||||
userDAL,
|
||||
userGroupMembershipDAL,
|
||||
orgDAL,
|
||||
|
@ -12,7 +12,7 @@ export type TDeleteScimTokenDTO = {
|
||||
// SCIM server endpoint types
|
||||
|
||||
export type TListScimUsersDTO = {
|
||||
offset: number;
|
||||
startIndex: number;
|
||||
limit: number;
|
||||
filter?: string;
|
||||
orgId: string;
|
||||
@ -27,12 +27,12 @@ export type TListScimUsers = {
|
||||
};
|
||||
|
||||
export type TGetScimUserDTO = {
|
||||
userId: string;
|
||||
orgMembershipId: string;
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export type TCreateScimUserDTO = {
|
||||
username: string;
|
||||
externalId: string;
|
||||
email?: string;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
@ -40,7 +40,7 @@ export type TCreateScimUserDTO = {
|
||||
};
|
||||
|
||||
export type TUpdateScimUserDTO = {
|
||||
userId: string;
|
||||
orgMembershipId: string;
|
||||
orgId: string;
|
||||
operations: {
|
||||
op: string;
|
||||
@ -54,18 +54,18 @@ export type TUpdateScimUserDTO = {
|
||||
};
|
||||
|
||||
export type TReplaceScimUserDTO = {
|
||||
userId: string;
|
||||
orgMembershipId: string;
|
||||
active: boolean;
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export type TDeleteScimUserDTO = {
|
||||
userId: string;
|
||||
orgMembershipId: string;
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export type TListScimGroupsDTO = {
|
||||
offset: number;
|
||||
startIndex: number;
|
||||
limit: number;
|
||||
orgId: string;
|
||||
};
|
||||
|
@ -272,6 +272,7 @@ export const SECRETS = {
|
||||
|
||||
export const RAW_SECRETS = {
|
||||
LIST: {
|
||||
expand: "Whether or not to expand secret references",
|
||||
recursive:
|
||||
"Whether or not to fetch all secrets from the specified base path, and all of its subdirectories. Note, the max depth is 20 deep.",
|
||||
workspaceId: "The ID of the project to list secrets from.",
|
||||
@ -614,3 +615,29 @@ export const INTEGRATION = {
|
||||
integrationId: "The ID of the integration object."
|
||||
}
|
||||
};
|
||||
|
||||
export const AUDIT_LOG_STREAMS = {
|
||||
CREATE: {
|
||||
url: "The HTTP URL to push logs to.",
|
||||
headers: {
|
||||
desc: "The HTTP headers attached for the external prrovider requests.",
|
||||
key: "The HTTP header key name.",
|
||||
value: "The HTTP header value."
|
||||
}
|
||||
},
|
||||
UPDATE: {
|
||||
id: "The ID of the audit log stream to update.",
|
||||
url: "The HTTP URL to push logs to.",
|
||||
headers: {
|
||||
desc: "The HTTP headers attached for the external prrovider requests.",
|
||||
key: "The HTTP header key name.",
|
||||
value: "The HTTP header value."
|
||||
}
|
||||
},
|
||||
DELETE: {
|
||||
id: "The ID of the audit log stream to delete."
|
||||
},
|
||||
GET_BY_ID: {
|
||||
id: "The ID of the audit log stream to get details."
|
||||
}
|
||||
};
|
||||
|
@ -119,6 +119,7 @@ const envSchema = z
|
||||
})
|
||||
.transform((data) => ({
|
||||
...data,
|
||||
isCloud: Boolean(data.LICENSE_SERVER_KEY),
|
||||
isSmtpConfigured: Boolean(data.SMTP_HOST),
|
||||
isRedisConfigured: Boolean(data.REDIS_URL),
|
||||
isDevelopmentMode: data.NODE_ENV === "development",
|
||||
|
@ -17,7 +17,7 @@ export type TOrgPermission = {
|
||||
actorId: string;
|
||||
orgId: string;
|
||||
actorAuthMethod: ActorAuthMethod;
|
||||
actorOrgId: string | undefined;
|
||||
actorOrgId: string;
|
||||
};
|
||||
|
||||
export type TProjectPermission = {
|
||||
|
@ -1 +1,2 @@
|
||||
export { isDisposableEmail } from "./validate-email";
|
||||
export { validateLocalIps } from "./validate-url";
|
||||
|
18
backend/src/lib/validator/validate-url.ts
Normal file
18
backend/src/lib/validator/validate-url.ts
Normal file
@ -0,0 +1,18 @@
|
||||
import { getConfig } from "../config/env";
|
||||
import { BadRequestError } from "../errors";
|
||||
|
||||
export const validateLocalIps = (url: string) => {
|
||||
const validUrl = new URL(url);
|
||||
const appCfg = getConfig();
|
||||
// on cloud local ips are not allowed
|
||||
if (
|
||||
appCfg.isCloud &&
|
||||
(validUrl.host === "host.docker.internal" ||
|
||||
validUrl.host.match(/^10\.\d+\.\d+\.\d+/) ||
|
||||
validUrl.host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Local IPs not allowed as URL" });
|
||||
|
||||
if (validUrl.host === "localhost" || validUrl.host === "127.0.0.1")
|
||||
throw new BadRequestError({ message: "Localhost not allowed" });
|
||||
};
|
@ -36,7 +36,7 @@ export const writeLimit: RateLimitOptions = {
|
||||
export const secretsLimit: RateLimitOptions = {
|
||||
// secrets, folders, secret imports
|
||||
timeWindow: 60 * 1000,
|
||||
max: 600,
|
||||
max: 1000,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
|
@ -5,6 +5,8 @@ import { registerV1EERoutes } from "@app/ee/routes/v1";
|
||||
import { auditLogDALFactory } from "@app/ee/services/audit-log/audit-log-dal";
|
||||
import { auditLogQueueServiceFactory } from "@app/ee/services/audit-log/audit-log-queue";
|
||||
import { auditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||
import { auditLogStreamDALFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-dal";
|
||||
import { auditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
|
||||
import { dynamicSecretDALFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-dal";
|
||||
import { dynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { buildDynamicSecretProviders } from "@app/ee/services/dynamic-secret/providers";
|
||||
@ -86,6 +88,7 @@ import { orgDALFactory } from "@app/services/org/org-dal";
|
||||
import { orgRoleDALFactory } from "@app/services/org/org-role-dal";
|
||||
import { orgRoleServiceFactory } from "@app/services/org/org-role-service";
|
||||
import { orgServiceFactory } from "@app/services/org/org-service";
|
||||
import { orgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { projectDALFactory } from "@app/services/project/project-dal";
|
||||
import { projectQueueFactory } from "@app/services/project/project-queue";
|
||||
import { projectServiceFactory } from "@app/services/project/project-service";
|
||||
@ -153,6 +156,7 @@ export const registerRoutes = async (
|
||||
const authDAL = authDALFactory(db);
|
||||
const authTokenDAL = tokenDALFactory(db);
|
||||
const orgDAL = orgDALFactory(db);
|
||||
const orgMembershipDAL = orgMembershipDALFactory(db);
|
||||
const orgBotDAL = orgBotDALFactory(db);
|
||||
const incidentContactDAL = incidentContactDALFactory(db);
|
||||
const orgRoleDAL = orgRoleDALFactory(db);
|
||||
@ -193,6 +197,7 @@ export const registerRoutes = async (
|
||||
const identityUaClientSecretDAL = identityUaClientSecretDALFactory(db);
|
||||
|
||||
const auditLogDAL = auditLogDALFactory(db);
|
||||
const auditLogStreamDAL = auditLogStreamDALFactory(db);
|
||||
const trustedIpDAL = trustedIpDALFactory(db);
|
||||
const telemetryDAL = telemetryDALFactory(db);
|
||||
|
||||
@ -243,9 +248,15 @@ export const registerRoutes = async (
|
||||
auditLogDAL,
|
||||
queueService,
|
||||
projectDAL,
|
||||
licenseService
|
||||
licenseService,
|
||||
auditLogStreamDAL
|
||||
});
|
||||
const auditLogService = auditLogServiceFactory({ auditLogDAL, permissionService, auditLogQueue });
|
||||
const auditLogStreamService = auditLogStreamServiceFactory({
|
||||
licenseService,
|
||||
permissionService,
|
||||
auditLogStreamDAL
|
||||
});
|
||||
const sapService = secretApprovalPolicyServiceFactory({
|
||||
projectMembershipDAL,
|
||||
projectEnvDAL,
|
||||
@ -253,13 +264,18 @@ export const registerRoutes = async (
|
||||
permissionService,
|
||||
secretApprovalPolicyDAL
|
||||
});
|
||||
const tokenService = tokenServiceFactory({ tokenDAL: authTokenDAL, userDAL });
|
||||
const samlService = samlConfigServiceFactory({
|
||||
permissionService,
|
||||
orgBotDAL,
|
||||
orgDAL,
|
||||
orgMembershipDAL,
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
samlConfigDAL,
|
||||
licenseService
|
||||
licenseService,
|
||||
tokenService,
|
||||
smtpService
|
||||
});
|
||||
const groupService = groupServiceFactory({
|
||||
userDAL,
|
||||
@ -288,7 +304,9 @@ export const registerRoutes = async (
|
||||
licenseService,
|
||||
scimDAL,
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
orgDAL,
|
||||
orgMembershipDAL,
|
||||
projectDAL,
|
||||
projectMembershipDAL,
|
||||
groupDAL,
|
||||
@ -304,6 +322,7 @@ export const registerRoutes = async (
|
||||
ldapConfigDAL,
|
||||
ldapGroupMapDAL,
|
||||
orgDAL,
|
||||
orgMembershipDAL,
|
||||
orgBotDAL,
|
||||
groupDAL,
|
||||
groupProjectDAL,
|
||||
@ -327,8 +346,13 @@ export const registerRoutes = async (
|
||||
queueService
|
||||
});
|
||||
|
||||
const tokenService = tokenServiceFactory({ tokenDAL: authTokenDAL, userDAL });
|
||||
const userService = userServiceFactory({ userDAL });
|
||||
const userService = userServiceFactory({
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
orgMembershipDAL,
|
||||
tokenService,
|
||||
smtpService
|
||||
});
|
||||
const loginService = authLoginServiceFactory({ userDAL, smtpService, tokenService, orgDAL, tokenDAL: authTokenDAL });
|
||||
const passwordService = authPaswordServiceFactory({
|
||||
tokenService,
|
||||
@ -337,6 +361,7 @@ export const registerRoutes = async (
|
||||
userDAL
|
||||
});
|
||||
const orgService = orgServiceFactory({
|
||||
userAliasDAL,
|
||||
licenseService,
|
||||
samlConfigDAL,
|
||||
orgRoleDAL,
|
||||
@ -715,6 +740,7 @@ export const registerRoutes = async (
|
||||
saml: samlService,
|
||||
ldap: ldapService,
|
||||
auditLog: auditLogService,
|
||||
auditLogStream: auditLogStreamService,
|
||||
secretScanning: secretScanningService,
|
||||
license: licenseService,
|
||||
trustedIp: trustedIpService,
|
||||
|
@ -69,3 +69,10 @@ export const SanitizedDynamicSecretSchema = DynamicSecretsSchema.omit({
|
||||
keyEncoding: true,
|
||||
algorithm: true
|
||||
});
|
||||
|
||||
export const SanitizedAuditLogStreamSchema = z.object({
|
||||
id: z.string(),
|
||||
url: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
@ -42,7 +42,9 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
schema: {
|
||||
body: z.object({
|
||||
allowSignUp: z.boolean().optional(),
|
||||
allowedSignUpDomain: z.string().optional().nullable()
|
||||
allowedSignUpDomain: z.string().optional().nullable(),
|
||||
trustSamlEmails: z.boolean().optional(),
|
||||
trustLdapEmails: z.boolean().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -1,3 +1,4 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ProjectEnvironmentsSchema } from "@app/db/schemas";
|
||||
@ -26,7 +27,13 @@ export const registerProjectEnvRouter = async (server: FastifyZodProvider) => {
|
||||
}),
|
||||
body: z.object({
|
||||
name: z.string().trim().describe(ENVIRONMENTS.CREATE.name),
|
||||
slug: z.string().trim().describe(ENVIRONMENTS.CREATE.slug)
|
||||
slug: z
|
||||
.string()
|
||||
.trim()
|
||||
.refine((v) => slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.describe(ENVIRONMENTS.CREATE.slug)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -84,7 +91,14 @@ export const registerProjectEnvRouter = async (server: FastifyZodProvider) => {
|
||||
id: z.string().trim().describe(ENVIRONMENTS.UPDATE.id)
|
||||
}),
|
||||
body: z.object({
|
||||
slug: z.string().trim().optional().describe(ENVIRONMENTS.UPDATE.slug),
|
||||
slug: z
|
||||
.string()
|
||||
.trim()
|
||||
.optional()
|
||||
.refine((v) => !v || slugify(v) === v, {
|
||||
message: "Slug must be a valid slug"
|
||||
})
|
||||
.describe(ENVIRONMENTS.UPDATE.slug),
|
||||
name: z.string().trim().optional().describe(ENVIRONMENTS.UPDATE.name),
|
||||
position: z.number().optional().describe(ENVIRONMENTS.UPDATE.position)
|
||||
}),
|
||||
|
@ -76,6 +76,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
organization: z.string(),
|
||||
environments: z
|
||||
.object({
|
||||
|
@ -2,11 +2,52 @@ import { z } from "zod";
|
||||
|
||||
import { AuthTokenSessionsSchema, OrganizationsSchema, UserEncryptionKeysSchema, UsersSchema } from "@app/db/schemas";
|
||||
import { ApiKeysSchema } from "@app/db/schemas/api-keys";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMethod, AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/me/emails/code",
|
||||
config: {
|
||||
rateLimit: authRateLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
username: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
await server.services.user.sendEmailVerificationCode(req.body.username);
|
||||
return {};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/me/emails/verify",
|
||||
config: {
|
||||
rateLimit: authRateLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
username: z.string().trim(),
|
||||
code: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
await server.services.user.verifyEmailVerificationCode(req.body.username, req.body.code);
|
||||
return {};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/me/mfa",
|
||||
|
@ -166,6 +166,11 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
workspaceSlug: z.string().trim().optional().describe(RAW_SECRETS.LIST.workspaceSlug),
|
||||
environment: z.string().trim().optional().describe(RAW_SECRETS.LIST.environment),
|
||||
secretPath: z.string().trim().default("/").transform(removeTrailingSlash).describe(RAW_SECRETS.LIST.secretPath),
|
||||
expandSecretReferences: z
|
||||
.enum(["true", "false"])
|
||||
.default("false")
|
||||
.transform((value) => value === "true")
|
||||
.describe(RAW_SECRETS.LIST.expand),
|
||||
recursive: z
|
||||
.enum(["true", "false"])
|
||||
.default("false")
|
||||
@ -233,6 +238,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
environment,
|
||||
expandSecretReferences: req.query.expandSecretReferences,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
projectId: workspaceId,
|
||||
path: secretPath,
|
||||
|
@ -27,10 +27,17 @@ export const getTokenConfig = (tokenType: TokenType) => {
|
||||
const expiresAt = new Date(new Date().getTime() + 86400000);
|
||||
return { token, expiresAt };
|
||||
}
|
||||
case TokenType.TOKEN_EMAIL_VERIFICATION: {
|
||||
// generate random 6-digit code
|
||||
const token = String(crypto.randomInt(10 ** 5, 10 ** 6 - 1));
|
||||
const triesLeft = 3;
|
||||
const expiresAt = new Date(new Date().getTime() + 86400000);
|
||||
return { token, triesLeft, expiresAt };
|
||||
}
|
||||
case TokenType.TOKEN_EMAIL_MFA: {
|
||||
// generate random 6-digit code
|
||||
const token = String(crypto.randomInt(10 ** 5, 10 ** 6 - 1));
|
||||
const triesLeft = 5;
|
||||
const triesLeft = 3;
|
||||
const expiresAt = new Date(new Date().getTime() + 300000);
|
||||
return { token, triesLeft, expiresAt };
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
export enum TokenType {
|
||||
TOKEN_EMAIL_CONFIRMATION = "emailConfirmation",
|
||||
TOKEN_EMAIL_VERIFICATION = "emailVerification", // unverified -> verified
|
||||
TOKEN_EMAIL_MFA = "emailMfa",
|
||||
TOKEN_EMAIL_ORG_INVITATION = "organizationInvitation",
|
||||
TOKEN_EMAIL_PASSWORD_RESET = "passwordReset"
|
||||
|
@ -361,6 +361,7 @@ export const authLoginServiceFactory = ({
|
||||
user = await userDAL.create({
|
||||
username: email,
|
||||
email,
|
||||
isEmailVerified: true,
|
||||
firstName,
|
||||
lastName,
|
||||
authMethods: [authMethod],
|
||||
@ -374,6 +375,8 @@ export const authLoginServiceFactory = ({
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
email: user.email,
|
||||
isEmailVerified: user.isEmailVerified,
|
||||
firstName: user.firstName,
|
||||
lastName: user.lastName,
|
||||
authMethod,
|
||||
|
@ -1,9 +1,10 @@
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { OrgMembershipStatus } from "@app/db/schemas";
|
||||
import { OrgMembershipStatus, TableName } from "@app/db/schemas";
|
||||
import { convertPendingGroupAdditionsToGroupMemberships } from "@app/ee/services/group/group-fns";
|
||||
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { isDisposableEmail } from "@app/lib/validator";
|
||||
@ -79,9 +80,9 @@ export const authSignupServiceFactory = ({
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
template: SmtpTemplates.SignupEmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [email],
|
||||
recipients: [user.email as string],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
@ -101,6 +102,8 @@ export const authSignupServiceFactory = ({
|
||||
code
|
||||
});
|
||||
|
||||
await userDAL.updateById(user.id, { isEmailVerified: true });
|
||||
|
||||
// generate jwt token this is a temporary token
|
||||
const jwtToken = jwt.sign(
|
||||
{
|
||||
@ -139,9 +142,11 @@ export const authSignupServiceFactory = ({
|
||||
throw new Error("Failed to complete account for complete user");
|
||||
}
|
||||
|
||||
let organizationId;
|
||||
let organizationId: string | null = null;
|
||||
let authMethod: AuthMethod | null = null;
|
||||
if (providerAuthToken) {
|
||||
const { orgId } = validateProviderAuthToken(providerAuthToken, user.username);
|
||||
const { orgId, authMethod: userAuthMethod } = validateProviderAuthToken(providerAuthToken, user.username);
|
||||
authMethod = userAuthMethod;
|
||||
organizationId = orgId;
|
||||
} else {
|
||||
validateSignUpAuthorization(authorization, user.id);
|
||||
@ -165,6 +170,25 @@ export const authSignupServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
// If it's SAML Auth and the organization ID is present, we should check if the user has a pending invite for this org, and accept it
|
||||
if ((isAuthMethodSaml(authMethod) || authMethod === AuthMethod.LDAP) && organizationId) {
|
||||
const [pendingOrgMembership] = await orgDAL.findMembership({
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: user.id,
|
||||
status: OrgMembershipStatus.Invited,
|
||||
[`${TableName.OrgMembership}.orgId` as "orgId"]: organizationId
|
||||
});
|
||||
|
||||
if (pendingOrgMembership) {
|
||||
await orgDAL.updateMembershipById(
|
||||
pendingOrgMembership.id,
|
||||
{
|
||||
status: OrgMembershipStatus.Accepted
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return { info: us, key: userEncKey };
|
||||
});
|
||||
|
||||
|
@ -566,20 +566,32 @@ export const integrationAuthServiceFactory = ({
|
||||
}
|
||||
});
|
||||
const kms = new AWS.KMS();
|
||||
|
||||
const aliases = await kms.listAliases({}).promise();
|
||||
const keys = await kms.listKeys({}).promise();
|
||||
const response = keys
|
||||
.Keys!.map((key) => {
|
||||
const keyAlias = aliases.Aliases!.find((alias) => key.KeyId === alias.TargetKeyId);
|
||||
if (!keyAlias?.AliasName?.includes("alias/aws/") || keyAlias?.AliasName?.includes("alias/aws/secretsmanager")) {
|
||||
return { id: String(key.KeyId), alias: String(keyAlias?.AliasName || key.KeyId) };
|
||||
}
|
||||
return { id: "null", alias: "null" };
|
||||
})
|
||||
.filter((elem) => elem.id !== "null");
|
||||
|
||||
return response;
|
||||
const keyAliases = aliases.Aliases!.filter((alias) => {
|
||||
if (!alias.TargetKeyId) return false;
|
||||
|
||||
if (integrationAuth.integration === Integrations.AWS_PARAMETER_STORE && alias.AliasName === "alias/aws/ssm")
|
||||
return true;
|
||||
|
||||
if (
|
||||
integrationAuth.integration === Integrations.AWS_SECRET_MANAGER &&
|
||||
alias.AliasName === "alias/aws/secretsmanager"
|
||||
)
|
||||
return true;
|
||||
|
||||
if (alias.AliasName?.includes("alias/aws/")) return false;
|
||||
return alias.TargetKeyId;
|
||||
});
|
||||
|
||||
const keysWithAliases = keyAliases.map((alias) => {
|
||||
return {
|
||||
id: alias.TargetKeyId!,
|
||||
alias: alias.AliasName!
|
||||
};
|
||||
});
|
||||
|
||||
return keysWithAliases;
|
||||
};
|
||||
|
||||
const getQoveryProjects = async ({
|
||||
|
@ -458,7 +458,7 @@ const syncSecretsAWSParameterStore = async ({
|
||||
});
|
||||
ssm.config.update(config);
|
||||
|
||||
const metadata = z.record(z.any()).parse(integration.metadata);
|
||||
const metadata = z.record(z.any()).parse(integration.metadata || {});
|
||||
|
||||
const params = {
|
||||
Path: integration.path as string,
|
||||
@ -477,24 +477,29 @@ const syncSecretsAWSParameterStore = async ({
|
||||
}),
|
||||
{} as Record<string, AWS.SSM.Parameter>
|
||||
);
|
||||
|
||||
// Identify secrets to create
|
||||
await Promise.all(
|
||||
Object.keys(secrets).map(async (key) => {
|
||||
if (!(key in awsParameterStoreSecretsObj)) {
|
||||
// case: secret does not exist in AWS parameter store
|
||||
// -> create secret
|
||||
await ssm
|
||||
.putParameter({
|
||||
Name: `${integration.path}${key}`,
|
||||
Type: "SecureString",
|
||||
Value: secrets[key].value,
|
||||
// Overwrite: true,
|
||||
Tags: metadata.secretAWSTag
|
||||
? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({ Key: tag.key, Value: tag.value }))
|
||||
: []
|
||||
})
|
||||
.promise();
|
||||
if (secrets[key].value) {
|
||||
await ssm
|
||||
.putParameter({
|
||||
Name: `${integration.path}${key}`,
|
||||
Type: "SecureString",
|
||||
Value: secrets[key].value,
|
||||
...(metadata.kmsKeyId && { KeyId: metadata.kmsKeyId }),
|
||||
// Overwrite: true,
|
||||
Tags: metadata.secretAWSTag
|
||||
? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({
|
||||
Key: tag.key,
|
||||
Value: tag.value
|
||||
}))
|
||||
: []
|
||||
})
|
||||
.promise();
|
||||
}
|
||||
// case: secret exists in AWS parameter store
|
||||
} else if (awsParameterStoreSecretsObj[key].Value !== secrets[key].value) {
|
||||
// case: secret value doesn't match one in AWS parameter store
|
||||
@ -544,7 +549,7 @@ const syncSecretsAWSSecretManager = async ({
|
||||
}) => {
|
||||
let secretsManager;
|
||||
const secKeyVal = getSecretKeyValuePair(secrets);
|
||||
const metadata = z.record(z.any()).parse(integration.metadata);
|
||||
const metadata = z.record(z.any()).parse(integration.metadata || {});
|
||||
try {
|
||||
if (!accessId) return;
|
||||
|
||||
@ -567,7 +572,6 @@ const syncSecretsAWSSecretManager = async ({
|
||||
if (awsSecretManagerSecret?.SecretString) {
|
||||
awsSecretManagerSecretObj = JSON.parse(awsSecretManagerSecret.SecretString);
|
||||
}
|
||||
|
||||
if (!isEqual(awsSecretManagerSecretObj, secKeyVal)) {
|
||||
await secretsManager.send(
|
||||
new UpdateSecretCommand({
|
||||
@ -582,7 +586,7 @@ const syncSecretsAWSSecretManager = async ({
|
||||
new CreateSecretCommand({
|
||||
Name: integration.app as string,
|
||||
SecretString: JSON.stringify(secKeyVal),
|
||||
KmsKeyId: metadata.kmsKeyId ? metadata.kmsKeyId : null,
|
||||
...(metadata.kmsKeyId && { KmsKeyId: metadata.kmsKeyId }),
|
||||
Tags: metadata.secretAWSTag
|
||||
? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({ Key: tag.key, Value: tag.value }))
|
||||
: []
|
||||
|
13
backend/src/services/org-membership/org-membership-dal.ts
Normal file
13
backend/src/services/org-membership/org-membership-dal.ts
Normal file
@ -0,0 +1,13 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TOrgMembershipDALFactory = ReturnType<typeof orgMembershipDALFactory>;
|
||||
|
||||
export const orgMembershipDALFactory = (db: TDbClient) => {
|
||||
const orgMembershipOrm = ormify(db, TableName.OrgMembership);
|
||||
|
||||
return {
|
||||
...orgMembershipOrm
|
||||
};
|
||||
};
|
@ -262,13 +262,19 @@ export const orgDALFactory = (db: TDbClient) => {
|
||||
.where(buildFindFilter(filter))
|
||||
.join(TableName.Users, `${TableName.Users}.id`, `${TableName.OrgMembership}.userId`)
|
||||
.join(TableName.Organization, `${TableName.Organization}.id`, `${TableName.OrgMembership}.orgId`)
|
||||
.leftJoin(TableName.UserAliases, function joinUserAlias() {
|
||||
this.on(`${TableName.UserAliases}.userId`, "=", `${TableName.OrgMembership}.userId`)
|
||||
.andOn(`${TableName.UserAliases}.orgId`, "=", `${TableName.OrgMembership}.orgId`)
|
||||
.andOn(`${TableName.UserAliases}.aliasType`, "=", (tx || db).raw("?", ["saml"]));
|
||||
})
|
||||
.select(
|
||||
selectAllTableCols(TableName.OrgMembership),
|
||||
db.ref("email").withSchema(TableName.Users),
|
||||
db.ref("username").withSchema(TableName.Users),
|
||||
db.ref("firstName").withSchema(TableName.Users),
|
||||
db.ref("lastName").withSchema(TableName.Users),
|
||||
db.ref("scimEnabled").withSchema(TableName.Organization)
|
||||
db.ref("scimEnabled").withSchema(TableName.Organization),
|
||||
db.ref("externalId").withSchema(TableName.UserAliases)
|
||||
)
|
||||
.where({ isGhost: false });
|
||||
|
||||
|
@ -1,41 +1,78 @@
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
|
||||
type TDeleteOrgMembership = {
|
||||
orgMembershipId: string;
|
||||
orgId: string;
|
||||
orgDAL: Pick<TOrgDALFactory, "findMembership" | "deleteMembershipById" | "transaction">;
|
||||
projectDAL: Pick<TProjectDALFactory, "find">;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find" | "delete">;
|
||||
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "delete" | "findProjectMembershipsByUserId">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete">;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "delete">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "updateSubscriptionOrgMemberCount">;
|
||||
};
|
||||
|
||||
export const deleteOrgMembership = async ({
|
||||
export const deleteOrgMembershipFn = async ({
|
||||
orgMembershipId,
|
||||
orgId,
|
||||
orgDAL,
|
||||
projectDAL,
|
||||
projectMembershipDAL
|
||||
projectMembershipDAL,
|
||||
projectKeyDAL,
|
||||
userAliasDAL,
|
||||
licenseService
|
||||
}: TDeleteOrgMembership) => {
|
||||
const membership = await orgDAL.transaction(async (tx) => {
|
||||
// delete org membership
|
||||
const deletedMembership = await orgDAL.transaction(async (tx) => {
|
||||
const orgMembership = await orgDAL.deleteMembershipById(orgMembershipId, orgId, tx);
|
||||
|
||||
const projects = await projectDAL.find({ orgId }, { tx });
|
||||
if (!orgMembership.userId) {
|
||||
await licenseService.updateSubscriptionOrgMemberCount(orgId);
|
||||
return orgMembership;
|
||||
}
|
||||
|
||||
// delete associated project memberships
|
||||
await projectMembershipDAL.delete(
|
||||
await userAliasDAL.delete(
|
||||
{
|
||||
$in: {
|
||||
projectId: projects.map((project) => project.id)
|
||||
},
|
||||
userId: orgMembership.userId as string
|
||||
userId: orgMembership.userId,
|
||||
orgId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
// Get all the project memberships of the user in the organization
|
||||
const projectMemberships = await projectMembershipDAL.findProjectMembershipsByUserId(orgId, orgMembership.userId);
|
||||
|
||||
// Delete all the project memberships of the user in the organization
|
||||
await projectMembershipDAL.delete(
|
||||
{
|
||||
$in: {
|
||||
id: projectMemberships.map((membership) => membership.id)
|
||||
}
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
// Get all the project keys of the user in the organization
|
||||
const projectKeys = await projectKeyDAL.find({
|
||||
$in: {
|
||||
projectId: projectMemberships.map((membership) => membership.projectId)
|
||||
},
|
||||
receiverId: orgMembership.userId
|
||||
});
|
||||
|
||||
// Delete all the project keys of the user in the organization
|
||||
await projectKeyDAL.delete(
|
||||
{
|
||||
$in: {
|
||||
id: projectKeys.map((key) => key.id)
|
||||
}
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await licenseService.updateSubscriptionOrgMemberCount(orgId);
|
||||
return orgMembership;
|
||||
});
|
||||
|
||||
return membership;
|
||||
return deletedMembership;
|
||||
};
|
||||
|
@ -4,7 +4,7 @@ import crypto from "crypto";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { OrgMembershipRole, OrgMembershipStatus } from "@app/db/schemas";
|
||||
import { OrgMembershipRole, OrgMembershipStatus, TableName } from "@app/db/schemas";
|
||||
import { TProjects } from "@app/db/schemas/projects";
|
||||
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
@ -18,6 +18,7 @@ import { generateUserSrpKeys } from "@app/lib/crypto/srp";
|
||||
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { isDisposableEmail } from "@app/lib/validator";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
|
||||
import { ActorAuthMethod, ActorType, AuthMethod, AuthTokenType } from "../auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service";
|
||||
@ -30,6 +31,7 @@ import { TUserDALFactory } from "../user/user-dal";
|
||||
import { TIncidentContactsDALFactory } from "./incident-contacts-dal";
|
||||
import { TOrgBotDALFactory } from "./org-bot-dal";
|
||||
import { TOrgDALFactory } from "./org-dal";
|
||||
import { deleteOrgMembershipFn } from "./org-fns";
|
||||
import { TOrgRoleDALFactory } from "./org-role-dal";
|
||||
import {
|
||||
TDeleteOrgMembershipDTO,
|
||||
@ -43,6 +45,7 @@ import {
|
||||
} from "./org-types";
|
||||
|
||||
type TOrgServiceFactoryDep = {
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "delete">;
|
||||
orgDAL: TOrgDALFactory;
|
||||
orgBotDAL: TOrgBotDALFactory;
|
||||
orgRoleDAL: TOrgRoleDALFactory;
|
||||
@ -65,6 +68,7 @@ type TOrgServiceFactoryDep = {
|
||||
export type TOrgServiceFactory = ReturnType<typeof orgServiceFactory>;
|
||||
|
||||
export const orgServiceFactory = ({
|
||||
userAliasDAL,
|
||||
orgDAL,
|
||||
userDAL,
|
||||
groupDAL,
|
||||
@ -427,7 +431,13 @@ export const orgServiceFactory = ({
|
||||
if (inviteeUser) {
|
||||
// if user already exist means its already part of infisical
|
||||
// Thus the signup flow is not needed anymore
|
||||
const [inviteeMembership] = await orgDAL.findMembership({ orgId, userId: inviteeUser.id }, { tx });
|
||||
const [inviteeMembership] = await orgDAL.findMembership(
|
||||
{
|
||||
[`${TableName.OrgMembership}.orgId` as "orgId"]: orgId,
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: inviteeUser.id
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
if (inviteeMembership && inviteeMembership.status === OrgMembershipStatus.Accepted) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to invite an existing member of org",
|
||||
@ -519,9 +529,9 @@ export const orgServiceFactory = ({
|
||||
throw new BadRequestError({ message: "Invalid request", name: "Verify user to org" });
|
||||
}
|
||||
const [orgMembership] = await orgDAL.findMembership({
|
||||
userId: user.id,
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: user.id,
|
||||
status: OrgMembershipStatus.Invited,
|
||||
orgId
|
||||
[`${TableName.OrgMembership}.orgId` as "orgId"]: orgId
|
||||
});
|
||||
if (!orgMembership)
|
||||
throw new BadRequestError({
|
||||
@ -572,47 +582,14 @@ export const orgServiceFactory = ({
|
||||
const { permission } = await permissionService.getUserOrgPermission(userId, orgId, actorAuthMethod, actorOrgId);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Member);
|
||||
|
||||
const deletedMembership = await orgDAL.transaction(async (tx) => {
|
||||
const orgMembership = await orgDAL.deleteMembershipById(membershipId, orgId, tx);
|
||||
|
||||
if (!orgMembership.userId) {
|
||||
await licenseService.updateSubscriptionOrgMemberCount(orgId);
|
||||
return orgMembership;
|
||||
}
|
||||
|
||||
// Get all the project memberships of the user in the organization
|
||||
const projectMemberships = await projectMembershipDAL.findProjectMembershipsByUserId(orgId, orgMembership.userId);
|
||||
|
||||
// Delete all the project memberships of the user in the organization
|
||||
await projectMembershipDAL.delete(
|
||||
{
|
||||
$in: {
|
||||
id: projectMemberships.map((membership) => membership.id)
|
||||
}
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
// Get all the project keys of the user in the organization
|
||||
const projectKeys = await projectKeyDAL.find({
|
||||
$in: {
|
||||
projectId: projectMemberships.map((membership) => membership.projectId)
|
||||
},
|
||||
receiverId: orgMembership.userId
|
||||
});
|
||||
|
||||
// Delete all the project keys of the user in the organization
|
||||
await projectKeyDAL.delete(
|
||||
{
|
||||
$in: {
|
||||
id: projectKeys.map((key) => key.id)
|
||||
}
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
await licenseService.updateSubscriptionOrgMemberCount(orgId);
|
||||
return orgMembership;
|
||||
const deletedMembership = await deleteOrgMembershipFn({
|
||||
orgMembershipId: membershipId,
|
||||
orgId,
|
||||
orgDAL,
|
||||
projectMembershipDAL,
|
||||
projectKeyDAL,
|
||||
userAliasDAL,
|
||||
licenseService
|
||||
});
|
||||
|
||||
return deletedMembership;
|
||||
|
@ -110,7 +110,7 @@ export const projectMembershipServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Member);
|
||||
const orgMembers = await orgDAL.findMembership({
|
||||
orgId: project.orgId,
|
||||
[`${TableName.OrgMembership}.orgId` as "orgId"]: project.orgId,
|
||||
$in: {
|
||||
[`${TableName.OrgMembership}.id` as "id"]: members.map(({ orgMembershipId }) => orgMembershipId)
|
||||
}
|
||||
@ -119,7 +119,7 @@ export const projectMembershipServiceFactory = ({
|
||||
|
||||
const existingMembers = await projectMembershipDAL.find({
|
||||
projectId,
|
||||
$in: { userId: orgMembers.map(({ userId }) => userId).filter(Boolean) as string[] }
|
||||
$in: { userId: orgMembers.map(({ userId }) => userId).filter(Boolean) }
|
||||
});
|
||||
if (existingMembers.length) throw new BadRequestError({ message: "Some users are already part of project" });
|
||||
|
||||
@ -134,7 +134,7 @@ export const projectMembershipServiceFactory = ({
|
||||
const projectMemberships = await projectMembershipDAL.insertMany(
|
||||
orgMembers.map(({ userId }) => ({
|
||||
projectId,
|
||||
userId: userId as string
|
||||
userId
|
||||
})),
|
||||
tx
|
||||
);
|
||||
@ -145,12 +145,12 @@ export const projectMembershipServiceFactory = ({
|
||||
const encKeyGroupByOrgMembId = groupBy(members, (i) => i.orgMembershipId);
|
||||
await projectKeyDAL.insertMany(
|
||||
orgMembers
|
||||
.filter(({ userId }) => !userIdsToExcludeForProjectKeyAddition.has(userId as string))
|
||||
.filter(({ userId }) => !userIdsToExcludeForProjectKeyAddition.has(userId))
|
||||
.map(({ userId, id }) => ({
|
||||
encryptedKey: encKeyGroupByOrgMembId[id][0].workspaceEncryptedKey,
|
||||
nonce: encKeyGroupByOrgMembId[id][0].workspaceEncryptedNonce,
|
||||
senderId: actorId,
|
||||
receiverId: userId as string,
|
||||
receiverId: userId,
|
||||
projectId
|
||||
})),
|
||||
tx
|
||||
|
@ -8,6 +8,7 @@ import {
|
||||
SecretKeyEncoding,
|
||||
SecretsSchema,
|
||||
SecretVersionsSchema,
|
||||
TableName,
|
||||
TIntegrationAuths,
|
||||
TSecretApprovalRequestsSecrets,
|
||||
TSecrets,
|
||||
@ -273,7 +274,10 @@ export const projectQueueFactory = ({
|
||||
|
||||
for (const key of existingProjectKeys) {
|
||||
const user = await userDAL.findUserEncKeyByUserId(key.receiverId);
|
||||
const [orgMembership] = await orgDAL.findMembership({ userId: key.receiverId, orgId: project.orgId });
|
||||
const [orgMembership] = await orgDAL.findMembership({
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: key.receiverId,
|
||||
[`${TableName.OrgMembership}.orgId` as "orgId"]: project.orgId
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
throw new Error(`User with ID ${key.receiverId} was not found during upgrade.`);
|
||||
|
@ -1,33 +1,66 @@
|
||||
import { SecretType, TSecretImports } from "@app/db/schemas";
|
||||
import { SecretType, TSecretImports, TSecrets } from "@app/db/schemas";
|
||||
import { groupBy } from "@app/lib/fn";
|
||||
|
||||
import { TSecretDALFactory } from "../secret/secret-dal";
|
||||
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
|
||||
import { TSecretImportDALFactory } from "./secret-import-dal";
|
||||
|
||||
type TSecretImportSecrets = {
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
environmentInfo: {
|
||||
id: string;
|
||||
slug: string;
|
||||
name: string;
|
||||
};
|
||||
folderId: string | undefined;
|
||||
importFolderId: string;
|
||||
secrets: (TSecrets & { workspace: string; environment: string; _id: string })[];
|
||||
};
|
||||
|
||||
const LEVEL_BREAK = 10;
|
||||
const getImportUniqKey = (envSlug: string, path: string) => `${envSlug}=${path}`;
|
||||
export const fnSecretsFromImports = async ({
|
||||
allowedImports,
|
||||
allowedImports: possibleCyclicImports,
|
||||
folderDAL,
|
||||
secretDAL
|
||||
secretDAL,
|
||||
secretImportDAL,
|
||||
depth = 0,
|
||||
cyclicDetector = new Set()
|
||||
}: {
|
||||
allowedImports: (Omit<TSecretImports, "importEnv"> & {
|
||||
importEnv: { id: string; slug: string; name: string };
|
||||
})[];
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findByManySecretPath">;
|
||||
secretDAL: Pick<TSecretDALFactory, "find">;
|
||||
secretImportDAL: Pick<TSecretImportDALFactory, "findByFolderIds">;
|
||||
depth?: number;
|
||||
cyclicDetector?: Set<string>;
|
||||
}) => {
|
||||
const importedFolders = await folderDAL.findByManySecretPath(
|
||||
allowedImports.map(({ importEnv, importPath }) => ({
|
||||
envId: importEnv.id,
|
||||
secretPath: importPath
|
||||
}))
|
||||
// avoid going more than a depth
|
||||
if (depth >= LEVEL_BREAK) return [];
|
||||
|
||||
const allowedImports = possibleCyclicImports.filter(
|
||||
({ importPath, importEnv }) => !cyclicDetector.has(getImportUniqKey(importEnv.slug, importPath))
|
||||
);
|
||||
const folderIds = importedFolders.map((el) => el?.id).filter(Boolean) as string[];
|
||||
if (!folderIds.length) {
|
||||
|
||||
const importedFolders = (
|
||||
await folderDAL.findByManySecretPath(
|
||||
allowedImports.map(({ importEnv, importPath }) => ({
|
||||
envId: importEnv.id,
|
||||
secretPath: importPath
|
||||
}))
|
||||
)
|
||||
).filter(Boolean); // remove undefined ones
|
||||
if (!importedFolders.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const importedFolderIds = importedFolders.map((el) => el?.id) as string[];
|
||||
const importedFolderGroupBySourceImport = groupBy(importedFolders, (i) => `${i?.envId}-${i?.path}`);
|
||||
const importedSecrets = await secretDAL.find(
|
||||
{
|
||||
$in: { folderId: folderIds },
|
||||
$in: { folderId: importedFolderIds },
|
||||
type: SecretType.Shared
|
||||
},
|
||||
{
|
||||
@ -35,18 +68,50 @@ export const fnSecretsFromImports = async ({
|
||||
}
|
||||
);
|
||||
|
||||
const importedSecsGroupByFolderId = groupBy(importedSecrets, (i) => i.folderId);
|
||||
return allowedImports.map(({ importPath, importEnv }, i) => ({
|
||||
secretPath: importPath,
|
||||
environment: importEnv.slug,
|
||||
environmentInfo: importEnv,
|
||||
folderId: importedFolders?.[i]?.id,
|
||||
// this will ensure for cases when secrets are empty. Could be due to missing folder for a path or when emtpy secrets inside a given path
|
||||
secrets: (importedSecsGroupByFolderId?.[importedFolders?.[i]?.id as string] || []).map((item) => ({
|
||||
...item,
|
||||
const importedSecretsGroupByFolderId = groupBy(importedSecrets, (i) => i.folderId);
|
||||
|
||||
allowedImports.forEach(({ importPath, importEnv }) => {
|
||||
cyclicDetector.add(getImportUniqKey(importEnv.slug, importPath));
|
||||
});
|
||||
// now we need to check recursively deeper imports made inside other imports
|
||||
// we go level wise meaning we take all imports of a tree level and then go deeper ones level by level
|
||||
const deeperImports = await secretImportDAL.findByFolderIds(importedFolderIds);
|
||||
let secretsFromDeeperImports: TSecretImportSecrets[] = [];
|
||||
if (deeperImports.length) {
|
||||
secretsFromDeeperImports = await fnSecretsFromImports({
|
||||
allowedImports: deeperImports,
|
||||
secretImportDAL,
|
||||
folderDAL,
|
||||
secretDAL,
|
||||
depth: depth + 1,
|
||||
cyclicDetector
|
||||
});
|
||||
}
|
||||
const secretsFromdeeperImportGroupedByFolderId = groupBy(secretsFromDeeperImports, (i) => i.importFolderId);
|
||||
|
||||
const secrets = allowedImports.map(({ importPath, importEnv, id, folderId }, i) => {
|
||||
const sourceImportFolder = importedFolderGroupBySourceImport[`${importEnv.id}-${importPath}`][0];
|
||||
const folderDeeperImportSecrets =
|
||||
secretsFromdeeperImportGroupedByFolderId?.[sourceImportFolder?.id || ""]?.[0]?.secrets || [];
|
||||
|
||||
return {
|
||||
secretPath: importPath,
|
||||
environment: importEnv.slug,
|
||||
workspace: "", // This field should not be used, it's only here to keep the older Python SDK versions backwards compatible with the new Postgres backend.
|
||||
_id: item.id // The old Python SDK depends on the _id field being returned. We return this to keep the older Python SDK versions backwards compatible with the new Postgres backend.
|
||||
}))
|
||||
}));
|
||||
environmentInfo: importEnv,
|
||||
folderId: importedFolders?.[i]?.id,
|
||||
id,
|
||||
importFolderId: folderId,
|
||||
// this will ensure for cases when secrets are empty. Could be due to missing folder for a path or when emtpy secrets inside a given path
|
||||
secrets: (importedSecretsGroupByFolderId?.[importedFolders?.[i]?.id as string] || [])
|
||||
.map((item) => ({
|
||||
...item,
|
||||
environment: importEnv.slug,
|
||||
workspace: "", // This field should not be used, it's only here to keep the older Python SDK versions backwards compatible with the new Postgres backend.
|
||||
_id: item.id // The old Python SDK depends on the _id field being returned. We return this to keep the older Python SDK versions backwards compatible with the new Postgres backend.
|
||||
}))
|
||||
.concat(folderDeeperImportSecrets)
|
||||
};
|
||||
});
|
||||
|
||||
return secrets;
|
||||
};
|
||||
|
@ -290,7 +290,7 @@ export const secretImportServiceFactory = ({
|
||||
})
|
||||
)
|
||||
);
|
||||
return fnSecretsFromImports({ allowedImports, folderDAL, secretDAL });
|
||||
return fnSecretsFromImports({ allowedImports, folderDAL, secretDAL, secretImportDAL });
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -318,7 +318,7 @@ export const secretQueueFactory = ({
|
||||
});
|
||||
|
||||
// add the imported secrets to the current folder secrets
|
||||
content = { ...content, ...importedSecrets };
|
||||
content = { ...importedSecrets, ...content };
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -27,6 +27,7 @@ import {
|
||||
fnSecretBlindIndexCheck,
|
||||
fnSecretBulkInsert,
|
||||
fnSecretBulkUpdate,
|
||||
interpolateSecrets,
|
||||
recursivelyGetSecretPaths
|
||||
} from "./secret-fns";
|
||||
import { TSecretQueueFactory } from "./secret-queue";
|
||||
@ -525,7 +526,8 @@ export const secretServiceFactory = ({
|
||||
const importedSecrets = await fnSecretsFromImports({
|
||||
allowedImports,
|
||||
secretDAL,
|
||||
folderDAL
|
||||
folderDAL,
|
||||
secretImportDAL
|
||||
});
|
||||
|
||||
return {
|
||||
@ -630,7 +632,8 @@ export const secretServiceFactory = ({
|
||||
const importedSecrets = await fnSecretsFromImports({
|
||||
allowedImports,
|
||||
secretDAL,
|
||||
folderDAL
|
||||
folderDAL,
|
||||
secretImportDAL
|
||||
});
|
||||
for (let i = importedSecrets.length - 1; i >= 0; i -= 1) {
|
||||
for (let j = 0; j < importedSecrets[i].secrets.length; j += 1) {
|
||||
@ -883,6 +886,7 @@ export const secretServiceFactory = ({
|
||||
actorAuthMethod,
|
||||
environment,
|
||||
includeImports,
|
||||
expandSecretReferences,
|
||||
recursive
|
||||
}: TGetSecretsRawDTO) => {
|
||||
const botKey = await projectBotService.getBotKey(projectId);
|
||||
@ -900,17 +904,66 @@ export const secretServiceFactory = ({
|
||||
recursive
|
||||
});
|
||||
|
||||
return {
|
||||
secrets: secrets.map((el) => decryptSecretRaw(el, botKey)),
|
||||
imports: (imports || [])?.map(({ secrets: importedSecrets, ...el }) => ({
|
||||
...el,
|
||||
secrets: importedSecrets.map((sec) =>
|
||||
decryptSecretRaw(
|
||||
{ ...sec, environment: el.environment, workspace: projectId, secretPath: el.secretPath },
|
||||
botKey
|
||||
)
|
||||
const decryptedSecrets = secrets.map((el) => decryptSecretRaw(el, botKey));
|
||||
const decryptedImports = (imports || [])?.map(({ secrets: importedSecrets, ...el }) => ({
|
||||
...el,
|
||||
secrets: importedSecrets.map((sec) =>
|
||||
decryptSecretRaw(
|
||||
{ ...sec, environment: el.environment, workspace: projectId, secretPath: el.secretPath },
|
||||
botKey
|
||||
)
|
||||
}))
|
||||
)
|
||||
}));
|
||||
|
||||
if (expandSecretReferences) {
|
||||
const expandSecrets = interpolateSecrets({
|
||||
folderDAL,
|
||||
projectId,
|
||||
secretDAL,
|
||||
secretEncKey: botKey
|
||||
});
|
||||
|
||||
const batchSecretsExpand = async (
|
||||
secretBatch: {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
}[]
|
||||
) => {
|
||||
const secretRecord: Record<
|
||||
string,
|
||||
{
|
||||
value: string;
|
||||
comment?: string;
|
||||
skipMultilineEncoding?: boolean;
|
||||
}
|
||||
> = {};
|
||||
|
||||
secretBatch.forEach((decryptedSecret) => {
|
||||
secretRecord[decryptedSecret.secretKey] = {
|
||||
value: decryptedSecret.secretValue,
|
||||
comment: decryptedSecret.secretComment
|
||||
};
|
||||
});
|
||||
|
||||
await expandSecrets(secretRecord);
|
||||
|
||||
secretBatch.forEach((decryptedSecret, index) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
secretBatch[index].secretValue = secretRecord[decryptedSecret.secretKey].value;
|
||||
});
|
||||
};
|
||||
|
||||
// expand secrets
|
||||
await batchSecretsExpand(decryptedSecrets);
|
||||
|
||||
// expand imports by batch
|
||||
await Promise.all(decryptedImports.map((decryptedImport) => batchSecretsExpand(decryptedImport.secrets)));
|
||||
}
|
||||
|
||||
return {
|
||||
secrets: decryptedSecrets,
|
||||
imports: decryptedImports
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -138,6 +138,7 @@ export type TDeleteBulkSecretDTO = {
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TGetSecretsRawDTO = {
|
||||
expandSecretReferences?: boolean;
|
||||
path: string;
|
||||
environment: string;
|
||||
includeImports?: boolean;
|
||||
|
@ -17,6 +17,7 @@ export type TSmtpSendMail = {
|
||||
export type TSmtpService = ReturnType<typeof smtpServiceFactory>;
|
||||
|
||||
export enum SmtpTemplates {
|
||||
SignupEmailVerification = "signupEmailVerification.handlebars",
|
||||
EmailVerification = "emailVerification.handlebars",
|
||||
SecretReminder = "secretReminder.handlebars",
|
||||
EmailMfa = "emailMfa.handlebars",
|
||||
|
@ -1,17 +1,15 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge" />
|
||||
<title>Code</title>
|
||||
</head>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<body>
|
||||
<h2>Confirm your email address</h2>
|
||||
<p>Your confirmation code is below — enter it in the browser window where you've started signing up for Infisical.</p>
|
||||
<p>Your confirmation code is below — enter it in the browser window where you've started confirming your email.</p>
|
||||
<h1>{{code}}</h1>
|
||||
<p>Questions about setting up Infisical? Email us at support@infisical.com</p>
|
||||
</body>
|
||||
</body>
|
||||
|
||||
</html>
|
@ -0,0 +1,17 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge">
|
||||
<title>Code</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h2>Confirm your email address</h2>
|
||||
<p>Your confirmation code is below — enter it in the browser window where you've started signing up for Infisical.</p>
|
||||
<h1>{{code}}</h1>
|
||||
<p>Questions about setting up Infisical? Email us at support@infisical.com</p>
|
||||
</body>
|
||||
|
||||
</html>
|
@ -102,7 +102,8 @@ export const superAdminServiceFactory = ({
|
||||
superAdmin: true,
|
||||
isGhost: false,
|
||||
isAccepted: true,
|
||||
authMethods: [AuthMethod.EMAIL]
|
||||
authMethods: [AuthMethod.EMAIL],
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@ -0,0 +1,4 @@
|
||||
export enum UserAliasType {
|
||||
LDAP = "ldap",
|
||||
SAML = "saml"
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
export const normalizeUsername = async (username: string, userDAL: Pick<TUserDALFactory, "findOne">) => {
|
||||
let attempt = slugify(username);
|
||||
let attempt = slugify(`${username}-${alphaNumericNanoId(4)}`);
|
||||
|
||||
let user = await userDAL.findOne({ username: attempt });
|
||||
if (!user) return attempt;
|
||||
|
@ -1,15 +1,151 @@
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
|
||||
import { AuthMethod } from "../auth/auth-type";
|
||||
import { TUserDALFactory } from "./user-dal";
|
||||
|
||||
type TUserServiceFactoryDep = {
|
||||
userDAL: TUserDALFactory;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
| "find"
|
||||
| "findOne"
|
||||
| "findById"
|
||||
| "transaction"
|
||||
| "updateById"
|
||||
| "update"
|
||||
| "deleteById"
|
||||
| "findOneUserAction"
|
||||
| "createUserAction"
|
||||
| "findUserEncKeyByUserId"
|
||||
>;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "find" | "insertMany">;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find" | "insertMany">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser" | "validateTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
};
|
||||
|
||||
export type TUserServiceFactory = ReturnType<typeof userServiceFactory>;
|
||||
|
||||
export const userServiceFactory = ({ userDAL }: TUserServiceFactoryDep) => {
|
||||
export const userServiceFactory = ({
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
orgMembershipDAL,
|
||||
tokenService,
|
||||
smtpService
|
||||
}: TUserServiceFactoryDep) => {
|
||||
const sendEmailVerificationCode = async (username: string) => {
|
||||
const user = await userDAL.findOne({ username });
|
||||
if (!user) throw new BadRequestError({ name: "Failed to find user" });
|
||||
if (!user.email)
|
||||
throw new BadRequestError({ name: "Failed to send email verification code due to no email on user" });
|
||||
if (user.isEmailVerified)
|
||||
throw new BadRequestError({ name: "Failed to send email verification code due to email already verified" });
|
||||
|
||||
const token = await tokenService.createTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const verifyEmailVerificationCode = async (username: string, code: string) => {
|
||||
const user = await userDAL.findOne({ username });
|
||||
if (!user) throw new BadRequestError({ name: "Failed to find user" });
|
||||
if (!user.email)
|
||||
throw new BadRequestError({ name: "Failed to verify email verification code due to no email on user" });
|
||||
if (user.isEmailVerified)
|
||||
throw new BadRequestError({ name: "Failed to verify email verification code due to email already verified" });
|
||||
|
||||
await tokenService.validateTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id,
|
||||
code
|
||||
});
|
||||
|
||||
const { email } = user;
|
||||
|
||||
await userDAL.transaction(async (tx) => {
|
||||
await userDAL.updateById(
|
||||
user.id,
|
||||
{
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
// check if there are users with the same email.
|
||||
const users = await userDAL.find(
|
||||
{
|
||||
email,
|
||||
isEmailVerified: true
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
|
||||
if (users.length > 1) {
|
||||
// merge users
|
||||
const mergeUser = users.find((u) => u.id !== user.id);
|
||||
if (!mergeUser) throw new BadRequestError({ name: "Failed to find merge user" });
|
||||
|
||||
const mergeUserOrgMembershipSet = new Set(
|
||||
(await orgMembershipDAL.find({ userId: mergeUser.id }, { tx })).map((m) => m.orgId)
|
||||
);
|
||||
const myOrgMemberships = (await orgMembershipDAL.find({ userId: user.id }, { tx })).filter(
|
||||
(m) => !mergeUserOrgMembershipSet.has(m.orgId)
|
||||
);
|
||||
|
||||
const userAliases = await userAliasDAL.find(
|
||||
{
|
||||
userId: user.id
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
await userDAL.deleteById(user.id, tx);
|
||||
|
||||
if (myOrgMemberships.length) {
|
||||
await orgMembershipDAL.insertMany(
|
||||
myOrgMemberships.map((orgMembership) => ({
|
||||
...orgMembership,
|
||||
userId: mergeUser.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
if (userAliases.length) {
|
||||
await userAliasDAL.insertMany(
|
||||
userAliases.map((userAlias) => ({
|
||||
...userAlias,
|
||||
userId: mergeUser.id
|
||||
})),
|
||||
tx
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// update current user's username to [email]
|
||||
await userDAL.updateById(
|
||||
user.id,
|
||||
{
|
||||
username: email
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const toggleUserMfa = async (userId: string, isMfaEnabled: boolean) => {
|
||||
const user = await userDAL.findById(userId);
|
||||
|
||||
@ -72,6 +208,8 @@ export const userServiceFactory = ({ userDAL }: TUserServiceFactoryDep) => {
|
||||
};
|
||||
|
||||
return {
|
||||
sendEmailVerificationCode,
|
||||
verifyEmailVerificationCode,
|
||||
toggleUserMfa,
|
||||
updateUserName,
|
||||
updateAuthMethods,
|
||||
|
97
company/documentation/getting-started/introduction.mdx
Normal file
97
company/documentation/getting-started/introduction.mdx
Normal file
@ -0,0 +1,97 @@
|
||||
---
|
||||
title: "What is Infisical?"
|
||||
sidebarTitle: "What is Infisical?"
|
||||
description: "An Introduction to the Infisical secret management platform."
|
||||
---
|
||||
|
||||
Infisical is an [open-source](https://github.com/infisical/infisical) secret management platform for developers.
|
||||
It provides capabilities for storing, managing, and syncing application configuration and secrets like API keys, database
|
||||
credentials, and certificates across infrastructure. In addition, Infisical prevents secrets leaks to git and enables secure
|
||||
sharing of secrets among engineers.
|
||||
|
||||
Start managing secrets securely with [Infisical Cloud](https://app.infisical.com) or learn how to [host Infisical](/self-hosting/overview) yourself.
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card
|
||||
title="Infisical Cloud"
|
||||
href="https://app.infisical.com/signup"
|
||||
icon="cloud"
|
||||
color="#000000"
|
||||
>
|
||||
Get started with Infisical Cloud in just a few minutes.
|
||||
</Card>
|
||||
<Card
|
||||
href="/self-hosting/overview"
|
||||
title="Self-hosting"
|
||||
icon="server"
|
||||
color="#000000"
|
||||
>
|
||||
Self-host Infisical on your own infrastructure.
|
||||
</Card>
|
||||
</CardGroup>
|
||||
|
||||
## Why Infisical?
|
||||
|
||||
Infisical helps developers achieve secure centralized secret management and provides all the tools to easily manage secrets in various environments and infrastructure components. In particular, here are some of the most common points that developers mention after adopting Infisical:
|
||||
- Streamlined **local development** processes (switching .env files to [Infisical CLI](/cli/commands/run) and removing secrets from developer machines).
|
||||
- **Best-in-class developer experience** with an easy-to-use [Web Dashboard](/documentation/platform/project).
|
||||
- Simple secret management inside **[CI/CD pipelines](/integrations/cicd/githubactions)** and staging environments.
|
||||
- Secure and compliant secret management practices in **[production environments](/sdks/overview)**.
|
||||
- **Facilitated workflows** around [secret change management](/documentation/platform/pr-workflows), [access requests](/documentation/platform/access-controls/access-requests), [temporary access provisioning](/documentation/platform/access-controls/temporary-access), and more.
|
||||
- **Improved security posture** thanks to [secret scanning](/cli/scanning-overview), [granular access control policies](/documentation/platform/access-controls/overview), [automated secret rotation](https://infisical.com/docs/documentation/platform/secret-rotation/overview), and [dynamic secrets](/documentation/platform/dynamic-secrets/overview) capabilities.
|
||||
|
||||
## How does Infisical work?
|
||||
|
||||
To make secret management effortless and secure, Infisical follows a certain structure for enabling secret management workflows as defined below.
|
||||
|
||||
**Identities** in Infisical are users or machine which have a certain set of roles and permissions assigned to them. Such identities are able to manage secrets in various **Clients** throughout the entire infrastructure. To do that, identities have to verify themselves through one of the available **Authentication Methods**.
|
||||
|
||||
As a result, the 3 main concepts that are important to understand are:
|
||||
- **[Identities](/documentation/platform/identities/overview)**: users or machines with a set permissions assigned to them.
|
||||
- **[Clients](/integrations/platforms/kubernetes)**: Infisical-developed tools for managing secrets in various infrastructure components (e.g., [Kubernetes Operator](/integrations/platforms/kubernetes), [Infisical Agent](/integrations/platforms/infisical-agent), [CLI](/cli/usage), [SDKs](/sdks/overview), [API](/api-reference/overview/introduction), [Web Dashboard](/documentation/platform/organization)).
|
||||
- **[Authentication Methods](/documentation/platform/identities/universal-auth)**: ways for Identities to authenticate inside different clients (e.g., SAML SSO for Web Dashboard, Universal Auth for Infisical Agent, etc.).
|
||||
|
||||
## How to get started with Infisical?
|
||||
|
||||
Depending on your use case, it might be helpful to look into some of the resources and guides provided below.
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card href="../../cli/overview" title="Command Line Interface (CLI)" icon="square-terminal" color="#000000">
|
||||
Inject secrets into any application process/environment.
|
||||
</Card>
|
||||
<Card
|
||||
title="SDKs"
|
||||
href="/documentation/getting-started/sdks"
|
||||
icon="boxes-stacked"
|
||||
color="#000000"
|
||||
>
|
||||
Fetch secrets with any programming language on demand.
|
||||
</Card>
|
||||
<Card href="../../integrations/platforms/docker-intro" title="Docker" icon="docker" color="#000000">
|
||||
Inject secrets into Docker containers.
|
||||
</Card>
|
||||
<Card
|
||||
href="../../integrations/platforms/kubernetes"
|
||||
title="Kubernetes"
|
||||
icon="server"
|
||||
color="#000000"
|
||||
>
|
||||
Fetch and save secrets as native Kubernetes secrets.
|
||||
</Card>
|
||||
<Card
|
||||
href="/documentation/getting-started/api"
|
||||
title="REST API"
|
||||
icon="cloud"
|
||||
color="#000000"
|
||||
>
|
||||
Fetch secrets via HTTP request.
|
||||
</Card>
|
||||
<Card
|
||||
href="/integrations/overview"
|
||||
title="Native Integrations"
|
||||
icon="clouds"
|
||||
color="#000000"
|
||||
>
|
||||
Explore integrations for GitHub, Vercel, AWS, and more.
|
||||
</Card>
|
||||
</CardGroup>
|
BIN
company/favicon.png
Normal file
BIN
company/favicon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.9 KiB |
5
company/logo/dark.svg
Normal file
5
company/logo/dark.svg
Normal file
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 6.8 KiB |
5
company/logo/light.svg
Normal file
5
company/logo/light.svg
Normal file
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 6.8 KiB |
80
company/mint.json
Normal file
80
company/mint.json
Normal file
@ -0,0 +1,80 @@
|
||||
{
|
||||
"name": "Infisical",
|
||||
"openapi": "https://app.infisical.com/api/docs/json",
|
||||
"logo": {
|
||||
"dark": "/logo/dark.svg",
|
||||
"light": "/logo/light.svg",
|
||||
"href": "https://infisical.com"
|
||||
},
|
||||
"favicon": "/favicon.png",
|
||||
"colors": {
|
||||
"primary": "#26272b",
|
||||
"light": "#97b31d",
|
||||
"dark": "#A1B659",
|
||||
"ultraLight": "#E7F256",
|
||||
"ultraDark": "#8D9F4C",
|
||||
"background": {
|
||||
"light": "#ffffff",
|
||||
"dark": "#0D1117"
|
||||
},
|
||||
"anchors": {
|
||||
"from": "#000000",
|
||||
"to": "#707174"
|
||||
}
|
||||
},
|
||||
"modeToggle": {
|
||||
"default": "light",
|
||||
"isHidden": true
|
||||
},
|
||||
"feedback": {
|
||||
"suggestEdit": true,
|
||||
"raiseIssue": true,
|
||||
"thumbsRating": true
|
||||
},
|
||||
"api": {
|
||||
"baseUrl": ["https://app.infisical.com", "http://localhost:8080"]
|
||||
},
|
||||
"topbarLinks": [
|
||||
{
|
||||
"name": "Log In",
|
||||
"url": "https://app.infisical.com/login"
|
||||
}
|
||||
],
|
||||
"topbarCtaButton": {
|
||||
"name": "Start for Free",
|
||||
"url": "https://app.infisical.com/signup"
|
||||
},
|
||||
"tabs": [
|
||||
{
|
||||
"name": "Integrations",
|
||||
"url": "integrations"
|
||||
},
|
||||
{
|
||||
"name": "CLI",
|
||||
"url": "cli"
|
||||
},
|
||||
{
|
||||
"name": "API Reference",
|
||||
"url": "api-reference"
|
||||
},
|
||||
{
|
||||
"name": "SDKs",
|
||||
"url": "sdks"
|
||||
},
|
||||
{
|
||||
"name": "Changelog",
|
||||
"url": "changelog"
|
||||
}
|
||||
],
|
||||
"navigation": [
|
||||
{
|
||||
"group": "Getting Started",
|
||||
"pages": [
|
||||
"documentation/getting-started/introduction"
|
||||
]
|
||||
}
|
||||
],
|
||||
"integrations": {
|
||||
"intercom": "hsg644ru"
|
||||
}
|
||||
}
|
142
company/style.css
Normal file
142
company/style.css
Normal file
@ -0,0 +1,142 @@
|
||||
#navbar .max-w-8xl {
|
||||
max-width: 100%;
|
||||
border-bottom: 1px solid #ebebeb;
|
||||
background-color: #fcfcfc;
|
||||
}
|
||||
|
||||
.max-w-8xl {
|
||||
/* background-color: #f5f5f5; */
|
||||
}
|
||||
|
||||
#sidebar {
|
||||
left: 0;
|
||||
padding-left: 48px;
|
||||
padding-right: 30px;
|
||||
border-right: 1px;
|
||||
border-color: #cdd64b;
|
||||
background-color: #fcfcfc;
|
||||
border-right: 1px solid #ebebeb;
|
||||
}
|
||||
|
||||
#sidebar .relative .sticky {
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
#sidebar li > div.mt-2 {
|
||||
border-radius: 0;
|
||||
padding: 5px;
|
||||
}
|
||||
|
||||
#sidebar li > a.mt-2 {
|
||||
border-radius: 0;
|
||||
padding: 5px;
|
||||
}
|
||||
|
||||
#sidebar li > a.leading-6 {
|
||||
border-radius: 0;
|
||||
padding: 0px;
|
||||
}
|
||||
|
||||
/* #sidebar ul > div.mt-12 {
|
||||
padding-top: 30px;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
#sidebar ul > div.mt-12 h5 {
|
||||
position: absolute;
|
||||
left: -12px;
|
||||
top: -0px;
|
||||
} */
|
||||
|
||||
#header {
|
||||
border-left: 1px solid #26272b;
|
||||
padding-left: 16px;
|
||||
padding-right: 16px;
|
||||
background-color: #f5f5f5;
|
||||
padding-bottom: 10px;
|
||||
padding-top: 10px;
|
||||
}
|
||||
|
||||
#content-area .mt-8 .block{
|
||||
border-radius: 0;
|
||||
border-width: 1px;
|
||||
border-color: #ebebeb;
|
||||
}
|
||||
|
||||
#content-area .mt-8 .rounded-xl{
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
#content-area .mt-8 .rounded-lg{
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
#content-area .mt-6 .rounded-xl{
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
#content-area .mt-6 .rounded-lg{
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
#content-area .mt-6 .rounded-md{
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
#content-area .mt-8 .rounded-md{
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
#content-area div.my-4{
|
||||
border-radius: 0;
|
||||
border-width: 1px;
|
||||
}
|
||||
|
||||
#content-area div.flex-1 {
|
||||
/* text-transform: uppercase; */
|
||||
opacity: 0.8;
|
||||
font-weight: 400;
|
||||
}
|
||||
|
||||
#content-area button {
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
#content-area a {
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
#content-area .not-prose {
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
/* .eyebrow {
|
||||
text-transform: uppercase;
|
||||
font-weight: 400;
|
||||
color: red;
|
||||
} */
|
||||
|
||||
#content-container {
|
||||
/* background-color: #f5f5f5; */
|
||||
margin-top: 2rem;
|
||||
}
|
||||
|
||||
#topbar-cta-button .group .absolute {
|
||||
background-color: black;
|
||||
border-radius: 0px;
|
||||
}
|
||||
|
||||
/* #topbar-cta-button .group .absolute:hover {
|
||||
background-color: white;
|
||||
border-radius: 0px;
|
||||
} */
|
||||
|
||||
#topbar-cta-button .group .flex {
|
||||
margin-top: 5px;
|
||||
margin-bottom: 5px;
|
||||
font-size: medium;
|
||||
}
|
||||
|
||||
.flex-1 .flex .items-center {
|
||||
/* background-color: #f5f5f5; */
|
||||
}
|
59
docker-swarm/.env-example
Normal file
59
docker-swarm/.env-example
Normal file
@ -0,0 +1,59 @@
|
||||
# Keys
|
||||
# Required key for platform encryption/decryption ops
|
||||
# THIS IS A SAMPLE ENCRYPTION KEY AND SHOULD NEVER BE USED FOR PRODUCTION
|
||||
ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218
|
||||
|
||||
# JWT
|
||||
# Required secrets to sign JWT tokens
|
||||
# THIS IS A SAMPLE AUTH_SECRET KEY AND SHOULD NEVER BE USED FOR PRODUCTION
|
||||
AUTH_SECRET=5lrMXKKWCVocS/uerPsl7V+TX/aaUaI7iDkgl3tSmLE=
|
||||
|
||||
DB_CONNECTION_URI=postgres://infisical:infisical@haproxy:5433/infisical?sslmode=no-verify
|
||||
# Redis
|
||||
REDIS_URL=redis://:123456@haproxy:6379
|
||||
|
||||
|
||||
# Website URL
|
||||
# Required
|
||||
SITE_URL=http://localhost:8080
|
||||
|
||||
# Mail/SMTP
|
||||
SMTP_HOST=
|
||||
SMTP_PORT=
|
||||
SMTP_NAME=
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
|
||||
# Integration
|
||||
# Optional only if integration is used
|
||||
CLIENT_ID_HEROKU=
|
||||
CLIENT_ID_VERCEL=
|
||||
CLIENT_ID_NETLIFY=
|
||||
CLIENT_ID_GITHUB=
|
||||
CLIENT_ID_GITLAB=
|
||||
CLIENT_ID_BITBUCKET=
|
||||
CLIENT_SECRET_HEROKU=
|
||||
CLIENT_SECRET_VERCEL=
|
||||
CLIENT_SECRET_NETLIFY=
|
||||
CLIENT_SECRET_GITHUB=
|
||||
CLIENT_SECRET_GITLAB=
|
||||
CLIENT_SECRET_BITBUCKET=
|
||||
CLIENT_SLUG_VERCEL=
|
||||
|
||||
# Sentry (optional) for monitoring errors
|
||||
SENTRY_DSN=
|
||||
|
||||
# Infisical Cloud-specific configs
|
||||
# Ignore - Not applicable for self-hosted version
|
||||
POSTHOG_HOST=
|
||||
POSTHOG_PROJECT_API_KEY=
|
||||
|
||||
# SSO-specific variables
|
||||
CLIENT_ID_GOOGLE_LOGIN=
|
||||
CLIENT_SECRET_GOOGLE_LOGIN=
|
||||
|
||||
CLIENT_ID_GITHUB_LOGIN=
|
||||
CLIENT_SECRET_GITHUB_LOGIN=
|
||||
|
||||
CLIENT_ID_GITLAB_LOGIN=
|
||||
CLIENT_SECRET_GITLAB_LOGIN=
|
78
docker-swarm/haproxy.cfg
Normal file
78
docker-swarm/haproxy.cfg
Normal file
@ -0,0 +1,78 @@
|
||||
global
|
||||
maxconn 10000
|
||||
log stdout format raw local0
|
||||
|
||||
defaults
|
||||
log global
|
||||
mode tcp
|
||||
retries 3
|
||||
timeout client 30m
|
||||
timeout connect 10s
|
||||
timeout server 30m
|
||||
timeout check 5s
|
||||
|
||||
listen stats
|
||||
mode http
|
||||
bind *:7000
|
||||
stats enable
|
||||
stats uri /
|
||||
|
||||
resolvers hostdns
|
||||
nameserver dns 127.0.0.11:53
|
||||
resolve_retries 3
|
||||
timeout resolve 1s
|
||||
timeout retry 1s
|
||||
hold valid 5s
|
||||
|
||||
frontend postgres_master
|
||||
bind *:5433
|
||||
default_backend postgres_master_backend
|
||||
|
||||
frontend postgres_replicas
|
||||
bind *:5434
|
||||
default_backend postgres_replica_backend
|
||||
|
||||
|
||||
backend postgres_master_backend
|
||||
option httpchk GET /master
|
||||
http-check expect status 200
|
||||
default-server inter 3s fall 3 rise 2 on-marked-down shutdown-sessions
|
||||
server postgres-1 postgres-1:5432 check port 8008 resolvers hostdns
|
||||
server postgres-2 postgres-2:5432 check port 8008 resolvers hostdns
|
||||
server postgres-3 postgres-3:5432 check port 8008 resolvers hostdns
|
||||
|
||||
backend postgres_replica_backend
|
||||
option httpchk GET /replica
|
||||
http-check expect status 200
|
||||
default-server inter 3s fall 3 rise 2 on-marked-down shutdown-sessions
|
||||
server postgres-1 postgres-1:5432 check port 8008 resolvers hostdns
|
||||
server postgres-2 postgres-2:5432 check port 8008 resolvers hostdns
|
||||
server postgres-3 postgres-3:5432 check port 8008 resolvers hostdns
|
||||
|
||||
|
||||
frontend redis_master_frontend
|
||||
bind *:6379
|
||||
default_backend redis_master_backend
|
||||
|
||||
backend redis_master_backend
|
||||
option tcp-check
|
||||
tcp-check send AUTH\ 123456\r\n
|
||||
tcp-check expect string +OK
|
||||
tcp-check send PING\r\n
|
||||
tcp-check expect string +PONG
|
||||
tcp-check send info\ replication\r\n
|
||||
tcp-check expect string role:master
|
||||
tcp-check send QUIT\r\n
|
||||
tcp-check expect string +OK
|
||||
server redis_master redis_replica0:6379 check inter 1s
|
||||
server redis_replica1 redis_replica1:6379 check inter 1s
|
||||
server redis_replica2 redis_replica2:6379 check inter 1s
|
||||
|
||||
frontend infisical_frontend
|
||||
bind *:8080
|
||||
default_backend infisical_backend
|
||||
|
||||
backend infisical_backend
|
||||
option httpchk GET /api/status
|
||||
http-check expect status 200
|
||||
server infisical infisical:8080 check inter 1s
|
261
docker-swarm/stack.yaml
Normal file
261
docker-swarm/stack.yaml
Normal file
@ -0,0 +1,261 @@
|
||||
version: "3"
|
||||
|
||||
services:
|
||||
haproxy:
|
||||
image: haproxy:latest
|
||||
ports:
|
||||
- '7001:7000'
|
||||
- '5002:5433' # Postgres master
|
||||
- '5003:5434' # Postgres read
|
||||
- '6379:6379'
|
||||
- '8080:8080'
|
||||
networks:
|
||||
- infisical
|
||||
configs:
|
||||
- source: haproxy-config
|
||||
target: /usr/local/etc/haproxy/haproxy.cfg
|
||||
deploy:
|
||||
mode: global
|
||||
|
||||
infisical:
|
||||
container_name: infisical-backend
|
||||
image: infisical/infisical:v0.60.1-postgres
|
||||
env_file: .env
|
||||
networks:
|
||||
- infisical
|
||||
secrets:
|
||||
- env_file
|
||||
deploy:
|
||||
replicas: 5
|
||||
|
||||
etcd1:
|
||||
image: ghcr.io/zalando/spilo-16:3.2-p2
|
||||
networks:
|
||||
- infisical
|
||||
environment:
|
||||
ETCD_UNSUPPORTED_ARCH: arm64
|
||||
container_name: demo-etcd1
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.name == node1
|
||||
hostname: etcd1
|
||||
command: |
|
||||
etcd --name etcd1
|
||||
--listen-client-urls http://0.0.0.0:2379
|
||||
--listen-peer-urls=http://0.0.0.0:2380
|
||||
--advertise-client-urls http://etcd1:2379
|
||||
--initial-cluster=etcd1=http://etcd1:2380,etcd2=http://etcd2:2380,etcd3=http://etcd3:2380
|
||||
--initial-advertise-peer-urls=http://etcd1:2380
|
||||
--initial-cluster-state=new
|
||||
|
||||
etcd2:
|
||||
image: ghcr.io/zalando/spilo-16:3.2-p2
|
||||
networks:
|
||||
- infisical
|
||||
environment:
|
||||
ETCD_UNSUPPORTED_ARCH: arm64
|
||||
container_name: demo-etcd2
|
||||
hostname: etcd2
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.name == node2
|
||||
command: |
|
||||
etcd --name etcd2
|
||||
--listen-client-urls http://0.0.0.0:2379
|
||||
--listen-peer-urls=http://0.0.0.0:2380
|
||||
--advertise-client-urls http://etcd2:2379
|
||||
--initial-cluster=etcd1=http://etcd1:2380,etcd2=http://etcd2:2380,etcd3=http://etcd3:2380
|
||||
--initial-advertise-peer-urls=http://etcd2:2380
|
||||
--initial-cluster-state=new
|
||||
|
||||
etcd3:
|
||||
image: ghcr.io/zalando/spilo-16:3.2-p2
|
||||
networks:
|
||||
- infisical
|
||||
environment:
|
||||
ETCD_UNSUPPORTED_ARCH: arm64
|
||||
container_name: demo-etcd3
|
||||
hostname: etcd3
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.name == node3
|
||||
command: |
|
||||
etcd --name etcd3
|
||||
--listen-client-urls http://0.0.0.0:2379
|
||||
--listen-peer-urls=http://0.0.0.0:2380
|
||||
--advertise-client-urls http://etcd3:2379
|
||||
--initial-cluster=etcd1=http://etcd1:2380,etcd2=http://etcd2:2380,etcd3=http://etcd3:2380
|
||||
--initial-advertise-peer-urls=http://etcd3:2380
|
||||
--initial-cluster-state=new
|
||||
|
||||
spolo1:
|
||||
image: ghcr.io/zalando/spilo-16:3.2-p2
|
||||
container_name: postgres-1
|
||||
networks:
|
||||
- infisical
|
||||
hostname: postgres-1
|
||||
environment:
|
||||
ETCD_HOSTS: etcd1:2379,etcd2:2379,etcd3:2379
|
||||
PGPASSWORD_SUPERUSER: "postgres"
|
||||
PGUSER_SUPERUSER: "postgres"
|
||||
SCOPE: infisical
|
||||
volumes:
|
||||
- postgres_data1:/home/postgres/pgdata
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.name == node1
|
||||
|
||||
spolo2:
|
||||
image: ghcr.io/zalando/spilo-16:3.2-p2
|
||||
container_name: postgres-2
|
||||
networks:
|
||||
- infisical
|
||||
hostname: postgres-2
|
||||
environment:
|
||||
ETCD_HOSTS: etcd1:2379,etcd2:2379,etcd3:2379
|
||||
PGPASSWORD_SUPERUSER: "postgres"
|
||||
PGUSER_SUPERUSER: "postgres"
|
||||
SCOPE: infisical
|
||||
volumes:
|
||||
- postgres_data2:/home/postgres/pgdata
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.name == node2
|
||||
|
||||
spolo3:
|
||||
image: ghcr.io/zalando/spilo-16:3.2-p2
|
||||
container_name: postgres-3
|
||||
networks:
|
||||
- infisical
|
||||
hostname: postgres-3
|
||||
environment:
|
||||
ETCD_HOSTS: etcd1:2379,etcd2:2379,etcd3:2379
|
||||
PGPASSWORD_SUPERUSER: "postgres"
|
||||
PGUSER_SUPERUSER: "postgres"
|
||||
SCOPE: infisical
|
||||
volumes:
|
||||
- postgres_data3:/home/postgres/pgdata
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.name == node3
|
||||
|
||||
|
||||
redis_replica0:
|
||||
image: bitnami/redis:6.2.10
|
||||
environment:
|
||||
- REDIS_REPLICATION_MODE=master
|
||||
- REDIS_PASSWORD=123456
|
||||
networks:
|
||||
- infisical
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.name == node1
|
||||
|
||||
redis_replica1:
|
||||
image: bitnami/redis:6.2.10
|
||||
environment:
|
||||
- REDIS_REPLICATION_MODE=slave
|
||||
- REDIS_MASTER_HOST=redis_replica0
|
||||
- REDIS_MASTER_PORT_NUMBER=6379
|
||||
- REDIS_MASTER_PASSWORD=123456
|
||||
- REDIS_PASSWORD=123456
|
||||
networks:
|
||||
- infisical
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.name == node2
|
||||
|
||||
redis_replica2:
|
||||
image: bitnami/redis:6.2.10
|
||||
environment:
|
||||
- REDIS_REPLICATION_MODE=slave
|
||||
- REDIS_MASTER_HOST=redis_replica0
|
||||
- REDIS_MASTER_PORT_NUMBER=6379
|
||||
- REDIS_MASTER_PASSWORD=123456
|
||||
- REDIS_PASSWORD=123456
|
||||
networks:
|
||||
- infisical
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.name == node3
|
||||
|
||||
redis_sentinel1:
|
||||
image: bitnami/redis-sentinel:6.2.10
|
||||
environment:
|
||||
- REDIS_SENTINEL_QUORUM=2
|
||||
- REDIS_SENTINEL_DOWN_AFTER_MILLISECONDS=5000
|
||||
- REDIS_SENTINEL_FAILOVER_TIMEOUT=60000
|
||||
- REDIS_SENTINEL_PORT_NUMBER=26379
|
||||
- REDIS_MASTER_HOST=redis_replica1
|
||||
- REDIS_MASTER_PORT_NUMBER=6379
|
||||
- REDIS_MASTER_PASSWORD=123456
|
||||
networks:
|
||||
- infisical
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.name == node1
|
||||
|
||||
redis_sentinel2:
|
||||
image: bitnami/redis-sentinel:6.2.10
|
||||
environment:
|
||||
- REDIS_SENTINEL_QUORUM=2
|
||||
- REDIS_SENTINEL_DOWN_AFTER_MILLISECONDS=5000
|
||||
- REDIS_SENTINEL_FAILOVER_TIMEOUT=60000
|
||||
- REDIS_SENTINEL_PORT_NUMBER=26379
|
||||
- REDIS_MASTER_HOST=redis_replica1
|
||||
- REDIS_MASTER_PORT_NUMBER=6379
|
||||
- REDIS_MASTER_PASSWORD=123456
|
||||
networks:
|
||||
- infisical
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.name == node2
|
||||
|
||||
redis_sentinel3:
|
||||
image: bitnami/redis-sentinel:6.2.10
|
||||
environment:
|
||||
- REDIS_SENTINEL_QUORUM=2
|
||||
- REDIS_SENTINEL_DOWN_AFTER_MILLISECONDS=5000
|
||||
- REDIS_SENTINEL_FAILOVER_TIMEOUT=60000
|
||||
- REDIS_SENTINEL_PORT_NUMBER=26379
|
||||
- REDIS_MASTER_HOST=redis_replica1
|
||||
- REDIS_MASTER_PORT_NUMBER=6379
|
||||
- REDIS_MASTER_PASSWORD=123456
|
||||
networks:
|
||||
- infisical
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.labels.name == node3
|
||||
|
||||
networks:
|
||||
infisical:
|
||||
|
||||
|
||||
volumes:
|
||||
postgres_data1:
|
||||
postgres_data2:
|
||||
postgres_data3:
|
||||
postgres_data4:
|
||||
redis0:
|
||||
redis1:
|
||||
redis2:
|
||||
|
||||
configs:
|
||||
haproxy-config:
|
||||
file: ./haproxy.cfg
|
||||
|
||||
secrets:
|
||||
env_file:
|
||||
file: .env
|
@ -4,7 +4,7 @@ openapi: "GET /api/v2/service-token"
|
||||
---
|
||||
|
||||
<Warning>
|
||||
This endpoint will be deprecated in the near future with the removal of service tokens in Q1/Q2 2024.
|
||||
This endpoint is deprecated and will be removed in the future.
|
||||
|
||||
We recommend switching to using [identities](/documentation/platform/identities/overview) if your client supports it.
|
||||
We recommend switching to using [Machine Identities](/documentation/platform/identities/machine-identities).
|
||||
</Warning>
|
||||
|
@ -16,36 +16,48 @@ Export environment variables from the platform into a file format.
|
||||
<Accordion title="infisical export" defaultOpen="true">
|
||||
Use this command to export environment variables from the platform into a raw file formats
|
||||
|
||||
```bash
|
||||
$ infisical export
|
||||
```bash
|
||||
$ infisical export
|
||||
|
||||
# Export variables to a .env file
|
||||
infisical export > .env
|
||||
# Export variables to a .env file
|
||||
infisical export > .env
|
||||
|
||||
# Export variables to a .env file (with export keyword)
|
||||
infisical export --format=dotenv-export > .env
|
||||
# Export variables to a .env file (with export keyword)
|
||||
infisical export --format=dotenv-export > .env
|
||||
|
||||
# Export variables to a CSV file
|
||||
infisical export --format=csv > secrets.csv
|
||||
# Export variables to a CSV file
|
||||
infisical export --format=csv > secrets.csv
|
||||
|
||||
# Export variables to a JSON file
|
||||
infisical export --format=json > secrets.json
|
||||
# Export variables to a JSON file
|
||||
infisical export --format=json > secrets.json
|
||||
|
||||
# Export variables to a YAML file
|
||||
infisical export --format=yaml > secrets.yaml
|
||||
# Export variables to a YAML file
|
||||
infisical export --format=yaml > secrets.yaml
|
||||
|
||||
# Render secrets using a custom template file
|
||||
infisical export --template=<path to template>
|
||||
```
|
||||
# Render secrets using a custom template file
|
||||
infisical export --template=<path to template>
|
||||
```
|
||||
|
||||
### Environment variables
|
||||
|
||||
### Environment variables
|
||||
<Accordion title="INFISICAL_TOKEN">
|
||||
Used to fetch secrets via a [service token](/documentation/platform/token) apposed to logged in credentials. Simply, export this variable in the terminal before running this command.
|
||||
Used to fetch secrets via a [machine identities](/documentation/platform/identities/machine-identities) apposed to logged in credentials. Simply, export this variable in the terminal before running this command.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
export INFISICAL_TOKEN=st.63e03c4a97cb4a747186c71e.ed5b46a34c078a8f94e8228f4ab0ff97.4f7f38034811995997d72badf44b42ec
|
||||
# Example
|
||||
export INFISICAL_TOKEN=$(infisical login --method=universal-auth --client-id=<identity-client-id> --client-secret=<identity-client-secret> --silent --plain) # --plain flag will output only the token, so it can be fed to an environment variable. --silent will disable any update messages.
|
||||
```
|
||||
|
||||
<Info>
|
||||
Alternatively, you may use service tokens.
|
||||
|
||||
Please note, however, that service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities). They will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys).
|
||||
```bash
|
||||
# Example
|
||||
export INFISICAL_TOKEN=<service-token>
|
||||
```
|
||||
|
||||
</Info>
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="INFISICAL_DISABLE_UPDATE_CHECK">
|
||||
@ -54,16 +66,18 @@ Export environment variables from the platform into a file format.
|
||||
To use, simply export this variable in the terminal before running this command.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
# Example
|
||||
export INFISICAL_DISABLE_UPDATE_CHECK=true
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
|
||||
### flags
|
||||
### flags
|
||||
|
||||
<Accordion title="--template">
|
||||
The `--template` flag specifies the path to the template file used for rendering secrets. When using templates, you can omit the other format flags.
|
||||
|
||||
```text my-template-file
|
||||
```text my-template-file
|
||||
{{$secrets := secret "<infisical-project-id>" "<environment-slug>" "<folder-path>"}}
|
||||
{{$length := len $secrets}}
|
||||
{{- "{"}}
|
||||
@ -73,24 +87,26 @@ Export environment variables from the platform into a file format.
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{ "}" -}}
|
||||
```
|
||||
```
|
||||
|
||||
```bash
|
||||
# Example
|
||||
infisical export --template="/path/to/template/file"
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="--env">
|
||||
Used to set the environment that secrets are pulled from.
|
||||
Used to set the environment that secrets are pulled from.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
infisical export --env=prod
|
||||
# Example
|
||||
infisical export --env=prod
|
||||
```
|
||||
|
||||
Note: this flag only accepts environment slug names not the fully qualified name. To view the slug name of an environment, visit the project settings page.
|
||||
|
||||
default value: `dev`
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--projectId">
|
||||
@ -98,28 +114,32 @@ Export environment variables from the platform into a file format.
|
||||
This flag allows you to override this behavior by explicitly defining the project to fetch your secrets from.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
|
||||
# Example
|
||||
|
||||
infisical export --projectId=XXXXXXXXXXXXXX
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--expand">
|
||||
Parse shell parameter expansions in your secrets (e.g., `${DOMAIN}`)
|
||||
|
||||
Default value: `true`
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--format">
|
||||
Format of the output file. Accepted values: `dotenv`, `dotenv-export`, `csv`, `json` and `yaml`
|
||||
Format of the output file. Accepted values: `dotenv`, `dotenv-export`, `csv`, `json` and `yaml`
|
||||
|
||||
Default value: `dotenv`
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--secret-overriding">
|
||||
Prioritizes personal secrets with the same name over shared secrets
|
||||
|
||||
Default value: `true`
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--path">
|
||||
@ -129,19 +149,21 @@ Export environment variables from the platform into a file format.
|
||||
# Example
|
||||
infisical export --path="/path/to/folder" --env=dev
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--tags">
|
||||
When working with tags, you can use this flag to filter and retrieve only secrets that are associated with a specific tag(s).
|
||||
|
||||
```bash
|
||||
# Example
|
||||
# Example
|
||||
infisical run --tags=tag1,tag2,tag3 -- npm run dev
|
||||
```
|
||||
|
||||
Note: you must reference the tag by its slug name not its fully qualified name. Go to project settings to view all tag slugs.
|
||||
|
||||
By default, all secrets are fetched
|
||||
|
||||
</Accordion>
|
||||
|
||||
</Accordion>
|
||||
|
@ -11,6 +11,7 @@ description: "The command that injects your secrets into local environment"
|
||||
# Example
|
||||
infisical run [options] -- npm run dev
|
||||
```
|
||||
|
||||
</Tab>
|
||||
|
||||
<Tab title="Chained commands">
|
||||
@ -20,6 +21,7 @@ description: "The command that injects your secrets into local environment"
|
||||
# Example
|
||||
infisical run [options] --command "npm run bootstrap && npm run dev start; other-bash-command"
|
||||
```
|
||||
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
@ -27,27 +29,38 @@ description: "The command that injects your secrets into local environment"
|
||||
|
||||
Inject secrets from Infisical into your application process.
|
||||
|
||||
|
||||
## Subcommands & flags
|
||||
|
||||
<Accordion title="infisical run" defaultOpen="true">
|
||||
Use this command to inject secrets into your applications process
|
||||
|
||||
```bash
|
||||
$ infisical run -- <your application command>
|
||||
```bash
|
||||
$ infisical run -- <your application command>
|
||||
|
||||
# Example
|
||||
$ infisical run -- npm run dev
|
||||
```
|
||||
# Example
|
||||
$ infisical run -- npm run dev
|
||||
```
|
||||
|
||||
### Environment variables
|
||||
|
||||
### Environment variables
|
||||
<Accordion title="INFISICAL_TOKEN">
|
||||
Used to fetch secrets via a [service token](/documentation/platform/token) apposed to logged in credentials. Simply, export this variable in the terminal before running this command.
|
||||
Used to fetch secrets via a [machine identity](/documentation/platform/identities/machine-identities) apposed to logged in credentials. Simply, export this variable in the terminal before running this command.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
export INFISICAL_TOKEN=st.63e03c4a97cb4a747186c71e.ed5b46a34c078a8f94e8228f4ab0ff97.4f7f38034811995997d72badf44b42ec
|
||||
# Example
|
||||
export INFISICAL_TOKEN=$(infisical login --method=universal-auth --client-id=<identity-client-id> --client-secret=<identity-client-secret> --silent --plain) # --plain flag will output only the token, so it can be fed to an environment variable. --silent will disable any update messages.
|
||||
```
|
||||
|
||||
<Info>
|
||||
Alternatively, you may use service tokens.
|
||||
|
||||
Please note, however, that service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities). They will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys).
|
||||
```bash
|
||||
# Example
|
||||
export INFISICAL_TOKEN=<service-token>
|
||||
```
|
||||
|
||||
</Info>
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="INFISICAL_DISABLE_UPDATE_CHECK">
|
||||
@ -56,71 +69,90 @@ Inject secrets from Infisical into your application process.
|
||||
To use, simply export this variable in the terminal before running this command.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
# Example
|
||||
export INFISICAL_DISABLE_UPDATE_CHECK=true
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
|
||||
### Flags
|
||||
|
||||
### Flags
|
||||
|
||||
<Accordion title="--project-config-dir">
|
||||
Explicitly set the directory where the .infisical.json resides. This is useful for some monorepo setups.
|
||||
Explicitly set the directory where the .infisical.json resides. This is useful for some monorepo setups.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
# Example
|
||||
infisical run --project-config-dir=/some-dir -- printenv
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--command">
|
||||
Pass secrets into multiple commands at once
|
||||
|
||||
```bash
|
||||
# Example
|
||||
# Example
|
||||
infisical run --command="npm run build && npm run dev; more-commands..."
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--projectId">
|
||||
The project ID to fetch secrets from. This is required when using a machine identity to authenticate.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
infisical run --projectId=<project-id> -- npm run dev
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--token">
|
||||
If you are using a [service token](/documentation/platform/token) to authenticate, you can pass the token as a flag
|
||||
If you are using a [machine identity](/documentation/platform/identities/machine-identities) to authenticate, you can pass the token as a flag
|
||||
|
||||
```bash
|
||||
# Example
|
||||
infisical run --token="st.63e03c4a97cb4a747186c71e.ed5b46a34c078a8f94e8228f4ab0ff97.4f7f38034811995997d72badf44b42ec" -- npm run start
|
||||
# Example
|
||||
infisical run --token="<universal-auth-access-token>" --projectId=<project-id> -- npm run start
|
||||
```
|
||||
|
||||
You may also expose the token to the CLI by setting the environment variable `INFISICAL_TOKEN` before executing the run command. This will have the same effect as setting the token with `--token` flag
|
||||
You may also expose the token to the CLI by setting the environment variable `INFISICAL_TOKEN` before executing the run command. This will have the same effect as setting the token with `--token` flag
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--expand">
|
||||
Turn on or off the shell parameter expansion in your secrets. If you have used shell parameters in your secret(s), activating this feature will populate them before injecting them into your application process.
|
||||
|
||||
Default value: `true`
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--env">
|
||||
This is used to specify the environment from which secrets should be retrieved. The accepted values are the environment slugs defined for your project, such as `dev`, `staging`, `test`, and `prod`.
|
||||
|
||||
Default value: `dev`
|
||||
</Accordion>
|
||||
{" "}
|
||||
|
||||
<Accordion title="--env">
|
||||
This is used to specify the environment from which secrets should be
|
||||
retrieved. The accepted values are the environment slugs defined for your
|
||||
project, such as `dev`, `staging`, `test`, and `prod`. Default value: `dev`
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--secret-overriding">
|
||||
Prioritizes personal secrets with the same name over shared secrets
|
||||
|
||||
Default value: `true`
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--tags">
|
||||
When working with tags, you can use this flag to filter and retrieve only secrets that are associated with a specific tag(s).
|
||||
|
||||
```bash
|
||||
# Example
|
||||
# Example
|
||||
infisical run --tags=tag1,tag2,tag3 -- npm run dev
|
||||
```
|
||||
|
||||
Note: you must reference the tag by its slug name not its fully qualified name. Go to project settings to view all tag slugs.
|
||||
|
||||
By default, all secrets are fetched
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--path">
|
||||
|
@ -23,13 +23,23 @@ $ infisical secrets
|
||||
### Environment variables
|
||||
|
||||
<Accordion title="INFISICAL_TOKEN">
|
||||
Used to fetch secrets via a [service token](/documentation/platform/token) apposed to logged in credentials. Simply, export this variable in the terminal before running this command.
|
||||
Used to fetch secrets via a [machine identity](/documentation/platform/identities/machine-identities) apposed to logged in credentials. Simply, export this variable in the terminal before running this command.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
export INFISICAL_TOKEN=st.63e03c4a97cb4a747186c71e.ed5b46a34c078a8f94e8228f4ab0ff97.4f7f38034811995997d72badf44b42ec
|
||||
export INFISICAL_TOKEN=$(infisical login --method=universal-auth --client-id=<identity-client-id> --client-secret=<identity-client-secret> --silent --plain) # --plain flag will output only the token, so it can be fed to an environment variable. --silent will disable any update messages.
|
||||
```
|
||||
|
||||
<Info>
|
||||
Alternatively, you may use service tokens.
|
||||
|
||||
Please note, however, that service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities). They will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys).
|
||||
```bash
|
||||
# Example
|
||||
export INFISICAL_TOKEN=<service-token>
|
||||
```
|
||||
</Info>
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="INFISICAL_DISABLE_UPDATE_CHECK">
|
||||
@ -53,6 +63,16 @@ $ infisical secrets
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--projectId">
|
||||
The project ID to fetch secrets from. This is required when using a machine identity to authenticate.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
infisical secrets --projectId=<project-id>
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--env">
|
||||
Used to select the environment name on which actions should be taken on
|
||||
|
||||
@ -186,7 +206,7 @@ $ infisical secrets folders
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--token">
|
||||
Fetch folders using the Infisical service token
|
||||
Fetch folders using a [machine identity](/documentation/platform/identities/machine-identities) access token.
|
||||
|
||||
Default value: ``
|
||||
</Accordion>
|
||||
|
@ -3,37 +3,47 @@ title: "infisical service-token"
|
||||
description: "Manage Infisical service tokens"
|
||||
---
|
||||
|
||||
```bash
|
||||
<Warning>
|
||||
This command is deprecated and will be removed in the near future. Please
|
||||
switch to using [Machine
|
||||
Identities](/documentation/platform/identities/machine-identities) for
|
||||
authenticating with Infisical.
|
||||
</Warning>
|
||||
|
||||
```bash
|
||||
infisical service-token create --scope=dev:/global --scope=dev:/backend --access-level=read --access-level=write
|
||||
```
|
||||
|
||||
## Description
|
||||
The Infisical `service-token` command allows you to manage service tokens for a given Infisical project.
|
||||
|
||||
The Infisical `service-token` command allows you to manage service tokens for a given Infisical project.
|
||||
With this command, you can create, view, and delete service tokens.
|
||||
|
||||
<Accordion title="service-token create" defaultOpen="true">
|
||||
Use this command to create a service token
|
||||
|
||||
```bash
|
||||
$ infisical service-token create --scope=dev:/backend/** --access-level=read --access-level=write
|
||||
```
|
||||
```bash
|
||||
$ infisical service-token create --scope=dev:/backend/** --access-level=read --access-level=write
|
||||
```
|
||||
|
||||
### Flags
|
||||
|
||||
### Flags
|
||||
<Accordion title="--scope">
|
||||
```bash
|
||||
infisical service-token create --scope=dev:/global --scope=dev:/backend/** --access-level=read
|
||||
```
|
||||
|
||||
Use the scope flag to define which environments and paths your service token should be authorized to access.
|
||||
|
||||
The value of your scope flag should be in the following `<environment slug>:<path>`.
|
||||
|
||||
The value of your scope flag should be in the following `<environment slug>:<path>`.
|
||||
Here, `environment slug` refers to the slug name of the environment, and `path` indicates the folder path where your secrets are stored.
|
||||
|
||||
For specifying multiple scopes, you can use multiple --scope flags.
|
||||
|
||||
|
||||
<Info>
|
||||
The `path` can be a Glob pattern
|
||||
</Info>
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="--projectId">
|
||||
@ -41,8 +51,9 @@ With this command, you can create, view, and delete service tokens.
|
||||
infisical service-token create --scope=dev:/global --access-level=read --projectId=63cefb15c8d3175601cfa989
|
||||
```
|
||||
|
||||
The project ID you'd like to create the service token for.
|
||||
The project ID you'd like to create the service token for.
|
||||
By default, the CLI will attempt to use the linked Infisical project in `.infisical.json` generated by `infisical init` command.
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="--name">
|
||||
```bash
|
||||
@ -52,6 +63,7 @@ With this command, you can create, view, and delete service tokens.
|
||||
Service token name
|
||||
|
||||
Default: `Service token generated via CLI`
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="--expiry-seconds">
|
||||
```bash
|
||||
@ -61,6 +73,7 @@ With this command, you can create, view, and delete service tokens.
|
||||
Set the service token's expiration time in seconds from now. To never expire set to zero.
|
||||
|
||||
Default: `1 day`
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="--access-level">
|
||||
```bash
|
||||
@ -68,6 +81,7 @@ With this command, you can create, view, and delete service tokens.
|
||||
```
|
||||
|
||||
The type of access the service token should have. Can be `read` and or `write`
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="--token-only">
|
||||
```bash
|
||||
@ -77,5 +91,6 @@ With this command, you can create, view, and delete service tokens.
|
||||
When true, only the service token will be printed
|
||||
|
||||
Default: `false`
|
||||
|
||||
</Accordion>
|
||||
</Accordion>
|
||||
|
@ -1,22 +0,0 @@
|
||||
---
|
||||
title: "Infisical Token"
|
||||
description: "How to use Infisical service token within the CLI."
|
||||
---
|
||||
|
||||
Prerequisite: [Infisical Token and How to Generate One](/documentation/platform/token).
|
||||
|
||||
It's possible to use the CLI to sync environment variables without manually entering login credentials by using a service token in the prerequisite link above.
|
||||
|
||||
## Feeding Infisical Token to the CLI
|
||||
|
||||
The CLI looks out for an environment variable called the `INFISICAL_TOKEN` which you can set depending on where you run the CLI. If `INFISICAL_TOKEN` is detected by the CLI, it will authenticate and retrieve the environment variables which the token is authorized for.
|
||||
|
||||
A common use-case is to use the Infisical Token to fetch environment variables with Docker. More specifically, a token can be passed to a container as an environment variable for the CLI to authenticate and pull its corresponding secrets. Check out the integration guides for that:
|
||||
|
||||
- [Docker](../../integrations/platforms/docker)
|
||||
- [Docker Compose](../../integrations/platforms/docker-compose)
|
||||
|
||||
<Info>
|
||||
Once the token is expired, the CLI using it will no longer be able to make
|
||||
requests with it.
|
||||
</Info>
|
@ -1,141 +1,125 @@
|
||||
---
|
||||
title: "Quick usage"
|
||||
title: "Quickstart"
|
||||
description: "Manage secrets with Infisical CLI"
|
||||
---
|
||||
|
||||
The CLI is designed for a variety of applications, ranging from local secret management to CI/CD and production scenarios.
|
||||
The distinguishing factor, however, is the authentication method used.
|
||||
The CLI is designed for a variety of secret management applications ranging from local development to CI/CD and production scenarios.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Local development only">
|
||||
To use the Infisical CLI in your local development environment, simply run the command below and follow the interactive guide.
|
||||
<Tab title="Local development">
|
||||
In the following steps, we explore how to use the Infisical CLI to fetch back environment variables from Infisical
|
||||
and inject them into your local development process.
|
||||
|
||||
<Steps>
|
||||
<Step title="Log in with the CLI">
|
||||
Start by running the `infisical login` command to authenticate with Infisical.
|
||||
|
||||
```bash
|
||||
infisical login
|
||||
```
|
||||
<Note>
|
||||
If you are in a containerized environment such as WSL 2 or Codespaces, run `infisical login -i` to avoid browser based login
|
||||
</Note>
|
||||
</Step>
|
||||
<Step title="Initialize Infisical for your project">
|
||||
Next, navigate to your project and initialize Infisical.
|
||||
|
||||
```bash
|
||||
# navigate to your project
|
||||
cd /path/to/project
|
||||
|
||||
```bash
|
||||
infisical login
|
||||
```
|
||||
# initialize infisical
|
||||
infisical init
|
||||
```
|
||||
|
||||
<Note>
|
||||
If you are in a containerized environment such as WSL 2 or Codespaces, run `infisical login -i` to avoid browser based login
|
||||
</Note>
|
||||
The `infisical init` command creates a `.infisical.json` file, containing [local project settings](./project-config), at the location where the command is executed.
|
||||
|
||||
## Initialize Infisical for your project
|
||||
<Note>
|
||||
The `.infisical.json` file does not contain any sensitive data, so you may commit it to your git repository.
|
||||
</Note>
|
||||
</Step>
|
||||
<Step title="Inject environment variables">
|
||||
Finally, pass environment variables from Infisical into your application.
|
||||
|
||||
```bash
|
||||
# navigate to your project
|
||||
cd /path/to/project
|
||||
<Tabs>
|
||||
<Tab title="Feed secrets to your application">
|
||||
```bash
|
||||
infisical run --env=dev --path=/apps/firefly -- [your application start command] # e.g. npm run dev
|
||||
|
||||
# initialize infisical
|
||||
infisical init
|
||||
```
|
||||
# example with node (nodemon)
|
||||
infisical run --env=staging --path=/apps/spotify -- nodemon index.js
|
||||
|
||||
# example with flask
|
||||
infisical run --env=prod --path=/apps/backend -- flask run
|
||||
|
||||
# example with spring boot - maven
|
||||
infisical run --env=dev --path=/apps/ -- ./mvnw spring-boot:run --quiet
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="Feed secrets via custom aliases (advanced)">
|
||||
Custom aliases can utilize secrets from Infisical. Suppose there is a custom alias `yd` in `custom.sh` that runs `yarn dev` and needs the secrets provided by Infisical.
|
||||
```bash
|
||||
#!/bin/sh
|
||||
|
||||
yd() {
|
||||
yarn dev
|
||||
}
|
||||
```
|
||||
|
||||
To make the secrets available from Infisical to `yd`, you can run the following command:
|
||||
|
||||
```bash
|
||||
infisical run --env=prod --path=/apps/reddit --command="source custom.sh && yd"
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
View all available options for `run` command [here](./commands/run)
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
This will create `.infisical.json` file at the location the command was executed. This file contains your [local project settings](./project-config). It does not contain any sensitive data.
|
||||
|
||||
</Tab>
|
||||
|
||||
<Tab title="Staging, production & all other use case">
|
||||
To use Infisical for non local development scenarios, please create a [service token](../documentation/platform/token). The service token will allow you to authenticate and interact with Infisical.
|
||||
Once you have created a service token with the required permissions, you'll need to feed the token to the CLI.
|
||||
<Tab title="Staging, production & all other use cases">
|
||||
In the following steps, we explore how to use the Infisical CLI in a non-local development scenario
|
||||
to fetch back environment variables and export them to a file.
|
||||
<Steps>
|
||||
<Step title="Create a machine identity and obtain credentials for it">
|
||||
Follow the steps listed [here](/documentation/platform/identities/universal-auth) to create a machine identity and obtain a **client ID** and **client secret** for it.
|
||||
</Step>
|
||||
<Step title="Obtain a machine identity access token">
|
||||
Run the following command to authenticate with Infisical using the **client ID** and **client secret** credentials from step 1 and set the `INFISICAL_TOKEN` environment variable to the retrieved access token.
|
||||
|
||||
```bash
|
||||
export INFISICAL_TOKEN=$(infisical login --method=universal-auth --client-id=<identity-client-id> --client-secret=<identity-client-secret> --silent --plain) # --plain flag will output only the token, so it can be fed to an environment variable. --silent will disable any update messages.
|
||||
```
|
||||
|
||||
#### Pass as flag
|
||||
You may use the --token flag to set the token
|
||||
The CLI is configured to look out for the `INFISICAL_TOKEN` environment variable, so going forward any command used will be authenticated.
|
||||
|
||||
```
|
||||
infisical export --token=<>
|
||||
infisical secrets --token=<>
|
||||
infisical run --token=<> -- npm run dev
|
||||
```
|
||||
Alternatively, assuming you have an access token on hand, you can also pass it directly to the CLI using the `--token` flag in conjunction with other CLI commands.
|
||||
|
||||
#### Pass via shell environment variable
|
||||
The CLI is configured to look for an environment variable named `INFISICAL_TOKEN`. If set, it'll attempt to use it for authentication.
|
||||
<Info>
|
||||
Keep in mind that the machine identity access token has a limited lifetime. It is recommended to use it only for the duration of the task at hand.
|
||||
You can [refresh the token](./commands/token) if needed.
|
||||
</Info>
|
||||
</Step>
|
||||
<Step title="Export environment variables back into a file">
|
||||
Finally, export the environment variables from Infisical to a file of choice.
|
||||
|
||||
```
|
||||
export INFISICAL_TOKEN=<>
|
||||
```
|
||||
|
||||
```bash
|
||||
# export variables to a .env file (with export keyword)
|
||||
infisical export --format=dotenv-export > .env
|
||||
|
||||
# export variables to a YAML file
|
||||
infisical export --format=yaml > secrets.yaml
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
|
||||
## Inject environment variables
|
||||
<Tabs>
|
||||
<Tab title="Feed secrets to your application">
|
||||
```bash
|
||||
infisical run --env=dev --path=/apps/firefly -- [your application start command]
|
||||
|
||||
# example with node (nodemon)
|
||||
infisical run --env=staging --path=/apps/spotify -- nodemon index.js
|
||||
|
||||
# example with flask
|
||||
infisical run --env=prod --path=/apps/backend -- flask run
|
||||
|
||||
# example with spring boot - maven
|
||||
infisical run --env=dev --path=/apps/ -- ./mvnw spring-boot:run --quiet
|
||||
```
|
||||
</Tab>
|
||||
<Tab title="Feed secrets via custom aliases (advanced)">
|
||||
Custom aliases can utilize secrets from Infisical. Suppose there is a custom alias `yd` in `custom.sh` that runs `yarn dev` and needs the secrets provided by Infisical.
|
||||
```bash
|
||||
#!/bin/sh
|
||||
|
||||
yd() {
|
||||
yarn dev
|
||||
}
|
||||
```
|
||||
|
||||
To make the secrets available from Infisical to `yd`, you can run the following command:
|
||||
|
||||
```bash
|
||||
infisical run --env=prod --path=/apps/reddit --command="source custom.sh && yd"
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
View all available options for `run` command [here](./commands/run)
|
||||
|
||||
## Connect CLI to self hosted Infisical
|
||||
|
||||
<Accordion title="Optional: point CLI to self-hosted">
|
||||
The CLI is set to connect to Infisical Cloud by default, but if you're running your own instance of Infisical, you can direct the CLI to it using one of the methods provided below.
|
||||
|
||||
#### Method 1: Use the updated CLI
|
||||
Beginning with CLI version V0.4.0, it is now possible to choose between logging in through the Infisical cloud or your own self-hosted instance. Simply execute the `infisical login` command and follow the on-screen instructions.
|
||||
|
||||
#### Method 2: Export environment variable
|
||||
You can point the CLI to the self hosted Infisical instance by exporting the environment variable `INFISICAL_API_URL` in your terminal.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Linux/MacOs">
|
||||
```bash
|
||||
# Set backend host
|
||||
export INFISICAL_API_URL="https://your-self-hosted-infisical.com/api"
|
||||
|
||||
# Remove backend host
|
||||
unset INFISICAL_API_URL
|
||||
```
|
||||
</Tab>
|
||||
<Tab title="Windows Powershell">
|
||||
```bash
|
||||
# Set backend host
|
||||
setx INFISICAL_API_URL "https://your-self-hosted-infisical.com/api"
|
||||
|
||||
# Remove backend host
|
||||
setx INFISICAL_API_URL ""
|
||||
|
||||
# NOTE: Once set or removed, please restart powershell for the change to take effect
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
#### Method 3: Set manually on every command
|
||||
Another option to point the CLI to your self hosted Infisical instance is to set it via a flag on every command you run.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
infisical <any-command> --domain="https://your-self-hosted-infisical.com/api"
|
||||
```
|
||||
</Accordion>
|
||||
|
||||
## History
|
||||
|
||||
Your terminal keeps a history with the commands you run. When you create Infisical secrets directly from your terminal, they'll stay there for a while.
|
||||
@ -143,30 +127,101 @@ Your terminal keeps a history with the commands you run. When you create Infisic
|
||||
For security and privacy concerns, we recommend you to configure your terminal to ignore those specific Infisical commands.
|
||||
|
||||
<Accordion title="Ignore commands">
|
||||
<Tabs>
|
||||
<Tab title="Unix/Linux">
|
||||
<Tip>
|
||||
`$HOME/.profile` is pretty common but, you could place it under `$HOME/.profile.d/infisical.sh` or any profile file run at login
|
||||
</Tip>
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Unix/Linux">
|
||||
<Tip>
|
||||
`$HOME/.profile` is pretty common but, you could place it under `$HOME/.profile.d/infisical.sh` or any profile file run at login
|
||||
</Tip>
|
||||
```bash
|
||||
cat <<EOF >> $HOME/.profile && source $HOME/.profile
|
||||
|
||||
# Ignoring specific Infisical CLI commands
|
||||
DEFAULT_HISTIGNORE=$HISTIGNORE
|
||||
export HISTIGNORE="*infisical secrets set*:$DEFAULT_HISTIGNORE"
|
||||
EOF
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="Windows">
|
||||
If you're on WSL, then you can use the Unix/Linux method.
|
||||
|
||||
<Tip>
|
||||
Here's some [documentation](https://superuser.com/a/1658331) about how to clear the terminal history, in PowerShell and CMD
|
||||
</Tip>
|
||||
|
||||
</Tab>
|
||||
|
||||
</Tabs>
|
||||
</Accordion>
|
||||
|
||||
## FAQ
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="Can I connect the CLI to my self-hosted Infisical instance?">
|
||||
Yes. The CLI is set to connect to Infisical Cloud by default, but if you're running your own instance of Infisical, you can direct the CLI to it using one of the methods provided below.
|
||||
|
||||
#### Method 1: Use the updated CLI
|
||||
|
||||
Beginning with CLI version V0.4.0, it is now possible to choose between logging in through the Infisical cloud or your own self-hosted instance. Simply execute the `infisical login` command and follow the on-screen instructions.
|
||||
|
||||
#### Method 2: Export environment variable
|
||||
|
||||
You can point the CLI to the self hosted Infisical instance by exporting the environment variable `INFISICAL_API_URL` in your terminal.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Linux/MacOs">
|
||||
```bash
|
||||
cat <<EOF >> $HOME/.profile && source $HOME/.profile
|
||||
# set backend host
|
||||
export INFISICAL_API_URL="https://your-self-hosted-infisical.com/api"
|
||||
|
||||
# Ignoring specific Infisical CLI commands
|
||||
DEFAULT_HISTIGNORE=$HISTIGNORE
|
||||
export HISTIGNORE="*infisical secrets set*:$DEFAULT_HISTIGNORE"
|
||||
EOF
|
||||
# remove backend host
|
||||
unset INFISICAL_API_URL
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="Windows">
|
||||
If you're on WSL, then you can use the Unix/Linux method.
|
||||
</Tab>
|
||||
<Tab title="Windows Powershell">
|
||||
```bash
|
||||
# set backend host
|
||||
setx INFISICAL_API_URL "https://your-self-hosted-infisical.com/api"
|
||||
|
||||
<Tip>
|
||||
Here's some [documentation](https://superuser.com/a/1658331) about how to clear the terminal history, in PowerShell and CMD
|
||||
</Tip>
|
||||
# remove backend host
|
||||
setx INFISICAL_API_URL ""
|
||||
|
||||
</Tab>
|
||||
</Tabs>
|
||||
</Accordion>
|
||||
# NOTE: Once set or removed, please restart powershell for the change to take effect
|
||||
```
|
||||
|
||||
</Tab>
|
||||
|
||||
</Tabs>
|
||||
|
||||
#### Method 3: Set manually on every command
|
||||
|
||||
Another option to point the CLI to your self hosted Infisical instance is to set it via a flag on every command you run.
|
||||
|
||||
```bash
|
||||
# Example
|
||||
infisical <any-command> --domain="https://your-self-hosted-infisical.com/api"
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="Can I use the CLI with service tokens?">
|
||||
Yes. Please note, however, that service tokens are being deprecated in favor of [machine identities](/documentation/platform/identities/machine-identities). They will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys).
|
||||
|
||||
To use Infisical for non local development scenarios, please create a service token. The service token will allow you to authenticate and interact with Infisical. Once you have created a service token with the required permissions, you’ll need to feed the token to the CLI.
|
||||
|
||||
```bash
|
||||
infisical export --token=<service-token>
|
||||
infisical secrets --token=<service-token>
|
||||
infisical run --token=<service-token> -- npm run dev
|
||||
```
|
||||
|
||||
#### Pass via shell environment variable
|
||||
The CLI is configured to look for an environment variable named `INFISICAL_TOKEN`. If set, it’ll attempt to use it for authentication.
|
||||
|
||||
```bash
|
||||
export INFISICAL_TOKEN=<service-token>
|
||||
```
|
||||
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
||||
|
@ -1,87 +0,0 @@
|
||||
---
|
||||
title: "Kubernetes"
|
||||
---
|
||||
|
||||
The Infisical Secrets Operator fetches secrets from Infisical and saves them as Kubernetes secrets using the custom `InfisicalSecret` resource to define authentication and storage methods.
|
||||
The operator updates secrets continuously and can reload dependent deployments automatically on secret changes.
|
||||
|
||||
Prerequisites:
|
||||
|
||||
- Connected to your cluster via kubectl
|
||||
- Have a project with secrets ready in [Infisical Cloud](https://app.infisical.com).
|
||||
- Create an [Infisical Token](/documentation/platform/token) scoped to an environment in your project in Infisical.
|
||||
|
||||
## Installation
|
||||
|
||||
Follow the instructions for either [Helm](https://helm.sh/) or [kubectl](https://github.com/kubernetes/kubectl) to install the Infisical Secrets Operator.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Helm">
|
||||
Install the Infisical Helm repository
|
||||
|
||||
```console
|
||||
helm repo add infisical-helm-charts 'https://dl.cloudsmith.io/public/infisical/helm-charts/helm/charts/'
|
||||
|
||||
helm repo update
|
||||
```
|
||||
|
||||
Install the Helm chart
|
||||
```console
|
||||
helm install --generate-name infisical-helm-charts/secrets-operator
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="Kubectl">
|
||||
The operator will be installed in `infisical-operator-system` namespace
|
||||
```
|
||||
kubectl apply -f https://raw.githubusercontent.com/Infisical/infisical/main/k8-operator/kubectl-install/install-secrets-operator.yaml
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
**Step 1: Create Kubernetes secret containing service token**
|
||||
|
||||
Once you have generated the service token, create a Kubernetes secret containing the service token you generated by running the command below.
|
||||
|
||||
``` bash
|
||||
kubectl create secret generic service-token --from-literal=infisicalToken=<your-service-token-here>
|
||||
```
|
||||
|
||||
**Step 2: Fill out the InfisicalSecrets CRD and apply it to your cluster**
|
||||
|
||||
```yaml infisical-secrets-config.yaml
|
||||
apiVersion: secrets.infisical.com/v1alpha1
|
||||
kind: InfisicalSecret
|
||||
metadata:
|
||||
# Name of of this InfisicalSecret resource
|
||||
name: infisicalsecret-sample
|
||||
spec:
|
||||
# The host that should be used to pull secrets from. If left empty, the value specified in Global configuration will be used
|
||||
hostAPI: https://app.infisical.com/api
|
||||
resyncInterval:
|
||||
authentication:
|
||||
serviceToken:
|
||||
serviceTokenSecretReference:
|
||||
secretName: service-token
|
||||
secretNamespace: option
|
||||
secretsScope:
|
||||
envSlug: dev
|
||||
secretsPath: "/"
|
||||
managedSecretReference:
|
||||
secretName: managed-secret # <-- the name of kubernetes secret that will be created
|
||||
secretNamespace: default # <-- where the kubernetes secret should be created
|
||||
```
|
||||
|
||||
```
|
||||
kubectl apply -f infisical-secrets-config.yaml
|
||||
```
|
||||
|
||||
You should now see a new kubernetes secret automatically created in the namespace you defined in the `managedSecretReference` property above.
|
||||
|
||||
See also:
|
||||
|
||||
- [Documentation for the Infisical Kubernetes Operator](../../integrations/platforms/kubernetes)
|
||||
|
82
docs/documentation/platform/audit-log-streams.mdx
Normal file
82
docs/documentation/platform/audit-log-streams.mdx
Normal file
@ -0,0 +1,82 @@
|
||||
---
|
||||
title: "Audit Log Streams"
|
||||
description: "Learn how to stream Infisical Audit Logs to external logging providers."
|
||||
---
|
||||
|
||||
<Info>
|
||||
Audit log streams is a paid feature.
|
||||
|
||||
If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical,
|
||||
then you should contact team@infisical.com to purchase an enterprise license to use it.
|
||||
</Info>
|
||||
|
||||
Infisical Audit Log Streaming enables you to transmit your organization's Audit Logs to external logging providers for monitoring and analysis.
|
||||
|
||||
The logs are formatted in JSON, requiring your logging provider to support JSON-based log parsing.
|
||||
|
||||
|
||||
## Overview
|
||||
|
||||
<Steps>
|
||||
<Step title="Navigate to Organization Settings in your sidebar." />
|
||||
<Step title="Select Audit Log Streams Tab.">
|
||||

|
||||
</Step>
|
||||
<Step title="Click on Create">
|
||||

|
||||
|
||||
Provide the following values
|
||||
<ParamField path="Endpoint URL" type="string" required>
|
||||
The HTTPS endpoint URL of the logging provider that collects the JSON stream.
|
||||
</ParamField>
|
||||
<ParamField path="Headers" type="string" >
|
||||
The HTTP headers for the logging provider for identification and authentication.
|
||||
</ParamField>
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||

|
||||
Your Audit Logs are now ready to be streamed.
|
||||
|
||||
## Example Providers
|
||||
|
||||
### Better Stack
|
||||
|
||||
<Steps>
|
||||
<Step title="Select Connect Source">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide a name and select platform"/>
|
||||
<Step title="Provide Audit Log Stream inputs">
|
||||

|
||||
|
||||
1. Copy the **endpoint** from Better Stack to the **Endpoint URL** field.
|
||||
3. Create a new header with key **Authorization** and set the value as **Bearer \<source token from betterstack\>**.
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
### Datadog
|
||||
|
||||
<Steps>
|
||||
<Step title="Navigate to API Keys section">
|
||||

|
||||
</Step>
|
||||
<Step title="Select New Key and provide a key name">
|
||||

|
||||

|
||||
</Step>
|
||||
<Step title="Find your Datadog region specific logging endpoint.">
|
||||

|
||||
|
||||
1. Navigate to the [Datadog Send Logs API documentation](https://docs.datadoghq.com/api/latest/logs/?code-lang=curl&site=us5#send-logs).
|
||||
2. Pick your Datadog account region.
|
||||
3. Obtain your Datadog logging endpoint URL.
|
||||
</Step>
|
||||
<Step title="Provide audit log stream inputs">
|
||||

|
||||
|
||||
1. Copy the **logging endpoint** from Datadog to the **Endpoint URL** field.
|
||||
2. Copy the **API Key** from previous step
|
||||
3. Create a new header with key **DD-API-KEY** and set the value as **API Key**.
|
||||
</Step>
|
||||
</Steps>
|
151
docs/documentation/platform/dynamic-secrets/aws-iam.mdx
Normal file
151
docs/documentation/platform/dynamic-secrets/aws-iam.mdx
Normal file
@ -0,0 +1,151 @@
|
||||
---
|
||||
title: "AWS IAM"
|
||||
description: "How to dynamically generate AWS IAM Users."
|
||||
---
|
||||
|
||||
The Infisical AWS IAM dynamic secret allows you to generate AWS IAM Users on demand based on configured AWS policy.
|
||||
|
||||
## Prerequisite
|
||||
|
||||
Infisical needs an initial AWS IAM user with the required permissions to create sub IAM users. This IAM user will be responsible for managing the lifecycle of new IAM users.
|
||||
|
||||
<Accordion title="Managing AWS IAM User minimum permission policy">
|
||||
|
||||
```json
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": [
|
||||
"iam:AttachUserPolicy",
|
||||
"iam:CreateAccessKey",
|
||||
"iam:CreateUser",
|
||||
"iam:DeleteAccessKey",
|
||||
"iam:DeleteUser",
|
||||
"iam:DeleteUserPolicy",
|
||||
"iam:DetachUserPolicy",
|
||||
"iam:GetUser",
|
||||
"iam:ListAccessKeys",
|
||||
"iam:ListAttachedUserPolicies",
|
||||
"iam:ListGroupsForUser",
|
||||
"iam:ListUserPolicies",
|
||||
"iam:PutUserPolicy",
|
||||
"iam:AddUserToGroup",
|
||||
"iam:RemoveUserFromGroup"
|
||||
],
|
||||
"Resource": ["*"]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
To minimize managing user access you can attach a resource in format
|
||||
|
||||
> arn:aws:iam::\<account-id\>:user/\<aws-scope-path\>
|
||||
|
||||
Replace **\<account id\>** with your AWS account id and **\<aws-scope-path\>** with a path to minimize managing user access.
|
||||
|
||||
</Accordion>
|
||||
|
||||
## Set up Dynamic Secrets with AWS IAM
|
||||
|
||||
<Steps>
|
||||
<Step title="Secret Overview Dashboard">
|
||||
Navigate to the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret to.
|
||||
</Step>
|
||||
<Step title="Click on the 'Add Dynamic Secret' button">
|
||||

|
||||
</Step>
|
||||
<Step title="Select AWS IAM">
|
||||

|
||||
</Step>
|
||||
<Step title="Provide the inputs for dynamic secret parameters">
|
||||
<ParamField path="Secret Name" type="string" required>
|
||||
Name by which you want the secret to be referenced
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Default TTL" type="string" required>
|
||||
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="Max TTL" type="string" required>
|
||||
Maximum time-to-live for a generated secret
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="AWS Access Key" type="string" required>
|
||||
The managing AWS IAM User Access Key
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="AWS Secret Key" type="string" required>
|
||||
The managing AWS IAM User Secret Key
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="AWS IAM Path" type="string">
|
||||
[IAM AWS Path](https://aws.amazon.com/blogs/security/optimize-aws-administration-with-iam-paths/) to scope created IAM User resource access.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="AWS Region" type="string" required>
|
||||
The AWS data center region.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="IAM User Permission Boundary" type="string" required>
|
||||
The IAM Policy ARN of the [AWS Permissions Boundary](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_boundaries.html) to attach to IAM users created in the role.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="AWS IAM Groups" type="string">
|
||||
The AWS IAM groups that should be assigned to the created users. Multiple values can be provided by separating them with commas
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="AWS Policy ARNs" type="string">
|
||||
The AWS IAM managed policies that should be attached to the created users. Multiple values can be provided by separating them with commas
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="AWS IAM Policy Document" type="string">
|
||||
The AWS IAM inline policy that should be attached to the created users. Multiple values can be provided by separating them with commas
|
||||
</ParamField>
|
||||
|
||||

|
||||
|
||||
</Step>
|
||||
<Step title="Click 'Submit'">
|
||||
After submitting the form, you will see a dynamic secret created in the dashboard.
|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Generate dynamic secrets">
|
||||
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
|
||||
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
|
||||
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
|
||||
|
||||

|
||||

|
||||
|
||||
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
|
||||
|
||||

|
||||
|
||||
<Tip>
|
||||
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret in step 4.
|
||||
</Tip>
|
||||
|
||||
|
||||
Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Audit or Revoke Leases
|
||||
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
|
||||
This will allow you see the lease details and delete the lease ahead of its expiration time.
|
||||
|
||||

|
||||
|
||||
## Renew Leases
|
||||
To extend the life of the generated dynamic secret lease past its initial time to live, simply click on the **Renew** as illustrated below.
|
||||

|
||||
|
||||
<Warning>
|
||||
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
|
||||
</Warning>
|
@ -1,13 +1,14 @@
|
||||
---
|
||||
title: "Overview"
|
||||
title: "Dynamic Secrets"
|
||||
sidebarTitle: "Overview"
|
||||
description: "Learn how to generate secrets dynamically on-demand."
|
||||
---
|
||||
|
||||
## Introduction
|
||||
|
||||
Contrary to static key-value secrets, which require manual input of data into the secure Infisical storage, dynamic secrets are generated on-demand upon access.
|
||||
Contrary to static key-value secrets, which require manual input of data into the secure Infisical storage, **dynamic secrets are generated on-demand upon access**.
|
||||
|
||||
Dynamic secrets are unique to every identity using them. Such secrets come are generated only at the moment they are retrieved, eliminating the possibility of theft or reuse by another identity. Thanks to Infisical's integrated revocation capabilities, dynamic secrets can be promptly invalidated post-use, significantly reducing their lifespan.
|
||||
**Dynamic secrets are unique to every identity using them**. Such secrets come are generated only at the moment they are retrieved, eliminating the possibility of theft or reuse by another identity. Thanks to Infisical's integrated revocation capabilities, dynamic secrets can be promptly invalidated post-use, significantly reducing their lifespan.
|
||||
|
||||
## Benefits of Dynamic Secrets
|
||||
|
||||
@ -23,8 +24,12 @@ This approach offers several advantages in terms of security and management:
|
||||
|
||||
- **Scalability**: Dynamic secret management systems can scale more effectively to handle a large number of services and applications, as they automate much of the overhead associated with manual secret management.
|
||||
|
||||
Dynamic secrets are particularly useful in environments with stringent security requirements, such as cloud environments, distributed systems, and microservices architectures, where they help to manage database credentials, API keys, service tokens, and other types of secrets.
|
||||
Dynamic secrets are particularly useful in environments with stringent security requirements, such as cloud environments, distributed systems, and microservices architectures, where they help to manage database credentials, API keys, tokens, and other types of secrets.
|
||||
|
||||
## Infisical Dynamic Secret Templates
|
||||
|
||||
1. [PostgreSQL](./postgresql)
|
||||
2. [MySQL](./mysql)
|
||||
3. [Cassandra](./cassandra)
|
||||
4. [Oracle](./oracle)
|
||||
5. [AWS IAM](./aws-iam)
|
||||
|
@ -1,5 +1,5 @@
|
||||
---
|
||||
title: Machine Identities
|
||||
title: Machine Identities
|
||||
description: "Learn how to use Machine Identities to programmatically interact with Infisical."
|
||||
---
|
||||
|
||||
@ -21,18 +21,17 @@ Key Features:
|
||||
A typical workflow for using identities consists of four steps:
|
||||
|
||||
1. Creating the identity with a name and [role](/documentation/platform/role-based-access-controls) in Organization Access Control > Machine Identities.
|
||||
This step also involves configuring an authentication method for it such as [Universal Auth](/documentation/platform/identities/universal-auth).
|
||||
This step also involves configuring an authentication method for it such as [Universal Auth](/documentation/platform/identities/universal-auth).
|
||||
2. Adding the identity to the project(s) you want it to have access to.
|
||||
3. Authenticating the identity with the Infisical API based on the configured authentication method on it and receiving a short-lived access token back.
|
||||
4. Authenticating subsequent requests with the Infisical API using the short-lived access token.
|
||||
|
||||
|
||||
<Note>
|
||||
Currently, identities can only be used to make authenticated requests to the Infisical API, SDKs, Terraform, Kubernetes Operator, and Infisical Agent. They do not work with clients such as CLI, Ansible look up plugin, etc.
|
||||
|
||||
Machine Identity support for the rest of the clients is planned to be released in the current quarter.
|
||||
</Note>
|
||||
Machine Identity support for the rest of the clients is planned to be released in the current quarter.
|
||||
|
||||
</Note>
|
||||
|
||||
## Authentication Methods
|
||||
|
||||
@ -43,8 +42,16 @@ To interact with various resources in Infisical, Machine Identities are able to
|
||||
## FAQ
|
||||
|
||||
<AccordionGroup>
|
||||
<Accordion title="Can I use machine identities with the CLI?">
|
||||
|
||||
Yes - Identities can be used with the CLI.
|
||||
|
||||
You can learn more about how to do this in the CLI quickstart [here](/cli/usage).
|
||||
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="What is the difference between an identity and service token?">
|
||||
A service token is a project-level authentication method that is being phased out in favor of identities.
|
||||
A service token is a project-level authentication method that is being deprecated in favor of identities. The service token method will be removed in the future in accordance with the deprecation notice and timeline stated [here](https://infisical.com/blog/deprecating-api-keys).
|
||||
|
||||
Amongst many differences, identities provide broader access over the Infisical API, utilizes the same
|
||||
permission system as user identities, and come with a significantly larger number of configurable authentication and security features.
|
||||
|
@ -17,7 +17,6 @@ Upon being added to an organization and projects, users assume a certain set of
|
||||
|
||||
To interact with various resources in Infisical, users are able to utilize a number of authentication methods:
|
||||
- **Email & Password**: the most common authentication method that is used for authentication into Web Dashboard and Infisical CLI. It is recommended to utilize [Multi-factor Authentication](/documentation/platform/mfa) in addition to it.
|
||||
- **Service Tokens**: Service tokens allow users authenticate into CLI and other clients under their own identity. For the majority of use cases, it is not a recommended approach. Instead, it is often a good idea to utilize [Machine Identities](./machine-identities) with [Universal Authentication](/documentation/platform/identities/universal-auth).
|
||||
- **SSO**: Infisical natively integrates with a number of SSO identity providers like [Google](/documentation/platform/sso/google), [GitHub](/documentation/platform/sso/github), and [GitLab](/documentation/platform/sso/gitlab).
|
||||
- **SAML SSO**: It is also possible to set up SAML SSO integration with identity providers like [Okta](/documentation/platform/sso/okta), [Microsoft Entra ID](/documentation/platform/sso/azure) (formerly known as Azure AD), [JumpCloud](/documentation/platform/sso/jumpcloud), [Google](/documentation/platform/sso/google-saml), and more.
|
||||
- **LDAP**: For organizations with more advanced needs, Infisical also provides user authentication with [LDAP](/documentation/platform/ldap/overview) that includes a number of LDAP providers.
|
||||
|
@ -1,38 +0,0 @@
|
||||
---
|
||||
title: "IP Allowlisting"
|
||||
description: "Restrict access to your secrets in Infisical using trusted IPs"
|
||||
---
|
||||
|
||||
<Warning>
|
||||
IP allowlisting at the project-level is being replaced with IP allowlisting at the token-level now available with the Service Token V3 authentication method.
|
||||
|
||||
Instead of providing trusted IPs (specific IPs and CIDR ranges) to be applied across all service tokens,
|
||||
you can now specify trusted IPs at the token-level.
|
||||
|
||||
</Warning>
|
||||
<Info>
|
||||
Note that IP Allowlisting is a paid feature.
|
||||
|
||||
If you're using Infisical Cloud, then it is available under the **Pro Tier**. If you're self-hosting Infisical,
|
||||
then you should contact sales@infisical.com to purchase an enterprise license to use it.
|
||||
</Info>
|
||||
|
||||
Projects in Infisical can be configured to restrict client access to specific IP addresses or CIDR ranges. This applies to any client using service tokens and
|
||||
can be useful, for example, for limiting access to traffic coming from corporate networks.
|
||||
|
||||
By default, each project is initialized with the `0.0.0.0/0` entry, representing all possible IPv4 addresses.
|
||||
For enhanced security, we strongly recommend replacing the default entry with your client IPs to tighten access to your secrets.
|
||||
|
||||
<Note>
|
||||
You must be a project `admin` to manage your project's IP whitelist.
|
||||
</Note>
|
||||
|
||||

|
||||
|
||||
## Creating a trusted IP entry
|
||||
|
||||
To create a trusted IP entry, head over to the **IP Whitelist** tab in your project. When creating an entry,
|
||||
you can specify either a specific IP address like `192.0.2.1` or a CIDR range like `2001:db8::/32`; both IPv4 and IPv6
|
||||
formats are accepted.
|
||||
|
||||

|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user