Compare commits

..

27 Commits

Author SHA1 Message Date
8f1e662688 Feat: Added include imports to export command 2024-04-05 17:30:30 -07:00
dcbbb67f03 Feat: Added secret interpolation to get secret by name command 2024-04-05 17:30:19 -07:00
c0fb3c905e Docs: UA Auth Docs 2024-04-05 17:10:38 -07:00
18b0766d96 Update folders.go 2024-04-05 15:47:18 -07:00
b423696630 Feat: UA CLI Support 2024-04-05 15:22:48 -07:00
bf60489fde Feat: Added UA support to export command 2024-04-05 15:20:03 -07:00
85ea6d2585 Fix: Cleanup 2024-04-05 15:19:52 -07:00
a97737ab90 Feat: Folder support for Machine Identities 2024-04-05 15:19:44 -07:00
3793858f0a Feat: Export support for Machine Identities 2024-04-05 15:18:09 -07:00
66c48fbff8 Update model.go 2024-04-05 15:16:12 -07:00
b6b040375b Feat: UA CLI Support 2024-04-05 15:13:47 -07:00
9ad5e082e2 Feat: UA CLI support 2024-04-05 15:13:47 -07:00
f1805811aa Feat: Added token renew command 2024-04-05 15:13:47 -07:00
b135258cce Feat: Added UA support to secret commands 2024-04-05 15:13:47 -07:00
a651de53d1 Feat: Added UA support to run command 2024-04-05 15:13:00 -07:00
7d0a535f46 Feat: Added UA login support (defaults to 'user') 2024-04-05 15:12:32 -07:00
c4e3dd84e3 Feat: Added UA support to folder command 2024-04-05 15:12:32 -07:00
9193f13970 Feat: Added UA support to export command 2024-04-05 15:12:32 -07:00
016f22c295 Fix: Cleanup 2024-04-05 15:12:32 -07:00
4d7182c9b1 Fix: Removed unused struct and included secret type on secret response 2024-04-05 15:12:32 -07:00
6ea7b04efa Feat: Folder support for Machine Identities 2024-04-05 15:12:32 -07:00
3981d61853 Feat: Support for Machine Identities auth 2024-04-05 15:12:32 -07:00
3d391b4e2d Feat: Secrets cmd support for Machine Identities 2024-04-05 15:12:32 -07:00
4123177133 Feat: Run cmd support for Machine Identities 2024-04-05 15:09:38 -07:00
4d61188d0f Feat: Folder support for Machine Identities 2024-04-05 15:08:52 -07:00
fa33f35fcd Feat: Export support for Machine Identities 2024-04-05 15:08:52 -07:00
13629223fb Chore: Moved universalAuthLogin function to utils 2024-04-05 15:08:52 -07:00
149 changed files with 1962 additions and 5667 deletions

2
.gitignore vendored
View File

@ -59,8 +59,6 @@ yarn-error.log*
# Infisical init
.infisical.json
.infisicalignore
# Editor specific
.vscode/*

View File

@ -1,5 +1 @@
.github/resources/docker-compose.be-test.yml:generic-api-key:16
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/IdentityRbacSection.tsx:generic-api-key:206
frontend/src/views/Project/MembersPage/components/IdentityTab/components/IdentityRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:304
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/MemberRbacSection.tsx:generic-api-key:206
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:292

View File

@ -1,7 +1,6 @@
ARG POSTHOG_HOST=https://app.posthog.com
ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG SAML_ORG_SLUG=saml-org-slug-default
FROM node:20-alpine AS base
@ -36,8 +35,6 @@ ARG INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
# Build
RUN npm run build
@ -103,9 +100,6 @@ ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
ARG INTERCOM_ID=intercom-id
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG \
BAKED_NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
WORKDIR /

View File

@ -46,7 +46,7 @@ const deleteSecretImport = async (id: string) => {
describe("Secret Import Router", async () => {
test.each([
{ importEnv: "prod", importPath: "/" }, // one in root
{ importEnv: "dev", importPath: "/" }, // one in root
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
// check for default environments
@ -66,7 +66,7 @@ describe("Secret Import Router", async () => {
});
test("Get secret imports", async () => {
const createdImport1 = await createSecretImport("/", "prod");
const createdImport1 = await createSecretImport("/", "dev");
const createdImport2 = await createSecretImport("/", "staging");
const res = await testServer.inject({
method: "GET",
@ -103,10 +103,10 @@ describe("Secret Import Router", async () => {
});
test("Update secret import position", async () => {
const prodImportDetails = { path: "/", envSlug: "prod" };
const devImportDetails = { path: "/", envSlug: "dev" };
const stagingImportDetails = { path: "/", envSlug: "staging" };
const createdImport1 = await createSecretImport(prodImportDetails.path, prodImportDetails.envSlug);
const createdImport1 = await createSecretImport(devImportDetails.path, devImportDetails.envSlug);
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
const updateImportRes = await testServer.inject({
@ -136,7 +136,7 @@ describe("Secret Import Router", async () => {
position: 2,
importEnv: expect.objectContaining({
name: expect.any(String),
slug: expect.stringMatching(prodImportDetails.envSlug),
slug: expect.stringMatching(devImportDetails.envSlug),
id: expect.any(String)
})
})
@ -166,7 +166,7 @@ describe("Secret Import Router", async () => {
});
test("Delete secret import position", async () => {
const createdImport1 = await createSecretImport("/", "prod");
const createdImport1 = await createSecretImport("/", "dev");
const createdImport2 = await createSecretImport("/", "staging");
const deletedImport = await deleteSecretImport(createdImport1.id);
// check for default environments

View File

@ -5,7 +5,6 @@ import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-se
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
@ -26,7 +25,6 @@ import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
@ -91,8 +89,6 @@ declare module "fastify" {
orgRole: TOrgRoleServiceFactory;
superAdmin: TSuperAdminServiceFactory;
user: TUserServiceFactory;
group: TGroupServiceFactory;
groupProject: TGroupProjectServiceFactory;
apiKey: TApiKeyServiceFactory;
project: TProjectServiceFactory;
projectMembership: TProjectMembershipServiceFactory;

View File

@ -29,15 +29,6 @@ import {
TGitAppOrg,
TGitAppOrgInsert,
TGitAppOrgUpdate,
TGroupProjectMembershipRoles,
TGroupProjectMembershipRolesInsert,
TGroupProjectMembershipRolesUpdate,
TGroupProjectMemberships,
TGroupProjectMembershipsInsert,
TGroupProjectMembershipsUpdate,
TGroups,
TGroupsInsert,
TGroupsUpdate,
TIdentities,
TIdentitiesInsert,
TIdentitiesUpdate,
@ -197,9 +188,6 @@ import {
TUserEncryptionKeys,
TUserEncryptionKeysInsert,
TUserEncryptionKeysUpdate,
TUserGroupMembership,
TUserGroupMembershipInsert,
TUserGroupMembershipUpdate,
TUsers,
TUsersInsert,
TUsersUpdate,
@ -211,22 +199,6 @@ import {
declare module "knex/types/tables" {
interface Tables {
[TableName.Users]: Knex.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.Groups]: Knex.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
[TableName.UserGroupMembership]: Knex.CompositeTableType<
TUserGroupMembership,
TUserGroupMembershipInsert,
TUserGroupMembershipUpdate
>;
[TableName.GroupProjectMembership]: Knex.CompositeTableType<
TGroupProjectMemberships,
TGroupProjectMembershipsInsert,
TGroupProjectMembershipsUpdate
>;
[TableName.GroupProjectMembershipRole]: Knex.CompositeTableType<
TGroupProjectMembershipRoles,
TGroupProjectMembershipRolesInsert,
TGroupProjectMembershipRolesUpdate
>;
[TableName.UserAliases]: Knex.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
[TableName.UserEncryptionKey]: Knex.CompositeTableType<
TUserEncryptionKeys,

View File

@ -1,82 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.Groups))) {
await knex.schema.createTable(TableName.Groups, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.string("name").notNullable();
t.string("slug").notNullable();
t.unique(["orgId", "slug"]);
t.string("role").notNullable();
t.uuid("roleId");
t.foreign("roleId").references("id").inTable(TableName.OrgRoles);
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.Groups);
if (!(await knex.schema.hasTable(TableName.UserGroupMembership))) {
await knex.schema.createTable(TableName.UserGroupMembership, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); // link to user and link to groups cascade on groups
t.uuid("userId").notNullable();
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.uuid("groupId").notNullable();
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.UserGroupMembership);
if (!(await knex.schema.hasTable(TableName.GroupProjectMembership))) {
await knex.schema.createTable(TableName.GroupProjectMembership, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("groupId").notNullable();
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.GroupProjectMembership);
if (!(await knex.schema.hasTable(TableName.GroupProjectMembershipRole))) {
await knex.schema.createTable(TableName.GroupProjectMembershipRole, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("role").notNullable();
t.uuid("projectMembershipId").notNullable();
t.foreign("projectMembershipId").references("id").inTable(TableName.GroupProjectMembership).onDelete("CASCADE");
// until role is changed/removed the role should not deleted
t.uuid("customRoleId");
t.foreign("customRoleId").references("id").inTable(TableName.ProjectRoles);
t.boolean("isTemporary").notNullable().defaultTo(false);
t.string("temporaryMode");
t.string("temporaryRange"); // could be cron or relative time like 1H or 1minute etc
t.datetime("temporaryAccessStartTime");
t.datetime("temporaryAccessEndTime");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.GroupProjectMembershipRole);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.GroupProjectMembershipRole);
await dropOnUpdateTrigger(knex, TableName.GroupProjectMembershipRole);
await knex.schema.dropTableIfExists(TableName.UserGroupMembership);
await dropOnUpdateTrigger(knex, TableName.UserGroupMembership);
await knex.schema.dropTableIfExists(TableName.GroupProjectMembership);
await dropOnUpdateTrigger(knex, TableName.GroupProjectMembership);
await knex.schema.dropTableIfExists(TableName.Groups);
await dropOnUpdateTrigger(knex, TableName.Groups);
}

View File

@ -1,31 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const GroupProjectMembershipRolesSchema = z.object({
id: z.string().uuid(),
role: z.string(),
projectMembershipId: z.string().uuid(),
customRoleId: z.string().uuid().nullable().optional(),
isTemporary: z.boolean().default(false),
temporaryMode: z.string().nullable().optional(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TGroupProjectMembershipRoles = z.infer<typeof GroupProjectMembershipRolesSchema>;
export type TGroupProjectMembershipRolesInsert = Omit<
z.input<typeof GroupProjectMembershipRolesSchema>,
TImmutableDBKeys
>;
export type TGroupProjectMembershipRolesUpdate = Partial<
Omit<z.input<typeof GroupProjectMembershipRolesSchema>, TImmutableDBKeys>
>;

View File

@ -1,22 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const GroupProjectMembershipsSchema = z.object({
id: z.string().uuid(),
projectId: z.string(),
groupId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TGroupProjectMemberships = z.infer<typeof GroupProjectMembershipsSchema>;
export type TGroupProjectMembershipsInsert = Omit<z.input<typeof GroupProjectMembershipsSchema>, TImmutableDBKeys>;
export type TGroupProjectMembershipsUpdate = Partial<
Omit<z.input<typeof GroupProjectMembershipsSchema>, TImmutableDBKeys>
>;

View File

@ -1,23 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const GroupsSchema = z.object({
id: z.string().uuid(),
orgId: z.string().uuid(),
name: z.string(),
slug: z.string(),
role: z.string(),
roleId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TGroups = z.infer<typeof GroupsSchema>;
export type TGroupsInsert = Omit<z.input<typeof GroupsSchema>, TImmutableDBKeys>;
export type TGroupsUpdate = Partial<Omit<z.input<typeof GroupsSchema>, TImmutableDBKeys>>;

View File

@ -7,9 +7,6 @@ export * from "./dynamic-secret-leases";
export * from "./dynamic-secrets";
export * from "./git-app-install-sessions";
export * from "./git-app-org";
export * from "./group-project-membership-roles";
export * from "./group-project-memberships";
export * from "./groups";
export * from "./identities";
export * from "./identity-access-tokens";
export * from "./identity-org-memberships";
@ -64,6 +61,5 @@ export * from "./trusted-ips";
export * from "./user-actions";
export * from "./user-aliases";
export * from "./user-encryption-keys";
export * from "./user-group-membership";
export * from "./users";
export * from "./webhooks";

View File

@ -2,10 +2,6 @@ import { z } from "zod";
export enum TableName {
Users = "users",
Groups = "groups",
GroupProjectMembership = "group_project_memberships",
GroupProjectMembershipRole = "group_project_membership_roles",
UserGroupMembership = "user_group_membership",
UserAliases = "user_aliases",
UserEncryptionKey = "user_encryption_keys",
AuthTokens = "auth_tokens",

View File

@ -1,20 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const UserGroupMembershipSchema = z.object({
id: z.string().uuid(),
userId: z.string().uuid(),
groupId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TUserGroupMembership = z.infer<typeof UserGroupMembershipSchema>;
export type TUserGroupMembershipInsert = Omit<z.input<typeof UserGroupMembershipSchema>, TImmutableDBKeys>;
export type TUserGroupMembershipUpdate = Partial<Omit<z.input<typeof UserGroupMembershipSchema>, TImmutableDBKeys>>;

View File

@ -1,220 +0,0 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { GroupsSchema, OrgMembershipRole, UsersSchema } from "@app/db/schemas";
import { GROUPS } from "@app/lib/api-docs";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerGroupRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/",
method: "POST",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
body: z.object({
name: z.string().trim().min(1).max(50).describe(GROUPS.CREATE.name),
slug: z
.string()
.min(5)
.max(36)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(GROUPS.CREATE.slug),
role: z.string().trim().min(1).default(OrgMembershipRole.NoAccess).describe(GROUPS.CREATE.role)
}),
response: {
200: GroupsSchema
}
},
handler: async (req) => {
const group = await server.services.group.createGroup({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
return group;
}
});
server.route({
url: "/:currentSlug",
method: "PATCH",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
currentSlug: z.string().trim().describe(GROUPS.UPDATE.currentSlug)
}),
body: z
.object({
name: z.string().trim().min(1).describe(GROUPS.UPDATE.name),
slug: z
.string()
.min(5)
.max(36)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(GROUPS.UPDATE.slug),
role: z.string().trim().min(1).describe(GROUPS.UPDATE.role)
})
.partial(),
response: {
200: GroupsSchema
}
},
handler: async (req) => {
const group = await server.services.group.updateGroup({
currentSlug: req.params.currentSlug,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
return group;
}
});
server.route({
url: "/:slug",
method: "DELETE",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
slug: z.string().trim().describe(GROUPS.DELETE.slug)
}),
response: {
200: GroupsSchema
}
},
handler: async (req) => {
const group = await server.services.group.deleteGroup({
groupSlug: req.params.slug,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return group;
}
});
server.route({
method: "GET",
url: "/:slug/users",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
slug: z.string().trim().describe(GROUPS.LIST_USERS.slug)
}),
querystring: z.object({
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
username: z.string().optional().describe(GROUPS.LIST_USERS.username)
}),
response: {
200: z.object({
users: UsersSchema.pick({
email: true,
username: true,
firstName: true,
lastName: true,
id: true
})
.merge(
z.object({
isPartOfGroup: z.boolean()
})
)
.array(),
totalCount: z.number()
})
}
},
handler: async (req) => {
const { users, totalCount } = await server.services.group.listGroupUsers({
groupSlug: req.params.slug,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.query
});
return { users, totalCount };
}
});
server.route({
method: "POST",
url: "/:slug/users/:username",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
slug: z.string().trim().describe(GROUPS.ADD_USER.slug),
username: z.string().trim().describe(GROUPS.ADD_USER.username)
}),
response: {
200: UsersSchema.pick({
email: true,
username: true,
firstName: true,
lastName: true,
id: true
})
}
},
handler: async (req) => {
const user = await server.services.group.addUserToGroup({
groupSlug: req.params.slug,
username: req.params.username,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return user;
}
});
server.route({
method: "DELETE",
url: "/:slug/users/:username",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
slug: z.string().trim().describe(GROUPS.DELETE_USER.slug),
username: z.string().trim().describe(GROUPS.DELETE_USER.username)
}),
response: {
200: UsersSchema.pick({
email: true,
username: true,
firstName: true,
lastName: true,
id: true
})
}
},
handler: async (req) => {
const user = await server.services.group.removeUserFromGroup({
groupSlug: req.params.slug,
username: req.params.username,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return user;
}
});
};

View File

@ -1,6 +1,5 @@
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
import { registerGroupRouter } from "./group-router";
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
import { registerLdapRouter } from "./ldap-router";
import { registerLicenseRouter } from "./license-router";
@ -54,7 +53,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
await server.register(registerSecretScanningRouter, { prefix: "/secret-scanning" });
await server.register(registerSecretRotationRouter, { prefix: "/secret-rotations" });
await server.register(registerSecretVersionRouter, { prefix: "/secret" });
await server.register(registerGroupRouter, { prefix: "/groups" });
await server.register(
async (privilegeRouter) => {
await privilegeRouter.register(registerUserAdditionalPrivilegeRouter, { prefix: "/users" });

View File

@ -171,7 +171,6 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
req.permission.authMethod,
req.permission.orgId
);
return { data: { permissions, membership } };
}
});

View File

@ -206,7 +206,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
schema: {
body: z.object({
schemas: z.array(z.string()),
userName: z.string().trim(),
userName: z.string().trim().email(),
name: z.object({
familyName: z.string().trim(),
givenName: z.string().trim()
@ -227,7 +227,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
200: z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
userName: z.string().trim(),
userName: z.string().trim().email(),
name: z.object({
familyName: z.string().trim(),
givenName: z.string().trim()
@ -262,244 +262,25 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/Users/:userId",
method: "DELETE",
method: "PATCH",
schema: {
params: z.object({
userId: z.string().trim()
}),
response: {
200: z.object({})
}
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const user = await req.server.services.scim.deleteScimUser({
userId: req.params.userId,
orgId: req.permission.orgId
});
return user;
}
});
server.route({
url: "/Groups",
method: "POST",
schema: {
body: z.object({
schemas: z.array(z.string()),
displayName: z.string().trim(),
members: z.array(z.any()).length(0).optional() // okta-specific
}),
response: {
200: z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
displayName: z.string().trim(),
members: z.array(z.any()).length(0),
meta: z.object({
resourceType: z.string().trim()
})
})
}
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const group = await req.server.services.scim.createScimGroup({
displayName: req.body.displayName,
orgId: req.permission.orgId
});
return group;
}
});
server.route({
url: "/Groups",
method: "GET",
schema: {
querystring: z.object({
startIndex: z.coerce.number().default(1),
count: z.coerce.number().default(20),
filter: z.string().trim().optional()
}),
response: {
200: z.object({
Resources: z.array(
z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
displayName: z.string().trim(),
members: z.array(z.any()).length(0),
meta: z.object({
resourceType: z.string().trim()
})
})
),
itemsPerPage: z.number(),
schemas: z.array(z.string()),
startIndex: z.number(),
totalResults: z.number()
})
}
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const groups = await req.server.services.scim.listScimGroups({
orgId: req.permission.orgId,
offset: req.query.startIndex,
limit: req.query.count
});
return groups;
}
});
server.route({
url: "/Groups/:groupId",
method: "GET",
schema: {
params: z.object({
groupId: z.string().trim()
}),
response: {
200: z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
displayName: z.string().trim(),
members: z.array(
z.object({
value: z.string(),
display: z.string()
})
),
meta: z.object({
resourceType: z.string().trim()
})
})
}
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const group = await req.server.services.scim.getScimGroup({
groupId: req.params.groupId,
orgId: req.permission.orgId
});
return group;
}
});
server.route({
url: "/Groups/:groupId",
method: "PUT",
schema: {
params: z.object({
groupId: z.string().trim()
}),
body: z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
displayName: z.string().trim(),
members: z.array(z.any()).length(0)
}),
response: {
200: z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
displayName: z.string().trim(),
members: z.array(
z.object({
value: z.string(),
display: z.string()
})
),
meta: z.object({
resourceType: z.string().trim()
})
})
}
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const group = await req.server.services.scim.updateScimGroupNamePut({
groupId: req.params.groupId,
orgId: req.permission.orgId,
displayName: req.body.displayName
});
return group;
}
});
server.route({
url: "/Groups/:groupId",
method: "PATCH",
schema: {
params: z.object({
groupId: z.string().trim()
}),
body: z.object({
schemas: z.array(z.string()),
Operations: z.array(
z.union([
z.object({
op: z.literal("replace"),
value: z.object({
id: z.string().trim(),
displayName: z.string().trim()
})
}),
z.object({
op: z.literal("remove"),
path: z.string().trim()
}),
z.object({
op: z.literal("add"),
value: z.object({
value: z.string().trim(),
display: z.string().trim().optional()
})
})
])
)
}),
response: {
200: z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
displayName: z.string().trim(),
members: z.array(
z.object({
value: z.string(),
display: z.string()
})
),
meta: z.object({
resourceType: z.string().trim()
z.object({
op: z.string().trim(),
path: z.string().trim().optional(),
value: z.union([
z.object({
active: z.boolean()
}),
z.string().trim()
])
})
})
}
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
// console.log("PATCH /Groups/:groupId req.body: ", req.body);
// console.log("PATCH /Groups/:groupId req.body: ", req.body.Operations[0]);
const group = await req.server.services.scim.updateScimGroupNamePatch({
groupId: req.params.groupId,
orgId: req.permission.orgId,
operations: req.body.Operations
});
return group;
}
});
server.route({
url: "/Groups/:groupId",
method: "DELETE",
schema: {
params: z.object({
groupId: z.string().trim()
)
}),
response: {
200: z.object({})
@ -507,12 +288,12 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const group = await req.server.services.scim.deleteScimGroup({
groupId: req.params.groupId,
orgId: req.permission.orgId
const user = await req.server.services.scim.updateScimUser({
userId: req.params.userId,
orgId: req.permission.orgId,
operations: req.body.Operations
});
return group;
return user;
}
});

View File

@ -1,157 +0,0 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName, TGroups } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, ormify, selectAllTableCols, TFindFilter, TFindOpt } from "@app/lib/knex";
export type TGroupDALFactory = ReturnType<typeof groupDALFactory>;
export const groupDALFactory = (db: TDbClient) => {
const groupOrm = ormify(db, TableName.Groups);
const findGroups = async (filter: TFindFilter<TGroups>, { offset, limit, sort, tx }: TFindOpt<TGroups> = {}) => {
try {
const query = (tx || db)(TableName.Groups)
// eslint-disable-next-line
.where(buildFindFilter(filter))
.select(selectAllTableCols(TableName.Groups));
if (limit) void query.limit(limit);
if (offset) void query.limit(offset);
if (sort) {
void query.orderBy(sort.map(([column, order, nulls]) => ({ column: column as string, order, nulls })));
}
const res = await query;
return res;
} catch (err) {
throw new DatabaseError({ error: err, name: "Find groups" });
}
};
const findByOrgId = async (orgId: string, tx?: Knex) => {
try {
const docs = await (tx || db)(TableName.Groups)
.where(`${TableName.Groups}.orgId`, orgId)
.leftJoin(TableName.OrgRoles, `${TableName.Groups}.roleId`, `${TableName.OrgRoles}.id`)
.select(selectAllTableCols(TableName.Groups))
// cr stands for custom role
.select(db.ref("id").as("crId").withSchema(TableName.OrgRoles))
.select(db.ref("name").as("crName").withSchema(TableName.OrgRoles))
.select(db.ref("slug").as("crSlug").withSchema(TableName.OrgRoles))
.select(db.ref("description").as("crDescription").withSchema(TableName.OrgRoles))
.select(db.ref("permissions").as("crPermission").withSchema(TableName.OrgRoles));
return docs.map(({ crId, crDescription, crSlug, crPermission, crName, ...el }) => ({
...el,
customRole: el.roleId
? {
id: crId,
name: crName,
slug: crSlug,
permissions: crPermission,
description: crDescription
}
: undefined
}));
} catch (error) {
throw new DatabaseError({ error, name: "FindByOrgId" });
}
};
const countAllGroupMembers = async ({ orgId, groupId }: { orgId: string; groupId: string }) => {
try {
interface CountResult {
count: string;
}
const doc = await db<CountResult>(TableName.OrgMembership)
.where(`${TableName.OrgMembership}.orgId`, orgId)
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.UserGroupMembership, function () {
this.on(`${TableName.UserGroupMembership}.userId`, "=", `${TableName.Users}.id`).andOn(
`${TableName.UserGroupMembership}.groupId`,
"=",
db.raw("?", [groupId])
);
})
.where({ isGhost: false })
.count(`${TableName.Users}.id`)
.first();
return parseInt((doc?.count as string) || "0", 10);
} catch (err) {
throw new DatabaseError({ error: err, name: "Count all group members" });
}
};
// special query
const findAllGroupMembers = async ({
orgId,
groupId,
offset = 0,
limit,
username
}: {
orgId: string;
groupId: string;
offset?: number;
limit?: number;
username?: string;
}) => {
try {
let query = db(TableName.OrgMembership)
.where(`${TableName.OrgMembership}.orgId`, orgId)
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.UserGroupMembership, function () {
this.on(`${TableName.UserGroupMembership}.userId`, "=", `${TableName.Users}.id`).andOn(
`${TableName.UserGroupMembership}.groupId`,
"=",
db.raw("?", [groupId])
);
})
.select(
db.ref("id").withSchema(TableName.OrgMembership),
db.ref("groupId").withSchema(TableName.UserGroupMembership),
db.ref("email").withSchema(TableName.Users),
db.ref("username").withSchema(TableName.Users),
db.ref("firstName").withSchema(TableName.Users),
db.ref("lastName").withSchema(TableName.Users),
db.ref("id").withSchema(TableName.Users).as("userId")
)
.where({ isGhost: false })
.offset(offset);
if (limit) {
query = query.limit(limit);
}
if (username) {
query = query.andWhere(`${TableName.Users}.username`, "ilike", `%${username}%`);
}
const members = await query;
return members.map(
({ email, username: memberUsername, firstName, lastName, userId, groupId: memberGroupId }) => ({
id: userId,
email,
username: memberUsername,
firstName,
lastName,
isPartOfGroup: !!memberGroupId
})
);
} catch (error) {
throw new DatabaseError({ error, name: "Find all org members" });
}
};
return {
findGroups,
findByOrgId,
countAllGroupMembers,
findAllGroupMembers,
...groupOrm
};
};

View File

@ -1,474 +0,0 @@
import { ForbiddenError } from "@casl/ability";
import slugify from "@sindresorhus/slugify";
import { OrgMembershipRole, SecretKeyEncoding, TOrgRoles } from "@app/db/schemas";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { decryptAsymmetric, encryptAsymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { BadRequestError, ForbiddenRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TGroupProjectDALFactory } from "../../../services/group-project/group-project-dal";
import { TOrgDALFactory } from "../../../services/org/org-dal";
import { TProjectDALFactory } from "../../../services/project/project-dal";
import { TProjectBotDALFactory } from "../../../services/project-bot/project-bot-dal";
import { TProjectKeyDALFactory } from "../../../services/project-key/project-key-dal";
import { TUserDALFactory } from "../../../services/user/user-dal";
import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { TGroupDALFactory } from "./group-dal";
import {
TAddUserToGroupDTO,
TCreateGroupDTO,
TDeleteGroupDTO,
TListGroupUsersDTO,
TRemoveUserFromGroupDTO,
TUpdateGroupDTO
} from "./group-types";
import { TUserGroupMembershipDALFactory } from "./user-group-membership-dal";
type TGroupServiceFactoryDep = {
userDAL: Pick<TUserDALFactory, "findOne" | "findUserEncKeyByUsername">;
groupDAL: Pick<
TGroupDALFactory,
"create" | "findOne" | "update" | "delete" | "findAllGroupMembers" | "countAllGroupMembers"
>;
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
orgDAL: Pick<TOrgDALFactory, "findMembership">;
userGroupMembershipDAL: Pick<
TUserGroupMembershipDALFactory,
"findOne" | "create" | "delete" | "filterProjectsByUserMembership"
>;
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "create" | "delete" | "findLatestProjectKey">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission" | "getOrgPermissionByRole">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
};
export type TGroupServiceFactory = ReturnType<typeof groupServiceFactory>;
export const groupServiceFactory = ({
userDAL,
groupDAL,
groupProjectDAL,
orgDAL,
userGroupMembershipDAL,
projectDAL,
projectBotDAL,
projectKeyDAL,
permissionService,
licenseService
}: TGroupServiceFactoryDep) => {
const createGroup = async ({ name, slug, role, actor, actorId, actorAuthMethod, actorOrgId }: TCreateGroupDTO) => {
if (!actorOrgId) throw new BadRequestError({ message: "Failed to create group without organization" });
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Groups);
const plan = await licenseService.getPlan(actorOrgId);
if (!plan.groups)
throw new BadRequestError({
message: "Failed to create group due to plan restriction. Upgrade plan to create group."
});
const { permission: rolePermission, role: customRole } = await permissionService.getOrgPermissionByRole(
role,
actorOrgId
);
const isCustomRole = Boolean(customRole);
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasRequiredPriviledges) throw new BadRequestError({ message: "Failed to create a more privileged group" });
const group = await groupDAL.create({
name,
slug: slug || slugify(`${name}-${alphaNumericNanoId(4)}`),
orgId: actorOrgId,
role: isCustomRole ? OrgMembershipRole.Custom : role,
roleId: customRole?.id
});
return group;
};
const updateGroup = async ({
currentSlug,
name,
slug,
role,
actor,
actorId,
actorAuthMethod,
actorOrgId
}: TUpdateGroupDTO) => {
if (!actorOrgId) throw new BadRequestError({ message: "Failed to create group without organization" });
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
const plan = await licenseService.getPlan(actorOrgId);
if (!plan.groups)
throw new BadRequestError({
message: "Failed to update group due to plan restrictio Upgrade plan to update group."
});
const group = await groupDAL.findOne({ orgId: actorOrgId, slug: currentSlug });
if (!group) throw new BadRequestError({ message: `Failed to find group with slug ${currentSlug}` });
let customRole: TOrgRoles | undefined;
if (role) {
const { permission: rolePermission, role: customOrgRole } = await permissionService.getOrgPermissionByRole(
role,
group.orgId
);
const isCustomRole = Boolean(customOrgRole);
const hasRequiredNewRolePermission = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasRequiredNewRolePermission)
throw new BadRequestError({ message: "Failed to create a more privileged group" });
if (isCustomRole) customRole = customOrgRole;
}
const [updatedGroup] = await groupDAL.update(
{
orgId: actorOrgId,
slug: currentSlug
},
{
name,
slug: slug ? slugify(slug) : undefined,
...(role
? {
role: customRole ? OrgMembershipRole.Custom : role,
roleId: customRole?.id ?? null
}
: {})
}
);
return updatedGroup;
};
const deleteGroup = async ({ groupSlug, actor, actorId, actorAuthMethod, actorOrgId }: TDeleteGroupDTO) => {
if (!actorOrgId) throw new BadRequestError({ message: "Failed to create group without organization" });
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Groups);
const plan = await licenseService.getPlan(actorOrgId);
if (!plan.groups)
throw new BadRequestError({
message: "Failed to delete group due to plan restriction. Upgrade plan to delete group."
});
const [group] = await groupDAL.delete({
orgId: actorOrgId,
slug: groupSlug
});
return group;
};
const listGroupUsers = async ({
groupSlug,
offset,
limit,
username,
actor,
actorId,
actorAuthMethod,
actorOrgId
}: TListGroupUsersDTO) => {
if (!actorOrgId) throw new BadRequestError({ message: "Failed to create group without organization" });
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
const group = await groupDAL.findOne({
orgId: actorOrgId,
slug: groupSlug
});
if (!group)
throw new BadRequestError({
message: `Failed to find group with slug ${groupSlug}`
});
const users = await groupDAL.findAllGroupMembers({
orgId: group.orgId,
groupId: group.id,
offset,
limit,
username
});
const totalCount = await groupDAL.countAllGroupMembers({
orgId: group.orgId,
groupId: group.id
});
return { users, totalCount };
};
const addUserToGroup = async ({
groupSlug,
username,
actor,
actorId,
actorAuthMethod,
actorOrgId
}: TAddUserToGroupDTO) => {
if (!actorOrgId) throw new BadRequestError({ message: "Failed to create group without organization" });
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
// check if group with slug exists
const group = await groupDAL.findOne({
orgId: actorOrgId,
slug: groupSlug
});
if (!group)
throw new BadRequestError({
message: `Failed to find group with slug ${groupSlug}`
});
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, groupRolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to add user to more privileged group" });
// get user with username
const user = await userDAL.findUserEncKeyByUsername({
username
});
if (!user)
throw new BadRequestError({
message: `Failed to find user with username ${username}`
});
// check if user group membership already exists
const existingUserGroupMembership = await userGroupMembershipDAL.findOne({
groupId: group.id,
userId: user.userId
});
if (existingUserGroupMembership)
throw new BadRequestError({
message: `User ${username} is already part of the group ${groupSlug}`
});
// check if user is even part of the organization
const existingUserOrgMembership = await orgDAL.findMembership({
userId: user.userId,
orgId: actorOrgId
});
if (!existingUserOrgMembership)
throw new BadRequestError({
message: `User ${username} is not part of the organization`
});
await userGroupMembershipDAL.create({
userId: user.userId,
groupId: group.id
});
// check which projects the group is part of
const projectIds = (
await groupProjectDAL.find({
groupId: group.id
})
).map((gp) => gp.projectId);
const keys = await projectKeyDAL.find({
receiverId: user.userId,
$in: {
projectId: projectIds
}
});
const keysSet = new Set(keys.map((k) => k.projectId));
const projectsToAddKeyFor = projectIds.filter((p) => !keysSet.has(p));
for await (const projectId of projectsToAddKeyFor) {
const ghostUser = await projectDAL.findProjectGhostUser(projectId);
if (!ghostUser) {
throw new BadRequestError({
message: "Failed to find sudo user"
});
}
const ghostUserLatestKey = await projectKeyDAL.findLatestProjectKey(ghostUser.id, projectId);
if (!ghostUserLatestKey) {
throw new BadRequestError({
message: "Failed to find sudo user latest key"
});
}
const bot = await projectBotDAL.findOne({ projectId });
if (!bot) {
throw new BadRequestError({
message: "Failed to find bot"
});
}
const botPrivateKey = infisicalSymmetricDecrypt({
keyEncoding: bot.keyEncoding as SecretKeyEncoding,
iv: bot.iv,
tag: bot.tag,
ciphertext: bot.encryptedPrivateKey
});
const plaintextProjectKey = decryptAsymmetric({
ciphertext: ghostUserLatestKey.encryptedKey,
nonce: ghostUserLatestKey.nonce,
publicKey: ghostUserLatestKey.sender.publicKey,
privateKey: botPrivateKey
});
const { ciphertext: encryptedKey, nonce } = encryptAsymmetric(plaintextProjectKey, user.publicKey, botPrivateKey);
await projectKeyDAL.create({
encryptedKey,
nonce,
senderId: ghostUser.id,
receiverId: user.userId,
projectId
});
}
return user;
};
const removeUserFromGroup = async ({
groupSlug,
username,
actor,
actorId,
actorAuthMethod,
actorOrgId
}: TRemoveUserFromGroupDTO) => {
if (!actorOrgId) throw new BadRequestError({ message: "Failed to create group without organization" });
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
// check if group with slug exists
const group = await groupDAL.findOne({
orgId: actorOrgId,
slug: groupSlug
});
if (!group)
throw new BadRequestError({
message: `Failed to find group with slug ${groupSlug}`
});
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, groupRolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to delete user from more privileged group" });
const user = await userDAL.findOne({
username
});
if (!user)
throw new BadRequestError({
message: `Failed to find user with username ${username}`
});
// check if user group membership already exists
const existingUserGroupMembership = await userGroupMembershipDAL.findOne({
groupId: group.id,
userId: user.id
});
if (!existingUserGroupMembership)
throw new BadRequestError({
message: `User ${username} is not part of the group ${groupSlug}`
});
const projectIds = (
await groupProjectDAL.find({
groupId: group.id
})
).map((gp) => gp.projectId);
const t = await userGroupMembershipDAL.filterProjectsByUserMembership(user.id, group.id, projectIds);
const projectsToDeleteKeyFor = projectIds.filter((p) => !t.has(p));
if (projectsToDeleteKeyFor.length) {
await projectKeyDAL.delete({
receiverId: user.id,
$in: {
projectId: projectsToDeleteKeyFor
}
});
}
await userGroupMembershipDAL.delete({
groupId: group.id,
userId: user.id
});
return user;
};
return {
createGroup,
updateGroup,
deleteGroup,
listGroupUsers,
addUserToGroup,
removeUserFromGroup
};
};

View File

@ -1,37 +0,0 @@
import { TGenericPermission } from "@app/lib/types";
export type TCreateGroupDTO = {
name: string;
slug?: string;
role: string;
} & TGenericPermission;
export type TUpdateGroupDTO = {
currentSlug: string;
} & Partial<{
name: string;
slug: string;
role: string;
}> &
TGenericPermission;
export type TDeleteGroupDTO = {
groupSlug: string;
} & TGenericPermission;
export type TListGroupUsersDTO = {
groupSlug: string;
offset: number;
limit: number;
username?: string;
} & TGenericPermission;
export type TAddUserToGroupDTO = {
groupSlug: string;
username: string;
} & TGenericPermission;
export type TRemoveUserFromGroupDTO = {
groupSlug: string;
username: string;
} & TGenericPermission;

View File

@ -1,125 +0,0 @@
import { TDbClient } from "@app/db";
import { TableName, TUserEncryptionKeys } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify } from "@app/lib/knex";
export type TUserGroupMembershipDALFactory = ReturnType<typeof userGroupMembershipDALFactory>;
export const userGroupMembershipDALFactory = (db: TDbClient) => {
const userGroupMembershipOrm = ormify(db, TableName.UserGroupMembership);
/**
* Returns a sub-set of projectIds fed into this function corresponding to projects where either:
* - The user is a direct member of the project.
* - The user is a member of a group that is a member of the project, excluding projects that they are part of
* through the group with id [groupId].
*/
const filterProjectsByUserMembership = async (userId: string, groupId: string, projectIds: string[]) => {
const userProjectMemberships: string[] = await db(TableName.ProjectMembership)
.where(`${TableName.ProjectMembership}.userId`, userId)
.whereIn(`${TableName.ProjectMembership}.projectId`, projectIds)
.pluck(`${TableName.ProjectMembership}.projectId`);
const userGroupMemberships: string[] = await db(TableName.UserGroupMembership)
.where(`${TableName.UserGroupMembership}.userId`, userId)
.whereNot(`${TableName.UserGroupMembership}.groupId`, groupId)
.join(
TableName.GroupProjectMembership,
`${TableName.UserGroupMembership}.groupId`,
`${TableName.GroupProjectMembership}.groupId`
)
.whereIn(`${TableName.GroupProjectMembership}.projectId`, projectIds)
.pluck(`${TableName.GroupProjectMembership}.projectId`);
return new Set(userProjectMemberships.concat(userGroupMemberships));
};
// special query
const findUserGroupMembershipsInProject = async (usernames: string[], projectId: string) => {
try {
const usernameDocs: string[] = await db(TableName.UserGroupMembership)
.join(
TableName.GroupProjectMembership,
`${TableName.UserGroupMembership}.groupId`,
`${TableName.GroupProjectMembership}.groupId`
)
.join(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
.whereIn(`${TableName.Users}.username`, usernames) // TODO: pluck usernames
.pluck(`${TableName.Users}.id`);
return usernameDocs;
} catch (error) {
throw new DatabaseError({ error, name: "Find user group members in project" });
}
};
/**
* Return list of users that are part of the group with id [groupId]
* that have not yet been added individually to project with id [projectId].
*
* Note: Filters out users that are part of other groups in the project.
* @param groupId
* @param projectId
* @returns
*/
const findGroupMembersNotInProject = async (groupId: string, projectId: string) => {
try {
// get list of groups in the project with id [projectId]
// that that are not the group with id [groupId]
const groups: string[] = await db(TableName.GroupProjectMembership)
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
.whereNot(`${TableName.GroupProjectMembership}.groupId`, groupId)
.pluck(`${TableName.GroupProjectMembership}.groupId`);
// main query
const members = await db(TableName.UserGroupMembership)
.where(`${TableName.UserGroupMembership}.groupId`, groupId)
.join(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.ProjectMembership, function () {
this.on(`${TableName.Users}.id`, "=", `${TableName.ProjectMembership}.userId`).andOn(
`${TableName.ProjectMembership}.projectId`,
"=",
db.raw("?", [projectId])
);
})
.whereNull(`${TableName.ProjectMembership}.userId`)
.leftJoin<TUserEncryptionKeys>(
TableName.UserEncryptionKey,
`${TableName.UserEncryptionKey}.userId`,
`${TableName.Users}.id`
)
.select(
db.ref("id").withSchema(TableName.UserGroupMembership),
db.ref("groupId").withSchema(TableName.UserGroupMembership),
db.ref("email").withSchema(TableName.Users),
db.ref("username").withSchema(TableName.Users),
db.ref("firstName").withSchema(TableName.Users),
db.ref("lastName").withSchema(TableName.Users),
db.ref("id").withSchema(TableName.Users).as("userId"),
db.ref("publicKey").withSchema(TableName.UserEncryptionKey)
)
.where({ isGhost: false }) // MAKE SURE USER IS NOT A GHOST USER
.whereNotIn(`${TableName.UserGroupMembership}.userId`, function () {
// eslint-disable-next-line @typescript-eslint/no-floating-promises
this.select(`${TableName.UserGroupMembership}.userId`)
.from(TableName.UserGroupMembership)
.whereIn(`${TableName.UserGroupMembership}.groupId`, groups);
});
return members.map(({ email, username, firstName, lastName, userId, publicKey, ...data }) => ({
...data,
user: { email, username, firstName, lastName, id: userId, publicKey }
}));
} catch (error) {
throw new DatabaseError({ error, name: "Find group members not in project" });
}
};
return {
...userGroupMembershipOrm,
filterProjectsByUserMembership,
findUserGroupMembershipsInProject,
findGroupMembersNotInProject
};
};

View File

@ -12,6 +12,7 @@ import {
infisicalSymmetricEncypt
} from "@app/lib/crypto/encryption";
import { BadRequestError } from "@app/lib/errors";
import { TOrgPermission } from "@app/lib/types";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
import { TOrgDALFactory } from "@app/services/org/org-dal";
@ -23,7 +24,7 @@ import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { TLdapConfigDALFactory } from "./ldap-config-dal";
import { TCreateLdapCfgDTO, TGetLdapCfgDTO, TLdapLoginDTO, TUpdateLdapCfgDTO } from "./ldap-config-types";
import { TCreateLdapCfgDTO, TLdapLoginDTO, TUpdateLdapCfgDTO } from "./ldap-config-types";
type TLdapConfigServiceFactoryDep = {
ldapConfigDAL: TLdapConfigDALFactory;
@ -281,7 +282,7 @@ export const ldapConfigServiceFactory = ({
orgId,
actorAuthMethod,
actorOrgId
}: TGetLdapCfgDTO) => {
}: TOrgPermission) => {
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Ldap);
return getLdapCfg({

View File

@ -1,7 +1,6 @@
import { TOrgPermission } from "@app/lib/types";
export type TCreateLdapCfgDTO = {
orgId: string;
isActive: boolean;
url: string;
bindDN: string;
@ -10,9 +9,7 @@ export type TCreateLdapCfgDTO = {
caCert: string;
} & TOrgPermission;
export type TUpdateLdapCfgDTO = {
orgId: string;
} & Partial<{
export type TUpdateLdapCfgDTO = Partial<{
isActive: boolean;
url: string;
bindDN: string;
@ -22,10 +19,6 @@ export type TUpdateLdapCfgDTO = {
}> &
TOrgPermission;
export type TGetLdapCfgDTO = {
orgId: string;
} & TOrgPermission;
export type TLdapLoginDTO = {
externalId: string;
username: string;

View File

@ -20,7 +20,6 @@ export const getDefaultOnPremFeatures = () => {
samlSSO: false,
scim: false,
ldap: false,
groups: false,
status: null,
trial_end: null,
has_used_trial: true,

View File

@ -27,7 +27,6 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
samlSSO: false,
scim: false,
ldap: false,
groups: false,
status: null,
trial_end: null,
has_used_trial: true,

View File

@ -43,7 +43,6 @@ export type TFeatureSet = {
samlSSO: false;
scim: false;
ldap: false;
groups: false;
status: null;
trial_end: null;
has_used_trial: true;

View File

@ -18,7 +18,6 @@ export enum OrgPermissionSubjects {
Sso = "sso",
Scim = "scim",
Ldap = "ldap",
Groups = "groups",
Billing = "billing",
SecretScanning = "secret-scanning",
Identity = "identity"
@ -34,7 +33,6 @@ export type OrgPermissionSet =
| [OrgPermissionActions, OrgPermissionSubjects.Sso]
| [OrgPermissionActions, OrgPermissionSubjects.Scim]
| [OrgPermissionActions, OrgPermissionSubjects.Ldap]
| [OrgPermissionActions, OrgPermissionSubjects.Groups]
| [OrgPermissionActions, OrgPermissionSubjects.SecretScanning]
| [OrgPermissionActions, OrgPermissionSubjects.Billing]
| [OrgPermissionActions, OrgPermissionSubjects.Identity];
@ -85,11 +83,6 @@ const buildAdminPermission = () => {
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Ldap);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Ldap);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Billing);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Billing);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Billing);
@ -112,7 +105,6 @@ const buildMemberPermission = () => {
can(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Member);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Member);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Role);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Billing);

View File

@ -45,43 +45,6 @@ export const permissionDALFactory = (db: TDbClient) => {
const getProjectPermission = async (userId: string, projectId: string) => {
try {
const groups: string[] = await db(TableName.GroupProjectMembership)
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
.pluck(`${TableName.GroupProjectMembership}.groupId`);
const groupDocs = await db(TableName.UserGroupMembership)
.where(`${TableName.UserGroupMembership}.userId`, userId)
.whereIn(`${TableName.UserGroupMembership}.groupId`, groups)
.join(
TableName.GroupProjectMembership,
`${TableName.GroupProjectMembership}.groupId`,
`${TableName.UserGroupMembership}.groupId`
)
.join(
TableName.GroupProjectMembershipRole,
`${TableName.GroupProjectMembershipRole}.projectMembershipId`,
`${TableName.GroupProjectMembership}.id`
)
.leftJoin(
TableName.ProjectRoles,
`${TableName.GroupProjectMembershipRole}.customRoleId`,
`${TableName.ProjectRoles}.id`
)
.join(TableName.Project, `${TableName.GroupProjectMembership}.projectId`, `${TableName.Project}.id`)
.join(TableName.Organization, `${TableName.Project}.orgId`, `${TableName.Organization}.id`)
.select(selectAllTableCols(TableName.GroupProjectMembershipRole))
.select(
db.ref("id").withSchema(TableName.GroupProjectMembership).as("membershipId"),
// TODO(roll-forward-migration): remove this field when we drop this in next migration after a week
db.ref("createdAt").withSchema(TableName.GroupProjectMembership).as("membershipCreatedAt"),
db.ref("updatedAt").withSchema(TableName.GroupProjectMembership).as("membershipUpdatedAt"),
db.ref("projectId").withSchema(TableName.GroupProjectMembership),
db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"),
db.ref("orgId").withSchema(TableName.Project),
db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug")
)
.select("permissions");
const docs = await db(TableName.ProjectMembership)
.join(
TableName.ProjectUserMembershipRole,
@ -106,9 +69,9 @@ export const permissionDALFactory = (db: TDbClient) => {
.select(
db.ref("id").withSchema(TableName.ProjectMembership).as("membershipId"),
// TODO(roll-forward-migration): remove this field when we drop this in next migration after a week
db.ref("role").withSchema(TableName.ProjectMembership).as("oldRoleField"),
db.ref("createdAt").withSchema(TableName.ProjectMembership).as("membershipCreatedAt"),
db.ref("updatedAt").withSchema(TableName.ProjectMembership).as("membershipUpdatedAt"),
db.ref("projectId").withSchema(TableName.ProjectMembership),
db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"),
db.ref("orgId").withSchema(TableName.Project),
db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug"),
@ -130,12 +93,19 @@ export const permissionDALFactory = (db: TDbClient) => {
const permission = sqlNestRelationships({
data: docs,
key: "projectId",
parentMapper: ({ orgId, orgAuthEnforced, membershipId, membershipCreatedAt, membershipUpdatedAt, role }) => ({
key: "membershipId",
parentMapper: ({
orgId,
orgAuthEnforced,
membershipId,
membershipCreatedAt,
membershipUpdatedAt,
oldRoleField
}) => ({
orgId,
orgAuthEnforced,
userId,
role,
role: oldRoleField,
id: membershipId,
projectId,
createdAt: membershipCreatedAt,
@ -175,66 +145,19 @@ export const permissionDALFactory = (db: TDbClient) => {
]
});
const groupPermission = groupDocs.length
? sqlNestRelationships({
data: groupDocs,
key: "projectId",
parentMapper: ({
orgId,
orgAuthEnforced,
membershipId,
membershipCreatedAt,
membershipUpdatedAt,
role
}) => ({
orgId,
orgAuthEnforced,
userId,
role,
id: membershipId,
projectId,
createdAt: membershipCreatedAt,
updatedAt: membershipUpdatedAt
}),
childrenMapper: [
{
key: "id",
label: "roles" as const,
mapper: (data) =>
ProjectUserMembershipRolesSchema.extend({
permissions: z.unknown(),
customRoleSlug: z.string().optional().nullable()
}).parse(data)
}
]
})
: [];
if (!permission?.[0] && !groupPermission[0]) return undefined;
if (!permission?.[0]) return undefined;
// when introducting cron mode change it here
const activeRoles =
permission?.[0]?.roles?.filter(
({ isTemporary, temporaryAccessEndTime }) =>
!isTemporary || (isTemporary && temporaryAccessEndTime && new Date() < temporaryAccessEndTime)
) ?? [];
const activeGroupRoles =
groupPermission?.[0]?.roles?.filter(
({ isTemporary, temporaryAccessEndTime }) =>
!isTemporary || (isTemporary && temporaryAccessEndTime && new Date() < temporaryAccessEndTime)
) ?? [];
const activeRoles = permission?.[0]?.roles?.filter(
({ isTemporary, temporaryAccessEndTime }) =>
!isTemporary || (isTemporary && temporaryAccessEndTime && new Date() < temporaryAccessEndTime)
);
const activeAdditionalPrivileges = permission?.[0]?.additionalPrivileges?.filter(
({ isTemporary, temporaryAccessEndTime }) =>
!isTemporary || (isTemporary && temporaryAccessEndTime && new Date() < temporaryAccessEndTime)
);
return {
...(permission[0] || groupPermission[0]),
roles: [...activeRoles, ...activeGroupRoles],
additionalPrivileges: activeAdditionalPrivileges
};
return { ...permission[0], roles: activeRoles, additionalPrivileges: activeAdditionalPrivileges };
} catch (error) {
throw new DatabaseError({ error, name: "GetProjectPermission" });
}

View File

@ -12,7 +12,6 @@ export enum ProjectPermissionActions {
export enum ProjectPermissionSub {
Role = "role",
Member = "member",
Groups = "groups",
Settings = "settings",
Integrations = "integrations",
Webhooks = "webhooks",
@ -42,7 +41,6 @@ export type ProjectPermissionSet =
| [ProjectPermissionActions, ProjectPermissionSub.Role]
| [ProjectPermissionActions, ProjectPermissionSub.Tags]
| [ProjectPermissionActions, ProjectPermissionSub.Member]
| [ProjectPermissionActions, ProjectPermissionSub.Groups]
| [ProjectPermissionActions, ProjectPermissionSub.Integrations]
| [ProjectPermissionActions, ProjectPermissionSub.Webhooks]
| [ProjectPermissionActions, ProjectPermissionSub.AuditLogs]
@ -84,11 +82,6 @@ const buildAdminPermissionRules = () => {
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Member);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Groups);
can(ProjectPermissionActions.Create, ProjectPermissionSub.Groups);
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Groups);
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Groups);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Role);
can(ProjectPermissionActions.Create, ProjectPermissionSub.Role);
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Role);
@ -164,8 +157,6 @@ const buildMemberPermissionRules = () => {
can(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
can(ProjectPermissionActions.Create, ProjectPermissionSub.Member);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Groups);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
can(ProjectPermissionActions.Create, ProjectPermissionSub.Integrations);
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Integrations);
@ -218,7 +209,6 @@ const buildViewerPermissionRules = () => {
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRotation);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Groups);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Role);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Webhooks);

View File

@ -1,4 +1,4 @@
import { TListScimGroups, TListScimUsers, TScimGroup, TScimUser } from "./scim-types";
import { TListScimUsers, TScimUser } from "./scim-types";
export const buildScimUserList = ({
scimUsers,
@ -62,47 +62,3 @@ export const buildScimUser = ({
return scimUser;
};
export const buildScimGroupList = ({
scimGroups,
offset,
limit
}: {
scimGroups: TScimGroup[];
offset: number;
limit: number;
}): TListScimGroups => {
return {
Resources: scimGroups,
itemsPerPage: limit,
schemas: ["urn:ietf:params:scim:api:messages:2.0:ListResponse"],
startIndex: offset,
totalResults: scimGroups.length
};
};
export const buildScimGroup = ({
groupId,
name,
members
}: {
groupId: string;
name: string;
members: {
value: string;
display: string;
}[];
}): TScimGroup => {
const scimGroup = {
schemas: ["urn:ietf:params:scim:schemas:core:2.0:Group"],
id: groupId,
displayName: name,
members,
meta: {
resourceType: "Group",
location: null
}
};
return scimGroup;
};

View File

@ -1,13 +1,10 @@
import { ForbiddenError } from "@casl/ability";
import slugify from "@sindresorhus/slugify";
import jwt from "jsonwebtoken";
import { OrgMembershipRole, OrgMembershipStatus, TableName, TGroups } from "@app/db/schemas";
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
import { OrgMembershipRole, OrgMembershipStatus, TableName } from "@app/db/schemas";
import { TScimDALFactory } from "@app/ee/services/scim/scim-dal";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ScimRequestError, UnauthorizedError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TOrgPermission } from "@app/lib/types";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { TOrgDALFactory } from "@app/services/org/org-dal";
@ -20,23 +17,16 @@ import { TUserDALFactory } from "@app/services/user/user-dal";
import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { buildScimGroup, buildScimGroupList, buildScimUser, buildScimUserList } from "./scim-fns";
import { buildScimUser, buildScimUserList } from "./scim-fns";
import {
TCreateScimGroupDTO,
TCreateScimTokenDTO,
TCreateScimUserDTO,
TDeleteScimGroupDTO,
TDeleteScimTokenDTO,
TDeleteScimUserDTO,
TGetScimGroupDTO,
TGetScimUserDTO,
TListScimGroupsDTO,
TListScimUsers,
TListScimUsersDTO,
TReplaceScimUserDTO,
TScimTokenJwtPayload,
TUpdateScimGroupNamePatchDTO,
TUpdateScimGroupNamePutDTO,
TUpdateScimUserDTO
} from "./scim-types";
@ -49,7 +39,6 @@ type TScimServiceFactoryDep = {
>;
projectDAL: Pick<TProjectDALFactory, "find">;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find" | "delete">;
groupDAL: Pick<TGroupDALFactory, "create" | "findOne" | "findAllGroupMembers" | "update" | "delete" | "findGroups">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
smtpService: TSmtpService;
@ -64,7 +53,6 @@ export const scimServiceFactory = ({
orgDAL,
projectDAL,
projectMembershipDAL,
groupDAL,
permissionService,
smtpService
}: TScimServiceFactoryDep) => {
@ -435,221 +423,6 @@ export const scimServiceFactory = ({
});
};
const deleteScimUser = async ({ userId, orgId }: TDeleteScimUserDTO) => {
const [membership] = await orgDAL
.findMembership({
userId,
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
})
.catch(() => {
throw new ScimRequestError({
detail: "User not found",
status: 404
});
});
if (!membership)
throw new ScimRequestError({
detail: "User not found",
status: 404
});
if (!membership.scimEnabled) {
throw new ScimRequestError({
detail: "SCIM is disabled for the organization",
status: 403
});
}
await deleteOrgMembership({
orgMembershipId: membership.id,
orgId: membership.orgId,
orgDAL,
projectDAL,
projectMembershipDAL
});
return {}; // intentionally return empty object upon success
};
const listScimGroups = async ({ orgId, offset, limit }: TListScimGroupsDTO) => {
const org = await orgDAL.findById(orgId);
if (!org.scimEnabled)
throw new ScimRequestError({
detail: "SCIM is disabled for the organization",
status: 403
});
const groups = await groupDAL.findGroups({
orgId
});
const scimGroups = groups.map((group) =>
buildScimGroup({
groupId: group.id,
name: group.name,
members: []
})
);
return buildScimGroupList({
scimGroups,
offset,
limit
});
};
const createScimGroup = async ({ displayName, orgId }: TCreateScimGroupDTO) => {
const org = await orgDAL.findById(orgId);
if (!org.scimEnabled)
throw new ScimRequestError({
detail: "SCIM is disabled for the organization",
status: 403
});
const group = await groupDAL.create({
name: displayName,
slug: slugify(`${displayName}-${alphaNumericNanoId(4)}`),
orgId,
role: OrgMembershipRole.NoAccess
});
return buildScimGroup({
groupId: group.id,
name: group.name,
members: []
});
};
const getScimGroup = async ({ groupId, orgId }: TGetScimGroupDTO) => {
const group = await groupDAL.findOne({
id: groupId,
orgId
});
if (!group) {
throw new ScimRequestError({
detail: "Group Not Found",
status: 404
});
}
const users = await groupDAL.findAllGroupMembers({
orgId: group.orgId,
groupId: group.id
});
return buildScimGroup({
groupId: group.id,
name: group.name,
members: users
.filter((user) => user.isPartOfGroup)
.map((user) => ({
value: user.id,
display: `${user.firstName} ${user.lastName}`
}))
});
};
const updateScimGroupNamePut = async ({ groupId, orgId, displayName }: TUpdateScimGroupNamePutDTO) => {
const [group] = await groupDAL.update(
{
id: groupId,
orgId
},
{
name: displayName
}
);
if (!group) {
throw new ScimRequestError({
detail: "Group Not Found",
status: 404
});
}
return buildScimGroup({
groupId: group.id,
name: group.name,
members: []
});
};
// TODO: add support for add/remove op
const updateScimGroupNamePatch = async ({ groupId, orgId, operations }: TUpdateScimGroupNamePatchDTO) => {
const org = await orgDAL.findById(orgId);
if (!org.scimEnabled)
throw new ScimRequestError({
detail: "SCIM is disabled for the organization",
status: 403
});
let group: TGroups | undefined;
for await (const operation of operations) {
switch (operation.op) {
case "replace": {
await groupDAL.update(
{
id: groupId,
orgId
},
{
name: operation.value.displayName
}
);
break;
}
case "add": {
// TODO
break;
}
case "remove": {
// TODO
break;
}
default: {
throw new ScimRequestError({
detail: "Invalid Operation",
status: 400
});
}
}
}
if (!group) {
throw new ScimRequestError({
detail: "Group Not Found",
status: 404
});
}
return buildScimGroup({
groupId: group.id,
name: group.name,
members: []
});
};
const deleteScimGroup = async ({ groupId, orgId }: TDeleteScimGroupDTO) => {
const [group] = await groupDAL.delete({
id: groupId,
orgId
});
if (!group) {
throw new ScimRequestError({
detail: "Group Not Found",
status: 404
});
}
return {}; // intentionally return empty object upon success
};
const fnValidateScimToken = async (token: TScimTokenJwtPayload) => {
const scimToken = await scimDAL.findById(token.scimTokenId);
if (!scimToken) throw new UnauthorizedError();
@ -682,13 +455,6 @@ export const scimServiceFactory = ({
createScimUser,
updateScimUser,
replaceScimUser,
deleteScimUser,
listScimGroups,
createScimGroup,
getScimGroup,
deleteScimGroup,
updateScimGroupNamePut,
updateScimGroupNamePatch,
fnValidateScimToken
};
};

View File

@ -59,73 +59,6 @@ export type TReplaceScimUserDTO = {
orgId: string;
};
export type TDeleteScimUserDTO = {
userId: string;
orgId: string;
};
export type TListScimGroupsDTO = {
offset: number;
limit: number;
orgId: string;
};
export type TListScimGroups = {
schemas: ["urn:ietf:params:scim:api:messages:2.0:ListResponse"];
totalResults: number;
Resources: TScimGroup[];
itemsPerPage: number;
startIndex: number;
};
export type TCreateScimGroupDTO = {
displayName: string;
orgId: string;
};
export type TGetScimGroupDTO = {
groupId: string;
orgId: string;
};
export type TUpdateScimGroupNamePutDTO = {
groupId: string;
orgId: string;
displayName: string;
};
export type TUpdateScimGroupNamePatchDTO = {
groupId: string;
orgId: string;
operations: (TRemoveOp | TReplaceOp | TAddOp)[];
};
type TReplaceOp = {
op: "replace";
value: {
id: string;
displayName: string;
};
};
type TRemoveOp = {
op: "remove";
path: string;
};
type TAddOp = {
op: "add";
value: {
value: string;
display?: string;
};
};
export type TDeleteScimGroupDTO = {
groupId: string;
orgId: string;
};
export type TScimTokenJwtPayload = {
scimTokenId: string;
authTokenType: string;
@ -153,17 +86,3 @@ export type TScimUser = {
location: null;
};
};
export type TScimGroup = {
schemas: string[];
id: string;
displayName: string;
members: {
value: string;
display: string;
}[];
meta: {
resourceType: string;
location: null;
};
};

View File

@ -1,34 +1,3 @@
export const GROUPS = {
CREATE: {
name: "The name of the group to create.",
slug: "The slug of the group to create.",
role: "The role of the group to create."
},
UPDATE: {
currentSlug: "The current slug of the group to update.",
name: "The new name of the group to update to.",
slug: "The new slug of the group to update to.",
role: "The new role of the group to update to."
},
DELETE: {
slug: "The slug of the group to delete"
},
LIST_USERS: {
slug: "The slug of the group to list users for",
offset: "The offset to start from. If you enter 10, it will start from the 10th user.",
limit: "The number of users to return.",
username: "The username to search for."
},
ADD_USER: {
slug: "The slug of the group to add the user to.",
username: "The username of the user to add to the group."
},
DELETE_USER: {
slug: "The slug of the group to remove the user from.",
username: "The username of the user to remove from the group."
}
} as const;
export const IDENTITIES = {
CREATE: {
name: "The name of the identity to create.",
@ -110,9 +79,6 @@ export const ORGANIZATIONS = {
},
GET_PROJECTS: {
organizationId: "The ID of the organization to get projects from."
},
LIST_GROUPS: {
organizationId: "The ID of the organization to list groups for."
}
} as const;
@ -175,29 +141,6 @@ export const PROJECTS = {
},
ROLLBACK_TO_SNAPSHOT: {
secretSnapshotId: "The ID of the snapshot to rollback to."
},
ADD_GROUP_TO_PROJECT: {
projectSlug: "The slug of the project to add the group to.",
groupSlug: "The slug of the group to add to the project.",
role: "The role for the group to assume in the project."
},
UPDATE_GROUP_IN_PROJECT: {
projectSlug: "The slug of the project to update the group in.",
groupSlug: "The slug of the group to update in the project.",
roles: "A list of roles to update the group to."
},
REMOVE_GROUP_FROM_PROJECT: {
projectSlug: "The slug of the project to delete the group from.",
groupSlug: "The slug of the group to delete from the project."
},
LIST_GROUPS_IN_PROJECT: {
projectSlug: "The slug of the project to list groups for."
},
LIST_INTEGRATION: {
workspaceId: "The ID of the project to list integrations for."
},
LIST_INTEGRATION_AUTHORIZATION: {
workspaceId: "The ID of the project to list integration auths for."
}
} as const;
@ -272,8 +215,7 @@ export const SECRETS = {
export const RAW_SECRETS = {
LIST: {
recursive:
"Whether or not to fetch all secrets from the specified base path, and all of its subdirectories. Note, the max depth is 20 deep.",
recursive: "Whether or not to fetch all secrets from the specified base path, and all of its subdirectories.",
workspaceId: "The ID of the project to list secrets from.",
workspaceSlug: "The slug of the project to list secrets from. This parameter is only usable by machine identities.",
environment: "The slug of the environment to list secrets from.",
@ -560,8 +502,11 @@ export const INTEGRATION_AUTH = {
url: "",
namespace: "",
refreshToken: "The refresh token for integration authorization."
},
LIST_AUTHORIZATION: {
workspaceId: "The ID of the project to list integration auths for."
}
} as const;
};
export const INTEGRATION = {
CREATE: {

View File

@ -1,17 +1,5 @@
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
export type TGenericPermission = {
actor: ActorType;
actorId: string;
actorAuthMethod: ActorAuthMethod;
actorOrgId: string | undefined;
};
/**
* TODO(dangtony98): ideally move service fns to use TGenericPermission
* because TOrgPermission [orgId] is not as relevant anymore with the
* introduction of organizationIds bound to all user tokens
*/
export type TOrgPermission = {
actor: ActorType;
actorId: string;
@ -28,15 +16,6 @@ export type TProjectPermission = {
actorOrgId: string;
};
// same as TProjectPermission but with projectSlug requirement instead of projectId
export type TProjectSlugPermission = {
actor: ActorType;
actorId: string;
projectSlug: string;
actorAuthMethod: ActorAuthMethod;
actorOrgId: string;
};
export type RequiredKeys<T> = {
[K in keyof T]-?: undefined extends T[K] ? never : K;
}[keyof T];

View File

@ -61,11 +61,11 @@ export type TQueueJobTypes = {
};
[QueueName.SecretWebhook]: {
name: QueueJobs.SecWebhook;
payload: { projectId: string; environment: string; secretPath: string; depth?: number };
payload: { projectId: string; environment: string; secretPath: string };
};
[QueueName.IntegrationSync]: {
name: QueueJobs.IntegrationSync;
payload: { projectId: string; environment: string; secretPath: string; depth?: number };
payload: { projectId: string; environment: string; secretPath: string };
};
[QueueName.SecretFullRepoScan]: {
name: QueueJobs.SecretScan;

View File

@ -11,9 +11,6 @@ import { buildDynamicSecretProviders } from "@app/ee/services/dynamic-secret/pro
import { dynamicSecretLeaseDALFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-dal";
import { dynamicSecretLeaseQueueServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-queue";
import { dynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { groupDALFactory } from "@app/ee/services/group/group-dal";
import { groupServiceFactory } from "@app/ee/services/group/group-service";
import { userGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
import { identityProjectAdditionalPrivilegeDALFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-dal";
import { identityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
import { ldapConfigDALFactory } from "@app/ee/services/ldap-config/ldap-config-dal";
@ -61,9 +58,6 @@ import { authPaswordServiceFactory } from "@app/services/auth/auth-password-serv
import { authSignupServiceFactory } from "@app/services/auth/auth-signup-service";
import { tokenDALFactory } from "@app/services/auth-token/auth-token-dal";
import { tokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { groupProjectDALFactory } from "@app/services/group-project/group-project-dal";
import { groupProjectMembershipRoleDALFactory } from "@app/services/group-project/group-project-membership-role-dal";
import { groupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { identityDALFactory } from "@app/services/identity/identity-dal";
import { identityOrgDALFactory } from "@app/services/identity/identity-org-dal";
import { identityServiceFactory } from "@app/services/identity/identity-service";
@ -213,10 +207,6 @@ export const registerRoutes = async (
const gitAppInstallSessionDAL = gitAppInstallSessionDALFactory(db);
const gitAppOrgDAL = gitAppDALFactory(db);
const groupDAL = groupDALFactory(db);
const groupProjectDAL = groupProjectDALFactory(db);
const groupProjectMembershipRoleDAL = groupProjectMembershipRoleDALFactory(db);
const userGroupMembershipDAL = userGroupMembershipDALFactory(db);
const secretScanningDAL = secretScanningDALFactory(db);
const licenseDAL = licenseDALFactory(db);
const dynamicSecretDAL = dynamicSecretDALFactory(db);
@ -259,29 +249,6 @@ export const registerRoutes = async (
samlConfigDAL,
licenseService
});
const groupService = groupServiceFactory({
userDAL,
groupDAL,
groupProjectDAL,
orgDAL,
userGroupMembershipDAL,
projectDAL,
projectBotDAL,
projectKeyDAL,
permissionService,
licenseService
});
const groupProjectService = groupProjectServiceFactory({
groupDAL,
groupProjectDAL,
groupProjectMembershipRoleDAL,
userGroupMembershipDAL,
projectDAL,
projectKeyDAL,
projectBotDAL,
projectRoleDAL,
permissionService
});
const scimService = scimServiceFactory({
licenseService,
scimDAL,
@ -289,7 +256,6 @@ export const registerRoutes = async (
orgDAL,
projectDAL,
projectMembershipDAL,
groupDAL,
permissionService,
smtpService
});
@ -336,7 +302,6 @@ export const registerRoutes = async (
projectKeyDAL,
smtpService,
userDAL,
groupDAL,
orgBotDAL
});
const signupService = authSignupServiceFactory({
@ -382,7 +347,6 @@ export const registerRoutes = async (
projectBotDAL,
orgDAL,
userDAL,
userGroupMembershipDAL,
smtpService,
projectKeyDAL,
projectRoleDAL,
@ -481,6 +445,14 @@ export const registerRoutes = async (
projectEnvDAL,
snapshotService
});
const secretImportService = secretImportServiceFactory({
projectEnvDAL,
folderDAL,
permissionService,
secretImportDAL,
projectDAL,
secretDAL
});
const integrationAuthService = integrationAuthServiceFactory({
integrationAuthDAL,
integrationDAL,
@ -508,15 +480,6 @@ export const registerRoutes = async (
secretTagDAL,
secretVersionTagDAL
});
const secretImportService = secretImportServiceFactory({
projectEnvDAL,
folderDAL,
permissionService,
secretImportDAL,
projectDAL,
secretDAL,
secretQueueService
});
const secretBlindIndexService = secretBlindIndexServiceFactory({
permissionService,
secretDAL,
@ -662,8 +625,6 @@ export const registerRoutes = async (
password: passwordService,
signup: signupService,
user: userService,
group: groupService,
groupProject: groupProjectService,
permission: permissionService,
org: orgService,
orgRole: orgRoleService,

View File

@ -1,14 +1,6 @@
import { z } from "zod";
import {
GroupsSchema,
IncidentContactsSchema,
OrganizationsSchema,
OrgMembershipsSchema,
OrgRolesSchema,
UsersSchema
} from "@app/db/schemas";
import { ORGANIZATIONS } from "@app/lib/api-docs";
import { IncidentContactsSchema, OrganizationsSchema, OrgMembershipsSchema, UsersSchema } from "@app/db/schemas";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -226,41 +218,4 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
return { incidentContactsOrg };
}
});
server.route({
method: "GET",
url: "/:organizationId/groups",
schema: {
params: z.object({
organizationId: z.string().trim().describe(ORGANIZATIONS.LIST_GROUPS.organizationId)
}),
response: {
200: z.object({
groups: GroupsSchema.merge(
z.object({
customRole: OrgRolesSchema.pick({
id: true,
name: true,
slug: true,
permissions: true,
description: true
}).optional()
})
).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const groups = await server.services.org.getOrgGroups({
actor: req.permission.type,
actorId: req.permission.id,
orgId: req.params.organizationId,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return { groups };
}
});
};

View File

@ -7,7 +7,7 @@ import {
UserEncryptionKeysSchema,
UsersSchema
} from "@app/db/schemas";
import { PROJECTS } from "@app/lib/api-docs";
import { INTEGRATION_AUTH, PROJECTS } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -326,14 +326,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
rateLimit: readLimit
},
schema: {
description: "List integrations for a project.",
security: [
{
bearerAuth: []
}
],
params: z.object({
workspaceId: z.string().trim().describe(PROJECTS.LIST_INTEGRATION.workspaceId)
workspaceId: z.string().trim()
}),
response: {
200: z.object({
@ -349,7 +343,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const integrations = await server.services.integration.listIntegrationByProject({
actorId: req.permission.id,
@ -376,7 +370,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
}
],
params: z.object({
workspaceId: z.string().trim().describe(PROJECTS.LIST_INTEGRATION_AUTHORIZATION.workspaceId)
workspaceId: z.string().trim().describe(INTEGRATION_AUTH.LIST_AUTHORIZATION.workspaceId)
}),
response: {
200: z.object({

View File

@ -1,201 +0,0 @@
import ms from "ms";
import { z } from "zod";
import {
GroupProjectMembershipsSchema,
GroupsSchema,
ProjectMembershipRole,
ProjectUserMembershipRolesSchema
} from "@app/db/schemas";
import { PROJECTS } from "@app/lib/api-docs";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { ProjectUserMembershipTemporaryMode } from "@app/services/project-membership/project-membership-types";
export const registerGroupProjectRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/:projectSlug/groups/:groupSlug",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Add group to project",
security: [
{
bearerAuth: []
}
],
params: z.object({
projectSlug: z.string().trim().describe(PROJECTS.ADD_GROUP_TO_PROJECT.projectSlug),
groupSlug: z.string().trim().describe(PROJECTS.ADD_GROUP_TO_PROJECT.groupSlug)
}),
body: z.object({
role: z
.string()
.trim()
.min(1)
.default(ProjectMembershipRole.NoAccess)
.describe(PROJECTS.ADD_GROUP_TO_PROJECT.role)
}),
response: {
200: z.object({
groupMembership: GroupProjectMembershipsSchema
})
}
},
handler: async (req) => {
const groupMembership = await server.services.groupProject.addGroupToProject({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
groupSlug: req.params.groupSlug,
projectSlug: req.params.projectSlug,
role: req.body.role
});
return { groupMembership };
}
});
server.route({
method: "PATCH",
url: "/:projectSlug/groups/:groupSlug",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Update group in project",
security: [
{
bearerAuth: []
}
],
params: z.object({
projectSlug: z.string().trim().describe(PROJECTS.UPDATE_GROUP_IN_PROJECT.projectSlug),
groupSlug: z.string().trim().describe(PROJECTS.UPDATE_GROUP_IN_PROJECT.groupSlug)
}),
body: z.object({
roles: z
.array(
z.union([
z.object({
role: z.string(),
isTemporary: z.literal(false).default(false)
}),
z.object({
role: z.string(),
isTemporary: z.literal(true),
temporaryMode: z.nativeEnum(ProjectUserMembershipTemporaryMode),
temporaryRange: z.string().refine((val) => ms(val) > 0, "Temporary range must be a positive number"),
temporaryAccessStartTime: z.string().datetime()
})
])
)
.min(1)
.describe(PROJECTS.UPDATE_GROUP_IN_PROJECT.roles)
}),
response: {
200: z.object({
roles: ProjectUserMembershipRolesSchema.array()
})
}
},
handler: async (req) => {
const roles = await server.services.groupProject.updateGroupInProject({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
groupSlug: req.params.groupSlug,
projectSlug: req.params.projectSlug,
roles: req.body.roles
});
return { roles };
}
});
server.route({
method: "DELETE",
url: "/:projectSlug/groups/:groupSlug",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Remove group from project",
security: [
{
bearerAuth: []
}
],
params: z.object({
projectSlug: z.string().trim().describe(PROJECTS.REMOVE_GROUP_FROM_PROJECT.projectSlug),
groupSlug: z.string().trim().describe(PROJECTS.REMOVE_GROUP_FROM_PROJECT.groupSlug)
}),
response: {
200: z.object({
groupMembership: GroupProjectMembershipsSchema
})
}
},
handler: async (req) => {
const groupMembership = await server.services.groupProject.removeGroupFromProject({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
groupSlug: req.params.groupSlug,
projectSlug: req.params.projectSlug
});
return { groupMembership };
}
});
server.route({
method: "GET",
url: "/:projectSlug/groups",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Return list of groups in project",
security: [
{
bearerAuth: []
}
],
params: z.object({
projectSlug: z.string().trim().describe(PROJECTS.LIST_GROUPS_IN_PROJECT.projectSlug)
}),
response: {
200: z.object({
groupMemberships: z
.object({
id: z.string(),
groupId: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
roles: z.array(
z.object({
id: z.string(),
role: z.string(),
customRoleId: z.string().optional().nullable(),
customRoleName: z.string().optional().nullable(),
customRoleSlug: z.string().optional().nullable(),
isTemporary: z.boolean(),
temporaryMode: z.string().optional().nullable(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional()
})
),
group: GroupsSchema.pick({ name: true, id: true, slug: true })
})
.array()
})
}
},
handler: async (req) => {
const groupMemberships = await server.services.groupProject.listGroupsInProject({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
projectSlug: req.params.projectSlug
});
return { groupMemberships };
}
});
};

View File

@ -50,7 +50,6 @@ export const registerIdentityOrgRouter = async (server: FastifyZodProvider) => {
actorOrgId: req.permission.orgId,
orgId: req.params.orgId
});
return { identityMemberships };
}
});

View File

@ -1,4 +1,3 @@
import { registerGroupProjectRouter } from "./group-project-router";
import { registerIdentityOrgRouter } from "./identity-org-router";
import { registerIdentityProjectRouter } from "./identity-project-router";
import { registerMfaRouter } from "./mfa-router";
@ -23,7 +22,6 @@ export const registerV2Routes = async (server: FastifyZodProvider) => {
async (projectServer) => {
await projectServer.register(registerProjectRouter);
await projectServer.register(registerIdentityProjectRouter);
await projectServer.register(registerGroupProjectRouter);
await projectServer.register(registerProjectMembershipRouter);
},
{ prefix: "/workspace" }

View File

@ -153,7 +153,7 @@ export const authLoginServiceFactory = ({
username: email
});
if (!userEnc || (userEnc && !userEnc.isAccepted)) {
throw new Error("Failed to find user");
throw new Error("Failed to find user");
}
if (!userEnc.authMethods?.includes(AuthMethod.EMAIL)) {
validateProviderAuthToken(providerAuthToken as string, email);

View File

@ -192,7 +192,7 @@ export const authPaswordServiceFactory = ({
}: TCreateBackupPrivateKeyDTO) => {
const userEnc = await userDAL.findUserEncKeyByUserId(userId);
if (!userEnc || (userEnc && !userEnc.isAccepted)) {
throw new Error("Failed to find user");
throw new Error("Failed to find user");
}
if (!userEnc.clientPublicKey || !userEnc.serverPrivateKey) throw new Error("failed to create backup key");
@ -239,7 +239,7 @@ export const authPaswordServiceFactory = ({
const getBackupPrivateKeyOfUser = async (userId: string) => {
const user = await userDAL.findUserEncKeyByUserId(userId);
if (!user || (user && !user.isAccepted)) {
throw new Error("Failed to find user");
throw new Error("Failed to find user");
}
const backupKey = await authDAL.getBackupPrivateKeyByUserId(userId);
if (!backupKey) throw new Error("Failed to find user backup key");

View File

@ -1,99 +0,0 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, sqlNestRelationships } from "@app/lib/knex";
export type TGroupProjectDALFactory = ReturnType<typeof groupProjectDALFactory>;
export const groupProjectDALFactory = (db: TDbClient) => {
const groupProjectOrm = ormify(db, TableName.GroupProjectMembership);
const findByProjectId = async (projectId: string, tx?: Knex) => {
try {
const docs = await (tx || db)(TableName.GroupProjectMembership)
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
.join(TableName.Groups, `${TableName.GroupProjectMembership}.groupId`, `${TableName.Groups}.id`)
.join(
TableName.GroupProjectMembershipRole,
`${TableName.GroupProjectMembershipRole}.projectMembershipId`,
`${TableName.GroupProjectMembership}.id`
)
.leftJoin(
TableName.ProjectRoles,
`${TableName.GroupProjectMembershipRole}.customRoleId`,
`${TableName.ProjectRoles}.id`
)
.select(
db.ref("id").withSchema(TableName.GroupProjectMembership),
db.ref("createdAt").withSchema(TableName.GroupProjectMembership),
db.ref("updatedAt").withSchema(TableName.GroupProjectMembership),
db.ref("id").as("groupId").withSchema(TableName.Groups),
db.ref("name").as("groupName").withSchema(TableName.Groups),
db.ref("slug").as("groupSlug").withSchema(TableName.Groups),
db.ref("id").withSchema(TableName.GroupProjectMembership),
db.ref("role").withSchema(TableName.GroupProjectMembershipRole),
db.ref("id").withSchema(TableName.GroupProjectMembershipRole).as("membershipRoleId"),
db.ref("customRoleId").withSchema(TableName.GroupProjectMembershipRole),
db.ref("name").withSchema(TableName.ProjectRoles).as("customRoleName"),
db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug"),
db.ref("temporaryMode").withSchema(TableName.GroupProjectMembershipRole),
db.ref("isTemporary").withSchema(TableName.GroupProjectMembershipRole),
db.ref("temporaryRange").withSchema(TableName.GroupProjectMembershipRole),
db.ref("temporaryAccessStartTime").withSchema(TableName.GroupProjectMembershipRole),
db.ref("temporaryAccessEndTime").withSchema(TableName.GroupProjectMembershipRole)
);
const members = sqlNestRelationships({
data: docs,
parentMapper: ({ groupId, groupName, groupSlug, id, createdAt, updatedAt }) => ({
id,
groupId,
createdAt,
updatedAt,
group: {
id: groupId,
name: groupName,
slug: groupSlug
}
}),
key: "id",
childrenMapper: [
{
label: "roles" as const,
key: "membershipRoleId",
mapper: ({
role,
customRoleId,
customRoleName,
customRoleSlug,
membershipRoleId,
temporaryRange,
temporaryMode,
temporaryAccessEndTime,
temporaryAccessStartTime,
isTemporary
}) => ({
id: membershipRoleId,
role,
customRoleId,
customRoleName,
customRoleSlug,
temporaryRange,
temporaryMode,
temporaryAccessEndTime,
temporaryAccessStartTime,
isTemporary
})
}
]
});
return members;
} catch (error) {
throw new DatabaseError({ error, name: "FindByProjectId" });
}
};
return { ...groupProjectOrm, findByProjectId };
};

View File

@ -1,10 +0,0 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TGroupProjectMembershipRoleDALFactory = ReturnType<typeof groupProjectMembershipRoleDALFactory>;
export const groupProjectMembershipRoleDALFactory = (db: TDbClient) => {
const orm = ormify(db, TableName.GroupProjectMembershipRole);
return orm;
};

View File

@ -1,338 +0,0 @@
import { ForbiddenError } from "@casl/ability";
import ms from "ms";
import { ProjectMembershipRole, SecretKeyEncoding } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { decryptAsymmetric, encryptAsymmetric } from "@app/lib/crypto";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { BadRequestError, ForbiddenRequestError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { TGroupDALFactory } from "../../ee/services/group/group-dal";
import { TUserGroupMembershipDALFactory } from "../../ee/services/group/user-group-membership-dal";
import { TProjectDALFactory } from "../project/project-dal";
import { TProjectBotDALFactory } from "../project-bot/project-bot-dal";
import { TProjectKeyDALFactory } from "../project-key/project-key-dal";
import { ProjectUserMembershipTemporaryMode } from "../project-membership/project-membership-types";
import { TProjectRoleDALFactory } from "../project-role/project-role-dal";
import { TGroupProjectDALFactory } from "./group-project-dal";
import { TGroupProjectMembershipRoleDALFactory } from "./group-project-membership-role-dal";
import {
TCreateProjectGroupDTO,
TDeleteProjectGroupDTO,
TListProjectGroupDTO,
TUpdateProjectGroupDTO
} from "./group-project-types";
type TGroupProjectServiceFactoryDep = {
groupProjectDAL: Pick<TGroupProjectDALFactory, "findOne" | "transaction" | "create" | "delete" | "findByProjectId">;
groupProjectMembershipRoleDAL: Pick<
TGroupProjectMembershipRoleDALFactory,
"create" | "transaction" | "insertMany" | "delete"
>;
userGroupMembershipDAL: TUserGroupMembershipDALFactory;
projectDAL: Pick<TProjectDALFactory, "findOne" | "findProjectGhostUser">;
projectKeyDAL: Pick<TProjectKeyDALFactory, "findLatestProjectKey" | "delete" | "insertMany" | "transaction">;
projectRoleDAL: Pick<TProjectRoleDALFactory, "find">;
projectBotDAL: TProjectBotDALFactory;
groupDAL: Pick<TGroupDALFactory, "findOne">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getProjectPermissionByRole">;
};
export type TGroupProjectServiceFactory = ReturnType<typeof groupProjectServiceFactory>;
export const groupProjectServiceFactory = ({
groupDAL,
groupProjectDAL,
groupProjectMembershipRoleDAL,
userGroupMembershipDAL,
projectDAL,
projectKeyDAL,
projectBotDAL,
projectRoleDAL,
permissionService
}: TGroupProjectServiceFactoryDep) => {
const addGroupToProject = async ({
groupSlug,
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectSlug,
role
}: TCreateProjectGroupDTO) => {
const project = await projectDAL.findOne({
slug: projectSlug
});
if (!project) throw new BadRequestError({ message: `Failed to find project with slug ${projectSlug}` });
if (project.version < 2) throw new BadRequestError({ message: `Failed to add group to E2EE project` });
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
project.id,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Groups);
const group = await groupDAL.findOne({ orgId: actorOrgId, slug: groupSlug });
if (!group) throw new BadRequestError({ message: `Failed to find group with slug ${groupSlug}` });
const existingGroup = await groupProjectDAL.findOne({ groupId: group.id, projectId: project.id });
if (existingGroup)
throw new BadRequestError({
message: `Group with slug ${groupSlug} already exists in project with id ${project.id}`
});
const { permission: rolePermission, role: customRole } = await permissionService.getProjectPermissionByRole(
role,
project.id
);
const hasPrivilege = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasPrivilege)
throw new ForbiddenRequestError({
message: "Failed to add group to project with more privileged role"
});
const isCustomRole = Boolean(customRole);
const projectGroup = await groupProjectDAL.transaction(async (tx) => {
const groupProjectMembership = await groupProjectDAL.create(
{
groupId: group.id,
projectId: project.id
},
tx
);
await groupProjectMembershipRoleDAL.create(
{
projectMembershipId: groupProjectMembership.id,
role: isCustomRole ? ProjectMembershipRole.Custom : role,
customRoleId: customRole?.id
},
tx
);
return groupProjectMembership;
});
// share project key with users in group that have not
// individually been added to the project and that are not part of
// other groups that are in the project
const groupMembers = await userGroupMembershipDAL.findGroupMembersNotInProject(group.id, project.id);
if (groupMembers.length) {
const ghostUser = await projectDAL.findProjectGhostUser(project.id);
if (!ghostUser) {
throw new BadRequestError({
message: "Failed to find sudo user"
});
}
const ghostUserLatestKey = await projectKeyDAL.findLatestProjectKey(ghostUser.id, project.id);
if (!ghostUserLatestKey) {
throw new BadRequestError({
message: "Failed to find sudo user latest key"
});
}
const bot = await projectBotDAL.findOne({ projectId: project.id });
if (!bot) {
throw new BadRequestError({
message: "Failed to find bot"
});
}
const botPrivateKey = infisicalSymmetricDecrypt({
keyEncoding: bot.keyEncoding as SecretKeyEncoding,
iv: bot.iv,
tag: bot.tag,
ciphertext: bot.encryptedPrivateKey
});
const plaintextProjectKey = decryptAsymmetric({
ciphertext: ghostUserLatestKey.encryptedKey,
nonce: ghostUserLatestKey.nonce,
publicKey: ghostUserLatestKey.sender.publicKey,
privateKey: botPrivateKey
});
const projectKeyData = groupMembers.map(({ user: { publicKey, id } }) => {
const { ciphertext: encryptedKey, nonce } = encryptAsymmetric(plaintextProjectKey, publicKey, botPrivateKey);
return {
encryptedKey,
nonce,
senderId: ghostUser.id,
receiverId: id,
projectId: project.id
};
});
await projectKeyDAL.insertMany(projectKeyData);
}
return projectGroup;
};
const updateGroupInProject = async ({
projectSlug,
groupSlug,
roles,
actor,
actorId,
actorAuthMethod,
actorOrgId
}: TUpdateProjectGroupDTO) => {
const project = await projectDAL.findOne({
slug: projectSlug
});
if (!project) throw new BadRequestError({ message: `Failed to find project with slug ${projectSlug}` });
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
project.id,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Groups);
const group = await groupDAL.findOne({ orgId: actorOrgId, slug: groupSlug });
if (!group) throw new BadRequestError({ message: `Failed to find group with slug ${groupSlug}` });
const projectGroup = await groupProjectDAL.findOne({ groupId: group.id, projectId: project.id });
if (!projectGroup) throw new BadRequestError({ message: `Failed to find group with slug ${groupSlug}` });
// validate custom roles input
const customInputRoles = roles.filter(
({ role }) => !Object.values(ProjectMembershipRole).includes(role as ProjectMembershipRole)
);
const hasCustomRole = Boolean(customInputRoles.length);
const customRoles = hasCustomRole
? await projectRoleDAL.find({
projectId: project.id,
$in: { slug: customInputRoles.map(({ role }) => role) }
})
: [];
if (customRoles.length !== customInputRoles.length) throw new BadRequestError({ message: "Custom role not found" });
const customRolesGroupBySlug = groupBy(customRoles, ({ slug }) => slug);
const sanitizedProjectMembershipRoles = roles.map((inputRole) => {
const isCustomRole = Boolean(customRolesGroupBySlug?.[inputRole.role]?.[0]);
if (!inputRole.isTemporary) {
return {
projectMembershipId: projectGroup.id,
role: isCustomRole ? ProjectMembershipRole.Custom : inputRole.role,
customRoleId: customRolesGroupBySlug[inputRole.role] ? customRolesGroupBySlug[inputRole.role][0].id : null
};
}
// check cron or relative here later for now its just relative
const relativeTimeInMs = ms(inputRole.temporaryRange);
return {
projectMembershipId: projectGroup.id,
role: isCustomRole ? ProjectMembershipRole.Custom : inputRole.role,
customRoleId: customRolesGroupBySlug[inputRole.role] ? customRolesGroupBySlug[inputRole.role][0].id : null,
isTemporary: true,
temporaryMode: ProjectUserMembershipTemporaryMode.Relative,
temporaryRange: inputRole.temporaryRange,
temporaryAccessStartTime: new Date(inputRole.temporaryAccessStartTime),
temporaryAccessEndTime: new Date(new Date(inputRole.temporaryAccessStartTime).getTime() + relativeTimeInMs)
};
});
const updatedRoles = await groupProjectMembershipRoleDAL.transaction(async (tx) => {
await groupProjectMembershipRoleDAL.delete({ projectMembershipId: projectGroup.id }, tx);
return groupProjectMembershipRoleDAL.insertMany(sanitizedProjectMembershipRoles, tx);
});
return updatedRoles;
};
const removeGroupFromProject = async ({
projectSlug,
groupSlug,
actorId,
actor,
actorOrgId,
actorAuthMethod
}: TDeleteProjectGroupDTO) => {
const project = await projectDAL.findOne({
slug: projectSlug
});
if (!project) throw new BadRequestError({ message: `Failed to find project with slug ${projectSlug}` });
const group = await groupDAL.findOne({ orgId: actorOrgId, slug: groupSlug });
if (!group) throw new BadRequestError({ message: `Failed to find group with slug ${groupSlug}` });
const groupProjectMembership = await groupProjectDAL.findOne({ groupId: group.id, projectId: project.id });
if (!groupProjectMembership) throw new BadRequestError({ message: `Failed to find group with slug ${groupSlug}` });
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
project.id,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Groups);
const groupMembers = await userGroupMembershipDAL.findGroupMembersNotInProject(group.id, project.id);
if (groupMembers.length) {
await projectKeyDAL.delete({
projectId: project.id,
$in: {
receiverId: groupMembers.map(({ user: { id } }) => id)
}
});
}
const [deletedGroup] = await groupProjectDAL.delete({ groupId: group.id, projectId: project.id });
return deletedGroup;
};
const listGroupsInProject = async ({
projectSlug,
actor,
actorId,
actorAuthMethod,
actorOrgId
}: TListProjectGroupDTO) => {
const project = await projectDAL.findOne({
slug: projectSlug
});
if (!project) throw new BadRequestError({ message: `Failed to find project with slug ${projectSlug}` });
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
project.id,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Groups);
const groupMemberships = await groupProjectDAL.findByProjectId(project.id);
return groupMemberships;
};
return {
addGroupToProject,
updateGroupInProject,
removeGroupFromProject,
listGroupsInProject
};
};

View File

@ -1,31 +0,0 @@
import { TProjectSlugPermission } from "@app/lib/types";
import { ProjectUserMembershipTemporaryMode } from "../project-membership/project-membership-types";
export type TCreateProjectGroupDTO = {
groupSlug: string;
role: string;
} & TProjectSlugPermission;
export type TUpdateProjectGroupDTO = {
roles: (
| {
role: string;
isTemporary?: false;
}
| {
role: string;
isTemporary: true;
temporaryMode: ProjectUserMembershipTemporaryMode.Relative;
temporaryRange: string;
temporaryAccessStartTime: string;
}
)[];
groupSlug: string;
} & TProjectSlugPermission;
export type TDeleteProjectGroupDTO = {
groupSlug: string;
} & TProjectSlugPermission;
export type TListProjectGroupDTO = TProjectSlugPermission;

View File

@ -163,7 +163,7 @@ export const identityProjectServiceFactory = ({
const customRolesGroupBySlug = groupBy(customRoles, ({ slug }) => slug);
const sanitizedProjectMembershipRoles = roles.map((inputRole) => {
const santiziedProjectMembershipRoles = roles.map((inputRole) => {
const isCustomRole = Boolean(customRolesGroupBySlug?.[inputRole.role]?.[0]);
if (!inputRole.isTemporary) {
return {
@ -189,7 +189,7 @@ export const identityProjectServiceFactory = ({
const updatedRoles = await identityProjectMembershipRoleDAL.transaction(async (tx) => {
await identityProjectMembershipRoleDAL.delete({ projectMembershipId: projectIdentity.id }, tx);
return identityProjectMembershipRoleDAL.insertMany(sanitizedProjectMembershipRoles, tx);
return identityProjectMembershipRoleDAL.insertMany(santiziedProjectMembershipRoles, tx);
});
return updatedRoles;
@ -246,8 +246,8 @@ export const identityProjectServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
const identityMemberships = await identityProjectDAL.findByProjectId(projectId);
return identityMemberships;
const identityMemberhips = await identityProjectDAL.findByProjectId(projectId);
return identityMemberhips;
};
return {

View File

@ -157,8 +157,8 @@ export const identityServiceFactory = ({
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Identity);
const identityMemberships = await identityOrgMembershipDAL.findByOrgId(orgId);
return identityMemberships;
const identityMemberhips = await identityOrgMembershipDAL.findByOrgId(orgId);
return identityMemberhips;
};
return {

View File

@ -146,27 +146,7 @@ export const integrationServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Integrations);
const deletedIntegration = await integrationDAL.transaction(async (tx) => {
// delete integration
const deletedIntegrationResult = await integrationDAL.deleteById(id, tx);
// check if there are other integrations that share the same integration auth
const integrations = await integrationDAL.find(
{
integrationAuthId: integration.integrationAuthId
},
tx
);
if (integrations.length === 0) {
// no other integration shares the same integration auth
// -> delete the integration auth
await integrationAuthDAL.deleteById(integration.integrationAuthId, tx);
}
return deletedIntegrationResult;
});
const deletedIntegration = await integrationDAL.deleteById(id);
return { ...integration, ...deletedIntegration };
};

View File

@ -6,7 +6,6 @@ import { Knex } from "knex";
import { OrgMembershipRole, OrgMembershipStatus } from "@app/db/schemas";
import { TProjects } from "@app/db/schemas/projects";
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
@ -35,7 +34,6 @@ import {
TDeleteOrgMembershipDTO,
TFindAllWorkspacesDTO,
TFindOrgMembersByEmailDTO,
TGetOrgGroupsDTO,
TInviteUserToOrgDTO,
TUpdateOrgDTO,
TUpdateOrgMembershipDTO,
@ -47,7 +45,6 @@ type TOrgServiceFactoryDep = {
orgBotDAL: TOrgBotDALFactory;
orgRoleDAL: TOrgRoleDALFactory;
userDAL: TUserDALFactory;
groupDAL: TGroupDALFactory;
projectDAL: TProjectDALFactory;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findProjectMembershipsByUserId" | "delete">;
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete">;
@ -67,7 +64,6 @@ export type TOrgServiceFactory = ReturnType<typeof orgServiceFactory>;
export const orgServiceFactory = ({
orgDAL,
userDAL,
groupDAL,
orgRoleDAL,
incidentContactDAL,
permissionService,
@ -117,13 +113,6 @@ export const orgServiceFactory = ({
return members;
};
const getOrgGroups = async ({ actor, actorId, orgId, actorAuthMethod, actorOrgId }: TGetOrgGroupsDTO) => {
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
const groups = await groupDAL.findByOrgId(orgId);
return groups;
};
const findOrgMembersByUsername = async ({
actor,
actorId,
@ -685,7 +674,6 @@ export const orgServiceFactory = ({
// incident contacts
findIncidentContacts,
createIncidentContact,
deleteIncidentContact,
getOrgGroups
deleteIncidentContact
};
};

View File

@ -53,5 +53,3 @@ export type TFindAllWorkspacesDTO = {
export type TUpdateOrgDTO = {
data: Partial<{ name: string; slug: string; authEnforced: boolean; scimEnabled: boolean }>;
} & TOrgPermission;
export type TGetOrgGroupsDTO = TOrgPermission;

View File

@ -17,7 +17,6 @@ import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { BadRequestError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { TUserGroupMembershipDALFactory } from "../../ee/services/group/user-group-membership-dal";
import { ActorType } from "../auth/auth-type";
import { TOrgDALFactory } from "../org/org-dal";
import { TProjectDALFactory } from "../project/project-dal";
@ -46,7 +45,6 @@ type TProjectMembershipServiceFactoryDep = {
projectMembershipDAL: TProjectMembershipDALFactory;
projectUserMembershipRoleDAL: Pick<TProjectUserMembershipRoleDALFactory, "insertMany" | "find" | "delete">;
userDAL: Pick<TUserDALFactory, "findById" | "findOne" | "findUserByProjectMembershipId" | "find">;
userGroupMembershipDAL: TUserGroupMembershipDALFactory;
projectRoleDAL: Pick<TProjectRoleDALFactory, "find">;
orgDAL: Pick<TOrgDALFactory, "findMembership" | "findOrgMembersByUsername">;
projectDAL: Pick<TProjectDALFactory, "findById" | "findProjectGhostUser" | "transaction">;
@ -65,7 +63,6 @@ export const projectMembershipServiceFactory = ({
projectBotDAL,
orgDAL,
userDAL,
userGroupMembershipDAL,
projectDAL,
projectKeyDAL,
licenseService
@ -123,13 +120,6 @@ export const projectMembershipServiceFactory = ({
});
if (existingMembers.length) throw new BadRequestError({ message: "Some users are already part of project" });
const userIdsToExcludeForProjectKeyAddition = new Set(
await userGroupMembershipDAL.findUserGroupMembershipsInProject(
orgMembers.map(({ username }) => username),
projectId
)
);
await projectMembershipDAL.transaction(async (tx) => {
const projectMemberships = await projectMembershipDAL.insertMany(
orgMembers.map(({ userId }) => ({
@ -145,15 +135,13 @@ export const projectMembershipServiceFactory = ({
);
const encKeyGroupByOrgMembId = groupBy(members, (i) => i.orgMembershipId);
await projectKeyDAL.insertMany(
orgMembers
.filter(({ userId }) => !userIdsToExcludeForProjectKeyAddition.has(userId as string))
.map(({ userId, id }) => ({
encryptedKey: encKeyGroupByOrgMembId[id][0].workspaceEncryptedKey,
nonce: encKeyGroupByOrgMembId[id][0].workspaceEncryptedNonce,
senderId: actorId,
receiverId: userId as string,
projectId
})),
orgMembers.map(({ userId, id }) => ({
encryptedKey: encKeyGroupByOrgMembId[id][0].workspaceEncryptedKey,
nonce: encKeyGroupByOrgMembId[id][0].workspaceEncryptedNonce,
senderId: actorId,
receiverId: userId as string,
projectId
})),
tx
);
});
@ -259,10 +247,6 @@ export const projectMembershipServiceFactory = ({
const members: TProjectMemberships[] = [];
const userIdsToExcludeForProjectKeyAddition = new Set(
await userGroupMembershipDAL.findUserGroupMembershipsInProject(usernamesAndEmails, projectId)
);
await projectMembershipDAL.transaction(async (tx) => {
const projectMemberships = await projectMembershipDAL.insertMany(
orgMembers.map(({ user }) => ({
@ -281,15 +265,13 @@ export const projectMembershipServiceFactory = ({
const encKeyGroupByOrgMembId = groupBy(newWsMembers, (i) => i.orgMembershipId);
await projectKeyDAL.insertMany(
orgMembers
.filter(({ user }) => !userIdsToExcludeForProjectKeyAddition.has(user.id))
.map(({ user, id }) => ({
encryptedKey: encKeyGroupByOrgMembId[id][0].workspaceEncryptedKey,
nonce: encKeyGroupByOrgMembId[id][0].workspaceEncryptedNonce,
senderId: ghostUser.id,
receiverId: user.id,
projectId
})),
orgMembers.map(({ user, id }) => ({
encryptedKey: encKeyGroupByOrgMembId[id][0].workspaceEncryptedKey,
nonce: encKeyGroupByOrgMembId[id][0].workspaceEncryptedNonce,
senderId: ghostUser.id,
receiverId: user.id,
projectId
})),
tx
);
});
@ -362,7 +344,7 @@ export const projectMembershipServiceFactory = ({
if (customRoles.length !== customInputRoles.length) throw new BadRequestError({ message: "Custom role not found" });
const customRolesGroupBySlug = groupBy(customRoles, ({ slug }) => slug);
const sanitizedProjectMembershipRoles = roles.map((inputRole) => {
const santiziedProjectMembershipRoles = roles.map((inputRole) => {
const isCustomRole = Boolean(customRolesGroupBySlug?.[inputRole.role]?.[0]);
if (!inputRole.isTemporary) {
return {
@ -388,7 +370,7 @@ export const projectMembershipServiceFactory = ({
const updatedRoles = await projectMembershipDAL.transaction(async (tx) => {
await projectUserMembershipRoleDAL.delete({ projectMembershipId: membershipId }, tx);
return projectUserMembershipRoleDAL.insertMany(sanitizedProjectMembershipRoles, tx);
return projectUserMembershipRoleDAL.insertMany(santiziedProjectMembershipRoles, tx);
});
return updatedRoles;
@ -476,10 +458,6 @@ export const projectMembershipServiceFactory = ({
});
}
const userIdsToExcludeFromProjectKeyRemoval = new Set(
await userGroupMembershipDAL.findUserGroupMembershipsInProject(usernamesAndEmails, projectId)
);
const memberships = await projectMembershipDAL.transaction(async (tx) => {
const deletedMemberships = await projectMembershipDAL.delete(
{
@ -491,15 +469,11 @@ export const projectMembershipServiceFactory = ({
tx
);
// delete project keys belonging to users that are not part of any other groups in the project
await projectKeyDAL.delete(
{
projectId,
$in: {
receiverId: projectMembers
.filter(({ user }) => !userIdsToExcludeFromProjectKeyRemoval.has(user.id))
.map(({ user }) => user.id)
.filter(Boolean)
receiverId: projectMembers.map(({ user }) => user.id).filter(Boolean)
}
},
tx

View File

@ -30,33 +30,8 @@ export const projectDALFactory = (db: TDbClient) => {
{ column: `${TableName.Environment}.position`, order: "asc" }
]);
const groups: string[] = await db(TableName.UserGroupMembership)
.where({ userId })
.select(selectAllTableCols(TableName.UserGroupMembership))
.pluck("groupId");
const groupWorkspaces = await db(TableName.GroupProjectMembership)
.whereIn("groupId", groups)
.join(TableName.Project, `${TableName.GroupProjectMembership}.projectId`, `${TableName.Project}.id`)
.whereNotIn(
`${TableName.Project}.id`,
workspaces.map(({ id }) => id)
)
.leftJoin(TableName.Environment, `${TableName.Environment}.projectId`, `${TableName.Project}.id`)
.select(
selectAllTableCols(TableName.Project),
db.ref("id").withSchema(TableName.Project).as("_id"),
db.ref("id").withSchema(TableName.Environment).as("envId"),
db.ref("slug").withSchema(TableName.Environment).as("envSlug"),
db.ref("name").withSchema(TableName.Environment).as("envName")
)
.orderBy([
{ column: `${TableName.Project}.name`, order: "asc" },
{ column: `${TableName.Environment}.position`, order: "asc" }
]);
const nestedWorkspaces = sqlNestRelationships({
data: workspaces.concat(groupWorkspaces),
data: workspaces,
key: "id",
parentMapper: ({ _id, ...el }) => ({ _id, ...ProjectsSchema.parse(el) }),
childrenMapper: [
@ -151,11 +126,13 @@ export const projectDALFactory = (db: TDbClient) => {
const findProjectById = async (id: string) => {
try {
const workspaces = await db(TableName.Project)
const workspaces = await db(TableName.ProjectMembership)
.where(`${TableName.Project}.id`, id)
.leftJoin(TableName.Environment, `${TableName.Environment}.projectId`, `${TableName.Project}.id`)
.join(TableName.Project, `${TableName.ProjectMembership}.projectId`, `${TableName.Project}.id`)
.join(TableName.Environment, `${TableName.Environment}.projectId`, `${TableName.Project}.id`)
.select(
selectAllTableCols(TableName.Project),
db.ref("id").withSchema(TableName.Project).as("_id"),
db.ref("id").withSchema(TableName.Environment).as("envId"),
db.ref("slug").withSchema(TableName.Environment).as("envSlug"),
db.ref("name").withSchema(TableName.Environment).as("envName")
@ -164,11 +141,10 @@ export const projectDALFactory = (db: TDbClient) => {
{ column: `${TableName.Project}.name`, order: "asc" },
{ column: `${TableName.Environment}.position`, order: "asc" }
]);
const project = sqlNestRelationships({
data: workspaces,
key: "id",
parentMapper: ({ ...el }) => ({ _id: el.id, ...ProjectsSchema.parse(el) }),
parentMapper: ({ _id, ...el }) => ({ _id, ...ProjectsSchema.parse(el) }),
childrenMapper: [
{
key: "envId",
@ -198,12 +174,14 @@ export const projectDALFactory = (db: TDbClient) => {
throw new BadRequestError({ message: "Organization ID is required when querying with slugs" });
}
const projects = await db(TableName.Project)
const projects = await db(TableName.ProjectMembership)
.where(`${TableName.Project}.slug`, slug)
.where(`${TableName.Project}.orgId`, orgId)
.leftJoin(TableName.Environment, `${TableName.Environment}.projectId`, `${TableName.Project}.id`)
.join(TableName.Project, `${TableName.ProjectMembership}.projectId`, `${TableName.Project}.id`)
.join(TableName.Environment, `${TableName.Environment}.projectId`, `${TableName.Project}.id`)
.select(
selectAllTableCols(TableName.Project),
db.ref("id").withSchema(TableName.Project).as("_id"),
db.ref("id").withSchema(TableName.Environment).as("envId"),
db.ref("slug").withSchema(TableName.Environment).as("envSlug"),
db.ref("name").withSchema(TableName.Environment).as("envName")
@ -216,7 +194,7 @@ export const projectDALFactory = (db: TDbClient) => {
const project = sqlNestRelationships({
data: projects,
key: "id",
parentMapper: ({ ...el }) => ({ _id: el.id, ...ProjectsSchema.parse(el) }),
parentMapper: ({ _id, ...el }) => ({ _id, ...ProjectsSchema.parse(el) }),
childrenMapper: [
{
key: "envId",

View File

@ -170,8 +170,7 @@ const sqlFindSecretPathByFolderId = (db: Knex, projectId: string, folderIds: str
// if the given folder id is root folder id then intial path is set as / instead of /root
// if not root folder the path here will be /<folder name>
path: db.raw(`CONCAT('/', (CASE WHEN "parentId" is NULL THEN '' ELSE ${TableName.SecretFolder}.name END))`),
child: db.raw("NULL::uuid"),
environmentSlug: `${TableName.Environment}.slug`
child: db.raw("NULL::uuid")
})
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.where({ projectId })
@ -191,15 +190,14 @@ const sqlFindSecretPathByFolderId = (db: Knex, projectId: string, folderIds: str
ELSE CONCAT('/', secret_folders.name)
END, parent.path )`
),
child: db.raw("COALESCE(parent.child, parent.id)"),
environmentSlug: "parent.environmentSlug"
child: db.raw("COALESCE(parent.child, parent.id)")
})
.from(TableName.SecretFolder)
.join("parent", "parent.parentId", `${TableName.SecretFolder}.id`)
);
})
.select("*")
.from<TSecretFolders & { child: string | null; path: string; environmentSlug: string }>("parent");
.from<TSecretFolders & { child: string | null; path: string }>("parent");
export type TSecretFolderDALFactory = ReturnType<typeof secretFolderDALFactory>;
// never change this. If u do write a migration for it
@ -259,12 +257,10 @@ export const secretFolderDALFactory = (db: TDbClient) => {
const findSecretPathByFolderIds = async (projectId: string, folderIds: string[], tx?: Knex) => {
try {
const folders = await sqlFindSecretPathByFolderId(tx || db, projectId, folderIds);
const rootFolders = groupBy(
folders.filter(({ parentId }) => parentId === null),
(i) => i.child || i.id // root condition then child and parent will null
);
return folderIds.map((folderId) => rootFolders[folderId]?.[0]);
} catch (error) {
throw new DatabaseError({ error, name: "Find by secret path" });

View File

@ -49,7 +49,7 @@ export const secretImportDALFactory = (db: TDbClient) => {
}
};
const find = async (filter: Partial<TSecretImports & { projectId: string }>, tx?: Knex) => {
const find = async (filter: Partial<TSecretImports>, tx?: Knex) => {
try {
const docs = await (tx || db)(TableName.SecretImport)
.where(filter)

View File

@ -7,7 +7,6 @@ import { BadRequestError } from "@app/lib/errors";
import { TProjectDALFactory } from "../project/project-dal";
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
import { TSecretDALFactory } from "../secret/secret-dal";
import { TSecretQueueFactory } from "../secret/secret-queue";
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
import { TSecretImportDALFactory } from "./secret-import-dal";
import { fnSecretsFromImports } from "./secret-import-fns";
@ -26,7 +25,6 @@ type TSecretImportServiceFactoryDep = {
projectDAL: Pick<TProjectDALFactory, "checkProjectUpgradeStatus">;
projectEnvDAL: TProjectEnvDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets">;
};
const ERR_SEC_IMP_NOT_FOUND = new BadRequestError({ message: "Secret import not found" });
@ -39,8 +37,7 @@ export const secretImportServiceFactory = ({
permissionService,
folderDAL,
projectDAL,
secretDAL,
secretQueueService
secretDAL
}: TSecretImportServiceFactoryDep) => {
const createImport = async ({
environment,
@ -80,19 +77,10 @@ export const secretImportServiceFactory = ({
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "Create import" });
// TODO(akhilmhdh-pg): updated permission check add here
const [importEnv] = await projectEnvDAL.findBySlugs(projectId, [data.environment]);
if (!importEnv) throw new BadRequestError({ error: "Imported env not found", name: "Create import" });
const sourceFolder = await folderDAL.findBySecretPath(projectId, data.environment, data.path);
if (sourceFolder) {
const existingImport = await secretImportDAL.findOne({
folderId: sourceFolder.id,
importEnv: folder.environment.id,
importPath: path
});
if (existingImport) throw new BadRequestError({ message: "Cyclic import not allowed" });
}
const secImport = await secretImportDAL.transaction(async (tx) => {
const lastPos = await secretImportDAL.findLastImportPosition(folder.id, tx);
return secretImportDAL.create(
@ -106,12 +94,6 @@ export const secretImportServiceFactory = ({
);
});
await secretQueueService.syncSecrets({
secretPath: secImport.importPath,
projectId,
environment: importEnv.slug
});
return { ...secImport, importEnv };
};
@ -149,20 +131,6 @@ export const secretImportServiceFactory = ({
: await projectEnvDAL.findById(secImpDoc.importEnv);
if (!importedEnv) throw new BadRequestError({ error: "Imported env not found", name: "Create import" });
const sourceFolder = await folderDAL.findBySecretPath(
projectId,
importedEnv.slug,
data.path || secImpDoc.importPath
);
if (sourceFolder) {
const existingImport = await secretImportDAL.findOne({
folderId: sourceFolder.id,
importEnv: folder.environment.id,
importPath: path
});
if (existingImport) throw new BadRequestError({ message: "Cyclic import not allowed" });
}
const updatedSecImport = await secretImportDAL.transaction(async (tx) => {
const secImp = await secretImportDAL.findOne({ folderId: folder.id, id });
if (!secImp) throw ERR_SEC_IMP_NOT_FOUND;
@ -217,13 +185,6 @@ export const secretImportServiceFactory = ({
if (!importEnv) throw new BadRequestError({ error: "Imported env not found", name: "Create import" });
return { ...doc, importEnv };
});
await secretQueueService.syncSecrets({
secretPath: path,
projectId,
environment
});
return secImport;
};

View File

@ -21,7 +21,6 @@ import {
} from "@app/lib/crypto";
import { BadRequestError } from "@app/lib/errors";
import { groupBy, unique } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { ActorAuthMethod, ActorType } from "../auth/auth-type";
import { getBotKeyFnFactory } from "../project-bot/project-bot-fns";
@ -93,8 +92,7 @@ const buildHierarchy = (folders: TSecretFolders[]): FolderMap => {
const generatePaths = (
map: FolderMap,
parentId: string = "null",
basePath: string = "",
currentDepth: number = 0
basePath: string = ""
): { path: string; folderId: string }[] => {
const children = map[parentId || "null"] || [];
let paths: { path: string; folderId: string }[] = [];
@ -107,20 +105,13 @@ const generatePaths = (
// eslint-disable-next-line no-nested-ternary
const currPath = basePath === "" ? (isRootFolder ? "/" : `/${child.name}`) : `${basePath}/${child.name}`;
// Add the current path
paths.push({
path: currPath,
folderId: child.id
});
}); // Add the current path
// We make sure that the recursion depth doesn't exceed 20.
// We do this to create "circuit break", basically to ensure that we can't encounter any potential memory leaks.
if (currentDepth >= 20) {
logger.info(`generatePaths: Recursion depth exceeded 20, breaking out of recursion [map=${JSON.stringify(map)}]`);
return;
}
// Recursively generate paths for children, passing down the formatted path
const childPaths = generatePaths(map, child.id, currPath, currentDepth + 1);
// Recursively generate paths for children, passing down the formatted pathh
const childPaths = generatePaths(map, child.id, currPath);
paths = paths.concat(
childPaths.map((p) => ({
path: p.path,

View File

@ -3,7 +3,7 @@ import { getConfig } from "@app/lib/config/env";
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { daysToMillisecond, secondsToMillis } from "@app/lib/dates";
import { BadRequestError } from "@app/lib/errors";
import { groupBy, isSamePath, unique } from "@app/lib/fn";
import { isSamePath } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
@ -23,6 +23,7 @@ import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal";
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
import { TSecretImportDALFactory } from "../secret-import/secret-import-dal";
import { fnSecretsFromImports } from "../secret-import/secret-import-fns";
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
import { TWebhookDALFactory } from "../webhook/webhook-dal";
import { fnTriggerWebhook } from "../webhook/webhook-fns";
@ -31,6 +32,7 @@ import { interpolateSecrets } from "./secret-fns";
import { TCreateSecretReminderDTO, THandleReminderDTO, TRemoveSecretReminderDTO } from "./secret-types";
export type TSecretQueueFactory = ReturnType<typeof secretQueueFactory>;
type TSecretQueueFactoryDep = {
queueService: TQueueServiceFactory;
integrationDAL: Pick<TIntegrationDALFactory, "findByProjectIdV2" | "updateById">;
@ -58,8 +60,6 @@ export type TGetSecrets = {
environment: string;
};
const MAX_SYNC_SECRET_DEPTH = 5;
export const secretQueueFactory = ({
queueService,
integrationDAL,
@ -117,10 +117,7 @@ export const secretQueueFactory = ({
});
};
const syncSecrets = async (dto: TGetSecrets & { depth?: number }) => {
logger.info(
`syncSecrets: syncing project secrets where [projectId=${dto.projectId}] [environment=${dto.environment}] [path=${dto.secretPath}]`
);
const syncSecrets = async (dto: TGetSecrets) => {
await queueService.queue(QueueName.SecretWebhook, QueueJobs.SecWebhook, dto, {
jobId: `secret-webhook-${dto.environment}-${dto.projectId}-${dto.secretPath}`,
removeOnFail: { count: 5 },
@ -230,42 +227,62 @@ export const secretQueueFactory = ({
}
};
type Content = Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean }>;
/**
* Return the secrets in a given [folderId] including secrets from
* nested imported folders recursively.
*/
const getIntegrationSecrets = async (dto: {
projectId: string;
environment: string;
folderId: string;
key: string;
depth: number;
}) => {
let content: Content = {};
if (dto.depth > MAX_SYNC_SECRET_DEPTH) {
logger.info(
`getIntegrationSecrets: secret depth exceeded for [projectId=${dto.projectId}] [folderId=${dto.folderId}] [depth=${dto.depth}]`
);
return content;
}
// process secrets in current folder
const getIntegrationSecrets = async (dto: TGetSecrets & { folderId: string }, key: string) => {
const secrets = await secretDAL.findByFolderId(dto.folderId);
// get imported secrets
const secretImport = await secretImportDAL.find({ folderId: dto.folderId });
const importedSecrets = await fnSecretsFromImports({
allowedImports: secretImport,
secretDAL,
folderDAL
});
if (!secrets.length && !importedSecrets.length) return {};
const content: Record<string, { value: string; comment?: string; skipMultilineEncoding?: boolean }> = {};
importedSecrets.forEach(({ secrets: secs }) => {
secs.forEach((secret) => {
const secretKey = decryptSymmetric128BitHexKeyUTF8({
ciphertext: secret.secretKeyCiphertext,
iv: secret.secretKeyIV,
tag: secret.secretKeyTag,
key
});
const secretValue = decryptSymmetric128BitHexKeyUTF8({
ciphertext: secret.secretValueCiphertext,
iv: secret.secretValueIV,
tag: secret.secretValueTag,
key
});
content[secretKey] = { value: secretValue };
content[secretKey].skipMultilineEncoding = Boolean(secret.skipMultilineEncoding);
if (secret.secretCommentCiphertext && secret.secretCommentIV && secret.secretCommentTag) {
const commentValue = decryptSymmetric128BitHexKeyUTF8({
ciphertext: secret.secretCommentCiphertext,
iv: secret.secretCommentIV,
tag: secret.secretCommentTag,
key
});
content[secretKey].comment = commentValue;
}
});
});
secrets.forEach((secret) => {
const secretKey = decryptSymmetric128BitHexKeyUTF8({
ciphertext: secret.secretKeyCiphertext,
iv: secret.secretKeyIV,
tag: secret.secretKeyTag,
key: dto.key
key
});
const secretValue = decryptSymmetric128BitHexKeyUTF8({
ciphertext: secret.secretValueCiphertext,
iv: secret.secretValueIV,
tag: secret.secretValueTag,
key: dto.key
key
});
content[secretKey] = { value: secretValue };
@ -275,111 +292,38 @@ export const secretQueueFactory = ({
ciphertext: secret.secretCommentCiphertext,
iv: secret.secretCommentIV,
tag: secret.secretCommentTag,
key: dto.key
key
});
content[secretKey].comment = commentValue;
}
content[secretKey].skipMultilineEncoding = Boolean(secret.skipMultilineEncoding);
});
const expandSecrets = interpolateSecrets({
projectId: dto.projectId,
secretEncKey: dto.key,
secretEncKey: key,
folderDAL,
secretDAL
});
await expandSecrets(content);
// check if current folder has any imports from other folders
const secretImport = await secretImportDAL.find({ folderId: dto.folderId });
// if no imports then return secrets in the current folder
if (!secretImport) return content;
const importedFolders = await folderDAL.findByManySecretPath(
secretImport.map(({ importEnv, importPath }) => ({
envId: importEnv.id,
secretPath: importPath
}))
);
for await (const folder of importedFolders) {
if (folder) {
// get secrets contained in each imported folder by recursively calling
// this function against the imported folder
const importedSecrets = await getIntegrationSecrets({
environment: dto.environment,
projectId: dto.projectId,
folderId: folder.id,
key: dto.key,
depth: dto.depth + 1
});
// add the imported secrets to the current folder secrets
content = { ...content, ...importedSecrets };
}
}
return content;
};
queueService.start(QueueName.IntegrationSync, async (job) => {
const { environment, projectId, secretPath, depth = 1 } = job.data;
const { environment, projectId, secretPath } = job.data;
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
if (!folder) {
logger.error(new Error("Secret path not found"));
logger.error("Secret path not found");
return;
}
// start syncing all linked imports also
if (depth < MAX_SYNC_SECRET_DEPTH) {
// find all imports made with the given environment and secret path
const linkSourceDto = {
projectId,
importEnv: folder.environment.id,
importPath: secretPath
};
const imports = await secretImportDAL.find(linkSourceDto);
if (imports.length) {
// keep calling sync secret for all the imports made
const importedFolderIds = unique(imports, (i) => i.folderId).map(({ folderId }) => folderId);
const importedFolders = await folderDAL.findSecretPathByFolderIds(projectId, importedFolderIds);
const foldersGroupedById = groupBy(importedFolders, (i) => i.child || i.id);
await Promise.all(
imports
.filter(({ folderId }) => Boolean(foldersGroupedById[folderId][0].path))
.map(({ folderId }) => {
const syncDto = {
depth: depth + 1,
projectId,
secretPath: foldersGroupedById[folderId][0].path,
environment: foldersGroupedById[folderId][0].environmentSlug
};
logger.info(
`getIntegrationSecrets: Syncing secret due to link change [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${depth}]`
);
return syncSecrets(syncDto);
})
);
}
} else {
logger.info(`getIntegrationSecrets: Secret depth exceeded for [projectId=${projectId}] [folderId=${folder.id}]`);
}
const integrations = await integrationDAL.findByProjectIdV2(projectId, environment); // note: returns array of integrations + integration auths in this environment
const integrations = await integrationDAL.findByProjectIdV2(projectId, environment);
const toBeSyncedIntegrations = integrations.filter(
// note: sync only the integrations sourced from secretPath
({ secretPath: integrationSecPath, isActive }) => isActive && isSamePath(secretPath, integrationSecPath)
);
if (!integrations.length) return;
logger.info(
`getIntegrationSecrets: secret integration sync started [jobId=${job.id}] [jobId=${job.id}] [projectId=${job.data.projectId}] [environment=${job.data.environment}] [secretPath=${job.data.secretPath}] [depth=${job.data.depth}]`
);
logger.info("Secret integration sync started", job.data, job.id);
for (const integration of toBeSyncedIntegrations) {
const integrationAuth = {
...integration.integrationAuth,
@ -390,13 +334,7 @@ export const secretQueueFactory = ({
const botKey = await projectBotService.getBotKey(projectId);
const { accessToken, accessId } = await integrationAuthService.getIntegrationAccessToken(integrationAuth, botKey);
const secrets = await getIntegrationSecrets({
environment,
projectId,
folderId: folder.id,
key: botKey,
depth: 1
});
const secrets = await getIntegrationSecrets({ environment, projectId, secretPath, folderId: folder.id }, botKey);
const suffixedSecrets: typeof secrets = {};
const metadata = integration.metadata as Record<string, string>;
if (metadata) {
@ -424,7 +362,7 @@ export const secretQueueFactory = ({
});
}
logger.info("Secret integration sync ended: %s", job.id);
logger.info("Secret integration sync ended", job.id);
});
queueService.start(QueueName.SecretReminder, async ({ data }) => {
@ -465,7 +403,7 @@ export const secretQueueFactory = ({
});
queueService.listen(QueueName.IntegrationSync, "failed", (job, err) => {
logger.error(err, "Failed to sync integration %s", job?.id);
logger.error("Failed to sync integration", job?.data, err);
});
queueService.start(QueueName.SecretWebhook, async (job) => {
@ -473,8 +411,7 @@ export const secretQueueFactory = ({
});
return {
// depth is internal only field thus no need to make it available outside
syncSecrets: (dto: TGetSecrets) => syncSecrets(dto),
syncSecrets,
syncIntegrations,
addSecretReminder,
removeSecretReminder,

View File

@ -528,6 +528,7 @@ type GetRawSecretsV3Request struct {
WorkspaceId string `json:"workspaceId"`
SecretPath string `json:"secretPath"`
IncludeImport bool `json:"include_imports"`
Recursive bool `json:"recursive"`
}
type GetRawSecretsV3Response struct {

View File

@ -479,7 +479,7 @@ func (tm *AgentManager) GetToken() string {
// Fetches a new access token using client credentials
func (tm *AgentManager) FetchNewAccessToken() error {
clientID := os.Getenv("INFISICAL_UNIVERSAL_AUTH_CLIENT_ID")
clientID := os.Getenv(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME)
if clientID == "" {
clientIDAsByte, err := ReadFile(tm.clientIdPath)
if err != nil {
@ -509,7 +509,7 @@ func (tm *AgentManager) FetchNewAccessToken() error {
// save as cache in memory
tm.cachedClientSecret = clientSecret
err, loginResponse := universalAuthLogin(clientID, clientSecret)
loginResponse, err := util.UniversalAuthLogin(clientID, clientSecret)
if err != nil {
return err
}
@ -725,20 +725,6 @@ func (tm *AgentManager) MonitorSecretChanges(secretTemplate Template, templateId
}
}
func universalAuthLogin(clientId string, clientSecret string) (error, api.UniversalAuthLoginResponse) {
httpClient := resty.New()
httpClient.SetRetryCount(10000).
SetRetryMaxWaitTime(20 * time.Second).
SetRetryWaitTime(5 * time.Second)
tokenResponse, err := api.CallUniversalAuthLogin(httpClient, api.UniversalAuthLoginRequest{ClientId: clientId, ClientSecret: clientSecret})
if err != nil {
return err, api.UniversalAuthLoginResponse{}
}
return nil, tokenResponse
}
// runCmd represents the run command
var agentCmd = &cobra.Command{
Example: `

View File

@ -44,6 +44,11 @@ var exportCmd = &cobra.Command{
util.HandleError(err)
}
includeImports, err := cmd.Flags().GetBool("include-imports")
if err != nil {
util.HandleError(err)
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err)
@ -59,8 +64,7 @@ var exportCmd = &cobra.Command{
util.HandleError(err, "Unable to parse flag")
}
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
@ -75,7 +79,21 @@ var exportCmd = &cobra.Command{
util.HandleError(err, "Unable to parse flag")
}
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, WorkspaceId: projectId, SecretsPath: secretsPath}, "")
request := models.GetAllSecretsParameters{
Environment: environmentName,
TagSlugs: tagSlugs,
WorkspaceId: projectId,
SecretsPath: secretsPath,
IncludeImport: includeImports,
}
if token != nil && token.Type == "service-token" {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == "universal-auth-token" {
request.UniversalAuthAccessToken = token.Token
}
secrets, err := util.GetAllEnvironmentVariables(request, "")
if err != nil {
util.HandleError(err, "Unable to fetch secrets")
}
@ -88,9 +106,16 @@ var exportCmd = &cobra.Command{
var output string
if shouldExpandSecrets {
secrets = util.ExpandSecrets(secrets, models.ExpandSecretsAuthentication{
InfisicalToken: infisicalToken,
}, "")
authParams := models.ExpandSecretsAuthentication{}
if token != nil && token.Type == "service-token" {
authParams.InfisicalToken = token.Token
} else if token != nil && token.Type == "universal-auth-token" {
authParams.UniversalAuthAccessToken = token.Token
}
secrets = util.ExpandSecrets(secrets, authParams, "")
}
secrets = util.FilterSecretsByTag(secrets, tagSlugs)
output, err = formatEnvs(secrets, format)
@ -110,6 +135,7 @@ func init() {
exportCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
exportCmd.Flags().StringP("format", "f", "dotenv", "Set the format of the output file (dotenv, json, csv)")
exportCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
exportCmd.Flags().Bool("include-imports", true, "Imported linked secrets")
exportCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
exportCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs")
exportCmd.Flags().String("projectId", "", "manually set the projectId to fetch secrets from")

View File

@ -36,18 +36,33 @@ var getCmd = &cobra.Command{
}
}
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
foldersPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
folders, err := util.GetAllFolders(models.GetAllFoldersParameters{Environment: environmentName, InfisicalToken: infisicalToken, FoldersPath: foldersPath})
request := models.GetAllFoldersParameters{
Environment: environmentName,
WorkspaceId: projectId,
FoldersPath: foldersPath,
}
if token != nil && token.Type == "service-token" {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == "universal-auth-token" {
request.UniversalAuthAccessToken = token.Token
}
folders, err := util.GetAllFolders(request)
if err != nil {
util.HandleError(err, "Unable to get folders")
}

View File

@ -55,95 +55,148 @@ var loginCmd = &cobra.Command{
Short: "Login into your Infisical account",
DisableFlagsInUseLine: true,
Run: func(cmd *cobra.Command, args []string) {
currentLoggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
// if the key can't be found or there is an error getting current credentials from key ring, allow them to override
if err != nil && (strings.Contains(err.Error(), "we couldn't find your logged in details")) {
log.Debug().Err(err)
} else if err != nil {
loginMethod, err := cmd.Flags().GetString("method")
if err != nil {
util.HandleError(err)
}
if currentLoggedInUserDetails.IsUserLoggedIn && !currentLoggedInUserDetails.LoginExpired && len(currentLoggedInUserDetails.UserCredentials.PrivateKey) != 0 {
shouldOverride, err := userLoginMenu(currentLoggedInUserDetails.UserCredentials.Email)
if err != nil {
if loginMethod != "user" && loginMethod != "universal-auth" {
util.PrintErrorMessageAndExit("Invalid login method. Please use either 'user' or 'universal-auth'")
}
if loginMethod == "user" {
currentLoggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
// if the key can't be found or there is an error getting current credentials from key ring, allow them to override
if err != nil && (strings.Contains(err.Error(), "we couldn't find your logged in details")) {
log.Debug().Err(err)
} else if err != nil {
util.HandleError(err)
}
if !shouldOverride {
return
if currentLoggedInUserDetails.IsUserLoggedIn && !currentLoggedInUserDetails.LoginExpired && len(currentLoggedInUserDetails.UserCredentials.PrivateKey) != 0 {
shouldOverride, err := userLoginMenu(currentLoggedInUserDetails.UserCredentials.Email)
if err != nil {
util.HandleError(err)
}
if !shouldOverride {
return
}
}
}
//override domain
domainQuery := true
if config.INFISICAL_URL_MANUAL_OVERRIDE != "" && config.INFISICAL_URL_MANUAL_OVERRIDE != util.INFISICAL_DEFAULT_API_URL {
overrideDomain, err := DomainOverridePrompt()
if err != nil {
util.HandleError(err)
//override domain
domainQuery := true
if config.INFISICAL_URL_MANUAL_OVERRIDE != "" && config.INFISICAL_URL_MANUAL_OVERRIDE != util.INFISICAL_DEFAULT_API_URL {
overrideDomain, err := DomainOverridePrompt()
if err != nil {
util.HandleError(err)
}
//if not override set INFISICAL_URL to exported var
//set domainQuery to false
if !overrideDomain {
domainQuery = false
config.INFISICAL_URL = config.INFISICAL_URL_MANUAL_OVERRIDE
}
}
//if not override set INFISICAL_URL to exported var
//set domainQuery to false
if !overrideDomain {
domainQuery = false
config.INFISICAL_URL = config.INFISICAL_URL_MANUAL_OVERRIDE
//prompt user to select domain between Infisical cloud and self hosting
if domainQuery {
err = askForDomain()
if err != nil {
util.HandleError(err, "Unable to parse domain url")
}
}
var userCredentialsToBeStored models.UserCredentials
}
//prompt user to select domain between Infisical cloud and self hosting
if domainQuery {
err = askForDomain()
if err != nil {
util.HandleError(err, "Unable to parse domain url")
}
}
var userCredentialsToBeStored models.UserCredentials
interactiveLogin := false
if cmd.Flags().Changed("interactive") {
interactiveLogin = true
cliDefaultLogin(&userCredentialsToBeStored)
}
//call browser login function
if !interactiveLogin {
fmt.Println("Logging in via browser... To login via interactive mode run [infisical login -i]")
userCredentialsToBeStored, err = browserCliLogin()
if err != nil {
//default to cli login on error
interactiveLogin := false
if cmd.Flags().Changed("interactive") {
interactiveLogin = true
cliDefaultLogin(&userCredentialsToBeStored)
}
//call browser login function
if !interactiveLogin {
fmt.Println("Logging in via browser... To login via interactive mode run [infisical login -i]")
userCredentialsToBeStored, err = browserCliLogin()
if err != nil {
//default to cli login on error
cliDefaultLogin(&userCredentialsToBeStored)
}
}
err = util.StoreUserCredsInKeyRing(&userCredentialsToBeStored)
if err != nil {
log.Error().Msgf("Unable to store your credentials in system vault [%s]")
log.Error().Msgf("\nTo trouble shoot further, read https://infisical.com/docs/cli/faq")
log.Debug().Err(err)
//return here
util.HandleError(err)
}
err = util.WriteInitalConfig(&userCredentialsToBeStored)
if err != nil {
util.HandleError(err, "Unable to write write to Infisical Config file. Please try again")
}
// clear backed up secrets from prev account
util.DeleteBackupSecrets()
whilte := color.New(color.FgGreen)
boldWhite := whilte.Add(color.Bold)
time.Sleep(time.Second * 1)
boldWhite.Printf(">>>> Welcome to Infisical!")
boldWhite.Printf(" You are now logged in as %v <<<< \n", userCredentialsToBeStored.Email)
plainBold := color.New(color.Bold)
plainBold.Println("\nQuick links")
fmt.Println("- Learn to inject secrets into your application at https://infisical.com/docs/cli/usage")
fmt.Println("- Stuck? Join our slack for quick support https://infisical.com/slack")
Telemetry.CaptureEvent("cli-command:login", posthog.NewProperties().Set("infisical-backend", config.INFISICAL_URL).Set("version", util.CLI_VERSION))
} else if loginMethod == "universal-auth" {
clientId, err := cmd.Flags().GetString("client-id")
if err != nil {
util.HandleError(err)
}
clientSecret, err := cmd.Flags().GetString("client-secret")
if err != nil {
util.HandleError(err)
}
if clientId == "" {
clientId = os.Getenv(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME)
if clientId == "" {
util.PrintErrorMessageAndExit("Please provide client-id")
}
}
if clientSecret == "" {
clientSecret = os.Getenv(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME)
if clientSecret == "" {
util.PrintErrorMessageAndExit("Please provide client-secret")
}
}
res, err := util.UniversalAuthLogin(clientId, clientSecret)
if err != nil {
util.HandleError(err)
}
boldGreen := color.New(color.FgGreen).Add(color.Bold)
time.Sleep(time.Second * 1)
boldGreen.Printf(">>>> Successfully authenticated with Universal Auth!\n\n")
boldGreen.Printf("Universal Auth Access Token:\n%v", res.AccessToken)
plainBold := color.New(color.Bold)
plainBold.Println("\n\nYou can use this access token to authenticate through other commands in the CLI.")
}
err = util.StoreUserCredsInKeyRing(&userCredentialsToBeStored)
if err != nil {
log.Error().Msgf("Unable to store your credentials in system vault [%s]")
log.Error().Msgf("\nTo trouble shoot further, read https://infisical.com/docs/cli/faq")
log.Debug().Err(err)
//return here
util.HandleError(err)
}
err = util.WriteInitalConfig(&userCredentialsToBeStored)
if err != nil {
util.HandleError(err, "Unable to write write to Infisical Config file. Please try again")
}
// clear backed up secrets from prev account
util.DeleteBackupSecrets()
whilte := color.New(color.FgGreen)
boldWhite := whilte.Add(color.Bold)
time.Sleep(time.Second * 1)
boldWhite.Printf(">>>> Welcome to Infisical!")
boldWhite.Printf(" You are now logged in as %v <<<< \n", userCredentialsToBeStored.Email)
plainBold := color.New(color.Bold)
plainBold.Println("\nQuick links")
fmt.Println("- Learn to inject secrets into your application at https://infisical.com/docs/cli/usage")
fmt.Println("- Stuck? Join our slack for quick support https://infisical.com/slack")
Telemetry.CaptureEvent("cli-command:login", posthog.NewProperties().Set("infisical-backend", config.INFISICAL_URL).Set("version", util.CLI_VERSION))
},
}
@ -313,6 +366,9 @@ func cliDefaultLogin(userCredentialsToBeStored *models.UserCredentials) {
func init() {
rootCmd.AddCommand(loginCmd)
loginCmd.Flags().BoolP("interactive", "i", false, "login via the command line")
loginCmd.Flags().String("method", "user", "login method [user, universal-auth]")
loginCmd.Flags().String("client-id", "", "client id for universal auth")
loginCmd.Flags().String("client-secret", "", "client secret for universal auth")
}
func DomainOverridePrompt() (bool, error) {

View File

@ -62,8 +62,7 @@ var runCmd = &cobra.Command{
}
}
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
@ -73,6 +72,11 @@ var runCmd = &cobra.Command{
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretOverriding, err := cmd.Flags().GetBool("secret-overriding")
if err != nil {
util.HandleError(err, "Unable to parse flag")
@ -103,7 +107,22 @@ var runCmd = &cobra.Command{
util.HandleError(err, "Unable to parse flag")
}
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, SecretsPath: secretsPath, IncludeImport: includeImports, Recursive: recursive}, projectConfigDir)
request := models.GetAllSecretsParameters{
Environment: environmentName,
WorkspaceId: projectId,
TagSlugs: tagSlugs,
SecretsPath: secretsPath,
IncludeImport: includeImports,
Recursive: recursive,
}
if token != nil && token.Type == "service-token" {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == "universal-auth-token" {
request.UniversalAuthAccessToken = token.Token
}
secrets, err := util.GetAllEnvironmentVariables(request, projectConfigDir)
if err != nil {
util.HandleError(err, "Could not fetch secrets", "If you are using a service token to fetch secrets, please ensure it is valid")
@ -116,9 +135,16 @@ var runCmd = &cobra.Command{
}
if shouldExpandSecrets {
secrets = util.ExpandSecrets(secrets, models.ExpandSecretsAuthentication{
InfisicalToken: infisicalToken,
}, projectConfigDir)
authParams := models.ExpandSecretsAuthentication{}
if token != nil && token.Type == "service-token" {
authParams.InfisicalToken = token.Token
} else if token != nil && token.Type == "universal-auth-token" {
authParams.UniversalAuthAccessToken = token.Token
}
secrets = util.ExpandSecrets(secrets, authParams, projectConfigDir)
}
secretsByKey := getSecretsByKeys(secrets)
@ -149,7 +175,15 @@ var runCmd = &cobra.Command{
log.Debug().Msgf("injecting the following environment variables into shell: %v", env)
Telemetry.CaptureEvent("cli-command:run", posthog.NewProperties().Set("secretsCount", len(secrets)).Set("environment", environmentName).Set("isUsingServiceToken", infisicalToken != "").Set("single-command", strings.Join(args, " ")).Set("multi-command", cmd.Flag("command").Value.String()).Set("version", util.CLI_VERSION))
Telemetry.CaptureEvent("cli-command:run",
posthog.NewProperties().
Set("secretsCount", len(secrets)).
Set("environment", environmentName).
Set("isUsingServiceToken", token.Type == "service-token").
Set("isUsingUniversalAuthToken", token.Type == "universal-auth-token").
Set("single-command", strings.Join(args, " ")).
Set("multi-command", cmd.Flag("command").Value.String()).
Set("version", util.CLI_VERSION))
if cmd.Flags().Changed("command") {
command := cmd.Flag("command").Value.String()
@ -204,6 +238,7 @@ func filterReservedEnvVars(env map[string]models.SingleEnvironmentVariable) {
func init() {
rootCmd.AddCommand(runCmd)
runCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
runCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
runCmd.Flags().StringP("env", "e", "dev", "Set the environment (dev, prod, etc.) from which your secrets should be pulled from")
runCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
runCmd.Flags().Bool("include-imports", true, "Import linked secrets ")

View File

@ -38,12 +38,12 @@ var secretsCmd = &cobra.Command{
}
}
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
@ -78,7 +78,22 @@ var secretsCmd = &cobra.Command{
util.HandleError(err, "Unable to parse flag")
}
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, SecretsPath: secretsPath, IncludeImport: includeImports, Recursive: recursive}, "")
request := models.GetAllSecretsParameters{
Environment: environmentName,
WorkspaceId: projectId,
TagSlugs: tagSlugs,
SecretsPath: secretsPath,
IncludeImport: includeImports,
Recursive: recursive,
}
if token != nil && token.Type == "service-token" {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == "universal-auth-token" {
request.UniversalAuthAccessToken = token.Token
}
secrets, err := util.GetAllEnvironmentVariables(request, "")
if err != nil {
util.HandleError(err)
}
@ -90,9 +105,15 @@ var secretsCmd = &cobra.Command{
}
if shouldExpandSecrets {
secrets = util.ExpandSecrets(secrets, models.ExpandSecretsAuthentication{
InfisicalToken: infisicalToken,
}, "")
authParams := models.ExpandSecretsAuthentication{}
if token != nil && token.Type == "service-token" {
authParams.InfisicalToken = token.Token
} else if token != nil && token.Type == "universal-auth-token" {
authParams.UniversalAuthAccessToken = token.Token
}
secrets = util.ExpandSecrets(secrets, authParams, "")
}
visualize.PrintAllSecretDetails(secrets)
@ -402,8 +423,12 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
}
}
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
shouldExpand, err := cmd.Flags().GetBool("expand")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
@ -413,6 +438,11 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse path flag")
@ -428,7 +458,22 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
util.HandleError(err, "Unable to parse path flag")
}
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, SecretsPath: secretsPath, IncludeImport: true, Recursive: recursive}, "")
request := models.GetAllSecretsParameters{
Environment: environmentName,
WorkspaceId: projectId,
TagSlugs: tagSlugs,
SecretsPath: secretsPath,
IncludeImport: true,
Recursive: recursive,
}
if token != nil && token.Type == "service-token" {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == "universal-auth-token" {
request.UniversalAuthAccessToken = token.Token
}
secrets, err := util.GetAllEnvironmentVariables(request, "")
if err != nil {
util.HandleError(err, "To fetch all secrets")
}
@ -449,6 +494,18 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
}
}
if shouldExpand {
authParams := models.ExpandSecretsAuthentication{}
if token != nil && token.Type == "service-token" {
authParams.InfisicalToken = token.Token
} else if token != nil && token.Type == "universal-auth-token" {
authParams.UniversalAuthAccessToken = token.Token
}
requestedSecrets = util.ExpandSecrets(requestedSecrets, authParams, "")
}
if showOnlyValue && len(requestedSecrets) > 1 {
util.PrintErrorMessageAndExit("--raw-value only works with one secret.")
}
@ -475,8 +532,12 @@ func generateExampleEnv(cmd *cobra.Command, args []string) {
util.HandleError(err, "Unable to parse flag")
}
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
@ -486,7 +547,21 @@ func generateExampleEnv(cmd *cobra.Command, args []string) {
util.HandleError(err, "Unable to parse flag")
}
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, SecretsPath: secretsPath, IncludeImport: true}, "")
request := models.GetAllSecretsParameters{
Environment: environmentName,
WorkspaceId: projectId,
TagSlugs: tagSlugs,
SecretsPath: secretsPath,
IncludeImport: true,
}
if token != nil && token.Type == "service-token" {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == "universal-auth-token" {
request.UniversalAuthAccessToken = token.Token
}
secrets, err := util.GetAllEnvironmentVariables(request, "")
if err != nil {
util.HandleError(err, "To fetch all secrets")
}
@ -686,19 +761,23 @@ func getSecretsByKeys(secrets []models.SingleEnvironmentVariable) map[string]mod
func init() {
secretsGenerateExampleEnvCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
secretsGenerateExampleEnvCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
secretsGenerateExampleEnvCmd.Flags().String("path", "/", "Fetch secrets from within a folder path")
secretsCmd.AddCommand(secretsGenerateExampleEnvCmd)
secretsGetCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
secretsCmd.AddCommand(secretsGetCmd)
secretsGetCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
secretsGetCmd.Flags().String("path", "/", "get secrets within a folder path")
secretsGetCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
secretsGetCmd.Flags().Bool("raw-value", false, "Returns only the value of secret, only works with one secret")
secretsGetCmd.Flags().Bool("recursive", false, "Fetch secrets from all sub-folders")
secretsCmd.AddCommand(secretsGetCmd)
secretsCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
secretsCmd.AddCommand(secretsSetCmd)
secretsSetCmd.Flags().String("path", "/", "set secrets within a folder path")
// Only supports logged in users (JWT auth)
secretsSetCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
util.RequireLogin()
util.RequireLocalWorkspaceFile()
@ -707,6 +786,8 @@ func init() {
secretsDeleteCmd.Flags().String("type", "personal", "the type of secret to delete: personal or shared (default: personal)")
secretsDeleteCmd.Flags().String("path", "/", "get secrets within a folder path")
secretsCmd.AddCommand(secretsDeleteCmd)
// Only supports logged in users (JWT auth)
secretsDeleteCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
util.RequireLogin()
util.RequireLocalWorkspaceFile()
@ -718,6 +799,7 @@ func init() {
// Add getCmd, createCmd and deleteCmd flags here
getCmd.Flags().StringP("path", "p", "/", "The path from where folders should be fetched from")
getCmd.Flags().String("token", "", "Fetch folders using the infisical token")
getCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
folderCmd.AddCommand(getCmd)
// Add createCmd flags here
@ -735,6 +817,7 @@ func init() {
// ** End of folders sub command
secretsCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
secretsCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
secretsCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on")
secretsCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
secretsCmd.Flags().Bool("include-imports", true, "Imported linked secrets ")

63
cli/packages/cmd/token.go Normal file
View File

@ -0,0 +1,63 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"strings"
"time"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/fatih/color"
"github.com/spf13/cobra"
)
var tokenCmd = &cobra.Command{
Use: "token",
Short: "Manage your access tokens",
DisableFlagsInUseLine: true,
Example: "infisical token",
Args: cobra.ExactArgs(0),
PreRun: func(cmd *cobra.Command, args []string) {
util.RequireLogin()
},
Run: func(cmd *cobra.Command, args []string) {
},
}
var tokenRenewCmd = &cobra.Command{
Use: "renew [token]",
Short: "Used to renew your universal auth access token",
DisableFlagsInUseLine: true,
Example: "infisical token renew <access-token>",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
// args[0] will be the <INSERT_TOKEN> from your command call
token := args[0]
if strings.HasPrefix(token, "st.") {
util.PrintErrorMessageAndExit("You are trying to renew a service token. You can only renew universal auth access tokens.")
}
renewedAccessToken, err := util.RenewUniversalAuthAccessToken(token)
if err != nil {
util.HandleError(err, "Unable to renew token")
}
boldGreen := color.New(color.FgGreen).Add(color.Bold)
time.Sleep(time.Second * 1)
boldGreen.Printf(">>>> Successfully renewed token!\n\n")
boldGreen.Printf("Renewed Access Token:\n%v", renewedAccessToken)
plainBold := color.New(color.Bold)
plainBold.Println("\n\nYou can use the new access token to authenticate through other commands in the CLI.")
},
}
func init() {
tokenCmd.AddCommand(tokenRenewCmd)
rootCmd.AddCommand(tokenCmd)
}

View File

@ -59,6 +59,11 @@ type DynamicSecretLease struct {
Data map[string]interface{} `json:"data"`
}
type TokenDetails struct {
Type string
Token string
}
type SingleFolder struct {
ID string `json:"_id"`
Name string `json:"name"`
@ -97,10 +102,11 @@ type GetAllSecretsParameters struct {
}
type GetAllFoldersParameters struct {
WorkspaceId string
Environment string
FoldersPath string
InfisicalToken string
WorkspaceId string
Environment string
FoldersPath string
InfisicalToken string
UniversalAuthAccessToken string
}
type CreateFolderParameters struct {
@ -123,3 +129,8 @@ type ExpandSecretsAuthentication struct {
InfisicalToken string
UniversalAuthAccessToken string
}
type MachineIdentityCredentials struct {
ClientId string
ClientSecret string
}

View File

@ -1,17 +1,20 @@
package util
const (
CONFIG_FILE_NAME = "infisical-config.json"
CONFIG_FOLDER_NAME = ".infisical"
INFISICAL_DEFAULT_API_URL = "https://app.infisical.com/api"
INFISICAL_DEFAULT_URL = "https://app.infisical.com"
INFISICAL_WORKSPACE_CONFIG_FILE_NAME = ".infisical.json"
INFISICAL_TOKEN_NAME = "INFISICAL_TOKEN"
SECRET_TYPE_PERSONAL = "personal"
SECRET_TYPE_SHARED = "shared"
KEYRING_SERVICE_NAME = "infisical"
PERSONAL_SECRET_TYPE_NAME = "personal"
SHARED_SECRET_TYPE_NAME = "shared"
CONFIG_FILE_NAME = "infisical-config.json"
CONFIG_FOLDER_NAME = ".infisical"
INFISICAL_DEFAULT_API_URL = "https://app.infisical.com/api"
INFISICAL_DEFAULT_URL = "https://app.infisical.com"
INFISICAL_WORKSPACE_CONFIG_FILE_NAME = ".infisical.json"
INFISICAL_TOKEN_NAME = "INFISICAL_TOKEN"
INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME = "INFISICAL_UNIVERSAL_AUTH_CLIENT_ID"
INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME = "INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET"
INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN_NAME = "INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN"
SECRET_TYPE_PERSONAL = "personal"
SECRET_TYPE_SHARED = "shared"
KEYRING_SERVICE_NAME = "infisical"
PERSONAL_SECRET_TYPE_NAME = "personal"
SHARED_SECRET_TYPE_NAME = "shared"
)
var (

View File

@ -19,7 +19,7 @@ func GetAllFolders(params models.GetAllFoldersParameters) ([]models.SingleFolder
var foldersToReturn []models.SingleFolder
var folderErr error
if params.InfisicalToken == "" {
if params.InfisicalToken == "" && params.UniversalAuthAccessToken == "" {
log.Debug().Msg("GetAllFolders: Trying to fetch folders using logged in details")
@ -44,11 +44,21 @@ func GetAllFolders(params models.GetAllFoldersParameters) ([]models.SingleFolder
folders, err := GetFoldersViaJTW(loggedInUserDetails.UserCredentials.JTWToken, workspaceFile.WorkspaceId, params.Environment, params.FoldersPath)
folderErr = err
foldersToReturn = folders
} else {
} else if params.InfisicalToken != "" {
// get folders via service token
folders, err := GetFoldersViaServiceToken(params.InfisicalToken, params.WorkspaceId, params.Environment, params.FoldersPath)
folderErr = err
foldersToReturn = folders
} else if params.UniversalAuthAccessToken != "" {
if params.WorkspaceId == "" {
PrintErrorMessageAndExit("Project ID is required when using machine identity")
}
// get folders via machine identity
folders, err := GetFoldersViaMachineIdentity(params.UniversalAuthAccessToken, params.WorkspaceId, params.Environment, params.FoldersPath)
folderErr = err
foldersToReturn = folders
}
return foldersToReturn, folderErr
}
@ -132,6 +142,34 @@ func GetFoldersViaServiceToken(fullServiceToken string, workspaceId string, envi
return folders, nil
}
func GetFoldersViaMachineIdentity(accessToken string, workspaceId string, envSlug string, foldersPath string) ([]models.SingleFolder, error) {
httpClient := resty.New()
httpClient.SetAuthToken(accessToken).
SetHeader("Accept", "application/json")
getFoldersRequest := api.GetFoldersV1Request{
WorkspaceId: workspaceId,
Environment: envSlug,
FoldersPath: foldersPath,
}
apiResponse, err := api.CallGetFoldersV1(httpClient, getFoldersRequest)
if err != nil {
return nil, err
}
var folders []models.SingleFolder
for _, folder := range apiResponse.Folders {
folders = append(folders, models.SingleFolder{
Name: folder.Name,
ID: folder.ID,
})
}
return folders, nil
}
// CreateFolder creates a folder in Infisical
func CreateFolder(params models.CreateFolderParameters) (models.SingleFolder, error) {
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()

View File

@ -9,8 +9,11 @@ import (
"os/exec"
"path"
"strings"
"time"
"github.com/Infisical/infisical-merge/packages/api"
"github.com/Infisical/infisical-merge/packages/models"
"github.com/go-resty/resty/v2"
"github.com/spf13/cobra"
)
@ -64,18 +67,105 @@ func IsSecretTypeValid(s string) bool {
return false
}
func GetInfisicalServiceToken(cmd *cobra.Command) (serviceToken string, err error) {
func GetInfisicalToken(cmd *cobra.Command) (token *models.TokenDetails, err error) {
infisicalToken, err := cmd.Flags().GetString("token")
if infisicalToken == "" {
infisicalToken = os.Getenv(INFISICAL_TOKEN_NAME)
if err != nil {
return nil, err
}
if infisicalToken == "" {
// If no flag is passed, we first check for the universal auth access token env variable.
infisicalToken = os.Getenv(INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN_NAME)
// If it's still empty after the first env check, we check for the service token env variable.
if infisicalToken == "" {
infisicalToken = os.Getenv(INFISICAL_TOKEN_NAME)
}
}
// If it's empty, we return nothing at all.
if infisicalToken == "" {
return nil, nil
}
if strings.HasPrefix(infisicalToken, "st.") {
return &models.TokenDetails{
Type: "service-token",
Token: infisicalToken,
}, nil
}
return &models.TokenDetails{
Type: "universal-auth-token",
Token: infisicalToken,
}, nil
}
func GetInfisicalUniversalAuthAccessToken(cmd *cobra.Command) (accessToken string, err error) {
var token string
universalAuthClientId, err := cmd.Flags().GetString("universal-auth-client-id")
if err != nil {
return token, err
}
universalAuthClientSecret, err := cmd.Flags().GetString("universal-auth-client-secret")
if err != nil {
return token, err
}
if universalAuthClientId == "" {
universalAuthClientId = os.Getenv(INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME)
}
if universalAuthClientSecret == "" {
universalAuthClientSecret = os.Getenv(INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME)
}
if universalAuthClientId != "" || universalAuthClientSecret != "" {
res, err := UniversalAuthLogin(universalAuthClientId, universalAuthClientSecret)
if err != nil {
return token, err
}
token = res.AccessToken
}
return token, nil
}
func UniversalAuthLogin(clientId string, clientSecret string) (api.UniversalAuthLoginResponse, error) {
httpClient := resty.New()
httpClient.SetRetryCount(10000).
SetRetryMaxWaitTime(20 * time.Second).
SetRetryWaitTime(5 * time.Second)
tokenResponse, err := api.CallUniversalAuthLogin(httpClient, api.UniversalAuthLoginRequest{ClientId: clientId, ClientSecret: clientSecret})
if err != nil {
return api.UniversalAuthLoginResponse{}, err
}
return tokenResponse, nil
}
func RenewUniversalAuthAccessToken(accessToken string) (string, error) {
httpClient := resty.New()
httpClient.SetRetryCount(10000).
SetRetryMaxWaitTime(20 * time.Second).
SetRetryWaitTime(5 * time.Second)
request := api.UniversalAuthRefreshRequest{
AccessToken: accessToken,
}
tokenResponse, err := api.CallUniversalAuthRefreshAccessToken(httpClient, request)
if err != nil {
return "", err
}
return infisicalToken, nil
return tokenResponse.AccessToken, nil
}
// Checks if the passed in email already exists in the users slice

View File

@ -159,7 +159,7 @@ func GetPlainTextSecretsViaMachineIdentity(accessToken string, workspaceId strin
httpClient.SetAuthToken(accessToken).
SetHeader("Accept", "application/json")
getSecretsRequest := api.GetEncryptedSecretsV3Request{
getSecretsRequest := api.GetRawSecretsV3Request{
WorkspaceId: workspaceId,
Environment: environmentName,
IncludeImport: includeImports,
@ -171,7 +171,8 @@ func GetPlainTextSecretsViaMachineIdentity(accessToken string, workspaceId strin
getSecretsRequest.SecretPath = secretsPath
}
rawSecrets, err := api.CallGetRawSecretsV3(httpClient, api.GetRawSecretsV3Request{WorkspaceId: workspaceId, SecretPath: secretsPath, Environment: environmentName})
rawSecrets, err := api.CallGetRawSecretsV3(httpClient, getSecretsRequest)
if err != nil {
return models.PlaintextSecretResult{}, err
}
@ -182,7 +183,7 @@ func GetPlainTextSecretsViaMachineIdentity(accessToken string, workspaceId strin
}
for _, secret := range rawSecrets.Secrets {
plainTextSecrets = append(plainTextSecrets, models.SingleEnvironmentVariable{Key: secret.SecretKey, Value: secret.SecretValue, WorkspaceId: secret.Workspace})
plainTextSecrets = append(plainTextSecrets, models.SingleEnvironmentVariable{Key: secret.SecretKey, Value: secret.SecretValue, Type: secret.Type, WorkspaceId: secret.Workspace})
}
// if includeImports {
@ -355,6 +356,11 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo
log.Debug().Msg("Trying to fetch secrets using service token")
secretsToReturn, _, errorToReturn = GetPlainTextSecretsViaServiceToken(params.InfisicalToken, params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive)
} else if params.UniversalAuthAccessToken != "" {
if params.WorkspaceId == "" {
PrintErrorMessageAndExit("Project ID is required when using machine identity")
}
log.Debug().Msg("Trying to fetch secrets using universal auth")
res, err := GetPlainTextSecretsViaMachineIdentity(params.UniversalAuthAccessToken, params.WorkspaceId, params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive)

View File

@ -1,4 +0,0 @@
---
title: "List Project Integrations"
openapi: "GET /api/v1/workspace/{workspaceId}/integrations"
---

View File

@ -0,0 +1,180 @@
---
title: "E2EE Disabled"
---
Using Infisical's API to read/write secrets with E2EE disabled allows you to create, update, and retrieve secrets
in plaintext. Effectively, this means each such secret operation only requires 1 HTTP call.
<AccordionGroup>
<Accordion title="Retrieve secrets">
Retrieve all secrets for an Infisical project and environment.
<Tabs>
<Tab title="cURL">
```bash
curl --location --request GET 'https://app.infisical.com/api/v3/secrets/raw?environment=environment&workspaceId=workspaceId' \
--header 'Authorization: Bearer serviceToken'
```
</Tab>
</Tabs>
####
<Info>
When using a [service token](../../../documentation/platform/token) with access to a single environment and path, you don't need to provide request parameters because the server will automatically scope the request to the defined environment/secrets path of the service token used.
For all other cases, request parameters are required.
</Info>
####
<ParamField query="workspaceId" type="string" required>
The ID of the workspace
</ParamField>
<ParamField query="environment" type="string" required>
The environment slug
</ParamField>
<ParamField query="secretPath" type="string" default="/" optional>
Path to secrets in workspace
</ParamField>
</Accordion>
<Accordion title="Create secret">
Create a secret in Infisical.
<Tabs>
<Tab title="cURL">
```bash
curl --location --request POST 'https://app.infisical.com/api/v3/secrets/raw/secretName' \
--header 'Authorization: Bearer serviceToken' \
--header 'Content-Type: application/json' \
--data-raw '{
"workspaceId": "workspaceId",
"environment": "environment",
"type": "shared",
"secretValue": "secretValue",
"secretPath": "/"
}'
```
</Tab>
</Tabs>
<ParamField path="secretName" type="string" required>
Name of secret to create
</ParamField>
<ParamField body="workspaceId" type="string" required>
The ID of the workspace
</ParamField>
<ParamField body="environment" type="string" required>
The environment slug
</ParamField>
<ParamField body="secretValue" type="string" required>
Value of secret
</ParamField>
<ParamField body="secretComment" type="string" optional>
Comment of secret
</ParamField>
<ParamField body="secretPath" type="string" default="/" optional>
Path to secret in workspace
</ParamField>
<ParamField query="type" type="string" optional default="shared">
The type of the secret. Valid options are “shared” or “personal”
</ParamField>
</Accordion>
<Accordion title="Retrieve secret">
Retrieve a secret from Infisical.
<Tabs>
<Tab title="cURL">
```bash
curl --location --request GET 'https://app.infisical.com/api/v3/secrets/raw/secretName?workspaceId=workspaceId&environment=environment' \
--header 'Authorization: Bearer serviceToken'
```
</Tab>
</Tabs>
<ParamField path="secretName" type="string" required>
Name of secret to retrieve
</ParamField>
<ParamField query="workspaceId" type="string" required>
The ID of the workspace
</ParamField>
<ParamField query="environment" type="string" required>
The environment slug
</ParamField>
<ParamField query="secretPath" type="string" default="/" optional>
Path to secrets in workspace
</ParamField>
<ParamField query="type" type="string" optional default="personal">
The type of the secret. Valid options are “shared” or “personal”
</ParamField>
</Accordion>
<Accordion title="Update secret">
Update an existing secret in Infisical.
<Tabs>
<Tab title="cURL">
```bash
curl --location --request PATCH 'https://app.infisical.com/api/v3/secrets/raw/secretName' \
--header 'Authorization: Bearer serviceToken' \
--header 'Content-Type: application/json' \
--data-raw '{
"workspaceId": "workspaceId",
"environment": "environment",
"type": "shared",
"secretValue": "secretValue",
"secretPath": "/"
}'
```
</Tab>
</Tabs>
<ParamField path="secretName" type="string" required>
Name of secret to update
</ParamField>
<ParamField body="workspaceId" type="string" required>
The ID of the workspace
</ParamField>
<ParamField body="environment" type="string" required>
The environment slug
</ParamField>
<ParamField body="secretValue" type="string" required>
Value of secret
</ParamField>
<ParamField body="secretPath" type="string" default="/" optional>
Path to secret in workspace.
</ParamField>
<ParamField query="type" type="string" optional default="shared">
The type of the secret. Valid options are “shared” or “personal”
</ParamField>
</Accordion>
<Accordion title="Delete secret">
Delete a secret in Infisical.
<Tabs>
<Tab title="cURL">
```bash
curl --location --request DELETE 'https://app.infisical.com/api/v3/secrets/raw/secretName' \
--header 'Authorization: Bearer serviceToken' \
--header 'Content-Type: application/json' \
--data-raw '{
"workspaceId": "workspaceId",
"environment": "environment",
"type": "shared",
"secretPath": "/"
}'
```
</Tab>
</Tabs>
<ParamField path="secretName" type="string" required>
Name of secret to update
</ParamField>
<ParamField body="workspaceId" type="string" required>
The ID of the workspace
</ParamField>
<ParamField body="environment" type="string" required>
The environment slug
</ParamField>
<ParamField body="secretPath" type="string" default="/" optional>
Path to secret in workspace.
</ParamField>
<ParamField query="type" type="string" optional default="personal">
The type of the secret. Valid options are “shared” or “personal”
</ParamField>
</Accordion>
</AccordionGroup>

View File

@ -0,0 +1,862 @@
---
title: "E2EE Enabled"
---
<Note>
E2EE enabled mode only works with [Service Tokens](/documentation/platform/token) and cannot be used with [Identities](/documentation/platform/identities/overview).
</Note>
Using Infisical's API to read/write secrets with E2EE enabled allows you to create, update, and retrieve secrets
but requires you to perform client-side encryption/decryption operations. For this reason, we recommend using one of the available
SDKs instead.
<AccordionGroup>
<Accordion title="Retrieve secrets">
<Tabs>
<Tab title="Javascript">
Retrieve all secrets for an Infisical project and environment.
```js
const crypto = require('crypto');
const axios = require('axios');
const BASE_URL = 'https://app.infisical.com';
const ALGORITHM = 'aes-256-gcm';
const decrypt = ({ ciphertext, iv, tag, secret}) => {
const decipher = crypto.createDecipheriv(
ALGORITHM,
secret,
Buffer.from(iv, 'base64')
);
decipher.setAuthTag(Buffer.from(tag, 'base64'));
let cleartext = decipher.update(ciphertext, 'base64', 'utf8');
cleartext += decipher.final('utf8');
return cleartext;
}
const getSecrets = async () => {
const serviceToken = 'your_service_token';
const serviceTokenSecret = serviceToken.substring(serviceToken.lastIndexOf('.') + 1);
// 1. Get your Infisical Token data
const { data: serviceTokenData } = await axios.get(
`${BASE_URL}/api/v2/service-token`,
{
headers: {
Authorization: `Bearer ${serviceToken}`
}
}
);
// 2. Get secrets for your project and environment
const { data } = await axios.get(
`${BASE_URL}/api/v3/secrets?${new URLSearchParams({
environment: serviceTokenData.environment,
workspaceId: serviceTokenData.workspace
})}`,
{
headers: {
Authorization: `Bearer ${serviceToken}`
}
}
);
const encryptedSecrets = data.secrets;
// 3. Decrypt the (encrypted) project key with the key from your Infisical Token
const projectKey = decrypt({
ciphertext: serviceTokenData.encryptedKey,
iv: serviceTokenData.iv,
tag: serviceTokenData.tag,
secret: serviceTokenSecret
});
// 4. Decrypt the (encrypted) secrets
const secrets = encryptedSecrets.map((secret) => {
const secretKey = decrypt({
ciphertext: secret.secretKeyCiphertext,
iv: secret.secretKeyIV,
tag: secret.secretKeyTag,
secret: projectKey
});
const secretValue = decrypt({
ciphertext: secret.secretValueCiphertext,
iv: secret.secretValueIV,
tag: secret.secretValueTag,
secret: projectKey
});
return ({
secretKey,
secretValue
});
});
console.log('secrets: ', secrets);
}
getSecrets();
```
</Tab>
<Tab title="Python">
```Python
import requests
import base64
from Cryptodome.Cipher import AES
BASE_URL = "http://app.infisical.com"
def decrypt(ciphertext, iv, tag, secret):
secret = bytes(secret, "utf-8")
iv = base64.standard_b64decode(iv)
tag = base64.standard_b64decode(tag)
ciphertext = base64.standard_b64decode(ciphertext)
cipher = AES.new(secret, AES.MODE_GCM, iv)
cipher.update(tag)
cleartext = cipher.decrypt(ciphertext).decode("utf-8")
return cleartext
def get_secrets():
service_token = "your_service_token"
service_token_secret = service_token[service_token.rindex(".") + 1 :]
# 1. Get your Infisical Token data
service_token_data = requests.get(
f"{BASE_URL}/api/v2/service-token",
headers={"Authorization": f"Bearer {service_token}"},
).json()
# 2. Get secrets for your project and environment
data = requests.get(
f"{BASE_URL}/api/v3/secrets",
params={
"environment": service_token_data["environment"],
"workspaceId": service_token_data["workspace"],
},
headers={"Authorization": f"Bearer {service_token}"},
).json()
encrypted_secrets = data["secrets"]
# 3. Decrypt the (encrypted) project key with the key from your Infisical Token
project_key = decrypt(
ciphertext=service_token_data["encryptedKey"],
iv=service_token_data["iv"],
tag=service_token_data["tag"],
secret=service_token_secret,
)
# 4. Decrypt the (encrypted) secrets
secrets = []
for secret in encrypted_secrets:
secret_key = decrypt(
ciphertext=secret["secretKeyCiphertext"],
iv=secret["secretKeyIV"],
tag=secret["secretKeyTag"],
secret=project_key,
)
secret_value = decrypt(
ciphertext=secret["secretValueCiphertext"],
iv=secret["secretValueIV"],
tag=secret["secretValueTag"],
secret=project_key,
)
secrets.append(
{
"secret_key": secret_key,
"secret_value": secret_value,
}
)
print("secrets:", secrets)
get_secrets()
```
</Tab>
</Tabs>
</Accordion>
<Accordion title="Create secret">
<Tabs>
<Tab title="Javascript">
Create a secret in Infisical.
```js
const crypto = require('crypto');
const axios = require('axios');
const nacl = require('tweetnacl');
const BASE_URL = 'https://app.infisical.com';
const ALGORITHM = 'aes-256-gcm';
const BLOCK_SIZE_BYTES = 16;
const encrypt = ({ text, secret }) => {
const iv = crypto.randomBytes(BLOCK_SIZE_BYTES);
const cipher = crypto.createCipheriv(ALGORITHM, secret, iv);
let ciphertext = cipher.update(text, 'utf8', 'base64');
ciphertext += cipher.final('base64');
return {
ciphertext,
iv: iv.toString('base64'),
tag: cipher.getAuthTag().toString('base64')
};
}
const decrypt = ({ ciphertext, iv, tag, secret}) => {
const decipher = crypto.createDecipheriv(
ALGORITHM,
secret,
Buffer.from(iv, 'base64')
);
decipher.setAuthTag(Buffer.from(tag, 'base64'));
let cleartext = decipher.update(ciphertext, 'base64', 'utf8');
cleartext += decipher.final('utf8');
return cleartext;
}
const createSecrets = async () => {
const serviceToken = '';
const serviceTokenSecret = serviceToken.substring(serviceToken.lastIndexOf('.') + 1);
const secretType = 'shared'; // 'shared' or 'personal'
const secretKey = 'some_key';
const secretValue = 'some_value';
const secretComment = 'some_comment';
// 1. Get your Infisical Token data
const { data: serviceTokenData } = await axios.get(
`${BASE_URL}/api/v2/service-token`,
{
headers: {
Authorization: `Bearer ${serviceToken}`
}
}
);
// 2. Decrypt the (encrypted) project key with the key from your Infisical Token
const projectKey = decrypt({
ciphertext: serviceTokenData.encryptedKey,
iv: serviceTokenData.iv,
tag: serviceTokenData.tag,
secret: serviceTokenSecret
});
// 3. Encrypt your secret with the project key
const {
ciphertext: secretKeyCiphertext,
iv: secretKeyIV,
tag: secretKeyTag
} = encrypt({
text: secretKey,
secret: projectKey
});
const {
ciphertext: secretValueCiphertext,
iv: secretValueIV,
tag: secretValueTag
} = encrypt({
text: secretValue,
secret: projectKey
});
const {
ciphertext: secretCommentCiphertext,
iv: secretCommentIV,
tag: secretCommentTag
} = encrypt({
text: secretComment,
secret: projectKey
});
// 4. Send (encrypted) secret to Infisical
await axios.post(
`${BASE_URL}/api/v3/secrets/${secretKey}`,
{
workspaceId: serviceTokenData.workspace,
environment: serviceTokenData.environment,
type: secretType,
secretKeyCiphertext,
secretKeyIV,
secretKeyTag,
secretValueCiphertext,
secretValueIV,
secretValueTag,
secretCommentCiphertext,
secretCommentIV,
secretCommentTag
},
{
headers: {
Authorization: `Bearer ${serviceToken}`
}
}
);
}
createSecrets();
```
</Tab>
<Tab title="Python">
```Python
import base64
import requests
from Cryptodome.Cipher import AES
from Cryptodome.Random import get_random_bytes
BASE_URL = "https://app.infisical.com"
BLOCK_SIZE_BYTES = 16
def encrypt(text, secret):
iv = get_random_bytes(BLOCK_SIZE_BYTES)
secret = bytes(secret, "utf-8")
cipher = AES.new(secret, AES.MODE_GCM, iv)
ciphertext, tag = cipher.encrypt_and_digest(text.encode("utf-8"))
return {
"ciphertext": base64.standard_b64encode(ciphertext).decode("utf-8"),
"tag": base64.standard_b64encode(tag).decode("utf-8"),
"iv": base64.standard_b64encode(iv).decode("utf-8"),
}
def decrypt(ciphertext, iv, tag, secret):
secret = bytes(secret, "utf-8")
iv = base64.standard_b64decode(iv)
tag = base64.standard_b64decode(tag)
ciphertext = base64.standard_b64decode(ciphertext)
cipher = AES.new(secret, AES.MODE_GCM, iv)
cipher.update(tag)
cleartext = cipher.decrypt(ciphertext).decode("utf-8")
return cleartext
def create_secrets():
service_token = "your_service_token"
service_token_secret = service_token[service_token.rindex(".") + 1 :]
secret_type = "shared" # "shared or "personal"
secret_key = "some_key"
secret_value = "some_value"
secret_comment = "some_comment"
# 1. Get your Infisical Token data
service_token_data = requests.get(
f"{BASE_URL}/api/v2/service-token",
headers={"Authorization": f"Bearer {service_token}"},
).json()
# 2. Decrypt the (encrypted) project key with the key from your Infisical Token
project_key = decrypt(
ciphertext=service_token_data["encryptedKey"],
iv=service_token_data["iv"],
tag=service_token_data["tag"],
secret=service_token_secret,
)
# 3. Encrypt your secret with the project key
encrypted_key_data = encrypt(text=secret_key, secret=project_key)
encrypted_value_data = encrypt(text=secret_value, secret=project_key)
encrypted_comment_data = encrypt(text=secret_comment, secret=project_key)
# 4. Send (encrypted) secret to Infisical
requests.post(
f"{BASE_URL}/api/v3/secrets/{secret_key}",
json={
"workspaceId": service_token_data["workspace"],
"environment": service_token_data["environment"],
"type": secret_type,
"secretKeyCiphertext": encrypted_key_data["ciphertext"],
"secretKeyIV": encrypted_key_data["iv"],
"secretKeyTag": encrypted_key_data["tag"],
"secretValueCiphertext": encrypted_value_data["ciphertext"],
"secretValueIV": encrypted_value_data["iv"],
"secretValueTag": encrypted_value_data["tag"],
"secretCommentCiphertext": encrypted_comment_data["ciphertext"],
"secretCommentIV": encrypted_comment_data["iv"],
"secretCommentTag": encrypted_comment_data["tag"]
},
headers={"Authorization": f"Bearer {service_token}"},
)
create_secrets()
```
</Tab>
</Tabs>
</Accordion>
<Accordion title="Retrieve secret">
<Tabs>
<Tab title="Javascript">
Retrieve a secret from Infisical.
```js
const crypto = require('crypto');
const axios = require('axios');
const BASE_URL = 'https://app.infisical.com';
const ALGORITHM = 'aes-256-gcm';
const decrypt = ({ ciphertext, iv, tag, secret}) => {
const decipher = crypto.createDecipheriv(
ALGORITHM,
secret,
Buffer.from(iv, 'base64')
);
decipher.setAuthTag(Buffer.from(tag, 'base64'));
let cleartext = decipher.update(ciphertext, 'base64', 'utf8');
cleartext += decipher.final('utf8');
return cleartext;
}
const getSecret = async () => {
const serviceToken = 'your_service_token';
const serviceTokenSecret = serviceToken.substring(serviceToken.lastIndexOf('.') + 1);
const secretType = 'shared' // 'shared' or 'personal'
const secretKey = 'some_key';
// 1. Get your Infisical Token data
const { data: serviceTokenData } = await axios.get(
`${BASE_URL}/api/v2/service-token`,
{
headers: {
Authorization: `Bearer ${serviceToken}`
}
}
);
// 2. Get the secret from your project and environment
const { data } = await axios.get(
`${BASE_URL}/api/v3/secrets/${secretKey}?${new URLSearchParams({
environment: serviceTokenData.environment,
workspaceId: serviceTokenData.workspace,
type: secretType // optional, defaults to 'shared'
})}`,
{
headers: {
Authorization: `Bearer ${serviceToken}`
}
}
);
const encryptedSecret = data.secret;
// 3. Decrypt the (encrypted) project key with the key from your Infisical Token
const projectKey = decrypt({
ciphertext: serviceTokenData.encryptedKey,
iv: serviceTokenData.iv,
tag: serviceTokenData.tag,
secret: serviceTokenSecret
});
// 4. Decrypt the (encrypted) secret value
const secretValue = decrypt({
ciphertext: encryptedSecret.secretValueCiphertext,
iv: encryptedSecret.secretValueIV,
tag: encryptedSecret.secretValueTag,
secret: projectKey
});
console.log('secret: ', ({
secretKey,
secretValue
}));
}
getSecret();
```
</Tab>
<Tab title="Python">
```Python
import requests
import base64
from Cryptodome.Cipher import AES
BASE_URL = "http://app.infisical.com"
def decrypt(ciphertext, iv, tag, secret):
secret = bytes(secret, "utf-8")
iv = base64.standard_b64decode(iv)
tag = base64.standard_b64decode(tag)
ciphertext = base64.standard_b64decode(ciphertext)
cipher = AES.new(secret, AES.MODE_GCM, iv)
cipher.update(tag)
cleartext = cipher.decrypt(ciphertext).decode("utf-8")
return cleartext
def get_secret():
service_token = "your_service_token"
service_token_secret = service_token[service_token.rindex(".") + 1 :]
secret_type = "shared" # "shared" or "personal"
secret_key = "some_key"
# 1. Get your Infisical Token data
service_token_data = requests.get(
f"{BASE_URL}/api/v2/service-token",
headers={"Authorization": f"Bearer {service_token}"},
).json()
# 2. Get secret from your project and environment
data = requests.get(
f"{BASE_URL}/api/v3/secrets/{secret_key}",
params={
"environment": service_token_data["environment"],
"workspaceId": service_token_data["workspace"],
"type": secret_type # optional, defaults to "shared"
},
headers={"Authorization": f"Bearer {service_token}"},
).json()
encrypted_secret = data["secret"]
# 3. Decrypt the (encrypted) project key with the key from your Infisical Token
project_key = decrypt(
ciphertext=service_token_data["encryptedKey"],
iv=service_token_data["iv"],
tag=service_token_data["tag"],
secret=service_token_secret,
)
# 4. Decrypt the (encrypted) secret value
secret_value = decrypt(
ciphertext=encrypted_secret["secretValueCiphertext"],
iv=encrypted_secret["secretValueIV"],
tag=encrypted_secret["secretValueTag"],
secret=project_key,
)
print("secret: ", {
"secret_key": secret_key,
"secret_value": secret_value
})
get_secret()
```
</Tab>
</Tabs>
</Accordion>
<Accordion title="Update secret">
<Tabs>
<Tab title="Javascript">
Update an existing secret in Infisical.
```js
const crypto = require('crypto');
const axios = require('axios');
const BASE_URL = 'https://app.infisical.com';
const ALGORITHM = 'aes-256-gcm';
const BLOCK_SIZE_BYTES = 16;
const encrypt = ({ text, secret }) => {
const iv = crypto.randomBytes(BLOCK_SIZE_BYTES);
const cipher = crypto.createCipheriv(ALGORITHM, secret, iv);
let ciphertext = cipher.update(text, 'utf8', 'base64');
ciphertext += cipher.final('base64');
return {
ciphertext,
iv: iv.toString('base64'),
tag: cipher.getAuthTag().toString('base64')
};
}
const decrypt = ({ ciphertext, iv, tag, secret}) => {
const decipher = crypto.createDecipheriv(
ALGORITHM,
secret,
Buffer.from(iv, 'base64')
);
decipher.setAuthTag(Buffer.from(tag, 'base64'));
let cleartext = decipher.update(ciphertext, 'base64', 'utf8');
cleartext += decipher.final('utf8');
return cleartext;
}
const updateSecrets = async () => {
const serviceToken = 'your_service_token';
const serviceTokenSecret = serviceToken.substring(serviceToken.lastIndexOf('.') + 1);
const secretType = 'shared' // 'shared' or 'personal'
const secretKey = 'some_key';
const secretValue = 'updated_value';
const secretComment = 'updated_comment';
// 1. Get your Infisical Token data
const { data: serviceTokenData } = await axios.get(
`${BASE_URL}/api/v2/service-token`,
{
headers: {
Authorization: `Bearer ${serviceToken}`
}
}
);
// 2. Decrypt the (encrypted) project key with the key from your Infisical Token
const projectKey = decrypt({
ciphertext: serviceTokenData.encryptedKey,
iv: serviceTokenData.iv,
tag: serviceTokenData.tag,
secret: serviceTokenSecret
});
// 3. Encrypt your updated secret with the project key
const {
ciphertext: secretKeyCiphertext,
iv: secretKeyIV,
tag: secretKeyTag
} = encrypt({
text: secretKey,
secret: projectKey
});
const {
ciphertext: secretValueCiphertext,
iv: secretValueIV,
tag: secretValueTag
} = encrypt({
text: secretValue,
secret: projectKey
});
const {
ciphertext: secretCommentCiphertext,
iv: secretCommentIV,
tag: secretCommentTag
} = encrypt({
text: secretComment,
secret: projectKey
});
// 4. Send (encrypted) updated secret to Infisical
await axios.patch(
`${BASE_URL}/api/v3/secrets/${secretKey}`,
{
workspaceId: serviceTokenData.workspace,
environment: serviceTokenData.environment,
type: secretType,
secretValueCiphertext,
secretValueIV,
secretValueTag,
secretCommentCiphertext,
secretCommentIV,
secretCommentTag
},
{
headers: {
Authorization: `Bearer ${serviceToken}`
}
}
);
}
updateSecrets();
```
</Tab>
<Tab title="Python">
```Python
import base64
import requests
from Cryptodome.Cipher import AES
from Cryptodome.Random import get_random_bytes
BASE_URL = "https://app.infisical.com"
BLOCK_SIZE_BYTES = 16
def encrypt(text, secret):
iv = get_random_bytes(BLOCK_SIZE_BYTES)
secret = bytes(secret, "utf-8")
cipher = AES.new(secret, AES.MODE_GCM, iv)
ciphertext, tag = cipher.encrypt_and_digest(text.encode("utf-8"))
return {
"ciphertext": base64.standard_b64encode(ciphertext).decode("utf-8"),
"tag": base64.standard_b64encode(tag).decode("utf-8"),
"iv": base64.standard_b64encode(iv).decode("utf-8"),
}
def decrypt(ciphertext, iv, tag, secret):
secret = bytes(secret, "utf-8")
iv = base64.standard_b64decode(iv)
tag = base64.standard_b64decode(tag)
ciphertext = base64.standard_b64decode(ciphertext)
cipher = AES.new(secret, AES.MODE_GCM, iv)
cipher.update(tag)
cleartext = cipher.decrypt(ciphertext).decode("utf-8")
return cleartext
def update_secret():
service_token = "your_service_token"
service_token_secret = service_token[service_token.rindex(".") + 1 :]
secret_type = "shared" # "shared" or "personal"
secret_key = "some_key"
secret_value = "updated_value"
secret_comment = "updated_comment"
# 1. Get your Infisical Token data
service_token_data = requests.get(
f"{BASE_URL}/api/v2/service-token",
headers={"Authorization": f"Bearer {service_token}"},
).json()
# 2. Decrypt the (encrypted) project key with the key from your Infisical Token
project_key = decrypt(
ciphertext=service_token_data["encryptedKey"],
iv=service_token_data["iv"],
tag=service_token_data["tag"],
secret=service_token_secret,
)
# 3. Encrypt your updated secret with the project key
encrypted_key_data = encrypt(text=secret_key, secret=project_key)
encrypted_value_data = encrypt(text=secret_value, secret=project_key)
encrypted_comment_data = encrypt(text=secret_comment, secret=project_key)
# 4. Send (encrypted) updated secret to Infisical
requests.patch(
f"{BASE_URL}/api/v3/secrets/{secret_key}",
json={
"workspaceId": service_token_data["workspace"],
"environment": service_token_data["environment"],
"type": secret_type,
"secretKeyCiphertext": encrypted_key_data["ciphertext"],
"secretKeyIV": encrypted_key_data["iv"],
"secretKeyTag": encrypted_key_data["tag"],
"secretValueCiphertext": encrypted_value_data["ciphertext"],
"secretValueIV": encrypted_value_data["iv"],
"secretValueTag": encrypted_value_data["tag"],
"secretCommentCiphertext": encrypted_comment_data["ciphertext"],
"secretCommentIV": encrypted_comment_data["iv"],
"secretCommentTag": encrypted_comment_data["tag"]
},
headers={"Authorization": f"Bearer {service_token}"},
)
update_secret()
```
</Tab>
</Tabs>
</Accordion>
<Accordion title="Delete secret">
<Tabs>
<Tab title="Javascript">
Delete a secret in Infisical.
```js
const axios = require('axios');
const BASE_URL = 'https://app.infisical.com';
const deleteSecrets = async () => {
const serviceToken = 'your_service_token';
const secretType = 'shared' // 'shared' or 'personal'
const secretKey = 'some_key'
// 1. Get your Infisical Token data
const { data: serviceTokenData } = await axios.get(
`${BASE_URL}/api/v2/service-token`,
{
headers: {
Authorization: `Bearer ${serviceToken}`
}
}
);
// 2. Delete secret from Infisical
await axios.delete(
`${BASE_URL}/api/v3/secrets/${secretKey}`,
{
workspaceId: serviceTokenData.workspace,
environment: serviceTokenData.environment,
type: secretType
},
{
headers: {
Authorization: `Bearer ${serviceToken}`
},
}
);
};
deleteSecrets();
```
</Tab>
<Tab title="Python">
```Python
import requests
BASE_URL = "https://app.infisical.com"
def delete_secrets():
service_token = "<your_service_token>"
secret_type = "shared" # "shared" or "personal"
secret_key = "some_key"
# 1. Get your Infisical Token data
service_token_data = requests.get(
f"{BASE_URL}/api/v2/service-token",
headers={"Authorization": f"Bearer {service_token}"},
).json()
# 2. Delete secret from Infisical
requests.delete(
f"{BASE_URL}/api/v2/secrets/{secret_key}",
json={
"workspaceId": service_token_data["workspace"],
"environment": service_token_data["environment"],
"type": secret_type
},
headers={"Authorization": f"Bearer {service_token}"},
)
delete_secrets()
```
</Tab>
</Tabs>
<Info>
If using an `API_KEY` to authenticate with the Infisical API, then you should include it in the `X_API_KEY` header.
</Info>
</Accordion>
</AccordionGroup>

View File

@ -1,90 +0,0 @@
---
title: "Configure native integrations via API"
description: "How to use Infisical API to sync secrets to external secret managers"
---
The Infisical API allows you to create programmatic integrations that connect with third-party secret managers to synchronize secrets from Infisical.
This guide will primarily demonstrate the process using AWS Secret Store Manager (AWS SSM), but the steps are generally applicable to other secret management integrations.
<Info>
For details on setting up AWS SSM synchronization and understanding its prerequisites, refer to the [AWS SSM integration setup documentation](../../../integrations/cloud/aws-secret-manager).
</Info>
<Steps>
<Step title="Authenticate with AWS SSM">
Authentication is required for all integrations. Use the [Integration Auth API](../../endpoints/integrations/create-auth) with the following parameters to authenticate.
<ParamField body="integration" type="string" initialValue="aws-secret-manager" required>
Set this parameter to **aws-secret-manager**.
</ParamField>
<ParamField body="workspaceId" type="string" required>
The Infisical project ID for the integration.
</ParamField>
<ParamField body="accessId" type="string" required>
The AWS IAM User Access ID.
</ParamField>
<ParamField body="accessToken" type="string" required>
The AWS IAM User Access Secret Key.
</ParamField>
```bash Request
curl --request POST \
--url https://app.infisical.com/api/v1/integration-auth/access-token \
--header 'Authorization: <authorization>' \
--header 'Content-Type: application/json' \
--data '{
"workspaceId": "<workspaceid>",
"integration": "aws-secret-manager",
"accessId": "<aws iam user access id>",
"accessToken": "<aws iam user access secret key>"
}'
```
</Step>
<Step title="Configure the Synchronization Setup">
Once authentication between AWS SSM and Infisical is established, you can configure the synchronization behavior.
This involves specifying the source (environment and secret path in Infisical) and the destination in SSM to which the secrets will be synchronized.
Use the [integration API](../../endpoints/integrations/create) with the following parameters to configure the sync source and destination.
<ParamField body="integrationAuthId" type="string" required>
The ID of the integration authentication object used with AWS, obtained from the previous API response.
</ParamField>
<ParamField body="isActive" type="boolean">
Indicates whether the integration should be active or inactive.
</ParamField>
<ParamField body="app" type="string" required>
The secret name for saving in AWS SSM, which can be arbitrarily chosen.
</ParamField>
<ParamField body="region" type="string" required>
The AWS region where the SSM is located, e.g., `us-east-1`.
</ParamField>
<ParamField body="sourceEnvironment" type="string" required>
The Infisical environment slug from which secrets will be synchronized, e.g., `dev`.
</ParamField>
<ParamField body="secretPath" type="string" required>
The Infisical folder path from which secrets will be synchronized, e.g., `/some/path`. The root path is `/`.
</ParamField>
```bash Request
curl --request POST \
--url https://app.infisical.com/api/v1/integration \
--header 'Authorization: <authorization>' \
--header 'Content-Type: application/json' \
--data '{
"integrationAuthId": "<integrationauthid>",
"sourceEnvironment": "<sourceenvironment>",
"secretPath": "<secret-path, default is '/' >",
"app": "<app>",
"region": "<aws-ssm-region>"
}'
```
</Step>
</Steps>
<Check>
Congratulations! You have successfully set up an integration to synchronize secrets from Infisical with AWS SSM.
For more information, [view the integration API reference](../../endpoints/integrations).
</Check>

View File

@ -0,0 +1,54 @@
---
title: "Note on E2EE"
---
Each project in Infisical can have **End-to-End Encryption (E2EE)** enabled or disabled.
By default, all projects have **E2EE** enabled which means the server is not able to decrypt any values because all secret encryption/decryption operations occur on the client-side; this can be (optionally) disabled. However, this has limitations around functionality and ease-of-use:
- You cannot make HTTP calls to Infisical to read/write secrets in plaintext.
- You cannot leverage non-E2EE features like native integrations and in-platform automations like dynamic secrets and secret rotation.
<CardGroup cols={2}>
<Card
title="E2EE Disabled"
href="/api-reference/overview/examples/e2ee-disabled"
icon="shield-halved"
color="#3c8639"
>
Example read/write secrets without client-side encryption/decryption
</Card>
<Card
href="/api-reference/overview/examples/e2ee-enabled"
title="E2EE Enabled"
icon="shield"
color="#3775a9"
>
Example read/write secrets with client-side encryption/decryption
</Card>
</CardGroup>
## FAQ
<AccordionGroup>
<Accordion title="Should I have E2EE enabled or disabled?">
We recommend starting with having **E2EE** enabled and disabling it if:
- You're self-hosting Infisical, so having your instance of Infisical be able to read your secrets isn't an issue.
- You want an easier way to read/write secrets with Infisical.
- You need more power out of non-E2EE features such as secret rotation, dynamic secrets, etc.
</Accordion>
<Accordion title="How can I enable/disable E2EE?">
You can enable/disable E2EE for your project in Infisical in the Project Settings.
</Accordion>
<Accordion title="Is disabling E2EE secure?">
It is secure and in fact how most vendors in our industry are able to offer features like secret rotation. In this mode, secrets are encrypted at rest by
a series of keys, secured ultimately by a top-level `ROOT_ENCRYPTION_KEY` located on the server.
If you're concerned about Infisical Cloud's ability to read your secrets, then you may wish to
use it with **E2EE** enabled or self-host Infisical on your own infrastructure and disable E2EE there.
As an organization, we do not read any customer secrets without explicit permission; access to the `ROOT_ENCRYPTION_KEY` is restricted to one individual in the organization.
</Accordion>
</AccordionGroup>

View File

@ -12,4 +12,50 @@ The CLI uses authentication to verify your identity. When you enter the correct
To change where the login credentials are stored, visit the [vaults command](./vault).
If you have added multiple users, you can switch between the users by using the [user command](./user).
If you have added multiple users, you can switch between the users by using the [user command](./user).
### Flags
<Accordion title="--method">
```bash
infisical login --method=<auth-method> # Optional, will default to 'user'.
```
#### Valid values for the `method` flag are:
- `user`: Login using email and password.
- `universal-auth`: Login using a universal auth client ID and client secret.
<Info>
When `method` is set to `universal-auth`, the `client-id` and `client-secret` flags are required. Optionally you can set the `INFISICAL_UNIVERSAL_AUTH_CLIENT_ID` and `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET` environment variables instead of using the flags.
When you authenticate with universal auth, an access token will be printed to the console upon successful login. This token can be used to authenticate with the Infisical API and the CLI by passing it in the `--token` flag when applicable.
</Info>
</Accordion>
<Accordion title="--client-id">
```bash
infisical login --client-id=<client-id> # Optional, required if --method=universal-auth.
```
#### Description
The client ID of the universal auth client. This is required if the `--method` flag is set to `universal-auth`.
<Tip>
The `client-id` flag can be substituted with the `INFISICAL_UNIVERSAL_AUTH_CLIENT_ID` environment variable.
</Tip>
</Accordion>
<Accordion title="--client-secret">
```bash
infisical login --client-secret=<client-secret> # Optional, required if --method=universal-auth.
```
#### Description
The client secret of the universal auth client. This is required if the `--method` flag is set to `universal-auth`.
<Tip>
The `client-secret` flag can be substituted with the `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET` environment variable.
</Tip>
</Accordion>

View File

@ -0,0 +1,21 @@
---
title: "infisical token"
description: "Manage your Infisical identity access tokens"
---
```bash
infisical service-token renew <ua-access-token>
```
## Description
The Infisical `token` command allows you to manage your universal auth access tokens.
With this command, you can renew your access tokens. In the future more subcommands will be added to better help you manage your tokens through the CLI.
<Accordion title="token renew <access-token>" defaultOpen="true">
Use this command to renew your access token. This command will renew your access token and output a renewed access token to the console.
```bash
$ infisical token renew <ua-access-token>
```
</Accordion>

View File

@ -38,7 +38,7 @@ Infisical helps developers achieve secure centralized secret management and prov
- Simple secret management inside **[CI/CD pipelines](/integrations/cicd/githubactions)** and staging environments.
- Secure and compliant secret management practices in **[production environments](/sdks/overview)**.
- **Facilitated workflows** around [secret change management](/documentation/platform/pr-workflows), [access requests](/documentation/platform/access-controls/access-requests), [temporary access provisioning](/documentation/platform/access-controls/temporary-access), and more.
- **Improved security posture** thanks to [secret scanning](/cli/scanning-overview), [granular access control policies](/documentation/platform/access-controls/overview), [automated secret rotation](https://infisical.com/docs/documentation/platform/secret-rotation/overview), and [dynamic secrets](/documentation/platform/dynamic-secrets/overview) capabilities.
- **Improved security posture** thanks to [secret scanning](/cli/scanning-overview), [granular access control policies](/documentation/platform/access-controls/overview), [automated secret rotation](http://localhost:3000/documentation/platform/secret-rotation/overview), and [dynamic secrets](/documentation/platform/dynamic-secrets/overview) capabilities.
## How does Infisical work?

View File

@ -1,67 +0,0 @@
---
title: "User Groups"
description: "Manage user groups in Infisical."
---
<Info>
User Groups is a paid feature.
If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical,
then you should contact team@infisical.com to purchase an enterprise license to use it.
</Info>
## Concept
A (user) group is a collection of users that you can create in an Infisical organization to more efficiently manage permissions and access control for multiple users together. For example, you can have a group called `Developers` with the `Developer` role containing all the developers in your organization.
User groups have the following properties:
- If a group is added to a project under specific role(s), all users in the group will be provisioned access to the project with the role(s). Conversely, if a group is removed from a project, all users in the group will lose access to the project.
- If a user is added to a group, they will inherit the access control properties of the group including access to project(s) under the role(s) assigned to the group. Conversely, if a user is removed from a group, they will lose access to project(s) that the group has access to.
- If a user was previously added to a project under a role and is later added to a group that has access to the same project under a different role, then the user will now have access to the project under the composite permissions of the two roles. If the group is subsequently removed from the project, the user will not lose access to the project as they were previously added to the project separately.
- A user can be part of multiple groups. If a user is part of multiple groups, they will inherit the composite permissions of all the groups that they are part of.
## Workflow
In the following steps, we explore how to create and use user groups to provision user access to projects in Infisical.
<Steps>
<Step title="Creating a group">
To create a group, head to your Organization Settings > Access Control > Groups and press **Create group**.
![groups org](/images/platform/groups/groups-org.png)
When creating a group, you specify an organization level [role](/documentation/platform/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles.
![groups org create](/images/platform/groups/groups-org-create.png)
Now input a few details for your new group. Heres some guidance for each field:
- Name (required): A friendly name for the group like `Engineering`.
- Slug (required): A unique identifier for the group like `engineering`.
- Role (required): A role from the Organization Roles tab for the group to assume. The organization role assigned will determine what organization level resources this group can have access to.
</Step>
<Step title="Adding users to the group">
Next, you'll want to assign users to the group. To do this, press on the users icon on the group and start assigning users to the group.
![groups org users](/images/platform/groups/groups-org-users.png)
In this example, we're assigning **Alan Turing** and **Ada Lovelace** to the group **Engineering**.
![groups org assign users](/images/platform/groups/groups-org-users-assign.png)
</Step>
<Step title="Adding the group to a project">
To enable the group to access project-level resources such as secrets within a specific project, you should add it to that project.
To do this, head over to the project you want to add the group to and go to Project Settings > Access Control > Groups and press **Add group**.
![groups project](/images/platform/groups/groups-project.png)
Next, select the group you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this group can have access to.
![groups project add](/images/platform/groups/groups-project-create.png)
That's it!
The users of the group now have access to the project under the role you assigned to the group.
</Step>
</Steps>

View File

@ -16,7 +16,7 @@ Prerequisites:
<Steps>
<Step title="Create a SCIM token in Infisical">
In Infisical, head to your Organization Settings > Authentication > SCIM Configuration and
press the **Enable SCIM provisioning** toggle to allow JumpCloud to provision/deprovision users and user groups for your organization.
press the **Enable SCIM provisioning** toggle to allow JumpCloud to provision/deprovision users for your organization.
![SCIM enable provisioning](/images/platform/scim/scim-enable-provisioning.png)
@ -49,7 +49,7 @@ Prerequisites:
![SCIM JumpCloud](/images/platform/scim/jumpcloud/scim-jumpcloud-test-connection.png)
Now JumpCloud can provision/deprovision users and user groups to/from your organization in Infisical.
Now JumpCloud can provision/deprovision users to/from your organization in Infisical.
</Step>
</Steps>

View File

@ -16,7 +16,7 @@ Prerequisites:
<Steps>
<Step title="Create a SCIM token in Infisical">
In Infisical, head to your Organization Settings > Authentication > SCIM Configuration and
press the **Enable SCIM provisioning** toggle to allow Okta to provision/deprovision users and user groups for your organization.
press the **Enable SCIM provisioning** toggle to allow Okta to provision/deprovision users for your organization.
![SCIM enable provisioning](/images/platform/scim/scim-enable-provisioning.png)
@ -38,7 +38,7 @@ Prerequisites:
- SCIM connector base URL: Input the **SCIM URL** from Step 1.
- Unique identifier field for users: Input `email`.
- Supported provisioning actions: Select **Push New Users**, **Push Profile Updates**, and **Push Groups**.
- Supported provisioning actions: Select **Push New Users** and **Push Profile Updates**.
- Authentication Mode: `HTTP Header`.
![SCIM Okta](/images/platform/scim/okta/scim-okta-config.png)
@ -55,7 +55,7 @@ Prerequisites:
![SCIM Okta](/images/platform/scim/okta/scim-okta-app-settings.png)
Now Okta can provision/deprovision users and user groups to/from your organization in Infisical.
Now Okta can provision/deprovision users to/from your organization in Infisical.
</Step>
</Steps>

View File

@ -10,7 +10,7 @@ description: "Learn how to provision users for Infisical via SCIM."
then you should contact sales@infisical.com to purchase an enterprise license to use it.
</Info>
You can configure your organization in Infisical to have users and user groups be provisioned/deprovisioned using [SCIM](https://scim.cloud/#Implementations2) via providers like Okta, Azure, JumpCloud, etc.
You can configure your organization in Infisical to have members be provisioned/deprovisioned using [SCIM](https://scim.cloud/#Implementations2) via providers like Okta, Azure, JumpCloud, etc.
- Provisioning: The SCIM provider pushes user information to Infisical. If the user exists in Infisical, Infisical sends an email invitation to add them to the relevant organization in Infisical; if not, Infisical initializes a new user and sends them an email invitation to finish setting up their account in the organization.
- Deprovisioning: The SCIM provider instructs Infisical to remove user(s) from an organization in Infisical.

View File

@ -44,7 +44,7 @@ In the above screenshot, you can see that we are creating a token token with `re
of the `/common` path within the development environment of the project; the token expires in 6 months and can be used from any IP address.
<Note>
For a deeper understanding of service tokens, it is recommended to read [this guide](https://infisical.com/docs/internals/service-tokens).
For a deeper understanding of service tokens, it is recommended to read [this guide](http://localhost:3000/internals/service-tokens).
</Note>
**FAQ**

Binary file not shown.

Before

Width:  |  Height:  |  Size: 378 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 431 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 636 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 626 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 371 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 615 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 323 KiB

After

Width:  |  Height:  |  Size: 308 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 300 KiB

View File

@ -144,8 +144,7 @@
"documentation/platform/dynamic-secrets/overview",
"documentation/platform/dynamic-secrets/postgresql"
]
},
"documentation/platform/groups"
}
]
},
{
@ -226,6 +225,7 @@
"cli/commands/run",
"cli/commands/secrets",
"cli/commands/export",
"cli/commands/token",
"cli/commands/service-token",
"cli/commands/vault",
"cli/commands/user",
@ -388,7 +388,9 @@
{
"group": "Examples",
"pages": [
"api-reference/overview/examples/integration"
"api-reference/overview/examples/note",
"api-reference/overview/examples/e2ee-disabled",
"api-reference/overview/examples/e2ee-enabled"
]
}
]
@ -520,8 +522,7 @@
"api-reference/endpoints/integrations/delete-auth-by-id",
"api-reference/endpoints/integrations/create",
"api-reference/endpoints/integrations/update",
"api-reference/endpoints/integrations/delete",
"api-reference/endpoints/integrations/list-project-integrations"
"api-reference/endpoints/integrations/delete"
]
},
{

View File

@ -121,35 +121,24 @@ Without email configuration, Infisical's core functions like sign-up/login and s
</Accordion>
<Accordion title="AWS SES">
<Steps>
<Step title="Create a verifed identity">
This will be used to verify the email you are sending from.
![Create SES identity](../../images/self-hosting/configuration/email/ses-create-identity.png)
<Info>
If you AWS SES is under sandbox mode, you will only be able to send emails to verified identies.
</Info>
</Step>
<Step title="Create an account and configure AWS SES">
Create an IAM user for SMTP authentication and obtain SMTP credentials in SMTP settings > Create SMTP credentials
1. Create an account and [configure AWS SES](https://aws.amazon.com/premiumsupport/knowledge-center/ses-set-up-connect-smtp/) to send emails in the Amazon SES console.
2. Create an IAM user for SMTP authentication and obtain SMTP credentials in SMTP settings > Create SMTP credentials
![opening AWS SES console](../../images/self-hosting/configuration/email/email-aws-ses-console.png)
![opening AWS SES console](../../images/self-hosting/configuration/email/email-aws-ses-console.png)
![creating AWS IAM SES user](../../images/self-hosting/configuration/email/email-aws-ses-user.png)
</Step>
<Step title="Set up your SMTP environment variables">
With your AWS SES SMTP credentials, you can now set up your SMTP environment variables for your Infisical instance.
![creating AWS IAM SES user](../../images/self-hosting/configuration/email/email-aws-ses-user.png)
```
SMTP_HOST=email-smtp.ap-northeast-1.amazonaws.com # SMTP endpoint obtained from SMTP settings
SMTP_USERNAME=xxx # your SMTP username
SMTP_PASSWORD=xxx # your SMTP password
SMTP_PORT=587
SMTP_SECURE=false
SMTP_FROM_ADDRESS=hey@example.com # your email address being used to send out emails
SMTP_FROM_NAME=Infisical
```
</Step>
</Steps>
3. With your AWS SES SMTP credentials, you can now set up your SMTP environment variables:
```
SMTP_HOST=email-smtp.ap-northeast-1.amazonaws.com # SMTP endpoint obtained from SMTP settings
SMTP_USERNAME=xxx # your SMTP username
SMTP_PASSWORD=xxx # your SMTP password
SMTP_PORT=587
SMTP_SECURE=true
SMTP_FROM_ADDRESS=hey@example.com # your email address being used to send out emails
SMTP_FROM_NAME=Infisical
```
<Info>
Remember that you will need to restart Infisical for this to work properly.
@ -346,10 +335,6 @@ To login into Infisical with OAuth providers such as Google, configure the assoc
Requires enterprise license. Please contact team@infisical.com to get more information.
</Accordion>
<ParamField query="NEXT_PUBLIC_SAML_ORG_SLUG" type="string">
Configure SAML organization slug to automatically redirect all users of your Infisical instance to the identity provider.
</ParamField>

View File

@ -52,9 +52,6 @@ ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
ARG INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG \
BAKED_NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
ARG NEXT_INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION=$NEXT_INFISICAL_PLATFORM_VERSION

View File

@ -4,6 +4,7 @@
"requires": true,
"packages": {
"": {
"name": "frontend",
"dependencies": {
"@casl/ability": "^6.5.0",
"@casl/react": "^3.1.0",

View File

@ -4,8 +4,6 @@ scripts/replace-standalone-build-variable.sh "$BAKED_NEXT_PUBLIC_POSTHOG_API_KEY
scripts/replace-standalone-build-variable.sh "$BAKED_NEXT_PUBLIC_INTERCOM_ID" "$NEXT_PUBLIC_INTERCOM_ID"
scripts/replace-standalone-build-variable.sh "$BAKED_NEXT_PUBLIC_SAML_ORG_SLUG" "$NEXT_PUBLIC_SAML_ORG_SLUG"
if [ "$TELEMETRY_ENABLED" != "false" ]; then
echo "Telemetry is enabled"
scripts/set-standalone-build-telemetry.sh true

View File

@ -4,8 +4,6 @@ scripts/replace-variable.sh "$BAKED_NEXT_PUBLIC_POSTHOG_API_KEY" "$NEXT_PUBLIC_P
scripts/replace-variable.sh "$BAKED_NEXT_PUBLIC_INTERCOM_ID" "$NEXT_PUBLIC_INTERCOM_ID"
scripts/replace-variable.sh "$BAKED_NEXT_SAML_ORG_SLUG" "$NEXT_PUBLIC_SAML_ORG_SLUG"
if [ "$TELEMETRY_ENABLED" != "false" ]; then
echo "Telemetry is enabled"
scripts/set-telemetry.sh true

View File

@ -1,29 +0,0 @@
import { useRouter } from "next/router";
import { Button } from "../v2";
interface IProps {
projectId: string;
}
export const NoEnvironmentsBanner = ({ projectId }: IProps) => {
const router = useRouter();
return (
<div className="mt-4 flex w-full flex-row items-center rounded-md border border-primary-600/70 bg-primary/[.07] p-4 text-base text-white">
<div className="flex w-full flex-col text-sm">
<span className="mb-2 text-lg font-semibold">
No environments in your project was found
</span>
<p className="prose">
In order to use integrations, you need to create at least one environment in your project.
</p>
</div>
<div className="my-2">
<Button onClick={() => router.push(`/project/${projectId}/settings#environments`)}>
Add environments
</Button>
</div>
</div>
);
};

Some files were not shown because too many files have changed in this diff Show More