mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-22 22:22:22 +00:00
Compare commits
1 Commits
daniel/cli
...
groups-pha
Author | SHA1 | Date | |
---|---|---|---|
beb54e8a12 |
@ -108,7 +108,7 @@
|
||||
"libsodium-wrappers": "^0.7.13",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.4",
|
||||
"mysql2": "^3.9.1",
|
||||
"nanoid": "^5.0.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"ora": "^7.0.1",
|
||||
|
@ -21,7 +21,8 @@ export const UsersSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
isGhost: z.boolean().default(false),
|
||||
username: z.string()
|
||||
username: z.string(),
|
||||
isEmailVerified: z.boolean().default(false).nullable().optional()
|
||||
});
|
||||
|
||||
export type TUsers = z.infer<typeof UsersSchema>;
|
||||
|
@ -29,6 +29,7 @@ export async function seed(knex: Knex): Promise<void> {
|
||||
lastName: "",
|
||||
authMethods: [AuthMethod.EMAIL],
|
||||
isAccepted: true,
|
||||
isEmailVerified: true,
|
||||
isMfaEnabled: false,
|
||||
mfaMethods: null,
|
||||
devices: null
|
||||
|
@ -3,7 +3,7 @@ import { z } from "zod";
|
||||
import { AuditLogsSchema, SecretSnapshotsSchema } from "@app/db/schemas";
|
||||
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { AUDIT_LOGS, PROJECTS } from "@app/lib/api-docs";
|
||||
import { getLastMidnightDateISO, removeTrailingSlash } from "@app/lib/fn";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
@ -145,7 +145,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
projectId: req.params.workspaceId,
|
||||
...req.query,
|
||||
startDate: req.query.endDate || getLastMidnightDateISO(),
|
||||
auditLogActor: req.query.actor,
|
||||
actor: req.permission.type
|
||||
});
|
||||
|
@ -220,7 +220,8 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
// displayName: z.string().trim(),
|
||||
externalId: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
active: z.boolean()
|
||||
}),
|
||||
response: {
|
||||
@ -249,11 +250,12 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
const primaryEmail = req.body.emails?.find((email) => email.primary)?.value;
|
||||
|
||||
const user = await req.server.services.scim.createScimUser({
|
||||
orgId: req.permission.orgId,
|
||||
username: req.body.userName,
|
||||
email: primaryEmail,
|
||||
firstName: req.body.name.givenName,
|
||||
lastName: req.body.name.familyName,
|
||||
orgId: req.permission.orgId
|
||||
externalId: req.body.externalId
|
||||
});
|
||||
|
||||
return user;
|
||||
@ -400,7 +402,12 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
schemas: z.array(z.string()),
|
||||
id: z.string().trim(),
|
||||
displayName: z.string().trim(),
|
||||
members: z.array(z.any()).length(0)
|
||||
members: z.array(
|
||||
z.object({
|
||||
value: z.string(),
|
||||
display: z.string()
|
||||
})
|
||||
)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
@ -482,8 +489,6 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
|
||||
handler: async (req) => {
|
||||
// console.log("PATCH /Groups/:groupId req.body: ", req.body);
|
||||
// console.log("PATCH /Groups/:groupId req.body: ", req.body.Operations[0]);
|
||||
const group = await req.server.services.scim.updateScimGroupNamePatch({
|
||||
groupId: req.params.groupId,
|
||||
orgId: req.permission.orgId,
|
||||
|
@ -249,7 +249,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
|
||||
if ((revokeResponse as { error?: Error })?.error) {
|
||||
const { error } = revokeResponse as { error?: Error };
|
||||
logger.error(error?.message, "Failed to revoke lease");
|
||||
logger.error("Failed to revoke lease", { error: error?.message });
|
||||
const deletedDynamicSecretLease = await dynamicSecretLeaseDAL.updateById(dynamicSecretLease.id, {
|
||||
status: DynamicSecretLeaseStatus.FailedDeletion,
|
||||
statusDetails: error?.message?.slice(0, 255)
|
||||
|
@ -1,8 +1,7 @@
|
||||
import { z } from "zod";
|
||||
|
||||
export enum SqlProviders {
|
||||
Postgres = "postgres",
|
||||
MySQL = "mysql2"
|
||||
Postgres = "postgres"
|
||||
}
|
||||
|
||||
export const DynamicSecretSqlDBSchema = z.object({
|
||||
@ -14,7 +13,7 @@ export const DynamicSecretSqlDBSchema = z.object({
|
||||
password: z.string(),
|
||||
creationStatement: z.string(),
|
||||
revocationStatement: z.string(),
|
||||
renewStatement: z.string().optional(),
|
||||
renewStatement: z.string(),
|
||||
ca: z.string().optional()
|
||||
});
|
||||
|
||||
|
@ -48,10 +48,10 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
host: providerInputs.host,
|
||||
user: providerInputs.username,
|
||||
password: providerInputs.password,
|
||||
connectionTimeoutMillis: EXTERNAL_REQUEST_TIMEOUT,
|
||||
ssl,
|
||||
pool: { min: 0, max: 1 }
|
||||
},
|
||||
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT
|
||||
}
|
||||
});
|
||||
return db;
|
||||
};
|
||||
@ -73,25 +73,15 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const username = alphaNumericNanoId(32);
|
||||
const password = generatePassword();
|
||||
const { database } = providerInputs;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
expiration,
|
||||
database
|
||||
expiration
|
||||
});
|
||||
|
||||
await db.transaction(async (tx) =>
|
||||
Promise.all(
|
||||
creationStatement
|
||||
.toString()
|
||||
.split(";")
|
||||
.filter(Boolean)
|
||||
.map((query) => tx.raw(query))
|
||||
)
|
||||
);
|
||||
await db.raw(creationStatement.toString());
|
||||
await db.destroy();
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
@ -101,18 +91,9 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const db = await getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const { database } = providerInputs;
|
||||
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, database });
|
||||
await db.transaction(async (tx) =>
|
||||
Promise.all(
|
||||
revokeStatement
|
||||
.toString()
|
||||
.split(";")
|
||||
.filter(Boolean)
|
||||
.map((query) => tx.raw(query))
|
||||
)
|
||||
);
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
|
||||
await db.raw(revokeStatement);
|
||||
|
||||
await db.destroy();
|
||||
return { entityId: username };
|
||||
@ -124,19 +105,9 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
|
||||
const username = entityId;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const { database } = providerInputs;
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration, database });
|
||||
if (renewStatement)
|
||||
await db.transaction(async (tx) =>
|
||||
Promise.all(
|
||||
renewStatement
|
||||
.toString()
|
||||
.split(";")
|
||||
.filter(Boolean)
|
||||
.map((query) => tx.raw(query))
|
||||
)
|
||||
);
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration });
|
||||
await db.raw(renewStatement);
|
||||
|
||||
await db.destroy();
|
||||
return { entityId: username };
|
||||
|
@ -24,10 +24,10 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
customAlerts: false,
|
||||
auditLogs: false,
|
||||
auditLogsRetentionDays: 0,
|
||||
samlSSO: false,
|
||||
scim: false,
|
||||
samlSSO: true,
|
||||
scim: true,
|
||||
ldap: false,
|
||||
groups: false,
|
||||
groups: true,
|
||||
status: null,
|
||||
trial_end: null,
|
||||
has_used_trial: true,
|
||||
|
@ -40,10 +40,10 @@ export type TFeatureSet = {
|
||||
customAlerts: false;
|
||||
auditLogs: false;
|
||||
auditLogsRetentionDays: 0;
|
||||
samlSSO: false;
|
||||
scim: false;
|
||||
samlSSO: true;
|
||||
scim: true;
|
||||
ldap: false;
|
||||
groups: false;
|
||||
groups: true;
|
||||
status: null;
|
||||
trial_end: null;
|
||||
has_used_trial: true;
|
||||
|
@ -16,6 +16,7 @@ import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
@ -43,6 +44,7 @@ import {
|
||||
type TScimServiceFactoryDep = {
|
||||
scimDAL: Pick<TScimDALFactory, "create" | "find" | "findById" | "deleteById">;
|
||||
userDAL: Pick<TUserDALFactory, "findOne" | "create" | "transaction">;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "create">;
|
||||
orgDAL: Pick<
|
||||
TOrgDALFactory,
|
||||
"createMembership" | "findById" | "findMembership" | "deleteMembershipById" | "transaction"
|
||||
@ -231,7 +233,7 @@ export const scimServiceFactory = ({
|
||||
});
|
||||
};
|
||||
|
||||
const createScimUser = async ({ username, email, firstName, lastName, orgId }: TCreateScimUserDTO) => {
|
||||
const createScimUser = async ({ orgId, username, email, firstName, lastName }: TCreateScimUserDTO) => {
|
||||
const org = await orgDAL.findById(orgId);
|
||||
|
||||
if (!org)
|
||||
@ -473,7 +475,19 @@ export const scimServiceFactory = ({
|
||||
};
|
||||
|
||||
const listScimGroups = async ({ orgId, offset, limit }: TListScimGroupsDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to list SCIM groups due to plan restriction. Upgrade plan to list SCIM groups."
|
||||
});
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
if (!org) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Organization Not Found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
if (!org.scimEnabled)
|
||||
throw new ScimRequestError({
|
||||
@ -501,7 +515,19 @@ export const scimServiceFactory = ({
|
||||
};
|
||||
|
||||
const createScimGroup = async ({ displayName, orgId }: TCreateScimGroupDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create a SCIM group due to plan restriction. Upgrade plan to create a SCIM group."
|
||||
});
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
if (!org) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Organization Not Found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
if (!org.scimEnabled)
|
||||
throw new ScimRequestError({
|
||||
@ -524,6 +550,12 @@ export const scimServiceFactory = ({
|
||||
};
|
||||
|
||||
const getScimGroup = async ({ groupId, orgId }: TGetScimGroupDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to get SCIM group due to plan restriction. Upgrade plan to get SCIM group."
|
||||
});
|
||||
|
||||
const group = await groupDAL.findOne({
|
||||
id: groupId,
|
||||
orgId
|
||||
@ -554,6 +586,26 @@ export const scimServiceFactory = ({
|
||||
};
|
||||
|
||||
const updateScimGroupNamePut = async ({ groupId, orgId, displayName }: TUpdateScimGroupNamePutDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update SCIM group due to plan restriction. Upgrade plan to update SCIM group."
|
||||
});
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
if (!org) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Organization Not Found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
if (!org.scimEnabled)
|
||||
throw new ScimRequestError({
|
||||
detail: "SCIM is disabled for the organization",
|
||||
status: 403
|
||||
});
|
||||
|
||||
const [group] = await groupDAL.update(
|
||||
{
|
||||
id: groupId,
|
||||
@ -580,7 +632,19 @@ export const scimServiceFactory = ({
|
||||
|
||||
// TODO: add support for add/remove op
|
||||
const updateScimGroupNamePatch = async ({ groupId, orgId, operations }: TUpdateScimGroupNamePatchDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update SCIM group due to plan restriction. Upgrade plan to update SCIM group."
|
||||
});
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
if (!org) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Organization Not Found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
if (!org.scimEnabled)
|
||||
throw new ScimRequestError({
|
||||
@ -635,6 +699,26 @@ export const scimServiceFactory = ({
|
||||
};
|
||||
|
||||
const deleteScimGroup = async ({ groupId, orgId }: TDeleteScimGroupDTO) => {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.groups)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to delete SCIM group due to plan restriction. Upgrade plan to delete SCIM group."
|
||||
});
|
||||
|
||||
const org = await orgDAL.findById(orgId);
|
||||
if (!org) {
|
||||
throw new ScimRequestError({
|
||||
detail: "Organization Not Found",
|
||||
status: 404
|
||||
});
|
||||
}
|
||||
|
||||
if (!org.scimEnabled)
|
||||
throw new ScimRequestError({
|
||||
detail: "SCIM is disabled for the organization",
|
||||
status: 403
|
||||
});
|
||||
|
||||
const [group] = await groupDAL.delete({
|
||||
id: groupId,
|
||||
orgId
|
||||
|
@ -37,6 +37,7 @@ export type TCreateScimUserDTO = {
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
orgId: string;
|
||||
externalId: string;
|
||||
};
|
||||
|
||||
export type TUpdateScimUserDTO = {
|
||||
|
@ -1,2 +0,0 @@
|
||||
export const getLastMidnightDateISO = (last = 1) =>
|
||||
`${new Date(new Date().setDate(new Date().getDate() - last)).toISOString().slice(0, 10)}T00:00:00Z`;
|
||||
|
@ -2,6 +2,5 @@
|
||||
// Full credits goes to https://github.com/rayapps to those functions
|
||||
// Code taken to keep in in house and to adjust somethings for our needs
|
||||
export * from "./array";
|
||||
export * from "./dates";
|
||||
export * from "./object";
|
||||
export * from "./string";
|
||||
|
@ -41,8 +41,8 @@ export const secretsLimit: RateLimitOptions = {
|
||||
};
|
||||
|
||||
export const authRateLimit: RateLimitOptions = {
|
||||
timeWindow: 60 * 1000,
|
||||
max: 60,
|
||||
timeWindow: 5 * 60 * 1000,
|
||||
max: 30,
|
||||
keyGenerator: (req) => req.realIp
|
||||
};
|
||||
|
||||
|
@ -286,6 +286,7 @@ export const registerRoutes = async (
|
||||
licenseService,
|
||||
scimDAL,
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
orgDAL,
|
||||
projectDAL,
|
||||
projectMembershipDAL,
|
||||
@ -315,7 +316,14 @@ export const registerRoutes = async (
|
||||
});
|
||||
|
||||
const tokenService = tokenServiceFactory({ tokenDAL: authTokenDAL, userDAL });
|
||||
const userService = userServiceFactory({ userDAL });
|
||||
const userService = userServiceFactory({
|
||||
userDAL,
|
||||
orgDAL,
|
||||
projectMembershipDAL,
|
||||
projectUserMembershipRoleDAL,
|
||||
tokenService,
|
||||
smtpService
|
||||
});
|
||||
const loginService = authLoginServiceFactory({ userDAL, smtpService, tokenService, orgDAL, tokenDAL: authTokenDAL });
|
||||
const passwordService = authPaswordServiceFactory({
|
||||
tokenService,
|
||||
|
@ -2,11 +2,93 @@ import { z } from "zod";
|
||||
|
||||
import { AuthTokenSessionsSchema, OrganizationsSchema, UserEncryptionKeysSchema, UsersSchema } from "@app/db/schemas";
|
||||
import { ApiKeysSchema } from "@app/db/schemas/api-keys";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMethod, AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerUserRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/me/emails/code",
|
||||
config: {
|
||||
rateLimit: authRateLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({})
|
||||
}
|
||||
},
|
||||
preHandler: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
await server.services.user.sendEmailVerificationCode(req.permission.id);
|
||||
return {};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/me/emails/verify",
|
||||
config: {
|
||||
rateLimit: authRateLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
code: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({})
|
||||
}
|
||||
},
|
||||
preHandler: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
await server.services.user.verifyEmailVerificationCode(req.permission.id, req.body.code);
|
||||
return {};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/me/users/same-email",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
users: UsersSchema.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
preHandler: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const users = await server.services.user.listUsersWithSameEmail(req.permission.id);
|
||||
return {
|
||||
users
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
// server.route({ // TODO(dangtony98)
|
||||
// method: "POST",
|
||||
// url: "/me/users/merge",
|
||||
// config: {
|
||||
// rateLimit: authRateLimit
|
||||
// },
|
||||
// schema: {
|
||||
// body: z.object({
|
||||
// username: z.string().trim()
|
||||
// }),
|
||||
// response: {
|
||||
// 200: z.object({})
|
||||
// }
|
||||
// },
|
||||
// preHandler: verifyAuth([AuthMode.JWT]),
|
||||
// handler: async (req) => {
|
||||
// console.log("POST /me/users/merge req.body: ", req.body);
|
||||
// return {};
|
||||
// }
|
||||
// });
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/me/mfa",
|
||||
|
@ -27,6 +27,11 @@ export const getTokenConfig = (tokenType: TokenType) => {
|
||||
const expiresAt = new Date(new Date().getTime() + 86400000);
|
||||
return { token, expiresAt };
|
||||
}
|
||||
case TokenType.TOKEN_EMAIL_VERIFICATION: {
|
||||
const token = crypto.randomBytes(16).toString("hex");
|
||||
const expiresAt = new Date(new Date().getTime() + 300000);
|
||||
return { token, expiresAt };
|
||||
}
|
||||
case TokenType.TOKEN_EMAIL_MFA: {
|
||||
// generate random 6-digit code
|
||||
const token = String(crypto.randomInt(10 ** 5, 10 ** 6 - 1));
|
||||
|
@ -1,5 +1,6 @@
|
||||
export enum TokenType {
|
||||
TOKEN_EMAIL_CONFIRMATION = "emailConfirmation",
|
||||
TOKEN_EMAIL_VERIFICATION = "emailVerification", // unverified -> verified email
|
||||
TOKEN_EMAIL_MFA = "emailMfa",
|
||||
TOKEN_EMAIL_ORG_INVITATION = "organizationInvitation",
|
||||
TOKEN_EMAIL_PASSWORD_RESET = "passwordReset"
|
||||
|
@ -60,7 +60,7 @@ export const authSignupServiceFactory = ({
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
template: SmtpTemplates.SignupEmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [email],
|
||||
substitutions: {
|
||||
@ -129,7 +129,16 @@ export const authSignupServiceFactory = ({
|
||||
}
|
||||
|
||||
const updateduser = await authDAL.transaction(async (tx) => {
|
||||
const us = await userDAL.updateById(user.id, { firstName, lastName, isAccepted: true }, tx);
|
||||
const us = await userDAL.updateById(
|
||||
user.id,
|
||||
{
|
||||
firstName,
|
||||
lastName,
|
||||
isAccepted: true,
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
if (!us) throw new Error("User not found");
|
||||
const userEncKey = await userDAL.upsertUserEncryptionKey(
|
||||
us.id,
|
||||
@ -243,7 +252,16 @@ export const authSignupServiceFactory = ({
|
||||
});
|
||||
|
||||
const updateduser = await authDAL.transaction(async (tx) => {
|
||||
const us = await userDAL.updateById(user.id, { firstName, lastName, isAccepted: true }, tx);
|
||||
const us = await userDAL.updateById(
|
||||
user.id,
|
||||
{
|
||||
firstName,
|
||||
lastName,
|
||||
isAccepted: true,
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
if (!us) throw new Error("User not found");
|
||||
const userEncKey = await userDAL.upsertUserEncryptionKey(
|
||||
us.id,
|
||||
|
@ -175,7 +175,8 @@ export const orgServiceFactory = ({
|
||||
authMethods: [AuthMethod.EMAIL],
|
||||
username: email,
|
||||
email,
|
||||
isAccepted: true
|
||||
isAccepted: true,
|
||||
isEmailVerified: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@ -17,6 +17,7 @@ export type TSmtpSendMail = {
|
||||
export type TSmtpService = ReturnType<typeof smtpServiceFactory>;
|
||||
|
||||
export enum SmtpTemplates {
|
||||
SignupEmailVerification = "signupEmailVerification.handlebars",
|
||||
EmailVerification = "emailVerification.handlebars",
|
||||
SecretReminder = "secretReminder.handlebars",
|
||||
EmailMfa = "emailMfa.handlebars",
|
||||
|
@ -9,9 +9,8 @@
|
||||
|
||||
<body>
|
||||
<h2>Confirm your email address</h2>
|
||||
<p>Your confirmation code is below — enter it in the browser window where you've started signing up for Infisical.</p>
|
||||
<p>Your confirmation code is below — enter it in the browser window where you've started confirming your email.</p>
|
||||
<h1>{{code}}</h1>
|
||||
<p>Questions about setting up Infisical? Email us at support@infisical.com</p>
|
||||
</body>
|
||||
|
||||
</html>
|
@ -0,0 +1,17 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge">
|
||||
<title>Code</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h2>Confirm your email address</h2>
|
||||
<p>Your confirmation code is below — enter it in the browser window where you've started signing up for Infisical.</p>
|
||||
<h1>{{code}}</h1>
|
||||
<p>Questions about setting up Infisical? Email us at support@infisical.com</p>
|
||||
</body>
|
||||
|
||||
</html>
|
@ -102,7 +102,8 @@ export const superAdminServiceFactory = ({
|
||||
superAdmin: true,
|
||||
isGhost: false,
|
||||
isAccepted: true,
|
||||
authMethods: [AuthMethod.EMAIL]
|
||||
authMethods: [AuthMethod.EMAIL],
|
||||
isEmailVerified: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
@ -4,13 +4,17 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
|
||||
export const normalizeUsername = async (username: string, userDAL: Pick<TUserDALFactory, "findOne">) => {
|
||||
let attempt = slugify(username);
|
||||
let attempt = slugify(username, {
|
||||
preserveCharacters: ["@", "."]
|
||||
});
|
||||
|
||||
let user = await userDAL.findOne({ username: attempt });
|
||||
if (!user) return attempt;
|
||||
|
||||
while (true) {
|
||||
attempt = slugify(`${username}-${alphaNumericNanoId(4)}`);
|
||||
attempt = slugify(`${username}-${alphaNumericNanoId(4)}`, {
|
||||
preserveCharacters: ["@", "."]
|
||||
});
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
user = await userDAL.findOne({ username: attempt });
|
||||
|
||||
|
@ -1,15 +1,79 @@
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
import { TProjectUserMembershipRoleDALFactory } from "@app/services/project-membership/project-user-membership-role-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
|
||||
import { AuthMethod } from "../auth/auth-type";
|
||||
import { TUserDALFactory } from "./user-dal";
|
||||
|
||||
type TUserServiceFactoryDep = {
|
||||
userDAL: TUserDALFactory;
|
||||
orgDAL: TOrgDALFactory;
|
||||
projectMembershipDAL: TProjectMembershipDALFactory;
|
||||
projectUserMembershipRoleDAL: TProjectUserMembershipRoleDALFactory;
|
||||
tokenService: TAuthTokenServiceFactory;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
};
|
||||
|
||||
export type TUserServiceFactory = ReturnType<typeof userServiceFactory>;
|
||||
|
||||
export const userServiceFactory = ({ userDAL }: TUserServiceFactoryDep) => {
|
||||
export const userServiceFactory = ({ userDAL, tokenService, smtpService }: TUserServiceFactoryDep) => {
|
||||
const sendEmailVerificationCode = async (userId: string) => {
|
||||
const user = await userDAL.findById(userId);
|
||||
if (!user) throw new BadRequestError({ name: "Failed to find user" });
|
||||
if (!user.email)
|
||||
throw new BadRequestError({ name: "Failed to send email verification code due to no email on user" });
|
||||
if (user.isEmailVerified)
|
||||
throw new BadRequestError({ name: "Failed to send email verification code due to email already verified" });
|
||||
|
||||
const token = await tokenService.createTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const verifyEmailVerificationCode = async (userId: string, code: string) => {
|
||||
const user = await userDAL.findById(userId);
|
||||
if (!user) throw new BadRequestError({ name: "Failed to find user" });
|
||||
if (user.isEmailVerified)
|
||||
throw new BadRequestError({ name: "Failed to verify email verification code due to email already verified" });
|
||||
|
||||
await tokenService.validateTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id,
|
||||
code
|
||||
});
|
||||
|
||||
await userDAL.updateById(userId, { isEmailVerified: true });
|
||||
};
|
||||
|
||||
// lists users with same verified email only
|
||||
const listUsersWithSameEmail = async (userId: string) => {
|
||||
const user = await userDAL.findById(userId);
|
||||
if (!user) throw new BadRequestError({ name: "Failed to find user" });
|
||||
if (!user.email)
|
||||
throw new BadRequestError({ name: "Failed to list users with same email due to no email on user" });
|
||||
|
||||
const users = await userDAL.find({
|
||||
email: user.email,
|
||||
isEmailVerified: true
|
||||
});
|
||||
|
||||
return users;
|
||||
};
|
||||
|
||||
const toggleUserMfa = async (userId: string, isMfaEnabled: boolean) => {
|
||||
const user = await userDAL.findById(userId);
|
||||
|
||||
@ -72,6 +136,9 @@ export const userServiceFactory = ({ userDAL }: TUserServiceFactoryDep) => {
|
||||
};
|
||||
|
||||
return {
|
||||
sendEmailVerificationCode,
|
||||
verifyEmailVerificationCode,
|
||||
listUsersWithSameEmail,
|
||||
toggleUserMfa,
|
||||
updateUserName,
|
||||
updateAuthMethods,
|
||||
|
@ -528,7 +528,6 @@ type GetRawSecretsV3Request struct {
|
||||
WorkspaceId string `json:"workspaceId"`
|
||||
SecretPath string `json:"secretPath"`
|
||||
IncludeImport bool `json:"include_imports"`
|
||||
Recursive bool `json:"recursive"`
|
||||
}
|
||||
|
||||
type GetRawSecretsV3Response struct {
|
||||
|
@ -479,7 +479,7 @@ func (tm *AgentManager) GetToken() string {
|
||||
|
||||
// Fetches a new access token using client credentials
|
||||
func (tm *AgentManager) FetchNewAccessToken() error {
|
||||
clientID := os.Getenv(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME)
|
||||
clientID := os.Getenv("INFISICAL_UNIVERSAL_AUTH_CLIENT_ID")
|
||||
if clientID == "" {
|
||||
clientIDAsByte, err := ReadFile(tm.clientIdPath)
|
||||
if err != nil {
|
||||
@ -509,7 +509,7 @@ func (tm *AgentManager) FetchNewAccessToken() error {
|
||||
// save as cache in memory
|
||||
tm.cachedClientSecret = clientSecret
|
||||
|
||||
loginResponse, err := util.UniversalAuthLogin(clientID, clientSecret)
|
||||
err, loginResponse := universalAuthLogin(clientID, clientSecret)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -725,6 +725,20 @@ func (tm *AgentManager) MonitorSecretChanges(secretTemplate Template, templateId
|
||||
}
|
||||
}
|
||||
|
||||
func universalAuthLogin(clientId string, clientSecret string) (error, api.UniversalAuthLoginResponse) {
|
||||
httpClient := resty.New()
|
||||
httpClient.SetRetryCount(10000).
|
||||
SetRetryMaxWaitTime(20 * time.Second).
|
||||
SetRetryWaitTime(5 * time.Second)
|
||||
|
||||
tokenResponse, err := api.CallUniversalAuthLogin(httpClient, api.UniversalAuthLoginRequest{ClientId: clientId, ClientSecret: clientSecret})
|
||||
if err != nil {
|
||||
return err, api.UniversalAuthLoginResponse{}
|
||||
}
|
||||
|
||||
return nil, tokenResponse
|
||||
}
|
||||
|
||||
// runCmd represents the run command
|
||||
var agentCmd = &cobra.Command{
|
||||
Example: `
|
||||
|
@ -44,11 +44,6 @@ var exportCmd = &cobra.Command{
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
includeImports, err := cmd.Flags().GetBool("include-imports")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
projectId, err := cmd.Flags().GetString("projectId")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
@ -64,7 +59,8 @@ var exportCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
@ -79,21 +75,7 @@ var exportCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
request := models.GetAllSecretsParameters{
|
||||
Environment: environmentName,
|
||||
TagSlugs: tagSlugs,
|
||||
WorkspaceId: projectId,
|
||||
SecretsPath: secretsPath,
|
||||
IncludeImport: includeImports,
|
||||
}
|
||||
|
||||
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
|
||||
request.InfisicalToken = token.Token
|
||||
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
|
||||
request.UniversalAuthAccessToken = token.Token
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(request, "")
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, WorkspaceId: projectId, SecretsPath: secretsPath}, "")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to fetch secrets")
|
||||
}
|
||||
@ -106,16 +88,9 @@ var exportCmd = &cobra.Command{
|
||||
|
||||
var output string
|
||||
if shouldExpandSecrets {
|
||||
|
||||
authParams := models.ExpandSecretsAuthentication{}
|
||||
|
||||
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
|
||||
authParams.InfisicalToken = token.Token
|
||||
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
|
||||
authParams.UniversalAuthAccessToken = token.Token
|
||||
}
|
||||
|
||||
secrets = util.ExpandSecrets(secrets, authParams, "")
|
||||
secrets = util.ExpandSecrets(secrets, models.ExpandSecretsAuthentication{
|
||||
InfisicalToken: infisicalToken,
|
||||
}, "")
|
||||
}
|
||||
secrets = util.FilterSecretsByTag(secrets, tagSlugs)
|
||||
output, err = formatEnvs(secrets, format)
|
||||
@ -135,7 +110,6 @@ func init() {
|
||||
exportCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
exportCmd.Flags().StringP("format", "f", "dotenv", "Set the format of the output file (dotenv, json, csv)")
|
||||
exportCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
|
||||
exportCmd.Flags().Bool("include-imports", true, "Imported linked secrets")
|
||||
exportCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
exportCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs")
|
||||
exportCmd.Flags().String("projectId", "", "manually set the projectId to fetch secrets from")
|
||||
|
@ -36,33 +36,18 @@ var getCmd = &cobra.Command{
|
||||
}
|
||||
}
|
||||
|
||||
projectId, err := cmd.Flags().GetString("projectId")
|
||||
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
foldersPath, err := cmd.Flags().GetString("path")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
request := models.GetAllFoldersParameters{
|
||||
Environment: environmentName,
|
||||
WorkspaceId: projectId,
|
||||
FoldersPath: foldersPath,
|
||||
}
|
||||
|
||||
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
|
||||
request.InfisicalToken = token.Token
|
||||
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
|
||||
request.UniversalAuthAccessToken = token.Token
|
||||
}
|
||||
|
||||
folders, err := util.GetAllFolders(request)
|
||||
folders, err := util.GetAllFolders(models.GetAllFoldersParameters{Environment: environmentName, InfisicalToken: infisicalToken, FoldersPath: foldersPath})
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to get folders")
|
||||
}
|
||||
|
@ -55,157 +55,95 @@ var loginCmd = &cobra.Command{
|
||||
Short: "Login into your Infisical account",
|
||||
DisableFlagsInUseLine: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
|
||||
loginMethod, err := cmd.Flags().GetString("method")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
plainOutput, err := cmd.Flags().GetBool("plain")
|
||||
if err != nil {
|
||||
currentLoggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
// if the key can't be found or there is an error getting current credentials from key ring, allow them to override
|
||||
if err != nil && (strings.Contains(err.Error(), "we couldn't find your logged in details")) {
|
||||
log.Debug().Err(err)
|
||||
} else if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
if loginMethod != "user" && loginMethod != "universal-auth" {
|
||||
util.PrintErrorMessageAndExit("Invalid login method. Please use either 'user' or 'universal-auth'")
|
||||
}
|
||||
|
||||
if loginMethod == "user" {
|
||||
|
||||
currentLoggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
|
||||
// if the key can't be found or there is an error getting current credentials from key ring, allow them to override
|
||||
if err != nil && (strings.Contains(err.Error(), "we couldn't find your logged in details")) {
|
||||
log.Debug().Err(err)
|
||||
} else if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
if currentLoggedInUserDetails.IsUserLoggedIn && !currentLoggedInUserDetails.LoginExpired && len(currentLoggedInUserDetails.UserCredentials.PrivateKey) != 0 {
|
||||
shouldOverride, err := userLoginMenu(currentLoggedInUserDetails.UserCredentials.Email)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
if !shouldOverride {
|
||||
return
|
||||
}
|
||||
}
|
||||
//override domain
|
||||
domainQuery := true
|
||||
if config.INFISICAL_URL_MANUAL_OVERRIDE != "" && config.INFISICAL_URL_MANUAL_OVERRIDE != util.INFISICAL_DEFAULT_API_URL {
|
||||
overrideDomain, err := DomainOverridePrompt()
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
//if not override set INFISICAL_URL to exported var
|
||||
//set domainQuery to false
|
||||
if !overrideDomain {
|
||||
domainQuery = false
|
||||
config.INFISICAL_URL = config.INFISICAL_URL_MANUAL_OVERRIDE
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
//prompt user to select domain between Infisical cloud and self hosting
|
||||
if domainQuery {
|
||||
err = askForDomain()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse domain url")
|
||||
}
|
||||
}
|
||||
var userCredentialsToBeStored models.UserCredentials
|
||||
|
||||
interactiveLogin := false
|
||||
if cmd.Flags().Changed("interactive") {
|
||||
interactiveLogin = true
|
||||
cliDefaultLogin(&userCredentialsToBeStored)
|
||||
}
|
||||
|
||||
//call browser login function
|
||||
if !interactiveLogin {
|
||||
fmt.Println("Logging in via browser... To login via interactive mode run [infisical login -i]")
|
||||
userCredentialsToBeStored, err = browserCliLogin()
|
||||
if err != nil {
|
||||
//default to cli login on error
|
||||
cliDefaultLogin(&userCredentialsToBeStored)
|
||||
}
|
||||
}
|
||||
|
||||
err = util.StoreUserCredsInKeyRing(&userCredentialsToBeStored)
|
||||
if err != nil {
|
||||
log.Error().Msgf("Unable to store your credentials in system vault [%s]")
|
||||
log.Error().Msgf("\nTo trouble shoot further, read https://infisical.com/docs/cli/faq")
|
||||
log.Debug().Err(err)
|
||||
//return here
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
err = util.WriteInitalConfig(&userCredentialsToBeStored)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to write write to Infisical Config file. Please try again")
|
||||
}
|
||||
|
||||
// clear backed up secrets from prev account
|
||||
util.DeleteBackupSecrets()
|
||||
|
||||
whilte := color.New(color.FgGreen)
|
||||
boldWhite := whilte.Add(color.Bold)
|
||||
time.Sleep(time.Second * 1)
|
||||
boldWhite.Printf(">>>> Welcome to Infisical!")
|
||||
boldWhite.Printf(" You are now logged in as %v <<<< \n", userCredentialsToBeStored.Email)
|
||||
|
||||
plainBold := color.New(color.Bold)
|
||||
|
||||
plainBold.Println("\nQuick links")
|
||||
fmt.Println("- Learn to inject secrets into your application at https://infisical.com/docs/cli/usage")
|
||||
fmt.Println("- Stuck? Join our slack for quick support https://infisical.com/slack")
|
||||
Telemetry.CaptureEvent("cli-command:login", posthog.NewProperties().Set("infisical-backend", config.INFISICAL_URL).Set("version", util.CLI_VERSION))
|
||||
} else if loginMethod == "universal-auth" {
|
||||
|
||||
clientId, err := cmd.Flags().GetString("client-id")
|
||||
if currentLoggedInUserDetails.IsUserLoggedIn && !currentLoggedInUserDetails.LoginExpired && len(currentLoggedInUserDetails.UserCredentials.PrivateKey) != 0 {
|
||||
shouldOverride, err := userLoginMenu(currentLoggedInUserDetails.UserCredentials.Email)
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
clientSecret, err := cmd.Flags().GetString("client-secret")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
if clientId == "" {
|
||||
clientId = os.Getenv(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME)
|
||||
if clientId == "" {
|
||||
util.PrintErrorMessageAndExit("Please provide client-id")
|
||||
}
|
||||
}
|
||||
if clientSecret == "" {
|
||||
clientSecret = os.Getenv(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME)
|
||||
if clientSecret == "" {
|
||||
util.PrintErrorMessageAndExit("Please provide client-secret")
|
||||
}
|
||||
}
|
||||
|
||||
res, err := util.UniversalAuthLogin(clientId, clientSecret)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
if plainOutput {
|
||||
fmt.Println(res.AccessToken)
|
||||
if !shouldOverride {
|
||||
return
|
||||
}
|
||||
}
|
||||
//override domain
|
||||
domainQuery := true
|
||||
if config.INFISICAL_URL_MANUAL_OVERRIDE != "" && config.INFISICAL_URL_MANUAL_OVERRIDE != util.INFISICAL_DEFAULT_API_URL {
|
||||
overrideDomain, err := DomainOverridePrompt()
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
boldGreen := color.New(color.FgGreen).Add(color.Bold)
|
||||
boldPlain := color.New(color.Bold)
|
||||
time.Sleep(time.Second * 1)
|
||||
boldGreen.Printf(">>>> Successfully authenticated with Universal Auth!\n\n")
|
||||
boldPlain.Printf("Universal Auth Access Token:\n%v", res.AccessToken)
|
||||
|
||||
plainBold := color.New(color.Bold)
|
||||
plainBold.Println("\n\nYou can use this access token to authenticate through other commands in the CLI.")
|
||||
//if not override set INFISICAL_URL to exported var
|
||||
//set domainQuery to false
|
||||
if !overrideDomain {
|
||||
domainQuery = false
|
||||
config.INFISICAL_URL = config.INFISICAL_URL_MANUAL_OVERRIDE
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
//prompt user to select domain between Infisical cloud and self hosting
|
||||
if domainQuery {
|
||||
err = askForDomain()
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse domain url")
|
||||
}
|
||||
}
|
||||
var userCredentialsToBeStored models.UserCredentials
|
||||
|
||||
interactiveLogin := false
|
||||
if cmd.Flags().Changed("interactive") {
|
||||
interactiveLogin = true
|
||||
cliDefaultLogin(&userCredentialsToBeStored)
|
||||
}
|
||||
|
||||
//call browser login function
|
||||
if !interactiveLogin {
|
||||
fmt.Println("Logging in via browser... To login via interactive mode run [infisical login -i]")
|
||||
userCredentialsToBeStored, err = browserCliLogin()
|
||||
if err != nil {
|
||||
//default to cli login on error
|
||||
cliDefaultLogin(&userCredentialsToBeStored)
|
||||
}
|
||||
}
|
||||
|
||||
err = util.StoreUserCredsInKeyRing(&userCredentialsToBeStored)
|
||||
if err != nil {
|
||||
log.Error().Msgf("Unable to store your credentials in system vault [%s]")
|
||||
log.Error().Msgf("\nTo trouble shoot further, read https://infisical.com/docs/cli/faq")
|
||||
log.Debug().Err(err)
|
||||
//return here
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
err = util.WriteInitalConfig(&userCredentialsToBeStored)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to write write to Infisical Config file. Please try again")
|
||||
}
|
||||
|
||||
// clear backed up secrets from prev account
|
||||
util.DeleteBackupSecrets()
|
||||
|
||||
whilte := color.New(color.FgGreen)
|
||||
boldWhite := whilte.Add(color.Bold)
|
||||
time.Sleep(time.Second * 1)
|
||||
boldWhite.Printf(">>>> Welcome to Infisical!")
|
||||
boldWhite.Printf(" You are now logged in as %v <<<< \n", userCredentialsToBeStored.Email)
|
||||
|
||||
plainBold := color.New(color.Bold)
|
||||
|
||||
plainBold.Println("\nQuick links")
|
||||
fmt.Println("- Learn to inject secrets into your application at https://infisical.com/docs/cli/usage")
|
||||
fmt.Println("- Stuck? Join our slack for quick support https://infisical.com/slack")
|
||||
Telemetry.CaptureEvent("cli-command:login", posthog.NewProperties().Set("infisical-backend", config.INFISICAL_URL).Set("version", util.CLI_VERSION))
|
||||
},
|
||||
}
|
||||
|
||||
@ -375,10 +313,6 @@ func cliDefaultLogin(userCredentialsToBeStored *models.UserCredentials) {
|
||||
func init() {
|
||||
rootCmd.AddCommand(loginCmd)
|
||||
loginCmd.Flags().BoolP("interactive", "i", false, "login via the command line")
|
||||
loginCmd.Flags().String("method", "user", "login method [user, universal-auth]")
|
||||
loginCmd.Flags().String("client-id", "", "client id for universal auth")
|
||||
loginCmd.Flags().Bool("plain", false, "only output the token without any formatting")
|
||||
loginCmd.Flags().String("client-secret", "", "client secret for universal auth")
|
||||
}
|
||||
|
||||
func DomainOverridePrompt() (bool, error) {
|
||||
|
@ -40,14 +40,8 @@ func init() {
|
||||
rootCmd.PersistentFlags().StringP("log-level", "l", "info", "log level (trace, debug, info, warn, error, fatal)")
|
||||
rootCmd.PersistentFlags().Bool("telemetry", true, "Infisical collects non-sensitive telemetry data to enhance features and improve user experience. Participation is voluntary")
|
||||
rootCmd.PersistentFlags().StringVar(&config.INFISICAL_URL, "domain", util.INFISICAL_DEFAULT_API_URL, "Point the CLI to your own backend [can also set via environment variable name: INFISICAL_API_URL]")
|
||||
rootCmd.PersistentFlags().Bool("silent", false, "Disable output of tip/info messages. Useful when running in scripts or CI/CD pipelines.")
|
||||
rootCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
|
||||
silent, err := cmd.Flags().GetBool("silent")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
|
||||
if !util.IsRunningInDocker() && !silent {
|
||||
if !util.IsRunningInDocker() {
|
||||
util.CheckForUpdate()
|
||||
}
|
||||
}
|
||||
|
@ -62,7 +62,8 @@ var runCmd = &cobra.Command{
|
||||
}
|
||||
}
|
||||
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
@ -72,11 +73,6 @@ var runCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
projectId, err := cmd.Flags().GetString("projectId")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secretOverriding, err := cmd.Flags().GetBool("secret-overriding")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
@ -107,22 +103,7 @@ var runCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
request := models.GetAllSecretsParameters{
|
||||
Environment: environmentName,
|
||||
WorkspaceId: projectId,
|
||||
TagSlugs: tagSlugs,
|
||||
SecretsPath: secretsPath,
|
||||
IncludeImport: includeImports,
|
||||
Recursive: recursive,
|
||||
}
|
||||
|
||||
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
|
||||
request.InfisicalToken = token.Token
|
||||
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
|
||||
request.UniversalAuthAccessToken = token.Token
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(request, projectConfigDir)
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, SecretsPath: secretsPath, IncludeImport: includeImports, Recursive: recursive}, projectConfigDir)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Could not fetch secrets", "If you are using a service token to fetch secrets, please ensure it is valid")
|
||||
@ -135,16 +116,9 @@ var runCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
if shouldExpandSecrets {
|
||||
|
||||
authParams := models.ExpandSecretsAuthentication{}
|
||||
|
||||
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
|
||||
authParams.InfisicalToken = token.Token
|
||||
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
|
||||
authParams.UniversalAuthAccessToken = token.Token
|
||||
}
|
||||
|
||||
secrets = util.ExpandSecrets(secrets, authParams, projectConfigDir)
|
||||
secrets = util.ExpandSecrets(secrets, models.ExpandSecretsAuthentication{
|
||||
InfisicalToken: infisicalToken,
|
||||
}, projectConfigDir)
|
||||
}
|
||||
|
||||
secretsByKey := getSecretsByKeys(secrets)
|
||||
@ -175,15 +149,7 @@ var runCmd = &cobra.Command{
|
||||
|
||||
log.Debug().Msgf("injecting the following environment variables into shell: %v", env)
|
||||
|
||||
Telemetry.CaptureEvent("cli-command:run",
|
||||
posthog.NewProperties().
|
||||
Set("secretsCount", len(secrets)).
|
||||
Set("environment", environmentName).
|
||||
Set("isUsingServiceToken", token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER).
|
||||
Set("isUsingUniversalAuthToken", token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER).
|
||||
Set("single-command", strings.Join(args, " ")).
|
||||
Set("multi-command", cmd.Flag("command").Value.String()).
|
||||
Set("version", util.CLI_VERSION))
|
||||
Telemetry.CaptureEvent("cli-command:run", posthog.NewProperties().Set("secretsCount", len(secrets)).Set("environment", environmentName).Set("isUsingServiceToken", infisicalToken != "").Set("single-command", strings.Join(args, " ")).Set("multi-command", cmd.Flag("command").Value.String()).Set("version", util.CLI_VERSION))
|
||||
|
||||
if cmd.Flags().Changed("command") {
|
||||
command := cmd.Flag("command").Value.String()
|
||||
@ -238,7 +204,6 @@ func filterReservedEnvVars(env map[string]models.SingleEnvironmentVariable) {
|
||||
func init() {
|
||||
rootCmd.AddCommand(runCmd)
|
||||
runCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
runCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
|
||||
runCmd.Flags().StringP("env", "e", "dev", "Set the environment (dev, prod, etc.) from which your secrets should be pulled from")
|
||||
runCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
runCmd.Flags().Bool("include-imports", true, "Import linked secrets ")
|
||||
|
@ -38,12 +38,12 @@ var secretsCmd = &cobra.Command{
|
||||
}
|
||||
}
|
||||
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
projectId, err := cmd.Flags().GetString("projectId")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
@ -78,22 +78,7 @@ var secretsCmd = &cobra.Command{
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
request := models.GetAllSecretsParameters{
|
||||
Environment: environmentName,
|
||||
WorkspaceId: projectId,
|
||||
TagSlugs: tagSlugs,
|
||||
SecretsPath: secretsPath,
|
||||
IncludeImport: includeImports,
|
||||
Recursive: recursive,
|
||||
}
|
||||
|
||||
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
|
||||
request.InfisicalToken = token.Token
|
||||
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
|
||||
request.UniversalAuthAccessToken = token.Token
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(request, "")
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, SecretsPath: secretsPath, IncludeImport: includeImports, Recursive: recursive}, "")
|
||||
if err != nil {
|
||||
util.HandleError(err)
|
||||
}
|
||||
@ -105,15 +90,9 @@ var secretsCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
if shouldExpandSecrets {
|
||||
|
||||
authParams := models.ExpandSecretsAuthentication{}
|
||||
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
|
||||
authParams.InfisicalToken = token.Token
|
||||
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
|
||||
authParams.UniversalAuthAccessToken = token.Token
|
||||
}
|
||||
|
||||
secrets = util.ExpandSecrets(secrets, authParams, "")
|
||||
secrets = util.ExpandSecrets(secrets, models.ExpandSecretsAuthentication{
|
||||
InfisicalToken: infisicalToken,
|
||||
}, "")
|
||||
}
|
||||
|
||||
visualize.PrintAllSecretDetails(secrets)
|
||||
@ -423,12 +402,8 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
|
||||
}
|
||||
}
|
||||
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
|
||||
|
||||
shouldExpand, err := cmd.Flags().GetBool("expand")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
@ -438,11 +413,6 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
projectId, err := cmd.Flags().GetString("projectId")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
secretsPath, err := cmd.Flags().GetString("path")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse path flag")
|
||||
@ -458,37 +428,11 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
|
||||
util.HandleError(err, "Unable to parse path flag")
|
||||
}
|
||||
|
||||
request := models.GetAllSecretsParameters{
|
||||
Environment: environmentName,
|
||||
WorkspaceId: projectId,
|
||||
TagSlugs: tagSlugs,
|
||||
SecretsPath: secretsPath,
|
||||
IncludeImport: true,
|
||||
Recursive: recursive,
|
||||
}
|
||||
|
||||
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
|
||||
request.InfisicalToken = token.Token
|
||||
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
|
||||
request.UniversalAuthAccessToken = token.Token
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(request, "")
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, SecretsPath: secretsPath, IncludeImport: true, Recursive: recursive}, "")
|
||||
if err != nil {
|
||||
util.HandleError(err, "To fetch all secrets")
|
||||
}
|
||||
|
||||
if shouldExpand {
|
||||
authParams := models.ExpandSecretsAuthentication{}
|
||||
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
|
||||
authParams.InfisicalToken = token.Token
|
||||
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
|
||||
authParams.UniversalAuthAccessToken = token.Token
|
||||
}
|
||||
|
||||
secrets = util.ExpandSecrets(secrets, authParams, "")
|
||||
}
|
||||
|
||||
requestedSecrets := []models.SingleEnvironmentVariable{}
|
||||
|
||||
secretsMap := getSecretsByKeys(secrets)
|
||||
@ -531,12 +475,8 @@ func generateExampleEnv(cmd *cobra.Command, args []string) {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
token, err := util.GetInfisicalToken(cmd)
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
|
||||
|
||||
projectId, err := cmd.Flags().GetString("projectId")
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
@ -546,21 +486,7 @@ func generateExampleEnv(cmd *cobra.Command, args []string) {
|
||||
util.HandleError(err, "Unable to parse flag")
|
||||
}
|
||||
|
||||
request := models.GetAllSecretsParameters{
|
||||
Environment: environmentName,
|
||||
WorkspaceId: projectId,
|
||||
TagSlugs: tagSlugs,
|
||||
SecretsPath: secretsPath,
|
||||
IncludeImport: true,
|
||||
}
|
||||
|
||||
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
|
||||
request.InfisicalToken = token.Token
|
||||
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
|
||||
request.UniversalAuthAccessToken = token.Token
|
||||
}
|
||||
|
||||
secrets, err := util.GetAllEnvironmentVariables(request, "")
|
||||
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, SecretsPath: secretsPath, IncludeImport: true}, "")
|
||||
if err != nil {
|
||||
util.HandleError(err, "To fetch all secrets")
|
||||
}
|
||||
@ -760,23 +686,19 @@ func getSecretsByKeys(secrets []models.SingleEnvironmentVariable) map[string]mod
|
||||
|
||||
func init() {
|
||||
secretsGenerateExampleEnvCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
secretsGenerateExampleEnvCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
|
||||
secretsGenerateExampleEnvCmd.Flags().String("path", "/", "Fetch secrets from within a folder path")
|
||||
secretsCmd.AddCommand(secretsGenerateExampleEnvCmd)
|
||||
|
||||
secretsGetCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
secretsGetCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
|
||||
secretsCmd.AddCommand(secretsGetCmd)
|
||||
secretsGetCmd.Flags().String("path", "/", "get secrets within a folder path")
|
||||
secretsGetCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
secretsGetCmd.Flags().Bool("raw-value", false, "Returns only the value of secret, only works with one secret")
|
||||
secretsGetCmd.Flags().Bool("recursive", false, "Fetch secrets from all sub-folders")
|
||||
secretsCmd.AddCommand(secretsGetCmd)
|
||||
|
||||
secretsCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
|
||||
secretsCmd.AddCommand(secretsSetCmd)
|
||||
secretsSetCmd.Flags().String("path", "/", "set secrets within a folder path")
|
||||
|
||||
// Only supports logged in users (JWT auth)
|
||||
secretsSetCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
@ -785,8 +707,6 @@ func init() {
|
||||
secretsDeleteCmd.Flags().String("type", "personal", "the type of secret to delete: personal or shared (default: personal)")
|
||||
secretsDeleteCmd.Flags().String("path", "/", "get secrets within a folder path")
|
||||
secretsCmd.AddCommand(secretsDeleteCmd)
|
||||
|
||||
// Only supports logged in users (JWT auth)
|
||||
secretsDeleteCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
|
||||
util.RequireLogin()
|
||||
util.RequireLocalWorkspaceFile()
|
||||
@ -798,7 +718,6 @@ func init() {
|
||||
// Add getCmd, createCmd and deleteCmd flags here
|
||||
getCmd.Flags().StringP("path", "p", "/", "The path from where folders should be fetched from")
|
||||
getCmd.Flags().String("token", "", "Fetch folders using the infisical token")
|
||||
getCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
|
||||
folderCmd.AddCommand(getCmd)
|
||||
|
||||
// Add createCmd flags here
|
||||
@ -816,7 +735,6 @@ func init() {
|
||||
// ** End of folders sub command
|
||||
|
||||
secretsCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
|
||||
secretsCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
|
||||
secretsCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on")
|
||||
secretsCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
|
||||
secretsCmd.Flags().Bool("include-imports", true, "Imported linked secrets ")
|
||||
|
@ -1,63 +0,0 @@
|
||||
/*
|
||||
Copyright (c) 2023 Infisical Inc.
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/util"
|
||||
"github.com/fatih/color"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var tokenCmd = &cobra.Command{
|
||||
Use: "token",
|
||||
Short: "Manage your access tokens",
|
||||
DisableFlagsInUseLine: true,
|
||||
Example: "infisical token",
|
||||
Args: cobra.ExactArgs(0),
|
||||
PreRun: func(cmd *cobra.Command, args []string) {
|
||||
util.RequireLogin()
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
},
|
||||
}
|
||||
|
||||
var tokenRenewCmd = &cobra.Command{
|
||||
Use: "renew [token]",
|
||||
Short: "Used to renew your universal auth access token",
|
||||
DisableFlagsInUseLine: true,
|
||||
Example: "infisical token renew <access-token>",
|
||||
Args: cobra.ExactArgs(1),
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
// args[0] will be the <INSERT_TOKEN> from your command call
|
||||
token := args[0]
|
||||
|
||||
if strings.HasPrefix(token, "st.") {
|
||||
util.PrintErrorMessageAndExit("You are trying to renew a service token. You can only renew universal auth access tokens.")
|
||||
}
|
||||
|
||||
renewedAccessToken, err := util.RenewUniversalAuthAccessToken(token)
|
||||
|
||||
if err != nil {
|
||||
util.HandleError(err, "Unable to renew token")
|
||||
}
|
||||
|
||||
boldGreen := color.New(color.FgGreen).Add(color.Bold)
|
||||
time.Sleep(time.Second * 1)
|
||||
boldGreen.Printf(">>>> Successfully renewed token!\n\n")
|
||||
boldGreen.Printf("Renewed Access Token:\n%v", renewedAccessToken)
|
||||
|
||||
plainBold := color.New(color.Bold)
|
||||
plainBold.Println("\n\nYou can use the new access token to authenticate through other commands in the CLI.")
|
||||
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
tokenCmd.AddCommand(tokenRenewCmd)
|
||||
|
||||
rootCmd.AddCommand(tokenCmd)
|
||||
}
|
@ -59,11 +59,6 @@ type DynamicSecretLease struct {
|
||||
Data map[string]interface{} `json:"data"`
|
||||
}
|
||||
|
||||
type TokenDetails struct {
|
||||
Type string
|
||||
Token string
|
||||
}
|
||||
|
||||
type SingleFolder struct {
|
||||
ID string `json:"_id"`
|
||||
Name string `json:"name"`
|
||||
@ -102,11 +97,10 @@ type GetAllSecretsParameters struct {
|
||||
}
|
||||
|
||||
type GetAllFoldersParameters struct {
|
||||
WorkspaceId string
|
||||
Environment string
|
||||
FoldersPath string
|
||||
InfisicalToken string
|
||||
UniversalAuthAccessToken string
|
||||
WorkspaceId string
|
||||
Environment string
|
||||
FoldersPath string
|
||||
InfisicalToken string
|
||||
}
|
||||
|
||||
type CreateFolderParameters struct {
|
||||
@ -129,8 +123,3 @@ type ExpandSecretsAuthentication struct {
|
||||
InfisicalToken string
|
||||
UniversalAuthAccessToken string
|
||||
}
|
||||
|
||||
type MachineIdentityCredentials struct {
|
||||
ClientId string
|
||||
ClientSecret string
|
||||
}
|
||||
|
@ -1,23 +1,17 @@
|
||||
package util
|
||||
|
||||
const (
|
||||
CONFIG_FILE_NAME = "infisical-config.json"
|
||||
CONFIG_FOLDER_NAME = ".infisical"
|
||||
INFISICAL_DEFAULT_API_URL = "https://app.infisical.com/api"
|
||||
INFISICAL_DEFAULT_URL = "https://app.infisical.com"
|
||||
INFISICAL_WORKSPACE_CONFIG_FILE_NAME = ".infisical.json"
|
||||
INFISICAL_TOKEN_NAME = "INFISICAL_TOKEN"
|
||||
INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME = "INFISICAL_UNIVERSAL_AUTH_CLIENT_ID"
|
||||
INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME = "INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET"
|
||||
INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN_NAME = "INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN"
|
||||
SECRET_TYPE_PERSONAL = "personal"
|
||||
SECRET_TYPE_SHARED = "shared"
|
||||
KEYRING_SERVICE_NAME = "infisical"
|
||||
PERSONAL_SECRET_TYPE_NAME = "personal"
|
||||
SHARED_SECRET_TYPE_NAME = "shared"
|
||||
|
||||
SERVICE_TOKEN_IDENTIFIER = "service-token"
|
||||
UNIVERSAL_AUTH_TOKEN_IDENTIFIER = "universal-auth-token"
|
||||
CONFIG_FILE_NAME = "infisical-config.json"
|
||||
CONFIG_FOLDER_NAME = ".infisical"
|
||||
INFISICAL_DEFAULT_API_URL = "https://app.infisical.com/api"
|
||||
INFISICAL_DEFAULT_URL = "https://app.infisical.com"
|
||||
INFISICAL_WORKSPACE_CONFIG_FILE_NAME = ".infisical.json"
|
||||
INFISICAL_TOKEN_NAME = "INFISICAL_TOKEN"
|
||||
SECRET_TYPE_PERSONAL = "personal"
|
||||
SECRET_TYPE_SHARED = "shared"
|
||||
KEYRING_SERVICE_NAME = "infisical"
|
||||
PERSONAL_SECRET_TYPE_NAME = "personal"
|
||||
SHARED_SECRET_TYPE_NAME = "shared"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -19,7 +19,7 @@ func GetAllFolders(params models.GetAllFoldersParameters) ([]models.SingleFolder
|
||||
|
||||
var foldersToReturn []models.SingleFolder
|
||||
var folderErr error
|
||||
if params.InfisicalToken == "" && params.UniversalAuthAccessToken == "" {
|
||||
if params.InfisicalToken == "" {
|
||||
|
||||
log.Debug().Msg("GetAllFolders: Trying to fetch folders using logged in details")
|
||||
|
||||
@ -44,24 +44,11 @@ func GetAllFolders(params models.GetAllFoldersParameters) ([]models.SingleFolder
|
||||
folders, err := GetFoldersViaJTW(loggedInUserDetails.UserCredentials.JTWToken, workspaceFile.WorkspaceId, params.Environment, params.FoldersPath)
|
||||
folderErr = err
|
||||
foldersToReturn = folders
|
||||
} else if params.InfisicalToken != "" {
|
||||
log.Debug().Msg("GetAllFolders: Trying to fetch folders using service token")
|
||||
|
||||
} else {
|
||||
// get folders via service token
|
||||
folders, err := GetFoldersViaServiceToken(params.InfisicalToken, params.WorkspaceId, params.Environment, params.FoldersPath)
|
||||
folderErr = err
|
||||
foldersToReturn = folders
|
||||
} else if params.UniversalAuthAccessToken != "" {
|
||||
log.Debug().Msg("GetAllFolders: Trying to fetch folders using universal auth")
|
||||
|
||||
if params.WorkspaceId == "" {
|
||||
PrintErrorMessageAndExit("Project ID is required when using machine identity")
|
||||
}
|
||||
|
||||
// get folders via machine identity
|
||||
folders, err := GetFoldersViaMachineIdentity(params.UniversalAuthAccessToken, params.WorkspaceId, params.Environment, params.FoldersPath)
|
||||
folderErr = err
|
||||
foldersToReturn = folders
|
||||
}
|
||||
return foldersToReturn, folderErr
|
||||
}
|
||||
@ -145,34 +132,6 @@ func GetFoldersViaServiceToken(fullServiceToken string, workspaceId string, envi
|
||||
return folders, nil
|
||||
}
|
||||
|
||||
func GetFoldersViaMachineIdentity(accessToken string, workspaceId string, envSlug string, foldersPath string) ([]models.SingleFolder, error) {
|
||||
httpClient := resty.New()
|
||||
httpClient.SetAuthToken(accessToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
getFoldersRequest := api.GetFoldersV1Request{
|
||||
WorkspaceId: workspaceId,
|
||||
Environment: envSlug,
|
||||
FoldersPath: foldersPath,
|
||||
}
|
||||
|
||||
apiResponse, err := api.CallGetFoldersV1(httpClient, getFoldersRequest)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var folders []models.SingleFolder
|
||||
|
||||
for _, folder := range apiResponse.Folders {
|
||||
folders = append(folders, models.SingleFolder{
|
||||
Name: folder.Name,
|
||||
ID: folder.ID,
|
||||
})
|
||||
}
|
||||
|
||||
return folders, nil
|
||||
}
|
||||
|
||||
// CreateFolder creates a folder in Infisical
|
||||
func CreateFolder(params models.CreateFolderParameters) (models.SingleFolder, error) {
|
||||
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()
|
||||
|
@ -9,11 +9,8 @@ import (
|
||||
"os/exec"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Infisical/infisical-merge/packages/api"
|
||||
"github.com/Infisical/infisical-merge/packages/models"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@ -67,70 +64,18 @@ func IsSecretTypeValid(s string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func GetInfisicalToken(cmd *cobra.Command) (token *models.TokenDetails, err error) {
|
||||
func GetInfisicalServiceToken(cmd *cobra.Command) (serviceToken string, err error) {
|
||||
infisicalToken, err := cmd.Flags().GetString("token")
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
if infisicalToken == "" {
|
||||
infisicalToken = os.Getenv(INFISICAL_TOKEN_NAME)
|
||||
}
|
||||
|
||||
if infisicalToken == "" { // If no flag is passed, we first check for the universal auth access token env variable.
|
||||
infisicalToken = os.Getenv(INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN_NAME)
|
||||
|
||||
if infisicalToken == "" { // If it's still empty after the first env check, we check for the service token env variable.
|
||||
infisicalToken = os.Getenv(INFISICAL_TOKEN_NAME)
|
||||
}
|
||||
}
|
||||
|
||||
if infisicalToken == "" { // If it's empty, we return nothing at all.
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if strings.HasPrefix(infisicalToken, "st.") {
|
||||
return &models.TokenDetails{
|
||||
Type: SERVICE_TOKEN_IDENTIFIER,
|
||||
Token: infisicalToken,
|
||||
}, nil
|
||||
}
|
||||
|
||||
return &models.TokenDetails{
|
||||
Type: UNIVERSAL_AUTH_TOKEN_IDENTIFIER,
|
||||
Token: infisicalToken,
|
||||
}, nil
|
||||
|
||||
}
|
||||
|
||||
func UniversalAuthLogin(clientId string, clientSecret string) (api.UniversalAuthLoginResponse, error) {
|
||||
httpClient := resty.New()
|
||||
httpClient.SetRetryCount(10000).
|
||||
SetRetryMaxWaitTime(20 * time.Second).
|
||||
SetRetryWaitTime(5 * time.Second)
|
||||
|
||||
tokenResponse, err := api.CallUniversalAuthLogin(httpClient, api.UniversalAuthLoginRequest{ClientId: clientId, ClientSecret: clientSecret})
|
||||
if err != nil {
|
||||
return api.UniversalAuthLoginResponse{}, err
|
||||
}
|
||||
|
||||
return tokenResponse, nil
|
||||
}
|
||||
|
||||
func RenewUniversalAuthAccessToken(accessToken string) (string, error) {
|
||||
|
||||
httpClient := resty.New()
|
||||
httpClient.SetRetryCount(10000).
|
||||
SetRetryMaxWaitTime(20 * time.Second).
|
||||
SetRetryWaitTime(5 * time.Second)
|
||||
|
||||
request := api.UniversalAuthRefreshRequest{
|
||||
AccessToken: accessToken,
|
||||
}
|
||||
|
||||
tokenResponse, err := api.CallUniversalAuthRefreshAccessToken(httpClient, request)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return tokenResponse.AccessToken, nil
|
||||
return infisicalToken, nil
|
||||
}
|
||||
|
||||
// Checks if the passed in email already exists in the users slice
|
||||
|
@ -159,7 +159,7 @@ func GetPlainTextSecretsViaMachineIdentity(accessToken string, workspaceId strin
|
||||
httpClient.SetAuthToken(accessToken).
|
||||
SetHeader("Accept", "application/json")
|
||||
|
||||
getSecretsRequest := api.GetRawSecretsV3Request{
|
||||
getSecretsRequest := api.GetEncryptedSecretsV3Request{
|
||||
WorkspaceId: workspaceId,
|
||||
Environment: environmentName,
|
||||
IncludeImport: includeImports,
|
||||
@ -171,8 +171,7 @@ func GetPlainTextSecretsViaMachineIdentity(accessToken string, workspaceId strin
|
||||
getSecretsRequest.SecretPath = secretsPath
|
||||
}
|
||||
|
||||
rawSecrets, err := api.CallGetRawSecretsV3(httpClient, getSecretsRequest)
|
||||
|
||||
rawSecrets, err := api.CallGetRawSecretsV3(httpClient, api.GetRawSecretsV3Request{WorkspaceId: workspaceId, SecretPath: secretsPath, Environment: environmentName})
|
||||
if err != nil {
|
||||
return models.PlaintextSecretResult{}, err
|
||||
}
|
||||
@ -183,7 +182,7 @@ func GetPlainTextSecretsViaMachineIdentity(accessToken string, workspaceId strin
|
||||
}
|
||||
|
||||
for _, secret := range rawSecrets.Secrets {
|
||||
plainTextSecrets = append(plainTextSecrets, models.SingleEnvironmentVariable{Key: secret.SecretKey, Value: secret.SecretValue, Type: secret.Type, WorkspaceId: secret.Workspace})
|
||||
plainTextSecrets = append(plainTextSecrets, models.SingleEnvironmentVariable{Key: secret.SecretKey, Value: secret.SecretValue, WorkspaceId: secret.Workspace})
|
||||
}
|
||||
|
||||
// if includeImports {
|
||||
@ -356,11 +355,6 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo
|
||||
log.Debug().Msg("Trying to fetch secrets using service token")
|
||||
secretsToReturn, _, errorToReturn = GetPlainTextSecretsViaServiceToken(params.InfisicalToken, params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive)
|
||||
} else if params.UniversalAuthAccessToken != "" {
|
||||
|
||||
if params.WorkspaceId == "" {
|
||||
PrintErrorMessageAndExit("Project ID is required when using machine identity")
|
||||
}
|
||||
|
||||
log.Debug().Msg("Trying to fetch secrets using universal auth")
|
||||
res, err := GetPlainTextSecretsViaMachineIdentity(params.UniversalAuthAccessToken, params.WorkspaceId, params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive)
|
||||
|
||||
|
@ -12,53 +12,4 @@ The CLI uses authentication to verify your identity. When you enter the correct
|
||||
|
||||
To change where the login credentials are stored, visit the [vaults command](./vault).
|
||||
|
||||
If you have added multiple users, you can switch between the users by using the [user command](./user).
|
||||
|
||||
|
||||
### Flags
|
||||
<Accordion title="--method">
|
||||
```bash
|
||||
infisical login --method=<auth-method> # Optional, will default to 'user'.
|
||||
```
|
||||
|
||||
#### Valid values for the `method` flag are:
|
||||
- `user`: Login using email and password.
|
||||
- `universal-auth`: Login using a universal auth client ID and client secret.
|
||||
|
||||
<Info>
|
||||
When `method` is set to `universal-auth`, the `client-id` and `client-secret` flags are required. Optionally you can set the `INFISICAL_UNIVERSAL_AUTH_CLIENT_ID` and `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET` environment variables instead of using the flags.
|
||||
|
||||
When you authenticate with universal auth, an access token will be printed to the console upon successful login. This token can be used to authenticate with the Infisical API and the CLI by passing it in the `--token` flag when applicable.
|
||||
|
||||
Use flag `--plain` along with `--silent` to print only the token in plain text when using the `universal-auth` method.
|
||||
|
||||
</Info>
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="--client-id">
|
||||
```bash
|
||||
infisical login --client-id=<client-id> # Optional, required if --method=universal-auth.
|
||||
```
|
||||
|
||||
#### Description
|
||||
The client ID of the universal auth client. This is required if the `--method` flag is set to `universal-auth`.
|
||||
|
||||
<Tip>
|
||||
The `client-id` flag can be substituted with the `INFISICAL_UNIVERSAL_AUTH_CLIENT_ID` environment variable.
|
||||
</Tip>
|
||||
</Accordion>
|
||||
<Accordion title="--client-secret">
|
||||
```bash
|
||||
infisical login --client-secret=<client-secret> # Optional, required if --method=universal-auth.
|
||||
```
|
||||
#### Description
|
||||
The client secret of the universal auth client. This is required if the `--method` flag is set to `universal-auth`.
|
||||
|
||||
<Tip>
|
||||
The `client-secret` flag can be substituted with the `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET` environment variable.
|
||||
</Tip>
|
||||
|
||||
</Accordion>
|
||||
|
||||
|
||||
|
||||
If you have added multiple users, you can switch between the users by using the [user command](./user).
|
@ -1,21 +0,0 @@
|
||||
---
|
||||
title: "infisical token"
|
||||
description: "Manage your Infisical identity access tokens"
|
||||
---
|
||||
|
||||
```bash
|
||||
infisical service-token renew <ua-access-token>
|
||||
```
|
||||
|
||||
## Description
|
||||
The Infisical `token` command allows you to manage your universal auth access tokens.
|
||||
With this command, you can renew your access tokens. In the future more subcommands will be added to better help you manage your tokens through the CLI.
|
||||
|
||||
<Accordion title="token renew <access-token>" defaultOpen="true">
|
||||
Use this command to renew your access token. This command will renew your access token and output a renewed access token to the console.
|
||||
|
||||
```bash
|
||||
$ infisical token renew <ua-access-token>
|
||||
```
|
||||
|
||||
</Accordion>
|
Binary file not shown.
Before Width: | Height: | Size: 94 KiB |
Binary file not shown.
Before Width: | Height: | Size: 120 KiB |
Binary file not shown.
Before Width: | Height: | Size: 524 KiB |
@ -1,76 +0,0 @@
|
||||
---
|
||||
title: "AWS Amplify"
|
||||
description: "Learn how to sync secrets from Infisical to AWS Amplify."
|
||||
---
|
||||
|
||||
Prerequisites:
|
||||
- Infisical Cloud account
|
||||
- Add the secrets you wish to sync to Amplify to [Infisical Cloud](https://app.infisical.com)
|
||||
|
||||
There are many approaches to sync secrets stored within Infisical to AWS Amplify. This guide describes two such approaches below.
|
||||
|
||||
## Access Infisical secrets at Amplify build time
|
||||
|
||||
This approach enables you to fetch secrets from Infisical during Amplify build time.
|
||||
|
||||
<Steps>
|
||||
<Step title="Generate a service token">
|
||||
Go to your project settings in the Infisical dashboard to generate a [service token](/documentation/platform/token). This service token will allow you to authenticate and fetch secrets from Infisical. Once you have created a service token with the required permissions, you’ll need to provide the token to the CLI installed in your Docker container.
|
||||
</Step>
|
||||
<Step title="Set the service token as an Amplify environment variable">
|
||||

|
||||
1. In the Amplify console, choose App Settings, and then select Environment variables.
|
||||
2. In the Environment variables section, select Manage variables.
|
||||
3. Under Variable, enter the key **INFISICAL_TOKEN**. For the value, enter the generated service token from the previous step.
|
||||
4. Click save.
|
||||
</Step>
|
||||
<Step title="Install Infisical CLI to the Amplify build step">
|
||||
In the prebuild phase, add the command in AWS Amplify to install the Infisical CLI.
|
||||
|
||||
```yaml
|
||||
build:
|
||||
phases:
|
||||
preBuild:
|
||||
commands:
|
||||
- sudo curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.rpm.sh' | sudo -E bash
|
||||
- sudo yum install infisical
|
||||
```
|
||||
</Step>
|
||||
<Step title="Modify the build command">
|
||||
You can now pull secrets from Infisical using the CLI and save them as a `.env` file. To do this, modify the build commands.
|
||||
|
||||
```yaml
|
||||
build:
|
||||
phases:
|
||||
build:
|
||||
commands:
|
||||
- INFISICAL_TOKEN=${INFISICAL_TOKEN}
|
||||
- infisical export --format=dotenv > .env
|
||||
- <rest of the commands>
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Sync Secrets Using AWS SSM Parameter Store
|
||||
|
||||
Another approach to use secrets from Infisical in AWS Amplify is to utilize AWS Parameter Store.
|
||||
At high level, you begin by using Infisical's AWS SSM Parameter Store integration to sync secrets from Infisical to AWS SSM Parameter Store. You then instruct AWS Amplify to consume those secrets from AWS SSM Parameter Store as [environment secrets](https://docs.aws.amazon.com/amplify/latest/userguide/environment-variables.html#environment-secrets).
|
||||
|
||||
<Steps>
|
||||
<Step title="Follow the AWS SSM Parameter Store Integration guide">
|
||||
Follow the [Infisical AWS SSM Parameter Store Integration Guide](./aws-parameter-store) to set up the integration. Pause once you reach the step where it asks you to select the path you would like to sync.
|
||||
</Step>
|
||||
<Step title="Find your Amplify App ID">
|
||||

|
||||
1. Open your AWS Amplify App console.
|
||||
2. Go to **Actions >> View App Settings**
|
||||
3. The App ID will be the last part of the App ARN field after the slash.
|
||||
</Step>
|
||||
<Step title="Set AWS SSM Parameter Store path">
|
||||
You need to set the path in the format `/amplify/[amplify_app_id]/[your-amplify-environment-name]` as the path option in AWS SSM Parameter Infisical Integration.
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
<Info>
|
||||
Accessing an environment secret during a build is similar to accessing environment variables, except that environment secrets are stored in `process.env.secrets` as a JSON string.
|
||||
</Info>
|
@ -209,12 +209,6 @@
|
||||
"self-hosting/guides/mongo-to-postgres"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Reference architectures",
|
||||
"pages": [
|
||||
"self-hosting/reference-architectures/aws-ecs"
|
||||
]
|
||||
},
|
||||
"self-hosting/ee",
|
||||
"self-hosting/faq"
|
||||
]
|
||||
@ -232,7 +226,6 @@
|
||||
"cli/commands/run",
|
||||
"cli/commands/secrets",
|
||||
"cli/commands/export",
|
||||
"cli/commands/token",
|
||||
"cli/commands/service-token",
|
||||
"cli/commands/vault",
|
||||
"cli/commands/user",
|
||||
@ -284,8 +277,7 @@
|
||||
"group": "AWS",
|
||||
"pages": [
|
||||
"integrations/cloud/aws-parameter-store",
|
||||
"integrations/cloud/aws-secret-manager",
|
||||
"integrations/cloud/aws-amplify"
|
||||
"integrations/cloud/aws-secret-manager"
|
||||
]
|
||||
},
|
||||
"integrations/cloud/vercel",
|
||||
|
@ -5,7 +5,7 @@ description: "Learn how to run Postgres schema migrations."
|
||||
|
||||
Running schema migrations is a requirement before deploying Infisical.
|
||||
Each time you decide to upgrade your version of Infisical, it's necessary to run schema migrations for that specific version.
|
||||
The guide below outlines a step-by-step guide to help you manually run schema migrations for Infisical.
|
||||
The guide below outlines a step-by-step guide to help you through this process.
|
||||
|
||||
### Prerequisites
|
||||
- Docker installed on your machine
|
||||
|
@ -1,56 +0,0 @@
|
||||
---
|
||||
title: "AWS ECS"
|
||||
description: "Reference architecture for self-hosting Infisical on AWS ECS"
|
||||
---
|
||||
|
||||
This guide will provide high-level architecture design for deploying the Infisical on AWS ECS and give insights into the core components, high availability strategies, and secure credential management for Infisical's root secrets.
|
||||
|
||||
## Overview
|
||||
|
||||
In this guide, we'll focus on running Infisical on AWS Elastic Container Service (ECS) across multiple Availability Zones (AZs), ensuring high availability and resilience.
|
||||
The architecture utilizes Amazon Relational Database Service (RDS) for persistent storage, ElastiCache for Redis as an in-memory data store for caching, and Amazon Simple Email Service (SES) to handle email based communications from Infisical.
|
||||
|
||||
|
||||

|
||||
|
||||
### Core Components
|
||||
|
||||
- **ECS Fargate:** In this architecture, Infisical is deployed on ECS using Fargate launch type. The ECS services are deployed across multiple Availability Zones to ensure high availability.
|
||||
|
||||
- **Amazon RDS:** Infisical uses Postgres as it's persistent layer. As such, RDS for PostgreSQL is used as the database engine. The setup includes a primary instance in one AZ and a read replica in another AZ.
|
||||
This ensures that if there is a failure in one availability zone, the working replica will become the primary and continue processing workloads.
|
||||
|
||||
- **Amazon ElastiCache for Redis:** To enhance performance, Infisical requires Redis. In this architecture, Redis is set up with a primary and standby replication group across two AZs to increase availability.
|
||||
|
||||
- **Amazon Simple Email Service (SES):** Infisical requires email service to facilitate outbound communication. AWS SES is integrated into the architecture to handle such communication.
|
||||
|
||||
### Network Setup
|
||||
|
||||
- **Public Subnets:** Each Availability Zone contains a public subnet. There are two main reasons you might need internet access. First, if you intend to use Infisical to communicate with external secrets managers not located within your virtual private network, enabling internet access is necessary. Second, downloading the Docker image from Docker Hub requires internet access, though this can be avoided by utilizing AWS ECR with VPC Endpoints through AWS Private Link.
|
||||
|
||||
- **NAT Gateway:** This is used to route outbound requests from Infisical to the internet and is only used to communicate with external secrets manager and or downloading container images.
|
||||
|
||||
### Securing Infisical's root credential
|
||||
|
||||
- **Parameter Store:** To secure Infisical's root credentials (database connection string, encryption key, etc), we highly recommend that you use AWS Parameter Store and only allow the tasks running Infisical to access them.
|
||||
- **AWS Secrets Manager:** We strongly advise securing the master credentials for RDS by utilizing the latest AWS RDS integration with AWS Secrets Manager. This integration automatically stores the master database user's credentials in AWS Secrets Manager, thereby reducing the risk of misplacing the root RDS credential.
|
||||
|
||||
### High Availability (HA) and Scalability
|
||||
|
||||
- **Multi-AZ Deployment:** By spreading resources across multiple Availability Zones, we ensure that if one AZ experiences issues, traffic can be redirected to the remaining healthy AZ without service interruption.
|
||||
|
||||
- **Auto Scaling:** AWS Auto Scaling is in place to adjust capacity to maintain steady and predictable performance at the lowest possible cost.
|
||||
|
||||
- **Cross-Region Deployment:** For even greater high availability, you may deploy Infisical across multiple regions. This extends the HA capabilities of the architecture and protects against regional service disruptions.
|
||||
|
||||
|
||||
### Frequently asked questions
|
||||
<Accordion title="Can Infisical run in an air-gapped environment without any internet access?" defaultOpen >
|
||||
Yes, Infisical can function in an air-gapped environment. To do so, update your ECS task to use the publicly available AWS Elastic Container Registry (ECR) image instead of the default Docker Hub image. Additionally, it's necessary to configure VPC endpoints, which allows your system to access AWS ECR via a private network route instead of the internet, ensuring all connectivity remains within the secure, private network.
|
||||
</Accordion>
|
||||
<Accordion title="Since RDS is in a private subnet, how do run the Postgres schema migrations?">
|
||||
Since the Amazon RDS instance is housed within a private network to enhance security, it is not directly accessible from the internet. This means that in order to run the required [Postgres schema migrations](/self-hosting/configuration/schema-migrations), you need to connect to this instance of RDS. There are many approaches you can take:
|
||||
- To automate schema migrations, you may setup CI/CD pipeline with access to the same RDS network to run the schema migrations before making deployment to ECS. This ensures that if migrations fail, your Infisical instances continues to run.
|
||||
- If you would like to run the migrations manually, consider using AWS Systems Manager Session Manager to access the RDS within the VPC on your local machine.
|
||||
- If your organization already has mechanisms in place for secure access to the VPC, such as VPNs or Direct Connect, these can also be utilized for performing database migrations manually.
|
||||
</Accordion>
|
@ -20,8 +20,7 @@ export enum DynamicSecretProviders {
|
||||
}
|
||||
|
||||
export enum SqlProviders {
|
||||
Postgres = "postgres",
|
||||
MySql = "mysql2"
|
||||
Postgres = "postgres"
|
||||
}
|
||||
|
||||
export type TDynamicSecretProvider = {
|
||||
@ -35,7 +34,7 @@ export type TDynamicSecretProvider = {
|
||||
password: string;
|
||||
creationStatement: string;
|
||||
revocationStatement: string;
|
||||
renewStatement?: string;
|
||||
renewStatement: string;
|
||||
ca?: string | undefined;
|
||||
};
|
||||
};
|
||||
|
@ -22,8 +22,7 @@ export const LogsSection = () => {
|
||||
resolver: yupResolver(auditLogFilterFormSchema),
|
||||
defaultValues: {
|
||||
page: 1,
|
||||
perPage: 10,
|
||||
startDate: new Date(new Date().setDate(new Date().getDate() - 1))
|
||||
perPage: 10
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -31,7 +31,7 @@ const formSchema = z.object({
|
||||
password: z.string().min(1),
|
||||
creationStatement: z.string().min(1),
|
||||
revocationStatement: z.string().min(1),
|
||||
renewStatement: z.string().optional(),
|
||||
renewStatement: z.string().min(1),
|
||||
ca: z.string().optional()
|
||||
}),
|
||||
defaultTTL: z.string().superRefine((val, ctx) => {
|
||||
@ -66,26 +66,6 @@ type Props = {
|
||||
environment: string;
|
||||
};
|
||||
|
||||
const getSqlStatements = (provider: SqlProviders) => {
|
||||
if (provider === SqlProviders.MySql) {
|
||||
return {
|
||||
creationStatement:
|
||||
"CREATE USER \"{{username}}\"@'%' IDENTIFIED BY '{{password}}';\nGRANT ALL ON \"{{database}}\".* TO \"{{username}}\"@'%';",
|
||||
renewStatement: "",
|
||||
revocationStatement:
|
||||
'REVOKE ALL PRIVILEGES ON "{{database}}".* FROM "{{username}}"@\'%\';\nDROP USER "{{username}}"@\'%\';'
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
creationStatement:
|
||||
"CREATE USER \"{{username}}\" WITH ENCRYPTED PASSWORD '{{password}}' VALID UNTIL '{{expiration}}';\nGRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \"{{username}}\";",
|
||||
renewStatement: "ALTER ROLE \"{{username}}\" VALID UNTIL '{{expiration}}';",
|
||||
revocationStatement:
|
||||
'REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA public FROM "{{username}}";\nDROP ROLE "{{username}}";'
|
||||
};
|
||||
};
|
||||
|
||||
export const SqlDatabaseInputForm = ({
|
||||
onCompleted,
|
||||
onCancel,
|
||||
@ -95,13 +75,18 @@ export const SqlDatabaseInputForm = ({
|
||||
}: Props) => {
|
||||
const {
|
||||
control,
|
||||
setValue,
|
||||
formState: { isSubmitting },
|
||||
handleSubmit
|
||||
} = useForm<TForm>({
|
||||
resolver: zodResolver(formSchema),
|
||||
defaultValues: {
|
||||
provider: getSqlStatements(SqlProviders.Postgres)
|
||||
provider: {
|
||||
creationStatement:
|
||||
"CREATE USER \"{{username}}\" WITH ENCRYPTED PASSWORD '{{password}}' VALID UNTIL '{{expiration}}';\nGRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \"{{username}}\";",
|
||||
renewStatement: "ALTER ROLE \"{{username}}\" VALID UNTIL '{{expiration}}';",
|
||||
revocationStatement:
|
||||
'REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA public FROM "{{username}}";\nDROP ROLE "{{username}}";'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@ -197,17 +182,10 @@ export const SqlDatabaseInputForm = ({
|
||||
<FormControl isError={Boolean(error?.message)} errorText={error?.message}>
|
||||
<Select
|
||||
value={value}
|
||||
onValueChange={(val) => {
|
||||
onChange(val);
|
||||
const sqlStatment = getSqlStatements(val as SqlProviders);
|
||||
setValue("provider.creationStatement", sqlStatment.creationStatement);
|
||||
setValue("provider.renewStatement", sqlStatment.renewStatement);
|
||||
setValue("provider.revocationStatement", sqlStatment.revocationStatement);
|
||||
}}
|
||||
onValueChange={(val) => onChange(val)}
|
||||
className="w-full border border-mineshaft-500"
|
||||
>
|
||||
<SelectItem value={SqlProviders.Postgres}>PostgreSQL</SelectItem>
|
||||
<SelectItem value={SqlProviders.MySql}>MySQL</SelectItem>
|
||||
</Select>
|
||||
</FormControl>
|
||||
)}
|
||||
|
@ -32,7 +32,7 @@ const formSchema = z.object({
|
||||
password: z.string().min(1),
|
||||
creationStatement: z.string().min(1),
|
||||
revocationStatement: z.string().min(1),
|
||||
renewStatement: z.string().optional(),
|
||||
renewStatement: z.string().min(1),
|
||||
ca: z.string().optional()
|
||||
})
|
||||
.partial(),
|
||||
@ -94,7 +94,7 @@ export const EditDynamicSecretSqlProviderForm = ({
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
const updateDynamicSecret = useUpdateDynamicSecret();
|
||||
|
||||
const handleUpdateDynamicSecret = async ({ inputs, maxTTL, defaultTTL, newName }: TForm) => {
|
||||
@ -186,13 +186,11 @@ export const EditDynamicSecretSqlProviderForm = ({
|
||||
render={({ field: { value, onChange }, fieldState: { error } }) => (
|
||||
<FormControl isError={Boolean(error?.message)} errorText={error?.message}>
|
||||
<Select
|
||||
isDisabled
|
||||
value={value}
|
||||
onValueChange={(val) => onChange(val)}
|
||||
className="w-full border border-mineshaft-500"
|
||||
>
|
||||
<SelectItem value={SqlProviders.Postgres}>PostgreSQL</SelectItem>
|
||||
<SelectItem value={SqlProviders.MySql}>MySQL</SelectItem>
|
||||
</Select>
|
||||
</FormControl>
|
||||
)}
|
||||
@ -223,11 +221,7 @@ export const EditDynamicSecretSqlProviderForm = ({
|
||||
isError={Boolean(error?.message)}
|
||||
errorText={error?.message}
|
||||
>
|
||||
<Input
|
||||
{...field}
|
||||
type="number"
|
||||
onChange={(el) => field.onChange(parseInt(el.target.value, 10))}
|
||||
/>
|
||||
<Input {...field} type="number" onChange={(el) => field.onChange(parseInt(el.target.value, 10))} />
|
||||
</FormControl>
|
||||
)}
|
||||
/>
|
||||
|
Reference in New Issue
Block a user