mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-25 14:07:47 +00:00
Compare commits
108 Commits
fix/postgr
...
fix/confir
Author | SHA1 | Date | |
---|---|---|---|
|
d4bdf04061 | ||
|
4dcb3938e0 | ||
|
f992535812 | ||
|
0c855f3bd4 | ||
|
fa4b133a87 | ||
|
553389af33 | ||
|
4a6e4a90ee | ||
|
ceae1ed0e1 | ||
|
a290ae7767 | ||
|
bdd51d9baf | ||
|
f29593eb60 | ||
|
51d4444c77 | ||
|
1fc217798e | ||
|
7b95d37466 | ||
|
b53504444c | ||
|
193bbf2bf3 | ||
|
adb04737e0 | ||
|
42b039af3e | ||
|
2725e4d9dd | ||
|
b719f2d6ba | ||
|
b413f0f49e | ||
|
058dbc144d | ||
|
56eadb25e7 | ||
|
57ce1be0c7 | ||
|
40c1d32621 | ||
|
8399181e3d | ||
|
3c50291cd3 | ||
|
7884f312cd | ||
|
0dba359f96 | ||
|
de2df991d7 | ||
|
38b9d1f5a5 | ||
|
80743997e1 | ||
|
f025509938 | ||
|
b7b059bb50 | ||
|
f3a8e30548 | ||
|
b0c93e5c4c | ||
|
4ab0da6b03 | ||
|
9674b71df8 | ||
|
be04d3cf3a | ||
|
b7d7b555b2 | ||
|
8f77a3ae0b | ||
|
a064fae94e | ||
|
954ca58e15 | ||
|
cf6b9d8905 | ||
|
e4a28ab0f4 | ||
|
4ab8d680c4 | ||
|
a3b0d86996 | ||
|
1baa40ac8e | ||
|
277b92ddec | ||
|
0080d5f291 | ||
|
d321f6386d | ||
|
a99e7e24cc | ||
|
a276d27451 | ||
|
cec15d6d51 | ||
|
007e10d409 | ||
|
a8b448be0f | ||
|
bfda3776ee | ||
|
e71911c2de | ||
|
f2513b0f17 | ||
|
d0e7af721e | ||
|
c5c2e2619e | ||
|
bc98c42c79 | ||
|
e6bfb6ce2b | ||
|
1c20e4fef0 | ||
|
b560cdb0f8 | ||
|
144143b43a | ||
|
b9a05688cd | ||
|
c06c6c6c61 | ||
|
350afee45e | ||
|
5ae18a691d | ||
|
8187b1da91 | ||
|
0174d36136 | ||
|
968d7420c6 | ||
|
fd761df8e5 | ||
|
61ca617616 | ||
|
6ce6c276cd | ||
|
32b2f7b0fe | ||
|
4c2823c480 | ||
|
60438694e4 | ||
|
fdaf8f9a87 | ||
|
3fe41f81fe | ||
|
c1798d37be | ||
|
01c6d3192d | ||
|
621bfe3e60 | ||
|
67ec00d46b | ||
|
d6c2789d46 | ||
|
58ba0c8ed4 | ||
|
f38c574030 | ||
|
c330d8ca8a | ||
|
2cb0ecc768 | ||
|
ecc15bb432 | ||
|
59c0f1ff08 | ||
|
5110d59bea | ||
|
0e07ebae7b | ||
|
cd84d57025 | ||
|
19cb220107 | ||
|
fce6738562 | ||
|
aab204a68a | ||
|
49afaa4d2d | ||
|
a94a26263a | ||
|
2f9baee210 | ||
|
bd7947c04e | ||
|
7ff8a19518 | ||
|
08dfaaa8b0 | ||
|
221de8beb4 | ||
|
6bb634f5ed | ||
|
706447d5c6 | ||
|
246fe81134 |
11
README.md
11
README.md
@@ -149,11 +149,8 @@ Not sure where to get started? You can:
|
||||
|
||||
- Join our <a href="https://infisical.com/slack">Slack</a>, and ask us any questions there.
|
||||
|
||||
## Resources
|
||||
## We are hiring!
|
||||
|
||||
- [Docs](https://infisical.com/docs/documentation/getting-started/introduction) for comprehensive documentation and guides
|
||||
- [Slack](https://infisical.com/slack) for discussion with the community and Infisical team.
|
||||
- [GitHub](https://github.com/Infisical/infisical) for code, issues, and pull requests
|
||||
- [Twitter](https://twitter.com/infisical) for fast news
|
||||
- [YouTube](https://www.youtube.com/@infisical_os) for videos on secret management
|
||||
- [Blog](https://infisical.com/blog) for secret management insights, articles, tutorials, and updates
|
||||
If you're reading this, there is a strong chance you like the products we created.
|
||||
|
||||
You might also make a great addition to our team. We're growing fast and would love for you to [join us](https://infisical.com/careers).
|
||||
|
@@ -0,0 +1,19 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.AppConnection, "gatewayId"))) {
|
||||
await knex.schema.alterTable(TableName.AppConnection, (t) => {
|
||||
t.uuid("gatewayId").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.AppConnection, "gatewayId")) {
|
||||
await knex.schema.alterTable(TableName.AppConnection, (t) => {
|
||||
t.dropColumn("gatewayId");
|
||||
});
|
||||
}
|
||||
}
|
@@ -0,0 +1,21 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasColumn = await knex.schema.hasColumn(TableName.IdentityAwsAuth, "allowedPrincipalArns");
|
||||
if (hasColumn) {
|
||||
await knex.schema.alterTable(TableName.IdentityAwsAuth, (t) => {
|
||||
t.string("allowedPrincipalArns", 4096).notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasColumn = await knex.schema.hasColumn(TableName.IdentityAwsAuth, "allowedPrincipalArns");
|
||||
if (hasColumn) {
|
||||
await knex.schema.alterTable(TableName.IdentityAwsAuth, (t) => {
|
||||
t.string("allowedPrincipalArns", 2048).notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
@@ -20,7 +20,8 @@ export const AppConnectionsSchema = z.object({
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
isPlatformManagedCredentials: z.boolean().default(false).nullable().optional()
|
||||
isPlatformManagedCredentials: z.boolean().default(false).nullable().optional(),
|
||||
gatewayId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TAppConnections = z.infer<typeof AppConnectionsSchema>;
|
||||
|
@@ -3,11 +3,14 @@ import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { isValidFolderName } from "@app/lib/validator";
|
||||
import { readLimit, secretsLimit } from "@app/server/config/rateLimiter";
|
||||
import { SecretNameSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { booleanSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { commitChangesResponseSchema, resourceChangeSchema } from "@app/services/folder-commit/folder-commit-schemas";
|
||||
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
|
||||
|
||||
const commitHistoryItemSchema = z.object({
|
||||
id: z.string(),
|
||||
@@ -413,4 +416,166 @@ export const registerPITRouter = async (server: FastifyZodProvider) => {
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/batch/commit",
|
||||
config: {
|
||||
rateLimit: secretsLimit
|
||||
},
|
||||
schema: {
|
||||
hide: true,
|
||||
description: "Commit changes",
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
],
|
||||
body: z.object({
|
||||
projectId: z.string().trim(),
|
||||
environment: z.string().trim(),
|
||||
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
|
||||
message: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1)
|
||||
.max(255)
|
||||
.refine((message) => message.trim() !== "", {
|
||||
message: "Commit message cannot be empty"
|
||||
}),
|
||||
changes: z.object({
|
||||
secrets: z.object({
|
||||
create: z
|
||||
.array(
|
||||
z.object({
|
||||
secretKey: SecretNameSchema,
|
||||
secretValue: z.string().transform((val) => (val.at(-1) === "\n" ? `${val.trim()}\n` : val.trim())),
|
||||
secretComment: z.string().trim().optional().default(""),
|
||||
skipMultilineEncoding: z.boolean().optional(),
|
||||
metadata: z.record(z.string()).optional(),
|
||||
secretMetadata: ResourceMetadataSchema.optional(),
|
||||
tagIds: z.string().array().optional()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
update: z
|
||||
.array(
|
||||
z.object({
|
||||
secretKey: SecretNameSchema,
|
||||
newSecretName: SecretNameSchema.optional(),
|
||||
secretValue: z
|
||||
.string()
|
||||
.transform((val) => (val.at(-1) === "\n" ? `${val.trim()}\n` : val.trim()))
|
||||
.optional(),
|
||||
secretComment: z.string().trim().optional().default(""),
|
||||
skipMultilineEncoding: z.boolean().optional(),
|
||||
metadata: z.record(z.string()).optional(),
|
||||
secretMetadata: ResourceMetadataSchema.optional(),
|
||||
tagIds: z.string().array().optional()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
delete: z
|
||||
.array(
|
||||
z.object({
|
||||
secretKey: SecretNameSchema
|
||||
})
|
||||
)
|
||||
.optional()
|
||||
}),
|
||||
folders: z.object({
|
||||
create: z
|
||||
.array(
|
||||
z.object({
|
||||
folderName: z
|
||||
.string()
|
||||
.trim()
|
||||
.refine((name) => isValidFolderName(name), {
|
||||
message: "Invalid folder name. Only alphanumeric characters, dashes, and underscores are allowed."
|
||||
}),
|
||||
description: z.string().optional()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
update: z
|
||||
.array(
|
||||
z.object({
|
||||
folderName: z
|
||||
.string()
|
||||
.trim()
|
||||
.refine((name) => isValidFolderName(name), {
|
||||
message: "Invalid folder name. Only alphanumeric characters, dashes, and underscores are allowed."
|
||||
}),
|
||||
description: z.string().nullable().optional(),
|
||||
id: z.string()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
delete: z
|
||||
.array(
|
||||
z.object({
|
||||
folderName: z
|
||||
.string()
|
||||
.trim()
|
||||
.refine((name) => isValidFolderName(name), {
|
||||
message: "Invalid folder name. Only alphanumeric characters, dashes, and underscores are allowed."
|
||||
}),
|
||||
id: z.string()
|
||||
})
|
||||
)
|
||||
.optional()
|
||||
})
|
||||
})
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
message: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const result = await server.services.pit.processNewCommitRaw({
|
||||
actorId: req.permission.id,
|
||||
actor: req.permission.type,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
projectId: req.body.projectId,
|
||||
environment: req.body.environment,
|
||||
secretPath: req.body.secretPath,
|
||||
message: req.body.message,
|
||||
changes: {
|
||||
secrets: req.body.changes.secrets,
|
||||
folders: req.body.changes.folders
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.body.projectId,
|
||||
event: {
|
||||
type: EventType.PIT_PROCESS_NEW_COMMIT_RAW,
|
||||
metadata: {
|
||||
commitId: result.commitId,
|
||||
approvalId: result.approvalId,
|
||||
projectId: req.body.projectId,
|
||||
environment: req.body.environment,
|
||||
secretPath: req.body.secretPath,
|
||||
message: req.body.message
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
for await (const event of result.secretMutationEvents) {
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
projectId: req.body.projectId,
|
||||
event
|
||||
});
|
||||
}
|
||||
|
||||
return { message: "success" };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -6,6 +6,7 @@ import { registerAzureClientSecretRotationRouter } from "./azure-client-secret-r
|
||||
import { registerLdapPasswordRotationRouter } from "./ldap-password-rotation-router";
|
||||
import { registerMsSqlCredentialsRotationRouter } from "./mssql-credentials-rotation-router";
|
||||
import { registerMySqlCredentialsRotationRouter } from "./mysql-credentials-rotation-router";
|
||||
import { registerOktaClientSecretRotationRouter } from "./okta-client-secret-rotation-router";
|
||||
import { registerOracleDBCredentialsRotationRouter } from "./oracledb-credentials-rotation-router";
|
||||
import { registerPostgresCredentialsRotationRouter } from "./postgres-credentials-rotation-router";
|
||||
|
||||
@@ -22,5 +23,6 @@ export const SECRET_ROTATION_REGISTER_ROUTER_MAP: Record<
|
||||
[SecretRotation.Auth0ClientSecret]: registerAuth0ClientSecretRotationRouter,
|
||||
[SecretRotation.AzureClientSecret]: registerAzureClientSecretRotationRouter,
|
||||
[SecretRotation.AwsIamUserSecret]: registerAwsIamUserSecretRotationRouter,
|
||||
[SecretRotation.LdapPassword]: registerLdapPasswordRotationRouter
|
||||
[SecretRotation.LdapPassword]: registerLdapPasswordRotationRouter,
|
||||
[SecretRotation.OktaClientSecret]: registerOktaClientSecretRotationRouter
|
||||
};
|
||||
|
@@ -0,0 +1,19 @@
|
||||
import {
|
||||
CreateOktaClientSecretRotationSchema,
|
||||
OktaClientSecretRotationGeneratedCredentialsSchema,
|
||||
OktaClientSecretRotationSchema,
|
||||
UpdateOktaClientSecretRotationSchema
|
||||
} from "@app/ee/services/secret-rotation-v2/okta-client-secret";
|
||||
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
|
||||
|
||||
import { registerSecretRotationEndpoints } from "./secret-rotation-v2-endpoints";
|
||||
|
||||
export const registerOktaClientSecretRotationRouter = async (server: FastifyZodProvider) =>
|
||||
registerSecretRotationEndpoints({
|
||||
type: SecretRotation.OktaClientSecret,
|
||||
server,
|
||||
responseSchema: OktaClientSecretRotationSchema,
|
||||
createSchema: CreateOktaClientSecretRotationSchema,
|
||||
updateSchema: UpdateOktaClientSecretRotationSchema,
|
||||
generatedCredentialsSchema: OktaClientSecretRotationGeneratedCredentialsSchema
|
||||
});
|
@@ -7,6 +7,7 @@ import { AzureClientSecretRotationListItemSchema } from "@app/ee/services/secret
|
||||
import { LdapPasswordRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/ldap-password";
|
||||
import { MsSqlCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mssql-credentials";
|
||||
import { MySqlCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mysql-credentials";
|
||||
import { OktaClientSecretRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/okta-client-secret";
|
||||
import { OracleDBCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/oracledb-credentials";
|
||||
import { PostgresCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/postgres-credentials";
|
||||
import { SecretRotationV2Schema } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-union-schema";
|
||||
@@ -23,7 +24,8 @@ const SecretRotationV2OptionsSchema = z.discriminatedUnion("type", [
|
||||
Auth0ClientSecretRotationListItemSchema,
|
||||
AzureClientSecretRotationListItemSchema,
|
||||
AwsIamUserSecretRotationListItemSchema,
|
||||
LdapPasswordRotationListItemSchema
|
||||
LdapPasswordRotationListItemSchema,
|
||||
OktaClientSecretRotationListItemSchema
|
||||
]);
|
||||
|
||||
export const registerSecretRotationV2Router = async (server: FastifyZodProvider) => {
|
||||
|
@@ -45,7 +45,10 @@ export const ValidateOracleDBConnectionCredentialsSchema = z.discriminatedUnion(
|
||||
]);
|
||||
|
||||
export const CreateOracleDBConnectionSchema = ValidateOracleDBConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.OracleDB, { supportsPlatformManagedCredentials: true })
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.OracleDB, {
|
||||
supportsPlatformManagedCredentials: true,
|
||||
supportsGateways: true
|
||||
})
|
||||
);
|
||||
|
||||
export const UpdateOracleDBConnectionSchema = z
|
||||
@@ -54,7 +57,12 @@ export const UpdateOracleDBConnectionSchema = z
|
||||
AppConnections.UPDATE(AppConnection.OracleDB).credentials
|
||||
)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.OracleDB, { supportsPlatformManagedCredentials: true }));
|
||||
.and(
|
||||
GenericUpdateAppConnectionFieldsSchema(AppConnection.OracleDB, {
|
||||
supportsPlatformManagedCredentials: true,
|
||||
supportsGateways: true
|
||||
})
|
||||
);
|
||||
|
||||
export const OracleDBConnectionListItemSchema = z.object({
|
||||
name: z.literal("OracleDB"),
|
||||
|
@@ -449,6 +449,7 @@ export enum EventType {
|
||||
PIT_REVERT_COMMIT = "pit-revert-commit",
|
||||
PIT_GET_FOLDER_STATE = "pit-get-folder-state",
|
||||
PIT_COMPARE_FOLDER_STATES = "pit-compare-folder-states",
|
||||
PIT_PROCESS_NEW_COMMIT_RAW = "pit-process-new-commit-raw",
|
||||
SECRET_SCANNING_DATA_SOURCE_LIST = "secret-scanning-data-source-list",
|
||||
SECRET_SCANNING_DATA_SOURCE_CREATE = "secret-scanning-data-source-create",
|
||||
SECRET_SCANNING_DATA_SOURCE_UPDATE = "secret-scanning-data-source-update",
|
||||
@@ -1546,8 +1547,9 @@ interface UpdateFolderEvent {
|
||||
metadata: {
|
||||
environment: string;
|
||||
folderId: string;
|
||||
oldFolderName: string;
|
||||
oldFolderName?: string;
|
||||
newFolderName: string;
|
||||
newFolderDescription?: string;
|
||||
folderPath: string;
|
||||
};
|
||||
}
|
||||
@@ -3222,6 +3224,18 @@ interface PitCompareFolderStatesEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface PitProcessNewCommitRawEvent {
|
||||
type: EventType.PIT_PROCESS_NEW_COMMIT_RAW;
|
||||
metadata: {
|
||||
projectId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
message: string;
|
||||
approvalId?: string;
|
||||
commitId?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface SecretScanningDataSourceListEvent {
|
||||
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST;
|
||||
metadata: {
|
||||
@@ -3658,6 +3672,7 @@ export type Event =
|
||||
| PitRevertCommitEvent
|
||||
| PitCompareFolderStatesEvent
|
||||
| PitGetFolderStateEvent
|
||||
| PitProcessNewCommitRawEvent
|
||||
| SecretScanningDataSourceListEvent
|
||||
| SecretScanningDataSourceGetEvent
|
||||
| SecretScanningDataSourceCreateEvent
|
||||
|
@@ -1,29 +1,52 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { Event, EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { ProjectPermissionCommitsActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
||||
import { ResourceType, TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
|
||||
import { TFolderCommitDALFactory } from "@app/services/folder-commit/folder-commit-dal";
|
||||
import {
|
||||
ResourceType,
|
||||
TCommitResourceChangeDTO,
|
||||
TFolderCommitServiceFactory
|
||||
} from "@app/services/folder-commit/folder-commit-service";
|
||||
import {
|
||||
isFolderCommitChange,
|
||||
isSecretCommitChange
|
||||
} from "@app/services/folder-commit-changes/folder-commit-changes-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
|
||||
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
|
||||
import { TProcessNewCommitRawDTO } from "@app/services/secret/secret-types";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
|
||||
import { TSecretV2BridgeServiceFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-service";
|
||||
import { SecretOperations, SecretUpdateMode } from "@app/services/secret-v2-bridge/secret-v2-bridge-types";
|
||||
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service-types";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "../secret-approval-policy/secret-approval-policy-service";
|
||||
import { TSecretApprovalRequestServiceFactory } from "../secret-approval-request/secret-approval-request-service";
|
||||
|
||||
type TPitServiceFactoryDep = {
|
||||
folderCommitService: TFolderCommitServiceFactory;
|
||||
secretService: Pick<TSecretServiceFactory, "getSecretVersionsV2ByIds" | "getChangeVersions">;
|
||||
folderService: Pick<TSecretFolderServiceFactory, "getFolderById" | "getFolderVersions">;
|
||||
folderService: Pick<
|
||||
TSecretFolderServiceFactory,
|
||||
"getFolderById" | "getFolderVersions" | "createManyFolders" | "updateManyFolders" | "deleteManyFolders"
|
||||
>;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findSecretPathByFolderIds">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findSecretPathByFolderIds" | "findBySecretPath">;
|
||||
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
|
||||
secretApprovalRequestService: Pick<
|
||||
TSecretApprovalRequestServiceFactory,
|
||||
"generateSecretApprovalRequest" | "generateSecretApprovalRequestV2Bridge"
|
||||
>;
|
||||
secretApprovalPolicyService: Pick<TSecretApprovalPolicyServiceFactory, "getSecretApprovalPolicy">;
|
||||
projectDAL: Pick<TProjectDALFactory, "checkProjectUpgradeStatus" | "findProjectBySlug" | "findById">;
|
||||
secretV2BridgeService: TSecretV2BridgeServiceFactory;
|
||||
folderCommitDAL: Pick<TFolderCommitDALFactory, "transaction">;
|
||||
};
|
||||
|
||||
export type TPitServiceFactory = ReturnType<typeof pitServiceFactory>;
|
||||
@@ -34,7 +57,12 @@ export const pitServiceFactory = ({
|
||||
folderService,
|
||||
permissionService,
|
||||
folderDAL,
|
||||
projectEnvDAL
|
||||
projectEnvDAL,
|
||||
secretApprovalRequestService,
|
||||
secretApprovalPolicyService,
|
||||
projectDAL,
|
||||
secretV2BridgeService,
|
||||
folderCommitDAL
|
||||
}: TPitServiceFactoryDep) => {
|
||||
const getCommitsCount = async ({
|
||||
actor,
|
||||
@@ -471,6 +499,347 @@ export const pitServiceFactory = ({
|
||||
});
|
||||
};
|
||||
|
||||
const processNewCommitRaw = async ({
|
||||
actorId,
|
||||
projectId,
|
||||
environment,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
secretPath,
|
||||
message,
|
||||
changes = {
|
||||
secrets: {
|
||||
create: [],
|
||||
update: [],
|
||||
delete: []
|
||||
},
|
||||
folders: {
|
||||
create: [],
|
||||
update: [],
|
||||
delete: []
|
||||
}
|
||||
}
|
||||
}: {
|
||||
actorId: string;
|
||||
projectId: string;
|
||||
environment: string;
|
||||
actor: ActorType;
|
||||
actorOrgId: string;
|
||||
actorAuthMethod: ActorAuthMethod;
|
||||
secretPath: string;
|
||||
message: string;
|
||||
changes: TProcessNewCommitRawDTO;
|
||||
}) => {
|
||||
const policy =
|
||||
actor === ActorType.USER
|
||||
? await secretApprovalPolicyService.getSecretApprovalPolicy(projectId, environment, secretPath)
|
||||
: undefined;
|
||||
const secretMutationEvents: Event[] = [];
|
||||
|
||||
const project = await projectDAL.findById(projectId);
|
||||
if (project.enforceCapitalization) {
|
||||
const caseViolatingSecretKeys = [
|
||||
// Check create operations
|
||||
...(changes.secrets?.create
|
||||
?.filter((sec) => sec.secretKey !== sec.secretKey.toUpperCase())
|
||||
.map((sec) => sec.secretKey) ?? []),
|
||||
|
||||
// Check update operations
|
||||
...(changes.secrets?.update
|
||||
?.filter((sec) => sec.newSecretName && sec.newSecretName !== sec.newSecretName.toUpperCase())
|
||||
.map((sec) => sec.secretKey) ?? [])
|
||||
];
|
||||
|
||||
if (caseViolatingSecretKeys.length) {
|
||||
throw new BadRequestError({
|
||||
message: `Secret names must be in UPPERCASE per project requirements: ${caseViolatingSecretKeys.join(
|
||||
", "
|
||||
)}. You can disable this requirement in project settings`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const response = await folderCommitDAL.transaction(async (trx) => {
|
||||
const targetFolder = await folderDAL.findBySecretPath(projectId, environment, secretPath, trx);
|
||||
if (!targetFolder)
|
||||
throw new NotFoundError({
|
||||
message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found`,
|
||||
name: "CreateManySecret"
|
||||
});
|
||||
const commitChanges: TCommitResourceChangeDTO[] = [];
|
||||
const folderChanges: { create: string[]; update: string[]; delete: string[] } = {
|
||||
create: [],
|
||||
update: [],
|
||||
delete: []
|
||||
};
|
||||
|
||||
if ((changes.folders?.create?.length ?? 0) > 0) {
|
||||
const createdFolders = await folderService.createManyFolders({
|
||||
projectId,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
folders:
|
||||
changes.folders?.create?.map((folder) => ({
|
||||
name: folder.folderName,
|
||||
environment,
|
||||
path: secretPath,
|
||||
description: folder.description
|
||||
})) ?? [],
|
||||
tx: trx,
|
||||
commitChanges
|
||||
});
|
||||
const newFolderEvents = createdFolders.folders.map(
|
||||
(folder) =>
|
||||
({
|
||||
type: EventType.CREATE_FOLDER,
|
||||
metadata: {
|
||||
environment,
|
||||
folderId: folder.id,
|
||||
folderName: folder.name,
|
||||
folderPath: secretPath,
|
||||
...(folder.description ? { description: folder.description } : {})
|
||||
}
|
||||
}) as Event
|
||||
);
|
||||
secretMutationEvents.push(...newFolderEvents);
|
||||
folderChanges.create.push(...createdFolders.folders.map((folder) => folder.id));
|
||||
}
|
||||
|
||||
if ((changes.folders?.update?.length ?? 0) > 0) {
|
||||
const updatedFolders = await folderService.updateManyFolders({
|
||||
projectId,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
folders:
|
||||
changes.folders?.update?.map((folder) => ({
|
||||
environment,
|
||||
path: secretPath,
|
||||
id: folder.id,
|
||||
name: folder.folderName,
|
||||
description: folder.description
|
||||
})) ?? [],
|
||||
tx: trx,
|
||||
commitChanges
|
||||
});
|
||||
|
||||
const updatedFolderEvents = updatedFolders.newFolders.map(
|
||||
(folder) =>
|
||||
({
|
||||
type: EventType.UPDATE_FOLDER,
|
||||
metadata: {
|
||||
environment,
|
||||
folderId: folder.id,
|
||||
folderPath: secretPath,
|
||||
newFolderName: folder.name,
|
||||
newFolderDescription: folder.description
|
||||
}
|
||||
}) as Event
|
||||
);
|
||||
secretMutationEvents.push(...updatedFolderEvents);
|
||||
folderChanges.update.push(...updatedFolders.newFolders.map((folder) => folder.id));
|
||||
}
|
||||
|
||||
if ((changes.folders?.delete?.length ?? 0) > 0) {
|
||||
const deletedFolders = await folderService.deleteManyFolders({
|
||||
projectId,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
folders:
|
||||
changes.folders?.delete?.map((folder) => ({
|
||||
environment,
|
||||
path: secretPath,
|
||||
idOrName: folder.id
|
||||
})) ?? [],
|
||||
tx: trx,
|
||||
commitChanges
|
||||
});
|
||||
const deletedFolderEvents = deletedFolders.folders.map(
|
||||
(folder) =>
|
||||
({
|
||||
type: EventType.DELETE_FOLDER,
|
||||
metadata: {
|
||||
environment,
|
||||
folderId: folder.id,
|
||||
folderPath: secretPath,
|
||||
folderName: folder.name
|
||||
}
|
||||
}) as Event
|
||||
);
|
||||
secretMutationEvents.push(...deletedFolderEvents);
|
||||
folderChanges.delete.push(...deletedFolders.folders.map((folder) => folder.id));
|
||||
}
|
||||
|
||||
if (policy) {
|
||||
if (
|
||||
(changes.secrets?.create?.length ?? 0) > 0 ||
|
||||
(changes.secrets?.update?.length ?? 0) > 0 ||
|
||||
(changes.secrets?.delete?.length ?? 0) > 0
|
||||
) {
|
||||
const approval = await secretApprovalRequestService.generateSecretApprovalRequestV2Bridge({
|
||||
policy,
|
||||
secretPath,
|
||||
environment,
|
||||
projectId,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
data: {
|
||||
[SecretOperations.Create]:
|
||||
changes.secrets?.create?.map((el) => ({
|
||||
tagIds: el.tagIds,
|
||||
secretValue: el.secretValue,
|
||||
secretComment: el.secretComment,
|
||||
metadata: el.metadata,
|
||||
skipMultilineEncoding: el.skipMultilineEncoding,
|
||||
secretKey: el.secretKey,
|
||||
secretMetadata: el.secretMetadata
|
||||
})) ?? [],
|
||||
[SecretOperations.Update]:
|
||||
changes.secrets?.update?.map((el) => ({
|
||||
tagIds: el.tagIds,
|
||||
newSecretName: el.newSecretName,
|
||||
secretValue: el.secretValue,
|
||||
secretComment: el.secretComment,
|
||||
metadata: el.metadata,
|
||||
skipMultilineEncoding: el.skipMultilineEncoding,
|
||||
secretKey: el.secretKey,
|
||||
secretMetadata: el.secretMetadata
|
||||
})) ?? [],
|
||||
[SecretOperations.Delete]:
|
||||
changes.secrets?.delete?.map((el) => ({
|
||||
secretKey: el.secretKey
|
||||
})) ?? []
|
||||
}
|
||||
});
|
||||
return {
|
||||
approvalId: approval.id,
|
||||
folderChanges,
|
||||
secretMutationEvents
|
||||
};
|
||||
}
|
||||
return {
|
||||
folderChanges,
|
||||
secretMutationEvents
|
||||
};
|
||||
}
|
||||
|
||||
if ((changes.secrets?.create?.length ?? 0) > 0) {
|
||||
const newSecrets = await secretV2BridgeService.createManySecret({
|
||||
secretPath,
|
||||
environment,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actor,
|
||||
actorId,
|
||||
secrets: changes.secrets?.create ?? [],
|
||||
tx: trx,
|
||||
commitChanges
|
||||
});
|
||||
secretMutationEvents.push({
|
||||
type: EventType.CREATE_SECRETS,
|
||||
metadata: {
|
||||
environment,
|
||||
secretPath,
|
||||
secrets: newSecrets.map((secret) => ({
|
||||
secretId: secret.id,
|
||||
secretKey: secret.secretKey,
|
||||
secretVersion: secret.version
|
||||
}))
|
||||
}
|
||||
});
|
||||
}
|
||||
if ((changes.secrets?.update?.length ?? 0) > 0) {
|
||||
const updatedSecrets = await secretV2BridgeService.updateManySecret({
|
||||
secretPath,
|
||||
environment,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actor,
|
||||
actorId,
|
||||
secrets: changes.secrets?.update ?? [],
|
||||
mode: SecretUpdateMode.FailOnNotFound,
|
||||
tx: trx,
|
||||
commitChanges
|
||||
});
|
||||
secretMutationEvents.push({
|
||||
type: EventType.UPDATE_SECRETS,
|
||||
metadata: {
|
||||
environment,
|
||||
secretPath,
|
||||
secrets: updatedSecrets.map((secret) => ({
|
||||
secretId: secret.id,
|
||||
secretKey: secret.secretKey,
|
||||
secretVersion: secret.version
|
||||
}))
|
||||
}
|
||||
});
|
||||
}
|
||||
if ((changes.secrets?.delete?.length ?? 0) > 0) {
|
||||
const deletedSecrets = await secretV2BridgeService.deleteManySecret({
|
||||
secretPath,
|
||||
environment,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actor,
|
||||
actorId,
|
||||
secrets: changes.secrets?.delete ?? [],
|
||||
tx: trx,
|
||||
commitChanges
|
||||
});
|
||||
secretMutationEvents.push({
|
||||
type: EventType.DELETE_SECRETS,
|
||||
metadata: {
|
||||
environment,
|
||||
secretPath,
|
||||
secrets: deletedSecrets.map((secret) => ({
|
||||
secretId: secret.id,
|
||||
secretKey: secret.secretKey,
|
||||
secretVersion: secret.version
|
||||
}))
|
||||
}
|
||||
});
|
||||
}
|
||||
if (commitChanges?.length > 0) {
|
||||
const commit = await folderCommitService.createCommit(
|
||||
{
|
||||
actor: {
|
||||
type: actor || ActorType.PLATFORM,
|
||||
metadata: {
|
||||
id: actorId
|
||||
}
|
||||
},
|
||||
message,
|
||||
folderId: targetFolder.id,
|
||||
changes: commitChanges
|
||||
},
|
||||
trx
|
||||
);
|
||||
return {
|
||||
folderChanges,
|
||||
commitId: commit?.id,
|
||||
secretMutationEvents
|
||||
};
|
||||
}
|
||||
return {
|
||||
folderChanges,
|
||||
secretMutationEvents
|
||||
};
|
||||
});
|
||||
|
||||
return response;
|
||||
};
|
||||
|
||||
return {
|
||||
getCommitsCount,
|
||||
getCommitsForFolder,
|
||||
@@ -478,6 +847,7 @@ export const pitServiceFactory = ({
|
||||
compareCommitChanges,
|
||||
rollbackToCommit,
|
||||
revertCommit,
|
||||
getFolderStateAtCommit
|
||||
getFolderStateAtCommit,
|
||||
processNewCommitRaw
|
||||
};
|
||||
};
|
||||
|
@@ -410,7 +410,7 @@ export const samlConfigServiceFactory = ({
|
||||
}
|
||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
const isUserCompleted = Boolean(user.isAccepted && user.isEmailVerified);
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
const providerAuthToken = crypto.jwt().sign(
|
||||
{
|
||||
|
@@ -1,5 +1,6 @@
|
||||
/* eslint-disable no-nested-ternary */
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import {
|
||||
ProjectMembershipRole,
|
||||
@@ -1368,8 +1369,9 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
policy,
|
||||
projectId,
|
||||
secretPath,
|
||||
environment
|
||||
}: TGenerateSecretApprovalRequestV2BridgeDTO) => {
|
||||
environment,
|
||||
trx: providedTx
|
||||
}: TGenerateSecretApprovalRequestV2BridgeDTO & { trx?: Knex }) => {
|
||||
if (actor === ActorType.SERVICE || actor === ActorType.Machine)
|
||||
throw new BadRequestError({ message: "Cannot use service token or machine token over protected branches" });
|
||||
|
||||
@@ -1595,7 +1597,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
);
|
||||
});
|
||||
|
||||
const secretApprovalRequest = await secretApprovalRequestDAL.transaction(async (tx) => {
|
||||
const executeApprovalRequestCreation = async (tx: Knex) => {
|
||||
const doc = await secretApprovalRequestDAL.create(
|
||||
{
|
||||
folderId,
|
||||
@@ -1657,7 +1659,11 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
}
|
||||
|
||||
return { ...doc, commits: approvalCommits };
|
||||
});
|
||||
};
|
||||
|
||||
const secretApprovalRequest = providedTx
|
||||
? await executeApprovalRequestCreation(providedTx)
|
||||
: await secretApprovalRequestDAL.transaction(executeApprovalRequestCreation);
|
||||
|
||||
const user = await userDAL.findById(actorId);
|
||||
const env = await projectEnvDAL.findOne({ id: policy.envId });
|
||||
|
@@ -0,0 +1,3 @@
|
||||
export * from "./okta-client-secret-rotation-constants";
|
||||
export * from "./okta-client-secret-rotation-schemas";
|
||||
export * from "./okta-client-secret-rotation-types";
|
@@ -0,0 +1,15 @@
|
||||
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
|
||||
import { TSecretRotationV2ListItem } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
export const OKTA_CLIENT_SECRET_ROTATION_LIST_OPTION: TSecretRotationV2ListItem = {
|
||||
name: "Okta Client Secret",
|
||||
type: SecretRotation.OktaClientSecret,
|
||||
connection: AppConnection.Okta,
|
||||
template: {
|
||||
secretsMapping: {
|
||||
clientId: "OKTA_CLIENT_ID",
|
||||
clientSecret: "OKTA_CLIENT_SECRET"
|
||||
}
|
||||
}
|
||||
};
|
@@ -0,0 +1,273 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { AxiosError } from "axios";
|
||||
|
||||
import {
|
||||
TRotationFactory,
|
||||
TRotationFactoryGetSecretsPayload,
|
||||
TRotationFactoryIssueCredentials,
|
||||
TRotationFactoryRevokeCredentials,
|
||||
TRotationFactoryRotateCredentials
|
||||
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { delay as delayMs } from "@app/lib/delay";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { getOktaInstanceUrl } from "@app/services/app-connection/okta";
|
||||
|
||||
import {
|
||||
TOktaClientSecret,
|
||||
TOktaClientSecretRotationGeneratedCredentials,
|
||||
TOktaClientSecretRotationWithConnection
|
||||
} from "./okta-client-secret-rotation-types";
|
||||
|
||||
type OktaErrorResponse = { errorCode: string; errorSummary: string; errorCauses?: { errorSummary: string }[] };
|
||||
|
||||
const isOktaErrorResponse = (data: unknown): data is OktaErrorResponse => {
|
||||
return (
|
||||
typeof data === "object" &&
|
||||
data !== null &&
|
||||
"errorSummary" in data &&
|
||||
typeof (data as OktaErrorResponse).errorSummary === "string"
|
||||
);
|
||||
};
|
||||
|
||||
const createErrorMessage = (error: unknown) => {
|
||||
if (error instanceof AxiosError) {
|
||||
if (error.response?.data && isOktaErrorResponse(error.response.data)) {
|
||||
const oktaError = error.response.data;
|
||||
if (oktaError.errorCauses && oktaError.errorCauses.length > 0) {
|
||||
return oktaError.errorCauses[0].errorSummary;
|
||||
}
|
||||
return oktaError.errorSummary;
|
||||
}
|
||||
if (error.message) {
|
||||
return error.message;
|
||||
}
|
||||
}
|
||||
return "Unknown error";
|
||||
};
|
||||
|
||||
// Delay between each revocation call in revokeCredentials
|
||||
const DELAY_MS = 1000;
|
||||
|
||||
export const oktaClientSecretRotationFactory: TRotationFactory<
|
||||
TOktaClientSecretRotationWithConnection,
|
||||
TOktaClientSecretRotationGeneratedCredentials
|
||||
> = (secretRotation) => {
|
||||
const {
|
||||
connection,
|
||||
parameters: { clientId },
|
||||
secretsMapping
|
||||
} = secretRotation;
|
||||
|
||||
/**
|
||||
* Creates a new client secret for the Okta app.
|
||||
*/
|
||||
const $rotateClientSecret = async () => {
|
||||
const instanceUrl = await getOktaInstanceUrl(connection);
|
||||
|
||||
try {
|
||||
const { data } = await request.post<TOktaClientSecret>(
|
||||
`${instanceUrl}/api/v1/apps/${clientId}/credentials/secrets`,
|
||||
{},
|
||||
{
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
Authorization: `SSWS ${connection.credentials.apiToken}`
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
if (!data.client_secret || !data.id) {
|
||||
throw new Error("Invalid response from Okta: missing 'client_secret' or secret 'id'.");
|
||||
}
|
||||
|
||||
return {
|
||||
clientSecret: data.client_secret,
|
||||
secretId: data.id,
|
||||
clientId
|
||||
};
|
||||
} catch (error: unknown) {
|
||||
if (
|
||||
error instanceof AxiosError &&
|
||||
error.response?.data &&
|
||||
isOktaErrorResponse(error.response.data) &&
|
||||
error.response.data.errorCode === "E0000001"
|
||||
) {
|
||||
// Okta has a maximum of 2 secrets per app, thus we must warn the users in case they already have 2
|
||||
throw new BadRequestError({
|
||||
message: `Failed to add client secret to Okta app ${clientId}: You must have only a single secret for the Okta app prior to creating this secret rotation.`
|
||||
});
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `Failed to add client secret to Okta app ${clientId}: ${createErrorMessage(error)}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* List client secrets.
|
||||
*/
|
||||
const $listClientSecrets = async () => {
|
||||
const instanceUrl = await getOktaInstanceUrl(connection);
|
||||
|
||||
try {
|
||||
const { data } = await request.get<TOktaClientSecret[]>(
|
||||
`${instanceUrl}/api/v1/apps/${clientId}/credentials/secrets`,
|
||||
{
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
Authorization: `SSWS ${connection.credentials.apiToken}`
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return data;
|
||||
} catch (error: unknown) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to list client secrets for Okta app ${clientId}: ${createErrorMessage(error)}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks if a credential with the given secretId exists.
|
||||
*/
|
||||
const credentialExists = async (secretId: string): Promise<boolean> => {
|
||||
const instanceUrl = await getOktaInstanceUrl(connection);
|
||||
|
||||
try {
|
||||
const { data } = await request.get<TOktaClientSecret>(
|
||||
`${instanceUrl}/api/v1/apps/${clientId}/credentials/secrets/${secretId}`,
|
||||
{
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
Authorization: `SSWS ${connection.credentials.apiToken}`
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return data.id === secretId;
|
||||
} catch (_) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Revokes a client secret from the Okta app using its secretId.
|
||||
* First checks if the credential exists before attempting revocation.
|
||||
*/
|
||||
const revokeCredential = async (secretId: string) => {
|
||||
// Check if credential exists before attempting revocation
|
||||
const exists = await credentialExists(secretId);
|
||||
if (!exists) {
|
||||
return; // Credential doesn't exist, nothing to revoke
|
||||
}
|
||||
|
||||
const instanceUrl = await getOktaInstanceUrl(connection);
|
||||
|
||||
try {
|
||||
// First deactivate the secret
|
||||
await request.post(
|
||||
`${instanceUrl}/api/v1/apps/${clientId}/credentials/secrets/${secretId}/lifecycle/deactivate`,
|
||||
undefined,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `SSWS ${connection.credentials.apiToken}`
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Then delete it
|
||||
await request.delete(`${instanceUrl}/api/v1/apps/${clientId}/credentials/secrets/${secretId}`, {
|
||||
headers: {
|
||||
Authorization: `SSWS ${connection.credentials.apiToken}`
|
||||
}
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
if (
|
||||
error instanceof AxiosError &&
|
||||
error.response?.data &&
|
||||
isOktaErrorResponse(error.response.data) &&
|
||||
error.response.data.errorCode === "E0000001"
|
||||
) {
|
||||
// If this is the last secret, we cannot revoke it
|
||||
return;
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `Failed to remove client secret with secretId ${secretId} from app ${clientId}: ${createErrorMessage(error)}`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Issues a new set of credentials.
|
||||
*/
|
||||
const issueCredentials: TRotationFactoryIssueCredentials<TOktaClientSecretRotationGeneratedCredentials> = async (
|
||||
callback
|
||||
) => {
|
||||
const credentials = await $rotateClientSecret();
|
||||
return callback(credentials);
|
||||
};
|
||||
|
||||
/**
|
||||
* Revokes a list of credentials.
|
||||
*/
|
||||
const revokeCredentials: TRotationFactoryRevokeCredentials<TOktaClientSecretRotationGeneratedCredentials> = async (
|
||||
credentials,
|
||||
callback
|
||||
) => {
|
||||
if (!credentials?.length) return callback();
|
||||
|
||||
for (const { secretId } of credentials) {
|
||||
await revokeCredential(secretId);
|
||||
await delayMs(DELAY_MS);
|
||||
}
|
||||
return callback();
|
||||
};
|
||||
|
||||
/**
|
||||
* Rotates credentials by issuing new ones and revoking the old.
|
||||
*/
|
||||
const rotateCredentials: TRotationFactoryRotateCredentials<TOktaClientSecretRotationGeneratedCredentials> = async (
|
||||
oldCredentials,
|
||||
callback,
|
||||
activeCredentials
|
||||
) => {
|
||||
// Since in Okta you can only have a maximum of 2 secrets at a time, we must delete any other secret besides the current one PRIOR to generating the second secret
|
||||
if (oldCredentials?.secretId) {
|
||||
await revokeCredential(oldCredentials.secretId);
|
||||
} else if (activeCredentials) {
|
||||
// On the first rotation oldCredentials won't be set so we must find the second secret manually
|
||||
const secrets = await $listClientSecrets();
|
||||
|
||||
if (secrets.length > 1) {
|
||||
const nonActiveSecret = secrets.find((secret) => secret.id !== activeCredentials.secretId);
|
||||
if (nonActiveSecret) {
|
||||
await revokeCredential(nonActiveSecret.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const newCredentials = await $rotateClientSecret();
|
||||
return callback(newCredentials);
|
||||
};
|
||||
|
||||
/**
|
||||
* Maps the generated credentials into the secret payload format.
|
||||
*/
|
||||
const getSecretsPayload: TRotationFactoryGetSecretsPayload<TOktaClientSecretRotationGeneratedCredentials> = ({
|
||||
clientSecret
|
||||
}) => [
|
||||
{ key: secretsMapping.clientId, value: clientId },
|
||||
{ key: secretsMapping.clientSecret, value: clientSecret }
|
||||
];
|
||||
|
||||
return {
|
||||
issueCredentials,
|
||||
revokeCredentials,
|
||||
rotateCredentials,
|
||||
getSecretsPayload
|
||||
};
|
||||
};
|
@@ -0,0 +1,68 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
|
||||
import {
|
||||
BaseCreateSecretRotationSchema,
|
||||
BaseSecretRotationSchema,
|
||||
BaseUpdateSecretRotationSchema
|
||||
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-schemas";
|
||||
import { SecretRotations } from "@app/lib/api-docs";
|
||||
import { SecretNameSchema } from "@app/server/lib/schemas";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
export const OktaClientSecretRotationGeneratedCredentialsSchema = z
|
||||
.object({
|
||||
clientId: z.string(),
|
||||
clientSecret: z.string(),
|
||||
secretId: z.string()
|
||||
})
|
||||
.array()
|
||||
.min(1)
|
||||
.max(2);
|
||||
|
||||
const OktaClientSecretRotationParametersSchema = z.object({
|
||||
clientId: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Client ID Required")
|
||||
.describe(SecretRotations.PARAMETERS.OKTA_CLIENT_SECRET.clientId)
|
||||
});
|
||||
|
||||
const OktaClientSecretRotationSecretsMappingSchema = z.object({
|
||||
clientId: SecretNameSchema.describe(SecretRotations.SECRETS_MAPPING.OKTA_CLIENT_SECRET.clientId),
|
||||
clientSecret: SecretNameSchema.describe(SecretRotations.SECRETS_MAPPING.OKTA_CLIENT_SECRET.clientSecret)
|
||||
});
|
||||
|
||||
export const OktaClientSecretRotationTemplateSchema = z.object({
|
||||
secretsMapping: z.object({
|
||||
clientId: z.string(),
|
||||
clientSecret: z.string()
|
||||
})
|
||||
});
|
||||
|
||||
export const OktaClientSecretRotationSchema = BaseSecretRotationSchema(SecretRotation.OktaClientSecret).extend({
|
||||
type: z.literal(SecretRotation.OktaClientSecret),
|
||||
parameters: OktaClientSecretRotationParametersSchema,
|
||||
secretsMapping: OktaClientSecretRotationSecretsMappingSchema
|
||||
});
|
||||
|
||||
export const CreateOktaClientSecretRotationSchema = BaseCreateSecretRotationSchema(
|
||||
SecretRotation.OktaClientSecret
|
||||
).extend({
|
||||
parameters: OktaClientSecretRotationParametersSchema,
|
||||
secretsMapping: OktaClientSecretRotationSecretsMappingSchema
|
||||
});
|
||||
|
||||
export const UpdateOktaClientSecretRotationSchema = BaseUpdateSecretRotationSchema(
|
||||
SecretRotation.OktaClientSecret
|
||||
).extend({
|
||||
parameters: OktaClientSecretRotationParametersSchema.optional(),
|
||||
secretsMapping: OktaClientSecretRotationSecretsMappingSchema.optional()
|
||||
});
|
||||
|
||||
export const OktaClientSecretRotationListItemSchema = z.object({
|
||||
name: z.literal("Okta Client Secret"),
|
||||
connection: z.literal(AppConnection.Okta),
|
||||
type: z.literal(SecretRotation.OktaClientSecret),
|
||||
template: OktaClientSecretRotationTemplateSchema
|
||||
});
|
@@ -0,0 +1,40 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { TOktaConnection } from "@app/services/app-connection/okta";
|
||||
|
||||
import {
|
||||
CreateOktaClientSecretRotationSchema,
|
||||
OktaClientSecretRotationGeneratedCredentialsSchema,
|
||||
OktaClientSecretRotationListItemSchema,
|
||||
OktaClientSecretRotationSchema
|
||||
} from "./okta-client-secret-rotation-schemas";
|
||||
|
||||
export type TOktaClientSecretRotation = z.infer<typeof OktaClientSecretRotationSchema>;
|
||||
|
||||
export type TOktaClientSecretRotationInput = z.infer<typeof CreateOktaClientSecretRotationSchema>;
|
||||
|
||||
export type TOktaClientSecretRotationListItem = z.infer<typeof OktaClientSecretRotationListItemSchema>;
|
||||
|
||||
export type TOktaClientSecretRotationWithConnection = TOktaClientSecretRotation & {
|
||||
connection: TOktaConnection;
|
||||
};
|
||||
|
||||
export type TOktaClientSecretRotationGeneratedCredentials = z.infer<
|
||||
typeof OktaClientSecretRotationGeneratedCredentialsSchema
|
||||
>;
|
||||
|
||||
export interface TOktaClientSecretRotationParameters {
|
||||
clientId: string;
|
||||
secretId: string;
|
||||
}
|
||||
|
||||
export interface TOktaClientSecretRotationSecretsMapping {
|
||||
clientId: string;
|
||||
clientSecret: string;
|
||||
secretId: string;
|
||||
}
|
||||
|
||||
export interface TOktaClientSecret {
|
||||
id: string;
|
||||
client_secret: string;
|
||||
}
|
@@ -6,7 +6,8 @@ export enum SecretRotation {
|
||||
Auth0ClientSecret = "auth0-client-secret",
|
||||
AzureClientSecret = "azure-client-secret",
|
||||
AwsIamUserSecret = "aws-iam-user-secret",
|
||||
LdapPassword = "ldap-password"
|
||||
LdapPassword = "ldap-password",
|
||||
OktaClientSecret = "okta-client-secret"
|
||||
}
|
||||
|
||||
export enum SecretRotationStatus {
|
||||
|
@@ -10,6 +10,7 @@ import { AZURE_CLIENT_SECRET_ROTATION_LIST_OPTION } from "./azure-client-secret"
|
||||
import { LDAP_PASSWORD_ROTATION_LIST_OPTION, TLdapPasswordRotation } from "./ldap-password";
|
||||
import { MSSQL_CREDENTIALS_ROTATION_LIST_OPTION } from "./mssql-credentials";
|
||||
import { MYSQL_CREDENTIALS_ROTATION_LIST_OPTION } from "./mysql-credentials";
|
||||
import { OKTA_CLIENT_SECRET_ROTATION_LIST_OPTION } from "./okta-client-secret";
|
||||
import { ORACLEDB_CREDENTIALS_ROTATION_LIST_OPTION } from "./oracledb-credentials";
|
||||
import { POSTGRES_CREDENTIALS_ROTATION_LIST_OPTION } from "./postgres-credentials";
|
||||
import { SecretRotation, SecretRotationStatus } from "./secret-rotation-v2-enums";
|
||||
@@ -30,7 +31,8 @@ const SECRET_ROTATION_LIST_OPTIONS: Record<SecretRotation, TSecretRotationV2List
|
||||
[SecretRotation.Auth0ClientSecret]: AUTH0_CLIENT_SECRET_ROTATION_LIST_OPTION,
|
||||
[SecretRotation.AzureClientSecret]: AZURE_CLIENT_SECRET_ROTATION_LIST_OPTION,
|
||||
[SecretRotation.AwsIamUserSecret]: AWS_IAM_USER_SECRET_ROTATION_LIST_OPTION,
|
||||
[SecretRotation.LdapPassword]: LDAP_PASSWORD_ROTATION_LIST_OPTION
|
||||
[SecretRotation.LdapPassword]: LDAP_PASSWORD_ROTATION_LIST_OPTION,
|
||||
[SecretRotation.OktaClientSecret]: OKTA_CLIENT_SECRET_ROTATION_LIST_OPTION
|
||||
};
|
||||
|
||||
export const listSecretRotationOptions = () => {
|
||||
|
@@ -9,7 +9,8 @@ export const SECRET_ROTATION_NAME_MAP: Record<SecretRotation, string> = {
|
||||
[SecretRotation.Auth0ClientSecret]: "Auth0 Client Secret",
|
||||
[SecretRotation.AzureClientSecret]: "Azure Client Secret",
|
||||
[SecretRotation.AwsIamUserSecret]: "AWS IAM User Secret",
|
||||
[SecretRotation.LdapPassword]: "LDAP Password"
|
||||
[SecretRotation.LdapPassword]: "LDAP Password",
|
||||
[SecretRotation.OktaClientSecret]: "Okta Client Secret"
|
||||
};
|
||||
|
||||
export const SECRET_ROTATION_CONNECTION_MAP: Record<SecretRotation, AppConnection> = {
|
||||
@@ -20,5 +21,6 @@ export const SECRET_ROTATION_CONNECTION_MAP: Record<SecretRotation, AppConnectio
|
||||
[SecretRotation.Auth0ClientSecret]: AppConnection.Auth0,
|
||||
[SecretRotation.AzureClientSecret]: AppConnection.AzureClientSecrets,
|
||||
[SecretRotation.AwsIamUserSecret]: AppConnection.AWS,
|
||||
[SecretRotation.LdapPassword]: AppConnection.LDAP
|
||||
[SecretRotation.LdapPassword]: AppConnection.LDAP,
|
||||
[SecretRotation.OktaClientSecret]: AppConnection.Okta
|
||||
};
|
||||
|
@@ -4,6 +4,7 @@ import isEqual from "lodash.isequal";
|
||||
|
||||
import { SecretType, TableName } from "@app/db/schemas";
|
||||
import { EventType, TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { hasSecretReadValueOrDescribePermission } from "@app/ee/services/permission/permission-fns";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
|
||||
@@ -82,6 +83,7 @@ import { TSecretVersionV2DALFactory } from "@app/services/secret-v2-bridge/secre
|
||||
import { TSecretVersionV2TagDALFactory } from "@app/services/secret-v2-bridge/secret-version-tag-dal";
|
||||
|
||||
import { awsIamUserSecretRotationFactory } from "./aws-iam-user-secret/aws-iam-user-secret-rotation-fns";
|
||||
import { oktaClientSecretRotationFactory } from "./okta-client-secret/okta-client-secret-rotation-fns";
|
||||
import { TSecretRotationV2DALFactory } from "./secret-rotation-v2-dal";
|
||||
|
||||
export type TSecretRotationV2ServiceFactoryDep = {
|
||||
@@ -107,6 +109,7 @@ export type TSecretRotationV2ServiceFactoryDep = {
|
||||
queueService: Pick<TQueueServiceFactory, "queuePg">;
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">;
|
||||
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
|
||||
};
|
||||
|
||||
export type TSecretRotationV2ServiceFactory = ReturnType<typeof secretRotationV2ServiceFactory>;
|
||||
@@ -126,7 +129,8 @@ const SECRET_ROTATION_FACTORY_MAP: Record<SecretRotation, TRotationFactoryImplem
|
||||
[SecretRotation.Auth0ClientSecret]: auth0ClientSecretRotationFactory as TRotationFactoryImplementation,
|
||||
[SecretRotation.AzureClientSecret]: azureClientSecretRotationFactory as TRotationFactoryImplementation,
|
||||
[SecretRotation.AwsIamUserSecret]: awsIamUserSecretRotationFactory as TRotationFactoryImplementation,
|
||||
[SecretRotation.LdapPassword]: ldapPasswordRotationFactory as TRotationFactoryImplementation
|
||||
[SecretRotation.LdapPassword]: ldapPasswordRotationFactory as TRotationFactoryImplementation,
|
||||
[SecretRotation.OktaClientSecret]: oktaClientSecretRotationFactory as TRotationFactoryImplementation
|
||||
};
|
||||
|
||||
export const secretRotationV2ServiceFactory = ({
|
||||
@@ -148,7 +152,8 @@ export const secretRotationV2ServiceFactory = ({
|
||||
keyStore,
|
||||
queueService,
|
||||
folderCommitService,
|
||||
appConnectionDAL
|
||||
appConnectionDAL,
|
||||
gatewayService
|
||||
}: TSecretRotationV2ServiceFactoryDep) => {
|
||||
const $queueSendSecretRotationStatusNotification = async (secretRotation: TSecretRotationV2Raw) => {
|
||||
const appCfg = getConfig();
|
||||
@@ -461,7 +466,8 @@ export const secretRotationV2ServiceFactory = ({
|
||||
rotationInterval: payload.rotationInterval
|
||||
} as TSecretRotationV2WithConnection,
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
kmsService,
|
||||
gatewayService
|
||||
);
|
||||
|
||||
// even though we have a db constraint we want to check before any rotation of credentials is attempted
|
||||
@@ -824,7 +830,8 @@ export const secretRotationV2ServiceFactory = ({
|
||||
connection: appConnection
|
||||
} as TSecretRotationV2WithConnection,
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
kmsService,
|
||||
gatewayService
|
||||
);
|
||||
|
||||
const generatedCredentials = await decryptSecretRotationCredentials({
|
||||
@@ -907,7 +914,8 @@ export const secretRotationV2ServiceFactory = ({
|
||||
connection: appConnection
|
||||
} as TSecretRotationV2WithConnection,
|
||||
appConnectionDAL,
|
||||
kmsService
|
||||
kmsService,
|
||||
gatewayService
|
||||
);
|
||||
|
||||
const updatedRotation = await rotationFactory.rotateCredentials(
|
||||
|
@@ -1,4 +1,5 @@
|
||||
import { AuditLogInfo } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { TSqlCredentialsRotationGeneratedCredentials } from "@app/ee/services/secret-rotation-v2/shared/sql-credentials/sql-credentials-rotation-types";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||
@@ -45,6 +46,13 @@ import {
|
||||
TMySqlCredentialsRotationListItem,
|
||||
TMySqlCredentialsRotationWithConnection
|
||||
} from "./mysql-credentials";
|
||||
import {
|
||||
TOktaClientSecretRotation,
|
||||
TOktaClientSecretRotationGeneratedCredentials,
|
||||
TOktaClientSecretRotationInput,
|
||||
TOktaClientSecretRotationListItem,
|
||||
TOktaClientSecretRotationWithConnection
|
||||
} from "./okta-client-secret";
|
||||
import {
|
||||
TOracleDBCredentialsRotation,
|
||||
TOracleDBCredentialsRotationInput,
|
||||
@@ -68,7 +76,8 @@ export type TSecretRotationV2 =
|
||||
| TAuth0ClientSecretRotation
|
||||
| TAzureClientSecretRotation
|
||||
| TLdapPasswordRotation
|
||||
| TAwsIamUserSecretRotation;
|
||||
| TAwsIamUserSecretRotation
|
||||
| TOktaClientSecretRotation;
|
||||
|
||||
export type TSecretRotationV2WithConnection =
|
||||
| TPostgresCredentialsRotationWithConnection
|
||||
@@ -78,14 +87,16 @@ export type TSecretRotationV2WithConnection =
|
||||
| TAuth0ClientSecretRotationWithConnection
|
||||
| TAzureClientSecretRotationWithConnection
|
||||
| TLdapPasswordRotationWithConnection
|
||||
| TAwsIamUserSecretRotationWithConnection;
|
||||
| TAwsIamUserSecretRotationWithConnection
|
||||
| TOktaClientSecretRotationWithConnection;
|
||||
|
||||
export type TSecretRotationV2GeneratedCredentials =
|
||||
| TSqlCredentialsRotationGeneratedCredentials
|
||||
| TAuth0ClientSecretRotationGeneratedCredentials
|
||||
| TAzureClientSecretRotationGeneratedCredentials
|
||||
| TLdapPasswordRotationGeneratedCredentials
|
||||
| TAwsIamUserSecretRotationGeneratedCredentials;
|
||||
| TAwsIamUserSecretRotationGeneratedCredentials
|
||||
| TOktaClientSecretRotationGeneratedCredentials;
|
||||
|
||||
export type TSecretRotationV2Input =
|
||||
| TPostgresCredentialsRotationInput
|
||||
@@ -95,7 +106,8 @@ export type TSecretRotationV2Input =
|
||||
| TAuth0ClientSecretRotationInput
|
||||
| TAzureClientSecretRotationInput
|
||||
| TLdapPasswordRotationInput
|
||||
| TAwsIamUserSecretRotationInput;
|
||||
| TAwsIamUserSecretRotationInput
|
||||
| TOktaClientSecretRotationInput;
|
||||
|
||||
export type TSecretRotationV2ListItem =
|
||||
| TPostgresCredentialsRotationListItem
|
||||
@@ -105,7 +117,8 @@ export type TSecretRotationV2ListItem =
|
||||
| TAuth0ClientSecretRotationListItem
|
||||
| TAzureClientSecretRotationListItem
|
||||
| TLdapPasswordRotationListItem
|
||||
| TAwsIamUserSecretRotationListItem;
|
||||
| TAwsIamUserSecretRotationListItem
|
||||
| TOktaClientSecretRotationListItem;
|
||||
|
||||
export type TSecretRotationV2TemporaryParameters = TLdapPasswordRotationInput["temporaryParameters"] | undefined;
|
||||
|
||||
@@ -239,7 +252,8 @@ export type TRotationFactory<
|
||||
> = (
|
||||
secretRotation: T,
|
||||
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">,
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
issueCredentials: TRotationFactoryIssueCredentials<C, P>;
|
||||
revokeCredentials: TRotationFactoryRevokeCredentials<C>;
|
||||
|
@@ -6,6 +6,7 @@ import { AzureClientSecretRotationSchema } from "@app/ee/services/secret-rotatio
|
||||
import { LdapPasswordRotationSchema } from "@app/ee/services/secret-rotation-v2/ldap-password";
|
||||
import { MsSqlCredentialsRotationSchema } from "@app/ee/services/secret-rotation-v2/mssql-credentials";
|
||||
import { MySqlCredentialsRotationSchema } from "@app/ee/services/secret-rotation-v2/mysql-credentials";
|
||||
import { OktaClientSecretRotationSchema } from "@app/ee/services/secret-rotation-v2/okta-client-secret";
|
||||
import { OracleDBCredentialsRotationSchema } from "@app/ee/services/secret-rotation-v2/oracledb-credentials";
|
||||
import { PostgresCredentialsRotationSchema } from "@app/ee/services/secret-rotation-v2/postgres-credentials";
|
||||
|
||||
@@ -17,5 +18,6 @@ export const SecretRotationV2Schema = z.discriminatedUnion("type", [
|
||||
Auth0ClientSecretRotationSchema,
|
||||
AzureClientSecretRotationSchema,
|
||||
LdapPasswordRotationSchema,
|
||||
AwsIamUserSecretRotationSchema
|
||||
AwsIamUserSecretRotationSchema,
|
||||
OktaClientSecretRotationSchema
|
||||
]);
|
||||
|
@@ -1,3 +1,5 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import {
|
||||
TRotationFactory,
|
||||
TRotationFactoryGetSecretsPayload,
|
||||
@@ -5,7 +7,10 @@ import {
|
||||
TRotationFactoryRevokeCredentials,
|
||||
TRotationFactoryRotateCredentials
|
||||
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
|
||||
import { getSqlConnectionClient, SQL_CONNECTION_ALTER_LOGIN_STATEMENT } from "@app/services/app-connection/shared/sql";
|
||||
import {
|
||||
executeWithPotentialGateway,
|
||||
SQL_CONNECTION_ALTER_LOGIN_STATEMENT
|
||||
} from "@app/services/app-connection/shared/sql";
|
||||
|
||||
import { generatePassword } from "../utils";
|
||||
import {
|
||||
@@ -30,7 +35,7 @@ const redactPasswords = (e: unknown, credentials: TSqlCredentialsRotationGenerat
|
||||
export const sqlCredentialsRotationFactory: TRotationFactory<
|
||||
TSqlCredentialsRotationWithConnection,
|
||||
TSqlCredentialsRotationGeneratedCredentials
|
||||
> = (secretRotation) => {
|
||||
> = (secretRotation, _appConnectionDAL, _kmsService, gatewayService) => {
|
||||
const {
|
||||
connection,
|
||||
parameters: { username1, username2 },
|
||||
@@ -38,29 +43,38 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
|
||||
secretsMapping
|
||||
} = secretRotation;
|
||||
|
||||
const $validateCredentials = async (credentials: TSqlCredentialsRotationGeneratedCredentials[number]) => {
|
||||
const client = await getSqlConnectionClient({
|
||||
...connection,
|
||||
credentials: {
|
||||
...connection.credentials,
|
||||
...credentials
|
||||
}
|
||||
});
|
||||
const executeOperation = <T>(
|
||||
operation: (client: Knex) => Promise<T>,
|
||||
credentialsOverride?: TSqlCredentialsRotationGeneratedCredentials[number]
|
||||
) => {
|
||||
const finalCredentials = {
|
||||
...connection.credentials,
|
||||
...credentialsOverride
|
||||
};
|
||||
|
||||
return executeWithPotentialGateway(
|
||||
{
|
||||
...connection,
|
||||
credentials: finalCredentials
|
||||
},
|
||||
gatewayService,
|
||||
(client) => operation(client)
|
||||
);
|
||||
};
|
||||
|
||||
const $validateCredentials = async (credentials: TSqlCredentialsRotationGeneratedCredentials[number]) => {
|
||||
try {
|
||||
await client.raw("SELECT 1");
|
||||
await executeOperation(async (client) => {
|
||||
await client.raw("SELECT 1");
|
||||
}, credentials);
|
||||
} catch (error) {
|
||||
throw new Error(redactPasswords(error, [credentials]));
|
||||
} finally {
|
||||
await client.destroy();
|
||||
}
|
||||
};
|
||||
|
||||
const issueCredentials: TRotationFactoryIssueCredentials<TSqlCredentialsRotationGeneratedCredentials> = async (
|
||||
callback
|
||||
) => {
|
||||
const client = await getSqlConnectionClient(connection);
|
||||
|
||||
// For SQL, since we get existing users, we change both their passwords
|
||||
// on issue to invalidate their existing passwords
|
||||
const credentialsSet = [
|
||||
@@ -69,15 +83,15 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
|
||||
];
|
||||
|
||||
try {
|
||||
await client.transaction(async (tx) => {
|
||||
for await (const credentials of credentialsSet) {
|
||||
await tx.raw(...SQL_CONNECTION_ALTER_LOGIN_STATEMENT[connection.app](credentials));
|
||||
}
|
||||
await executeOperation(async (client) => {
|
||||
await client.transaction(async (tx) => {
|
||||
for await (const credentials of credentialsSet) {
|
||||
await tx.raw(...SQL_CONNECTION_ALTER_LOGIN_STATEMENT[connection.app](credentials));
|
||||
}
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
throw new Error(redactPasswords(error, credentialsSet));
|
||||
} finally {
|
||||
await client.destroy();
|
||||
}
|
||||
|
||||
for await (const credentials of credentialsSet) {
|
||||
@@ -91,21 +105,19 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
|
||||
credentialsToRevoke,
|
||||
callback
|
||||
) => {
|
||||
const client = await getSqlConnectionClient(connection);
|
||||
|
||||
const revokedCredentials = credentialsToRevoke.map(({ username }) => ({ username, password: generatePassword() }));
|
||||
|
||||
try {
|
||||
await client.transaction(async (tx) => {
|
||||
for await (const credentials of revokedCredentials) {
|
||||
// invalidate previous passwords
|
||||
await tx.raw(...SQL_CONNECTION_ALTER_LOGIN_STATEMENT[connection.app](credentials));
|
||||
}
|
||||
await executeOperation(async (client) => {
|
||||
await client.transaction(async (tx) => {
|
||||
for await (const credentials of revokedCredentials) {
|
||||
// invalidate previous passwords
|
||||
await tx.raw(...SQL_CONNECTION_ALTER_LOGIN_STATEMENT[connection.app](credentials));
|
||||
}
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
throw new Error(redactPasswords(error, revokedCredentials));
|
||||
} finally {
|
||||
await client.destroy();
|
||||
}
|
||||
|
||||
return callback();
|
||||
@@ -115,17 +127,15 @@ export const sqlCredentialsRotationFactory: TRotationFactory<
|
||||
_,
|
||||
callback
|
||||
) => {
|
||||
const client = await getSqlConnectionClient(connection);
|
||||
|
||||
// generate new password for the next active user
|
||||
const credentials = { username: activeIndex === 0 ? username2 : username1, password: generatePassword() };
|
||||
|
||||
try {
|
||||
await client.raw(...SQL_CONNECTION_ALTER_LOGIN_STATEMENT[connection.app](credentials));
|
||||
await executeOperation(async (client) => {
|
||||
await client.raw(...SQL_CONNECTION_ALTER_LOGIN_STATEMENT[connection.app](credentials));
|
||||
});
|
||||
} catch (error) {
|
||||
throw new Error(redactPasswords(error, [credentials]));
|
||||
} finally {
|
||||
await client.destroy();
|
||||
}
|
||||
|
||||
await $validateCredentials(credentials);
|
||||
|
@@ -567,14 +567,18 @@ export const secretScanningV2QueueServiceFactory = async ({
|
||||
const projectMembers = await projectMembershipDAL.findAllProjectMembers(projectId);
|
||||
const project = await projectDAL.findById(projectId);
|
||||
|
||||
const projectAdmins = projectMembers.filter((member) =>
|
||||
member.roles.some((role) => role.role === ProjectMembershipRole.Admin)
|
||||
);
|
||||
const recipients = projectMembers.filter((member) => {
|
||||
const isAdmin = member.roles.some((role) => role.role === ProjectMembershipRole.Admin);
|
||||
const isCompleted = payload.status === SecretScanningScanStatus.Completed;
|
||||
// We assume that the committer is one of the project members
|
||||
const isCommitter = isCompleted && payload.authorEmail === member.user.email;
|
||||
return isAdmin || isCommitter;
|
||||
});
|
||||
|
||||
const timestamp = new Date().toISOString();
|
||||
|
||||
await smtpService.sendMail({
|
||||
recipients: projectAdmins.map((member) => member.user.email!).filter(Boolean),
|
||||
recipients: recipients.map((member) => member.user.email!).filter(Boolean),
|
||||
template:
|
||||
payload.status === SecretScanningScanStatus.Completed
|
||||
? SmtpTemplates.SecretScanningV2SecretsDetected
|
||||
|
@@ -2285,6 +2285,14 @@ export const AppConnections = {
|
||||
},
|
||||
CHECKLY: {
|
||||
apiKey: "The API key used to authenticate with Checkly."
|
||||
},
|
||||
SUPABASE: {
|
||||
accessKey: "The Key used to access Supabase.",
|
||||
instanceUrl: "The URL used to access Supabase."
|
||||
},
|
||||
OKTA: {
|
||||
instanceUrl: "The URL used to access your Okta organization.",
|
||||
apiToken: "The API token used to authenticate with Okta."
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -2494,6 +2502,10 @@ export const SecretSyncs = {
|
||||
},
|
||||
CHECKLY: {
|
||||
accountId: "The ID of the Checkly account to sync secrets to."
|
||||
},
|
||||
SUPABASE: {
|
||||
projectId: "The ID of the Supabase project to sync secrets to.",
|
||||
projectName: "The name of the Supabase project to sync secrets to."
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -2586,6 +2598,9 @@ export const SecretRotations = {
|
||||
AWS_IAM_USER_SECRET: {
|
||||
userName: "The name of the client to rotate credentials for.",
|
||||
region: "The AWS region the client is present in."
|
||||
},
|
||||
OKTA_CLIENT_SECRET: {
|
||||
clientId: "The ID of the Okta Application to rotate the client secret for."
|
||||
}
|
||||
},
|
||||
SECRETS_MAPPING: {
|
||||
@@ -2608,6 +2623,10 @@ export const SecretRotations = {
|
||||
AWS_IAM_USER_SECRET: {
|
||||
accessKeyId: "The name of the secret that the access key ID will be mapped to.",
|
||||
secretAccessKey: "The name of the secret that the rotated secret access key will be mapped to."
|
||||
},
|
||||
OKTA_CLIENT_SECRET: {
|
||||
clientId: "The name of the secret that the client ID will be mapped to.",
|
||||
clientSecret: "The name of the secret that the rotated client secret will be mapped to."
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@@ -93,7 +93,13 @@ const cryptographyFactory = () => {
|
||||
};
|
||||
|
||||
const verifyFipsLicense = (licenseService: Pick<TLicenseServiceFactory, "onPremFeatures">) => {
|
||||
if (isFipsModeEnabled({ skipInitializationCheck: true }) && !licenseService.onPremFeatures?.fips) {
|
||||
const appCfg = getConfig();
|
||||
|
||||
if (
|
||||
!appCfg.isDevelopmentMode &&
|
||||
isFipsModeEnabled({ skipInitializationCheck: true }) &&
|
||||
!licenseService.onPremFeatures?.fips
|
||||
) {
|
||||
throw new CryptographyError({
|
||||
message: "FIPS mode is enabled but your license does not include FIPS support. Please contact support."
|
||||
});
|
||||
|
43
backend/src/server/lib/cookie.ts
Normal file
43
backend/src/server/lib/cookie.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { FastifyReply } from "fastify";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
/**
|
||||
* `aod` (Auth Origin Domain) cookie is used to store the origin domain of the application when user was last authenticated.
|
||||
* This is useful for determining the target domain for authentication redirects, especially in cloud deployments.
|
||||
* It is set only in cloud mode to ensure that the cookie is shared across subdomains.
|
||||
*/
|
||||
export function addAuthOriginDomainCookie(res: FastifyReply) {
|
||||
try {
|
||||
const appCfg = getConfig();
|
||||
|
||||
// Only set the cookie if the app is running in cloud mode
|
||||
if (!appCfg.isCloud) return;
|
||||
|
||||
const siteUrl = appCfg.SITE_URL!;
|
||||
let domain: string;
|
||||
|
||||
const { hostname } = new URL(siteUrl);
|
||||
|
||||
const parts = hostname.split(".");
|
||||
|
||||
if (parts.length >= 2) {
|
||||
// For `app.infisical.com` => `.infisical.com`
|
||||
domain = `.${parts.slice(-2).join(".")}`;
|
||||
} else {
|
||||
// If somehow only "example", fallback to itself
|
||||
domain = `.${hostname}`;
|
||||
}
|
||||
|
||||
void res.setCookie("aod", siteUrl, {
|
||||
domain,
|
||||
path: "/",
|
||||
sameSite: "strict",
|
||||
httpOnly: false,
|
||||
secure: appCfg.HTTPS_ENABLED
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(error, "Failed to set auth origin domain cookie");
|
||||
}
|
||||
}
|
@@ -1538,7 +1538,12 @@ export const registerRoutes = async (
|
||||
folderService,
|
||||
permissionService,
|
||||
folderDAL,
|
||||
projectEnvDAL
|
||||
projectEnvDAL,
|
||||
secretApprovalRequestService,
|
||||
secretApprovalPolicyService,
|
||||
projectDAL,
|
||||
secretV2BridgeService,
|
||||
folderCommitDAL
|
||||
});
|
||||
|
||||
const identityOidcAuthService = identityOidcAuthServiceFactory({
|
||||
@@ -1706,7 +1711,9 @@ export const registerRoutes = async (
|
||||
appConnectionDAL,
|
||||
permissionService,
|
||||
kmsService,
|
||||
licenseService
|
||||
licenseService,
|
||||
gatewayService,
|
||||
gatewayDAL
|
||||
});
|
||||
|
||||
const secretSyncService = secretSyncServiceFactory({
|
||||
@@ -1804,7 +1811,8 @@ export const registerRoutes = async (
|
||||
snapshotService,
|
||||
secretQueueService,
|
||||
queueService,
|
||||
appConnectionDAL
|
||||
appConnectionDAL,
|
||||
gatewayService
|
||||
});
|
||||
|
||||
const certificateAuthorityService = certificateAuthorityServiceFactory({
|
||||
|
@@ -12,6 +12,7 @@ import { getConfig, overridableKeys } from "@app/lib/config/env";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { invalidateCacheLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { addAuthOriginDomainCookie } from "@app/server/lib/cookie";
|
||||
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
@@ -593,6 +594,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
|
||||
secure: appCfg.HTTPS_ENABLED
|
||||
});
|
||||
|
||||
addAuthOriginDomainCookie(res);
|
||||
|
||||
return {
|
||||
message: "Successfully set up admin account",
|
||||
user: user.user,
|
||||
|
@@ -25,12 +25,14 @@ export const registerAppConnectionEndpoints = <T extends TAppConnection, I exten
|
||||
credentials: I["credentials"];
|
||||
description?: string | null;
|
||||
isPlatformManagedCredentials?: boolean;
|
||||
gatewayId?: string | null;
|
||||
}>;
|
||||
updateSchema: z.ZodType<{
|
||||
name?: string;
|
||||
credentials?: I["credentials"];
|
||||
description?: string | null;
|
||||
isPlatformManagedCredentials?: boolean;
|
||||
gatewayId?: string | null;
|
||||
}>;
|
||||
sanitizedResponseSchema: z.ZodTypeAny;
|
||||
}) => {
|
||||
@@ -224,10 +226,10 @@ export const registerAppConnectionEndpoints = <T extends TAppConnection, I exten
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { name, method, credentials, description, isPlatformManagedCredentials } = req.body;
|
||||
const { name, method, credentials, description, isPlatformManagedCredentials, gatewayId } = req.body;
|
||||
|
||||
const appConnection = (await server.services.appConnection.createAppConnection(
|
||||
{ name, method, app, credentials, description, isPlatformManagedCredentials },
|
||||
{ name, method, app, credentials, description, isPlatformManagedCredentials, gatewayId },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
@@ -270,11 +272,11 @@ export const registerAppConnectionEndpoints = <T extends TAppConnection, I exten
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { name, credentials, description, isPlatformManagedCredentials } = req.body;
|
||||
const { name, credentials, description, isPlatformManagedCredentials, gatewayId } = req.body;
|
||||
const { connectionId } = req.params;
|
||||
|
||||
const appConnection = (await server.services.appConnection.updateAppConnection(
|
||||
{ name, credentials, connectionId, description, isPlatformManagedCredentials },
|
||||
{ name, credentials, connectionId, description, isPlatformManagedCredentials, gatewayId },
|
||||
req.permission
|
||||
)) as T;
|
||||
|
||||
|
@@ -71,6 +71,7 @@ import {
|
||||
import { LdapConnectionListItemSchema, SanitizedLdapConnectionSchema } from "@app/services/app-connection/ldap";
|
||||
import { MsSqlConnectionListItemSchema, SanitizedMsSqlConnectionSchema } from "@app/services/app-connection/mssql";
|
||||
import { MySqlConnectionListItemSchema, SanitizedMySqlConnectionSchema } from "@app/services/app-connection/mysql";
|
||||
import { OktaConnectionListItemSchema, SanitizedOktaConnectionSchema } from "@app/services/app-connection/okta";
|
||||
import {
|
||||
PostgresConnectionListItemSchema,
|
||||
SanitizedPostgresConnectionSchema
|
||||
@@ -83,6 +84,10 @@ import {
|
||||
RenderConnectionListItemSchema,
|
||||
SanitizedRenderConnectionSchema
|
||||
} from "@app/services/app-connection/render/render-connection-schema";
|
||||
import {
|
||||
SanitizedSupabaseConnectionSchema,
|
||||
SupabaseConnectionListItemSchema
|
||||
} from "@app/services/app-connection/supabase";
|
||||
import {
|
||||
SanitizedTeamCityConnectionSchema,
|
||||
TeamCityConnectionListItemSchema
|
||||
@@ -133,7 +138,9 @@ const SanitizedAppConnectionSchema = z.union([
|
||||
...SanitizedBitbucketConnectionSchema.options,
|
||||
...SanitizedZabbixConnectionSchema.options,
|
||||
...SanitizedRailwayConnectionSchema.options,
|
||||
...SanitizedChecklyConnectionSchema.options
|
||||
...SanitizedChecklyConnectionSchema.options,
|
||||
...SanitizedSupabaseConnectionSchema.options,
|
||||
...SanitizedOktaConnectionSchema.options
|
||||
]);
|
||||
|
||||
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
@@ -169,7 +176,9 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
|
||||
BitbucketConnectionListItemSchema,
|
||||
ZabbixConnectionListItemSchema,
|
||||
RailwayConnectionListItemSchema,
|
||||
ChecklyConnectionListItemSchema
|
||||
ChecklyConnectionListItemSchema,
|
||||
SupabaseConnectionListItemSchema,
|
||||
OktaConnectionListItemSchema
|
||||
]);
|
||||
|
||||
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
|
@@ -25,9 +25,11 @@ import { registerHumanitecConnectionRouter } from "./humanitec-connection-router
|
||||
import { registerLdapConnectionRouter } from "./ldap-connection-router";
|
||||
import { registerMsSqlConnectionRouter } from "./mssql-connection-router";
|
||||
import { registerMySqlConnectionRouter } from "./mysql-connection-router";
|
||||
import { registerOktaConnectionRouter } from "./okta-connection-router";
|
||||
import { registerPostgresConnectionRouter } from "./postgres-connection-router";
|
||||
import { registerRailwayConnectionRouter } from "./railway-connection-router";
|
||||
import { registerRenderConnectionRouter } from "./render-connection-router";
|
||||
import { registerSupabaseConnectionRouter } from "./supabase-connection-router";
|
||||
import { registerTeamCityConnectionRouter } from "./teamcity-connection-router";
|
||||
import { registerTerraformCloudConnectionRouter } from "./terraform-cloud-router";
|
||||
import { registerVercelConnectionRouter } from "./vercel-connection-router";
|
||||
@@ -70,5 +72,7 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
|
||||
[AppConnection.Bitbucket]: registerBitbucketConnectionRouter,
|
||||
[AppConnection.Zabbix]: registerZabbixConnectionRouter,
|
||||
[AppConnection.Railway]: registerRailwayConnectionRouter,
|
||||
[AppConnection.Checkly]: registerChecklyConnectionRouter
|
||||
[AppConnection.Checkly]: registerChecklyConnectionRouter,
|
||||
[AppConnection.Supabase]: registerSupabaseConnectionRouter,
|
||||
[AppConnection.Okta]: registerOktaConnectionRouter
|
||||
};
|
||||
|
@@ -0,0 +1,52 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
CreateOktaConnectionSchema,
|
||||
SanitizedOktaConnectionSchema,
|
||||
UpdateOktaConnectionSchema
|
||||
} from "@app/services/app-connection/okta";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||
|
||||
export const registerOktaConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
registerAppConnectionEndpoints({
|
||||
app: AppConnection.Okta,
|
||||
server,
|
||||
sanitizedResponseSchema: SanitizedOktaConnectionSchema,
|
||||
createSchema: CreateOktaConnectionSchema,
|
||||
updateSchema: UpdateOktaConnectionSchema
|
||||
});
|
||||
|
||||
// The below endpoints are not exposed and for Infisical App use
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/apps`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
apps: z.object({ id: z.string(), label: z.string() }).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const {
|
||||
params: { connectionId }
|
||||
} = req;
|
||||
|
||||
const apps = await server.services.appConnection.okta.listApps(connectionId, req.permission);
|
||||
return { apps };
|
||||
}
|
||||
});
|
||||
};
|
@@ -0,0 +1,55 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
CreateSupabaseConnectionSchema,
|
||||
SanitizedSupabaseConnectionSchema,
|
||||
UpdateSupabaseConnectionSchema
|
||||
} from "@app/services/app-connection/supabase";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
|
||||
|
||||
export const registerSupabaseConnectionRouter = async (server: FastifyZodProvider) => {
|
||||
registerAppConnectionEndpoints({
|
||||
app: AppConnection.Supabase,
|
||||
server,
|
||||
sanitizedResponseSchema: SanitizedSupabaseConnectionSchema,
|
||||
createSchema: CreateSupabaseConnectionSchema,
|
||||
updateSchema: UpdateSupabaseConnectionSchema
|
||||
});
|
||||
|
||||
// The below endpoints are not exposed and for Infisical App use
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: `/:connectionId/projects`,
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
connectionId: z.string().uuid()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
projects: z
|
||||
.object({
|
||||
name: z.string(),
|
||||
id: z.string()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const { connectionId } = req.params;
|
||||
|
||||
const projects = await server.services.appConnection.supabase.listProjects(connectionId, req.permission);
|
||||
|
||||
return { projects };
|
||||
}
|
||||
});
|
||||
};
|
@@ -42,6 +42,14 @@ export const registerAuthRoutes = async (server: FastifyZodProvider) => {
|
||||
maxAge: 0
|
||||
});
|
||||
|
||||
void res.cookie("aod", "", {
|
||||
httpOnly: false,
|
||||
path: "/",
|
||||
sameSite: "lax",
|
||||
secure: appCfg.HTTPS_ENABLED,
|
||||
maxAge: 0
|
||||
});
|
||||
|
||||
return { message: "Successfully logged out" };
|
||||
}
|
||||
});
|
||||
|
@@ -28,7 +28,17 @@ export const registerIdentityOciAuthRouter = async (server: FastifyZodProvider)
|
||||
.object({
|
||||
authorization: z.string(),
|
||||
host: z.string(),
|
||||
"x-date": z.string()
|
||||
"x-date": z.string().optional(),
|
||||
date: z.string().optional()
|
||||
})
|
||||
.superRefine((val, ctx) => {
|
||||
if (!val.date && !val["x-date"]) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "Either date or x-date must be provided",
|
||||
path: ["headers", "date"]
|
||||
});
|
||||
}
|
||||
})
|
||||
.describe(OCI_AUTH.LOGIN.headers)
|
||||
}),
|
||||
|
@@ -21,6 +21,7 @@ import { registerHerokuSyncRouter } from "./heroku-sync-router";
|
||||
import { registerHumanitecSyncRouter } from "./humanitec-sync-router";
|
||||
import { registerRailwaySyncRouter } from "./railway-sync-router";
|
||||
import { registerRenderSyncRouter } from "./render-sync-router";
|
||||
import { registerSupabaseSyncRouter } from "./supabase-sync-router";
|
||||
import { registerTeamCitySyncRouter } from "./teamcity-sync-router";
|
||||
import { registerTerraformCloudSyncRouter } from "./terraform-cloud-sync-router";
|
||||
import { registerVercelSyncRouter } from "./vercel-sync-router";
|
||||
@@ -53,7 +54,7 @@ export const SECRET_SYNC_REGISTER_ROUTER_MAP: Record<SecretSync, (server: Fastif
|
||||
[SecretSync.GitLab]: registerGitLabSyncRouter,
|
||||
[SecretSync.CloudflarePages]: registerCloudflarePagesSyncRouter,
|
||||
[SecretSync.CloudflareWorkers]: registerCloudflareWorkersSyncRouter,
|
||||
|
||||
[SecretSync.Supabase]: registerSupabaseSyncRouter,
|
||||
[SecretSync.Zabbix]: registerZabbixSyncRouter,
|
||||
[SecretSync.Railway]: registerRailwaySyncRouter,
|
||||
[SecretSync.Checkly]: registerChecklySyncRouter
|
||||
|
@@ -41,6 +41,7 @@ import { HerokuSyncListItemSchema, HerokuSyncSchema } from "@app/services/secret
|
||||
import { HumanitecSyncListItemSchema, HumanitecSyncSchema } from "@app/services/secret-sync/humanitec";
|
||||
import { RailwaySyncListItemSchema, RailwaySyncSchema } from "@app/services/secret-sync/railway/railway-sync-schemas";
|
||||
import { RenderSyncListItemSchema, RenderSyncSchema } from "@app/services/secret-sync/render/render-sync-schemas";
|
||||
import { SupabaseSyncListItemSchema, SupabaseSyncSchema } from "@app/services/secret-sync/supabase";
|
||||
import { TeamCitySyncListItemSchema, TeamCitySyncSchema } from "@app/services/secret-sync/teamcity";
|
||||
import { TerraformCloudSyncListItemSchema, TerraformCloudSyncSchema } from "@app/services/secret-sync/terraform-cloud";
|
||||
import { VercelSyncListItemSchema, VercelSyncSchema } from "@app/services/secret-sync/vercel";
|
||||
@@ -71,7 +72,7 @@ const SecretSyncSchema = z.discriminatedUnion("destination", [
|
||||
GitLabSyncSchema,
|
||||
CloudflarePagesSyncSchema,
|
||||
CloudflareWorkersSyncSchema,
|
||||
|
||||
SupabaseSyncSchema,
|
||||
ZabbixSyncSchema,
|
||||
RailwaySyncSchema,
|
||||
ChecklySyncSchema
|
||||
@@ -104,7 +105,8 @@ const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
|
||||
|
||||
ZabbixSyncListItemSchema,
|
||||
RailwaySyncListItemSchema,
|
||||
ChecklySyncListItemSchema
|
||||
ChecklySyncListItemSchema,
|
||||
SupabaseSyncListItemSchema
|
||||
]);
|
||||
|
||||
export const registerSecretSyncRouter = async (server: FastifyZodProvider) => {
|
||||
|
@@ -0,0 +1,17 @@
|
||||
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||
import {
|
||||
CreateSupabaseSyncSchema,
|
||||
SupabaseSyncSchema,
|
||||
UpdateSupabaseSyncSchema
|
||||
} from "@app/services/secret-sync/supabase";
|
||||
|
||||
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
|
||||
|
||||
export const registerSupabaseSyncRouter = async (server: FastifyZodProvider) =>
|
||||
registerSyncSecretsEndpoints({
|
||||
destination: SecretSync.Supabase,
|
||||
server,
|
||||
responseSchema: SupabaseSyncSchema,
|
||||
createSchema: CreateSupabaseSyncSchema,
|
||||
updateSchema: UpdateSupabaseSyncSchema
|
||||
});
|
@@ -22,6 +22,7 @@ import { logger } from "@app/lib/logger";
|
||||
import { ms } from "@app/lib/ms";
|
||||
import { fetchGithubEmails, fetchGithubUser } from "@app/lib/requests/github";
|
||||
import { authRateLimit } from "@app/server/config/rateLimiter";
|
||||
import { addAuthOriginDomainCookie } from "@app/server/lib/cookie";
|
||||
import { AuthMethod } from "@app/services/auth/auth-type";
|
||||
import { OrgAuthMethod } from "@app/services/org/org-types";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
@@ -475,6 +476,8 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
secure: appCfg.HTTPS_ENABLED
|
||||
});
|
||||
|
||||
addAuthOriginDomainCookie(res);
|
||||
|
||||
return {
|
||||
encryptionVersion: data.user.encryptionVersion,
|
||||
token: data.token.access,
|
||||
|
@@ -4,6 +4,7 @@ import { getConfig } from "@app/lib/config/env";
|
||||
import { crypto } from "@app/lib/crypto";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { mfaRateLimit } from "@app/server/config/rateLimiter";
|
||||
import { addAuthOriginDomainCookie } from "@app/server/lib/cookie";
|
||||
import { AuthModeMfaJwtTokenPayload, AuthTokenType, MfaMethod } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerMfaRouter = async (server: FastifyZodProvider) => {
|
||||
@@ -131,6 +132,8 @@ export const registerMfaRouter = async (server: FastifyZodProvider) => {
|
||||
secure: appCfg.HTTPS_ENABLED
|
||||
});
|
||||
|
||||
addAuthOriginDomainCookie(res);
|
||||
|
||||
return {
|
||||
...user,
|
||||
token: token.access,
|
||||
|
@@ -10,6 +10,7 @@ import {
|
||||
import { ApiDocsTags, ORGANIZATIONS } from "@app/lib/api-docs";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { addAuthOriginDomainCookie } from "@app/server/lib/cookie";
|
||||
import { GenericResourceNameSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
|
||||
@@ -396,6 +397,8 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
|
||||
secure: cfg.HTTPS_ENABLED
|
||||
});
|
||||
|
||||
addAuthOriginDomainCookie(res);
|
||||
|
||||
return { organization, accessToken: tokens.accessToken };
|
||||
}
|
||||
});
|
||||
|
@@ -3,6 +3,7 @@ import { z } from "zod";
|
||||
import { INFISICAL_PROVIDER_GITHUB_ACCESS_TOKEN } from "@app/lib/config/const";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { authRateLimit } from "@app/server/config/rateLimiter";
|
||||
import { addAuthOriginDomainCookie } from "@app/server/lib/cookie";
|
||||
|
||||
export const registerLoginRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
@@ -93,6 +94,8 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
|
||||
secure: cfg.HTTPS_ENABLED
|
||||
});
|
||||
|
||||
addAuthOriginDomainCookie(res);
|
||||
|
||||
void res.cookie("infisical-project-assume-privileges", "", {
|
||||
httpOnly: true,
|
||||
path: "/",
|
||||
@@ -155,6 +158,8 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
|
||||
secure: appCfg.HTTPS_ENABLED
|
||||
});
|
||||
|
||||
addAuthOriginDomainCookie(res);
|
||||
|
||||
void res.cookie("infisical-project-assume-privileges", "", {
|
||||
httpOnly: true,
|
||||
path: "/",
|
||||
|
@@ -4,6 +4,7 @@ import { UsersSchema } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { ForbiddenRequestError } from "@app/lib/errors";
|
||||
import { authRateLimit, smtpRateLimit } from "@app/server/config/rateLimiter";
|
||||
import { addAuthOriginDomainCookie } from "@app/server/lib/cookie";
|
||||
import { GenericResourceNameSchema } from "@app/server/lib/schemas";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
@@ -170,6 +171,8 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
|
||||
secure: appCfg.HTTPS_ENABLED
|
||||
});
|
||||
|
||||
addAuthOriginDomainCookie(res);
|
||||
|
||||
return { message: "Successfully set up account", user, token: accessToken, organizationId };
|
||||
}
|
||||
});
|
||||
@@ -239,6 +242,8 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
|
||||
});
|
||||
// TODO(akhilmhdh-pg): add telemetry service
|
||||
|
||||
addAuthOriginDomainCookie(res);
|
||||
|
||||
return { message: "Successfully set up account", user, token: accessToken };
|
||||
}
|
||||
});
|
||||
|
@@ -31,12 +31,16 @@ export const validateOnePassConnectionCredentials = async (config: TOnePassConne
|
||||
const { apiToken } = config.credentials;
|
||||
|
||||
try {
|
||||
await request.get(`${instanceUrl}/v1/vaults`, {
|
||||
const res = await request.get(`${instanceUrl}/v1/vaults`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiToken}`,
|
||||
Accept: "application/json"
|
||||
}
|
||||
});
|
||||
|
||||
if (!Array.isArray(res.data)) {
|
||||
throw new AxiosError("Invalid response from 1Password API");
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
|
@@ -31,7 +31,9 @@ export enum AppConnection {
|
||||
Zabbix = "zabbix",
|
||||
Railway = "railway",
|
||||
Bitbucket = "bitbucket",
|
||||
Checkly = "checkly"
|
||||
Checkly = "checkly",
|
||||
Supabase = "supabase",
|
||||
Okta = "okta"
|
||||
}
|
||||
|
||||
export enum AWSRegion {
|
||||
|
@@ -5,6 +5,7 @@ import {
|
||||
validateOCIConnectionCredentials
|
||||
} from "@app/ee/services/app-connections/oci";
|
||||
import { getOracleDBConnectionListItem, OracleDBConnectionMethod } from "@app/ee/services/app-connections/oracledb";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
@@ -91,10 +92,16 @@ import { getLdapConnectionListItem, LdapConnectionMethod, validateLdapConnection
|
||||
import { getMsSqlConnectionListItem, MsSqlConnectionMethod } from "./mssql";
|
||||
import { MySqlConnectionMethod } from "./mysql/mysql-connection-enums";
|
||||
import { getMySqlConnectionListItem } from "./mysql/mysql-connection-fns";
|
||||
import { getOktaConnectionListItem, OktaConnectionMethod, validateOktaConnectionCredentials } from "./okta";
|
||||
import { getPostgresConnectionListItem, PostgresConnectionMethod } from "./postgres";
|
||||
import { getRailwayConnectionListItem, validateRailwayConnectionCredentials } from "./railway";
|
||||
import { RenderConnectionMethod } from "./render/render-connection-enums";
|
||||
import { getRenderConnectionListItem, validateRenderConnectionCredentials } from "./render/render-connection-fns";
|
||||
import {
|
||||
getSupabaseConnectionListItem,
|
||||
SupabaseConnectionMethod,
|
||||
validateSupabaseConnectionCredentials
|
||||
} from "./supabase";
|
||||
import {
|
||||
getTeamCityConnectionListItem,
|
||||
TeamCityConnectionMethod,
|
||||
@@ -148,7 +155,9 @@ export const listAppConnectionOptions = () => {
|
||||
getZabbixConnectionListItem(),
|
||||
getRailwayConnectionListItem(),
|
||||
getBitbucketConnectionListItem(),
|
||||
getChecklyConnectionListItem()
|
||||
getChecklyConnectionListItem(),
|
||||
getSupabaseConnectionListItem(),
|
||||
getOktaConnectionListItem()
|
||||
].sort((a, b) => a.name.localeCompare(b.name));
|
||||
};
|
||||
|
||||
@@ -195,7 +204,8 @@ export const decryptAppConnectionCredentials = async ({
|
||||
};
|
||||
|
||||
export const validateAppConnectionCredentials = async (
|
||||
appConnection: TAppConnectionConfig
|
||||
appConnection: TAppConnectionConfig,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
): Promise<TAppConnection["credentials"]> => {
|
||||
const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TAppConnectionCredentialsValidator> = {
|
||||
[AppConnection.AWS]: validateAwsConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
@@ -232,10 +242,12 @@ export const validateAppConnectionCredentials = async (
|
||||
[AppConnection.Zabbix]: validateZabbixConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Railway]: validateRailwayConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Bitbucket]: validateBitbucketConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Checkly]: validateChecklyConnectionCredentials as TAppConnectionCredentialsValidator
|
||||
[AppConnection.Checkly]: validateChecklyConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Supabase]: validateSupabaseConnectionCredentials as TAppConnectionCredentialsValidator,
|
||||
[AppConnection.Okta]: validateOktaConnectionCredentials as TAppConnectionCredentialsValidator
|
||||
};
|
||||
|
||||
return VALIDATE_APP_CONNECTION_CREDENTIALS_MAP[appConnection.app](appConnection);
|
||||
return VALIDATE_APP_CONNECTION_CREDENTIALS_MAP[appConnection.app](appConnection, gatewayService);
|
||||
};
|
||||
|
||||
export const getAppConnectionMethodName = (method: TAppConnection["method"]) => {
|
||||
@@ -271,6 +283,7 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
|
||||
case CloudflareConnectionMethod.APIToken:
|
||||
case BitbucketConnectionMethod.ApiToken:
|
||||
case ZabbixConnectionMethod.ApiToken:
|
||||
case OktaConnectionMethod.ApiToken:
|
||||
return "API Token";
|
||||
case PostgresConnectionMethod.UsernameAndPassword:
|
||||
case MsSqlConnectionMethod.UsernameAndPassword:
|
||||
@@ -292,6 +305,8 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
|
||||
case RenderConnectionMethod.ApiKey:
|
||||
case ChecklyConnectionMethod.ApiKey:
|
||||
return "API Key";
|
||||
case SupabaseConnectionMethod.AccessToken:
|
||||
return "Access Token";
|
||||
default:
|
||||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
|
||||
throw new Error(`Unhandled App Connection Method: ${method}`);
|
||||
@@ -355,7 +370,9 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
|
||||
[AppConnection.Zabbix]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Railway]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Bitbucket]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Checkly]: platformManagedCredentialsNotSupported
|
||||
[AppConnection.Checkly]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Supabase]: platformManagedCredentialsNotSupported,
|
||||
[AppConnection.Okta]: platformManagedCredentialsNotSupported
|
||||
};
|
||||
|
||||
export const enterpriseAppCheck = async (
|
||||
|
@@ -33,7 +33,9 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
|
||||
[AppConnection.Zabbix]: "Zabbix",
|
||||
[AppConnection.Railway]: "Railway",
|
||||
[AppConnection.Bitbucket]: "Bitbucket",
|
||||
[AppConnection.Checkly]: "Checkly"
|
||||
[AppConnection.Checkly]: "Checkly",
|
||||
[AppConnection.Supabase]: "Supabase",
|
||||
[AppConnection.Okta]: "Okta"
|
||||
};
|
||||
|
||||
export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanType> = {
|
||||
@@ -69,5 +71,7 @@ export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanTyp
|
||||
[AppConnection.Zabbix]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Railway]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Bitbucket]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Checkly]: AppConnectionPlanType.Regular
|
||||
[AppConnection.Checkly]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Supabase]: AppConnectionPlanType.Regular,
|
||||
[AppConnection.Okta]: AppConnectionPlanType.Regular
|
||||
};
|
||||
|
@@ -18,7 +18,7 @@ export const BaseAppConnectionSchema = AppConnectionsSchema.omit({
|
||||
|
||||
export const GenericCreateAppConnectionFieldsSchema = (
|
||||
app: AppConnection,
|
||||
{ supportsPlatformManagedCredentials = false }: TAppConnectionBaseConfig = {}
|
||||
{ supportsPlatformManagedCredentials = false, supportsGateways = false }: TAppConnectionBaseConfig = {}
|
||||
) =>
|
||||
z.object({
|
||||
name: slugSchema({ field: "name" }).describe(AppConnections.CREATE(app).name),
|
||||
@@ -30,12 +30,23 @@ export const GenericCreateAppConnectionFieldsSchema = (
|
||||
.describe(AppConnections.CREATE(app).description),
|
||||
isPlatformManagedCredentials: supportsPlatformManagedCredentials
|
||||
? z.boolean().optional().default(false).describe(AppConnections.CREATE(app).isPlatformManagedCredentials)
|
||||
: z.literal(false).optional().describe(`Not supported for ${APP_CONNECTION_NAME_MAP[app]} Connections.`)
|
||||
: z
|
||||
.literal(false, {
|
||||
errorMap: () => ({ message: `Not supported for ${APP_CONNECTION_NAME_MAP[app]} Connections` })
|
||||
})
|
||||
.optional()
|
||||
.describe(`Not supported for ${APP_CONNECTION_NAME_MAP[app]} Connections.`),
|
||||
gatewayId: supportsGateways
|
||||
? z.string().uuid().nullish().describe("The Gateway ID to use for this connection.")
|
||||
: z
|
||||
.undefined({ message: `Not supported for ${APP_CONNECTION_NAME_MAP[app]} Connections` })
|
||||
.or(z.null({ message: `Not supported for ${APP_CONNECTION_NAME_MAP[app]} Connections` }))
|
||||
.describe(`Not supported for ${APP_CONNECTION_NAME_MAP[app]} Connections.`)
|
||||
});
|
||||
|
||||
export const GenericUpdateAppConnectionFieldsSchema = (
|
||||
app: AppConnection,
|
||||
{ supportsPlatformManagedCredentials = false }: TAppConnectionBaseConfig = {}
|
||||
{ supportsPlatformManagedCredentials = false, supportsGateways = false }: TAppConnectionBaseConfig = {}
|
||||
) =>
|
||||
z.object({
|
||||
name: slugSchema({ field: "name" }).describe(AppConnections.UPDATE(app).name).optional(),
|
||||
@@ -47,5 +58,16 @@ export const GenericUpdateAppConnectionFieldsSchema = (
|
||||
.describe(AppConnections.UPDATE(app).description),
|
||||
isPlatformManagedCredentials: supportsPlatformManagedCredentials
|
||||
? z.boolean().optional().describe(AppConnections.UPDATE(app).isPlatformManagedCredentials)
|
||||
: z.literal(false).optional().describe(`Not supported for ${APP_CONNECTION_NAME_MAP[app]} Connections.`)
|
||||
: z
|
||||
.literal(false, {
|
||||
errorMap: () => ({ message: `Not supported for ${APP_CONNECTION_NAME_MAP[app]} Connections` })
|
||||
})
|
||||
.optional()
|
||||
.describe(`Not supported for ${APP_CONNECTION_NAME_MAP[app]} Connections.`),
|
||||
gatewayId: supportsGateways
|
||||
? z.string().uuid().nullish().describe("The Gateway ID to use for this connection.")
|
||||
: z
|
||||
.undefined({ message: `Not supported for ${APP_CONNECTION_NAME_MAP[app]} Connections` })
|
||||
.or(z.null({ message: `Not supported for ${APP_CONNECTION_NAME_MAP[app]} Connections` }))
|
||||
.describe(`Not supported for ${APP_CONNECTION_NAME_MAP[app]} Connections.`)
|
||||
});
|
||||
|
@@ -3,8 +3,14 @@ import { ForbiddenError, subject } from "@casl/ability";
|
||||
import { ValidateOCIConnectionCredentialsSchema } from "@app/ee/services/app-connections/oci";
|
||||
import { ociConnectionService } from "@app/ee/services/app-connections/oci/oci-connection-service";
|
||||
import { ValidateOracleDBConnectionCredentialsSchema } from "@app/ee/services/app-connections/oracledb";
|
||||
import { TGatewayDALFactory } from "@app/ee/services/gateway/gateway-dal";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { OrgPermissionAppConnectionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import {
|
||||
OrgPermissionAppConnectionActions,
|
||||
OrgPermissionGatewayActions,
|
||||
OrgPermissionSubjects
|
||||
} from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
|
||||
import { crypto } from "@app/lib/crypto/cryptography";
|
||||
import { DatabaseErrorCode } from "@app/lib/error-codes";
|
||||
@@ -73,11 +79,15 @@ import { humanitecConnectionService } from "./humanitec/humanitec-connection-ser
|
||||
import { ValidateLdapConnectionCredentialsSchema } from "./ldap";
|
||||
import { ValidateMsSqlConnectionCredentialsSchema } from "./mssql";
|
||||
import { ValidateMySqlConnectionCredentialsSchema } from "./mysql";
|
||||
import { ValidateOktaConnectionCredentialsSchema } from "./okta";
|
||||
import { oktaConnectionService } from "./okta/okta-connection-service";
|
||||
import { ValidatePostgresConnectionCredentialsSchema } from "./postgres";
|
||||
import { ValidateRailwayConnectionCredentialsSchema } from "./railway";
|
||||
import { railwayConnectionService } from "./railway/railway-connection-service";
|
||||
import { ValidateRenderConnectionCredentialsSchema } from "./render/render-connection-schema";
|
||||
import { renderConnectionService } from "./render/render-connection-service";
|
||||
import { ValidateSupabaseConnectionCredentialsSchema } from "./supabase";
|
||||
import { supabaseConnectionService } from "./supabase/supabase-connection-service";
|
||||
import { ValidateTeamCityConnectionCredentialsSchema } from "./teamcity";
|
||||
import { teamcityConnectionService } from "./teamcity/teamcity-connection-service";
|
||||
import { ValidateTerraformCloudConnectionCredentialsSchema } from "./terraform-cloud";
|
||||
@@ -94,6 +104,8 @@ export type TAppConnectionServiceFactoryDep = {
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
|
||||
gatewayDAL: Pick<TGatewayDALFactory, "find">;
|
||||
};
|
||||
|
||||
export type TAppConnectionServiceFactory = ReturnType<typeof appConnectionServiceFactory>;
|
||||
@@ -131,14 +143,18 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
|
||||
[AppConnection.Zabbix]: ValidateZabbixConnectionCredentialsSchema,
|
||||
[AppConnection.Railway]: ValidateRailwayConnectionCredentialsSchema,
|
||||
[AppConnection.Bitbucket]: ValidateBitbucketConnectionCredentialsSchema,
|
||||
[AppConnection.Checkly]: ValidateChecklyConnectionCredentialsSchema
|
||||
[AppConnection.Checkly]: ValidateChecklyConnectionCredentialsSchema,
|
||||
[AppConnection.Supabase]: ValidateSupabaseConnectionCredentialsSchema,
|
||||
[AppConnection.Okta]: ValidateOktaConnectionCredentialsSchema
|
||||
};
|
||||
|
||||
export const appConnectionServiceFactory = ({
|
||||
appConnectionDAL,
|
||||
permissionService,
|
||||
kmsService,
|
||||
licenseService
|
||||
licenseService,
|
||||
gatewayService,
|
||||
gatewayDAL
|
||||
}: TAppConnectionServiceFactoryDep) => {
|
||||
const listAppConnectionsByOrg = async (actor: OrgServiceActor, app?: AppConnection) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
@@ -219,7 +235,7 @@ export const appConnectionServiceFactory = ({
|
||||
};
|
||||
|
||||
const createAppConnection = async (
|
||||
{ method, app, credentials, ...params }: TCreateAppConnectionDTO,
|
||||
{ method, app, credentials, gatewayId, ...params }: TCreateAppConnectionDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
@@ -235,6 +251,20 @@ export const appConnectionServiceFactory = ({
|
||||
OrgPermissionSubjects.AppConnections
|
||||
);
|
||||
|
||||
if (gatewayId) {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionGatewayActions.AttachGateways,
|
||||
OrgPermissionSubjects.Gateway
|
||||
);
|
||||
|
||||
const [gateway] = await gatewayDAL.find({ id: gatewayId, orgId: actor.orgId });
|
||||
if (!gateway) {
|
||||
throw new NotFoundError({
|
||||
message: `Gateway with ID ${gatewayId} not found for org`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
await enterpriseAppCheck(
|
||||
licenseService,
|
||||
app,
|
||||
@@ -242,12 +272,16 @@ export const appConnectionServiceFactory = ({
|
||||
"Failed to create app connection due to plan restriction. Upgrade plan to access enterprise app connections."
|
||||
);
|
||||
|
||||
const validatedCredentials = await validateAppConnectionCredentials({
|
||||
app,
|
||||
credentials,
|
||||
method,
|
||||
orgId: actor.orgId
|
||||
} as TAppConnectionConfig);
|
||||
const validatedCredentials = await validateAppConnectionCredentials(
|
||||
{
|
||||
app,
|
||||
credentials,
|
||||
method,
|
||||
orgId: actor.orgId,
|
||||
gatewayId
|
||||
} as TAppConnectionConfig,
|
||||
gatewayService
|
||||
);
|
||||
|
||||
try {
|
||||
const createConnection = async (connectionCredentials: TAppConnection["credentials"]) => {
|
||||
@@ -262,6 +296,7 @@ export const appConnectionServiceFactory = ({
|
||||
encryptedCredentials,
|
||||
method,
|
||||
app,
|
||||
gatewayId,
|
||||
...params
|
||||
});
|
||||
};
|
||||
@@ -274,9 +309,11 @@ export const appConnectionServiceFactory = ({
|
||||
app,
|
||||
orgId: actor.orgId,
|
||||
credentials: validatedCredentials,
|
||||
method
|
||||
method,
|
||||
gatewayId
|
||||
} as TAppConnectionConfig,
|
||||
(platformCredentials) => createConnection(platformCredentials)
|
||||
(platformCredentials) => createConnection(platformCredentials),
|
||||
gatewayService
|
||||
);
|
||||
} else {
|
||||
connection = await createConnection(validatedCredentials);
|
||||
@@ -297,7 +334,7 @@ export const appConnectionServiceFactory = ({
|
||||
};
|
||||
|
||||
const updateAppConnection = async (
|
||||
{ connectionId, credentials, ...params }: TUpdateAppConnectionDTO,
|
||||
{ connectionId, credentials, gatewayId, ...params }: TUpdateAppConnectionDTO,
|
||||
actor: OrgServiceActor
|
||||
) => {
|
||||
const appConnection = await appConnectionDAL.findById(connectionId);
|
||||
@@ -324,6 +361,22 @@ export const appConnectionServiceFactory = ({
|
||||
OrgPermissionSubjects.AppConnections
|
||||
);
|
||||
|
||||
if (gatewayId !== appConnection.gatewayId) {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionGatewayActions.AttachGateways,
|
||||
OrgPermissionSubjects.Gateway
|
||||
);
|
||||
|
||||
if (gatewayId) {
|
||||
const [gateway] = await gatewayDAL.find({ id: gatewayId, orgId: actor.orgId });
|
||||
if (!gateway) {
|
||||
throw new NotFoundError({
|
||||
message: `Gateway with ID ${gatewayId} not found for org`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// prevent updating credentials or management status if platform managed
|
||||
if (appConnection.isPlatformManagedCredentials && (params.isPlatformManagedCredentials === false || credentials)) {
|
||||
throw new BadRequestError({
|
||||
@@ -348,12 +401,16 @@ export const appConnectionServiceFactory = ({
|
||||
} Connection with method ${getAppConnectionMethodName(method)}`
|
||||
});
|
||||
|
||||
updatedCredentials = await validateAppConnectionCredentials({
|
||||
app,
|
||||
orgId: actor.orgId,
|
||||
credentials,
|
||||
method
|
||||
} as TAppConnectionConfig);
|
||||
updatedCredentials = await validateAppConnectionCredentials(
|
||||
{
|
||||
app,
|
||||
orgId: actor.orgId,
|
||||
credentials,
|
||||
method,
|
||||
gatewayId
|
||||
} as TAppConnectionConfig,
|
||||
gatewayService
|
||||
);
|
||||
|
||||
if (!updatedCredentials)
|
||||
throw new BadRequestError({ message: "Unable to validate connection - check credentials" });
|
||||
@@ -372,6 +429,7 @@ export const appConnectionServiceFactory = ({
|
||||
return appConnectionDAL.updateById(connectionId, {
|
||||
orgId: actor.orgId,
|
||||
encryptedCredentials,
|
||||
gatewayId,
|
||||
...params
|
||||
});
|
||||
};
|
||||
@@ -388,9 +446,11 @@ export const appConnectionServiceFactory = ({
|
||||
app,
|
||||
orgId: actor.orgId,
|
||||
credentials: updatedCredentials,
|
||||
method
|
||||
method,
|
||||
gatewayId
|
||||
} as TAppConnectionConfig,
|
||||
(platformCredentials) => updateConnection(platformCredentials)
|
||||
(platformCredentials) => updateConnection(platformCredentials),
|
||||
gatewayService
|
||||
);
|
||||
} else {
|
||||
updatedConnection = await updateConnection(updatedCredentials);
|
||||
@@ -545,6 +605,8 @@ export const appConnectionServiceFactory = ({
|
||||
zabbix: zabbixConnectionService(connectAppConnectionById),
|
||||
railway: railwayConnectionService(connectAppConnectionById),
|
||||
bitbucket: bitbucketConnectionService(connectAppConnectionById),
|
||||
checkly: checklyConnectionService(connectAppConnectionById)
|
||||
checkly: checklyConnectionService(connectAppConnectionById),
|
||||
supabase: supabaseConnectionService(connectAppConnectionById),
|
||||
okta: oktaConnectionService(connectAppConnectionById)
|
||||
};
|
||||
};
|
||||
|
@@ -9,6 +9,7 @@ import {
|
||||
TOracleDBConnectionInput,
|
||||
TValidateOracleDBConnectionCredentialsSchema
|
||||
} from "@app/ee/services/app-connections/oracledb";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
|
||||
import { TSqlConnectionConfig } from "@app/services/app-connection/shared/sql/sql-connection-types";
|
||||
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||
@@ -142,6 +143,12 @@ import {
|
||||
} from "./ldap";
|
||||
import { TMsSqlConnection, TMsSqlConnectionInput, TValidateMsSqlConnectionCredentialsSchema } from "./mssql";
|
||||
import { TMySqlConnection, TMySqlConnectionInput, TValidateMySqlConnectionCredentialsSchema } from "./mysql";
|
||||
import {
|
||||
TOktaConnection,
|
||||
TOktaConnectionConfig,
|
||||
TOktaConnectionInput,
|
||||
TValidateOktaConnectionCredentialsSchema
|
||||
} from "./okta";
|
||||
import {
|
||||
TPostgresConnection,
|
||||
TPostgresConnectionInput,
|
||||
@@ -159,6 +166,12 @@ import {
|
||||
TRenderConnectionInput,
|
||||
TValidateRenderConnectionCredentialsSchema
|
||||
} from "./render/render-connection-types";
|
||||
import {
|
||||
TSupabaseConnection,
|
||||
TSupabaseConnectionConfig,
|
||||
TSupabaseConnectionInput,
|
||||
TValidateSupabaseConnectionCredentialsSchema
|
||||
} from "./supabase";
|
||||
import {
|
||||
TTeamCityConnection,
|
||||
TTeamCityConnectionConfig,
|
||||
@@ -224,6 +237,8 @@ export type TAppConnection = { id: string } & (
|
||||
| TZabbixConnection
|
||||
| TRailwayConnection
|
||||
| TChecklyConnection
|
||||
| TSupabaseConnection
|
||||
| TOktaConnection
|
||||
);
|
||||
|
||||
export type TAppConnectionRaw = NonNullable<Awaited<ReturnType<TAppConnectionDALFactory["findById"]>>>;
|
||||
@@ -264,6 +279,8 @@ export type TAppConnectionInput = { id: string } & (
|
||||
| TZabbixConnectionInput
|
||||
| TRailwayConnectionInput
|
||||
| TChecklyConnectionInput
|
||||
| TSupabaseConnectionInput
|
||||
| TOktaConnectionInput
|
||||
);
|
||||
|
||||
export type TSqlConnectionInput =
|
||||
@@ -274,7 +291,7 @@ export type TSqlConnectionInput =
|
||||
|
||||
export type TCreateAppConnectionDTO = Pick<
|
||||
TAppConnectionInput,
|
||||
"credentials" | "method" | "name" | "app" | "description" | "isPlatformManagedCredentials"
|
||||
"credentials" | "method" | "name" | "app" | "description" | "isPlatformManagedCredentials" | "gatewayId"
|
||||
>;
|
||||
|
||||
export type TUpdateAppConnectionDTO = Partial<Omit<TCreateAppConnectionDTO, "method" | "app">> & {
|
||||
@@ -311,7 +328,9 @@ export type TAppConnectionConfig =
|
||||
| TBitbucketConnectionConfig
|
||||
| TZabbixConnectionConfig
|
||||
| TRailwayConnectionConfig
|
||||
| TChecklyConnectionConfig;
|
||||
| TChecklyConnectionConfig
|
||||
| TSupabaseConnectionConfig
|
||||
| TOktaConnectionConfig;
|
||||
|
||||
export type TValidateAppConnectionCredentialsSchema =
|
||||
| TValidateAwsConnectionCredentialsSchema
|
||||
@@ -346,7 +365,9 @@ export type TValidateAppConnectionCredentialsSchema =
|
||||
| TValidateBitbucketConnectionCredentialsSchema
|
||||
| TValidateZabbixConnectionCredentialsSchema
|
||||
| TValidateRailwayConnectionCredentialsSchema
|
||||
| TValidateChecklyConnectionCredentialsSchema;
|
||||
| TValidateChecklyConnectionCredentialsSchema
|
||||
| TValidateSupabaseConnectionCredentialsSchema
|
||||
| TValidateOktaConnectionCredentialsSchema;
|
||||
|
||||
export type TListAwsConnectionKmsKeys = {
|
||||
connectionId: string;
|
||||
@@ -359,14 +380,17 @@ export type TListAwsConnectionIamUsers = {
|
||||
};
|
||||
|
||||
export type TAppConnectionCredentialsValidator = (
|
||||
appConnection: TAppConnectionConfig
|
||||
appConnection: TAppConnectionConfig,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => Promise<TAppConnection["credentials"]>;
|
||||
|
||||
export type TAppConnectionTransitionCredentialsToPlatform = (
|
||||
appConnection: TAppConnectionConfig,
|
||||
callback: (credentials: TAppConnection["credentials"]) => Promise<TAppConnectionRaw>
|
||||
callback: (credentials: TAppConnection["credentials"]) => Promise<TAppConnectionRaw>,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => Promise<TAppConnectionRaw>;
|
||||
|
||||
export type TAppConnectionBaseConfig = {
|
||||
supportsPlatformManagedCredentials?: boolean;
|
||||
supportsGateways?: boolean;
|
||||
};
|
||||
|
@@ -9,6 +9,7 @@ import { getAppConnectionMethodName } from "@app/services/app-connection/app-con
|
||||
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import { GithubTokenRespData, isGithubErrorResponse } from "../github/github-connection-fns";
|
||||
import { GitHubRadarConnectionMethod } from "./github-radar-connection-enums";
|
||||
import {
|
||||
TGitHubRadarConnection,
|
||||
@@ -71,13 +72,6 @@ export const listGitHubRadarRepositories = async (appConnection: TGitHubRadarCon
|
||||
return repositories;
|
||||
};
|
||||
|
||||
type TokenRespData = {
|
||||
access_token: string;
|
||||
scope: string;
|
||||
token_type: string;
|
||||
error?: string;
|
||||
};
|
||||
|
||||
export const validateGitHubRadarConnectionCredentials = async (config: TGitHubRadarConnectionConfig) => {
|
||||
const { credentials, method } = config;
|
||||
|
||||
@@ -93,10 +87,10 @@ export const validateGitHubRadarConnectionCredentials = async (config: TGitHubRa
|
||||
});
|
||||
}
|
||||
|
||||
let tokenResp: AxiosResponse<TokenRespData>;
|
||||
let tokenResp: AxiosResponse<GithubTokenRespData>;
|
||||
|
||||
try {
|
||||
tokenResp = await request.get<TokenRespData>("https://github.com/login/oauth/access_token", {
|
||||
tokenResp = await request.get<GithubTokenRespData>("https://github.com/login/oauth/access_token", {
|
||||
params: {
|
||||
client_id: INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID,
|
||||
client_secret: INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET,
|
||||
@@ -108,19 +102,27 @@ export const validateGitHubRadarConnectionCredentials = async (config: TGitHubRa
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
});
|
||||
|
||||
if (isGithubErrorResponse(tokenResp?.data)) {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate credentials: GitHub responded with an error: ${tokenResp.data.error} - ${tokenResp.data.error_description}`
|
||||
});
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof BadRequestError) {
|
||||
throw e;
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
});
|
||||
}
|
||||
|
||||
if (tokenResp.status !== 200) {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate credentials: GitHub responded with a status code of ${tokenResp.status} (${tokenResp.statusText}). Verify credentials and try again.`
|
||||
});
|
||||
}
|
||||
|
||||
if (method === GitHubRadarConnectionMethod.App) {
|
||||
if (!tokenResp.data.access_token) {
|
||||
throw new InternalServerError({ message: `Missing access token: ${tokenResp.data.error}` });
|
||||
}
|
||||
|
||||
const installationsResp = await request.get<{
|
||||
installations: {
|
||||
id: number;
|
||||
@@ -149,10 +151,6 @@ export const validateGitHubRadarConnectionCredentials = async (config: TGitHubRa
|
||||
}
|
||||
}
|
||||
|
||||
if (!tokenResp.data.access_token) {
|
||||
throw new InternalServerError({ message: `Missing access token: ${tokenResp.data.error}` });
|
||||
}
|
||||
|
||||
switch (method) {
|
||||
case GitHubRadarConnectionMethod.App:
|
||||
return {
|
||||
|
@@ -144,13 +144,21 @@ export const getGitHubEnvironments = async (appConnection: TGitHubConnection, ow
|
||||
}
|
||||
};
|
||||
|
||||
type TokenRespData = {
|
||||
access_token: string;
|
||||
export type GithubTokenRespData = {
|
||||
access_token?: string;
|
||||
scope: string;
|
||||
token_type: string;
|
||||
error?: string;
|
||||
};
|
||||
|
||||
export function isGithubErrorResponse(data: GithubTokenRespData): data is GithubTokenRespData & {
|
||||
error: string;
|
||||
error_description: string;
|
||||
error_uri: string;
|
||||
} {
|
||||
return "error" in data;
|
||||
}
|
||||
|
||||
export const validateGitHubConnectionCredentials = async (config: TGitHubConnectionConfig) => {
|
||||
const { credentials, method } = config;
|
||||
|
||||
@@ -183,10 +191,10 @@ export const validateGitHubConnectionCredentials = async (config: TGitHubConnect
|
||||
});
|
||||
}
|
||||
|
||||
let tokenResp: AxiosResponse<TokenRespData>;
|
||||
let tokenResp: AxiosResponse<GithubTokenRespData>;
|
||||
|
||||
try {
|
||||
tokenResp = await request.get<TokenRespData>("https://github.com/login/oauth/access_token", {
|
||||
tokenResp = await request.get<GithubTokenRespData>("https://github.com/login/oauth/access_token", {
|
||||
params: {
|
||||
client_id: clientId,
|
||||
client_secret: clientSecret,
|
||||
@@ -198,7 +206,17 @@ export const validateGitHubConnectionCredentials = async (config: TGitHubConnect
|
||||
"Accept-Encoding": "application/json"
|
||||
}
|
||||
});
|
||||
|
||||
if (isGithubErrorResponse(tokenResp?.data)) {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate credentials: GitHub responded with an error: ${tokenResp.data.error} - ${tokenResp.data.error_description}`
|
||||
});
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof BadRequestError) {
|
||||
throw e;
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: verify credentials`
|
||||
});
|
||||
@@ -211,6 +229,10 @@ export const validateGitHubConnectionCredentials = async (config: TGitHubConnect
|
||||
}
|
||||
|
||||
if (method === GitHubConnectionMethod.App) {
|
||||
if (!tokenResp.data.access_token) {
|
||||
throw new InternalServerError({ message: `Missing access token: ${tokenResp.data.error}` });
|
||||
}
|
||||
|
||||
const installationsResp = await request.get<{
|
||||
installations: {
|
||||
id: number;
|
||||
@@ -239,10 +261,6 @@ export const validateGitHubConnectionCredentials = async (config: TGitHubConnect
|
||||
}
|
||||
}
|
||||
|
||||
if (!tokenResp.data.access_token) {
|
||||
throw new InternalServerError({ message: `Missing access token: ${tokenResp.data.error}` });
|
||||
}
|
||||
|
||||
switch (method) {
|
||||
case GitHubConnectionMethod.App:
|
||||
return {
|
||||
|
@@ -49,7 +49,10 @@ export const ValidateMsSqlConnectionCredentialsSchema = z.discriminatedUnion("me
|
||||
]);
|
||||
|
||||
export const CreateMsSqlConnectionSchema = ValidateMsSqlConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.MsSql, { supportsPlatformManagedCredentials: true })
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.MsSql, {
|
||||
supportsPlatformManagedCredentials: true,
|
||||
supportsGateways: true
|
||||
})
|
||||
);
|
||||
|
||||
export const UpdateMsSqlConnectionSchema = z
|
||||
@@ -58,7 +61,12 @@ export const UpdateMsSqlConnectionSchema = z
|
||||
AppConnections.UPDATE(AppConnection.MsSql).credentials
|
||||
)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.MsSql, { supportsPlatformManagedCredentials: true }));
|
||||
.and(
|
||||
GenericUpdateAppConnectionFieldsSchema(AppConnection.MsSql, {
|
||||
supportsPlatformManagedCredentials: true,
|
||||
supportsGateways: true
|
||||
})
|
||||
);
|
||||
|
||||
export const MsSqlConnectionListItemSchema = z.object({
|
||||
name: z.literal("Microsoft SQL Server"),
|
||||
|
@@ -47,7 +47,10 @@ export const ValidateMySqlConnectionCredentialsSchema = z.discriminatedUnion("me
|
||||
]);
|
||||
|
||||
export const CreateMySqlConnectionSchema = ValidateMySqlConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.MySql, { supportsPlatformManagedCredentials: true })
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.MySql, {
|
||||
supportsPlatformManagedCredentials: true,
|
||||
supportsGateways: true
|
||||
})
|
||||
);
|
||||
|
||||
export const UpdateMySqlConnectionSchema = z
|
||||
@@ -56,7 +59,12 @@ export const UpdateMySqlConnectionSchema = z
|
||||
AppConnections.UPDATE(AppConnection.MySql).credentials
|
||||
)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.MySql, { supportsPlatformManagedCredentials: true }));
|
||||
.and(
|
||||
GenericUpdateAppConnectionFieldsSchema(AppConnection.MySql, {
|
||||
supportsPlatformManagedCredentials: true,
|
||||
supportsGateways: true
|
||||
})
|
||||
);
|
||||
|
||||
export const MySqlConnectionListItemSchema = z.object({
|
||||
name: z.literal("MySQL"),
|
||||
|
4
backend/src/services/app-connection/okta/index.ts
Normal file
4
backend/src/services/app-connection/okta/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from "./okta-connection-enums";
|
||||
export * from "./okta-connection-fns";
|
||||
export * from "./okta-connection-schemas";
|
||||
export * from "./okta-connection-types";
|
@@ -0,0 +1,3 @@
|
||||
export enum OktaConnectionMethod {
|
||||
ApiToken = "api-token"
|
||||
}
|
@@ -0,0 +1,57 @@
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { UnauthorizedError } from "@app/lib/errors";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
import { OktaConnectionMethod } from "./okta-connection-enums";
|
||||
import { TOktaApp, TOktaConnection, TOktaConnectionConfig } from "./okta-connection-types";
|
||||
|
||||
export const getOktaConnectionListItem = () => {
|
||||
return {
|
||||
name: "Okta" as const,
|
||||
app: AppConnection.Okta as const,
|
||||
methods: Object.values(OktaConnectionMethod) as [OktaConnectionMethod.ApiToken]
|
||||
};
|
||||
};
|
||||
|
||||
export const getOktaInstanceUrl = async (config: TOktaConnectionConfig) => {
|
||||
const instanceUrl = removeTrailingSlash(config.credentials.instanceUrl);
|
||||
await blockLocalAndPrivateIpAddresses(instanceUrl);
|
||||
return instanceUrl;
|
||||
};
|
||||
|
||||
export const validateOktaConnectionCredentials = async (config: TOktaConnectionConfig) => {
|
||||
const { apiToken } = config.credentials;
|
||||
const instanceUrl = await getOktaInstanceUrl(config);
|
||||
|
||||
try {
|
||||
await request.get(`${instanceUrl}/api/v1/users/me`, {
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
Authorization: `SSWS ${apiToken}`
|
||||
},
|
||||
validateStatus: (status) => status === 200
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
throw new UnauthorizedError({
|
||||
message: "Unable to validate connection: invalid credentials"
|
||||
});
|
||||
}
|
||||
|
||||
return config.credentials;
|
||||
};
|
||||
|
||||
export const listOktaApps = async (appConnection: TOktaConnection) => {
|
||||
const { apiToken } = appConnection.credentials;
|
||||
const instanceUrl = await getOktaInstanceUrl(appConnection);
|
||||
|
||||
const { data } = await request.get<TOktaApp[]>(`${instanceUrl}/api/v1/apps`, {
|
||||
headers: {
|
||||
Accept: "application/json",
|
||||
Authorization: `SSWS ${apiToken}`
|
||||
}
|
||||
});
|
||||
|
||||
return data.filter((app) => app.status === "ACTIVE" && app.name === "oidc_client");
|
||||
};
|
@@ -0,0 +1,69 @@
|
||||
import RE2 from "re2";
|
||||
import z from "zod";
|
||||
|
||||
import { AppConnections } from "@app/lib/api-docs";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
BaseAppConnectionSchema,
|
||||
GenericCreateAppConnectionFieldsSchema,
|
||||
GenericUpdateAppConnectionFieldsSchema
|
||||
} from "@app/services/app-connection/app-connection-schemas";
|
||||
|
||||
import { OktaConnectionMethod } from "./okta-connection-enums";
|
||||
|
||||
export const OktaConnectionApiTokenCredentialsSchema = z.object({
|
||||
instanceUrl: z
|
||||
.string()
|
||||
.trim()
|
||||
.url("Invalid Instance URL")
|
||||
.min(1, "Instance URL required")
|
||||
.max(255)
|
||||
.describe(AppConnections.CREDENTIALS.OKTA.instanceUrl),
|
||||
apiToken: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "API Token required")
|
||||
.refine((value) => new RE2("^00[a-zA-Z0-9_-]{40}$").test(value), "Invalid Okta API Token format")
|
||||
.describe(AppConnections.CREDENTIALS.OKTA.apiToken)
|
||||
});
|
||||
|
||||
const BaseOktaConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.Okta) });
|
||||
|
||||
export const OktaConnectionSchema = BaseOktaConnectionSchema.extend({
|
||||
method: z.literal(OktaConnectionMethod.ApiToken),
|
||||
credentials: OktaConnectionApiTokenCredentialsSchema
|
||||
});
|
||||
|
||||
export const SanitizedOktaConnectionSchema = z.discriminatedUnion("method", [
|
||||
BaseOktaConnectionSchema.extend({
|
||||
method: z.literal(OktaConnectionMethod.ApiToken),
|
||||
credentials: OktaConnectionApiTokenCredentialsSchema.pick({
|
||||
instanceUrl: true
|
||||
})
|
||||
})
|
||||
]);
|
||||
|
||||
export const ValidateOktaConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: z.literal(OktaConnectionMethod.ApiToken).describe(AppConnections.CREATE(AppConnection.Okta).method),
|
||||
credentials: OktaConnectionApiTokenCredentialsSchema.describe(AppConnections.CREATE(AppConnection.Okta).credentials)
|
||||
})
|
||||
]);
|
||||
|
||||
export const CreateOktaConnectionSchema = ValidateOktaConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.Okta)
|
||||
);
|
||||
|
||||
export const UpdateOktaConnectionSchema = z
|
||||
.object({
|
||||
credentials: OktaConnectionApiTokenCredentialsSchema.optional().describe(
|
||||
AppConnections.UPDATE(AppConnection.Okta).credentials
|
||||
)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Okta));
|
||||
|
||||
export const OktaConnectionListItemSchema = z.object({
|
||||
name: z.literal("Okta"),
|
||||
app: z.literal(AppConnection.Okta),
|
||||
methods: z.nativeEnum(OktaConnectionMethod).array()
|
||||
});
|
@@ -0,0 +1,23 @@
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import { listOktaApps } from "./okta-connection-fns";
|
||||
import { TOktaConnection } from "./okta-connection-types";
|
||||
|
||||
type TGetAppConnectionFunc = (
|
||||
app: AppConnection,
|
||||
connectionId: string,
|
||||
actor: OrgServiceActor
|
||||
) => Promise<TOktaConnection>;
|
||||
|
||||
export const oktaConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
|
||||
const listApps = async (connectionId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.Okta, connectionId, actor);
|
||||
const apps = await listOktaApps(appConnection);
|
||||
return apps;
|
||||
};
|
||||
|
||||
return {
|
||||
listApps
|
||||
};
|
||||
};
|
@@ -0,0 +1,29 @@
|
||||
import z from "zod";
|
||||
|
||||
import { DiscriminativePick } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import {
|
||||
CreateOktaConnectionSchema,
|
||||
OktaConnectionSchema,
|
||||
ValidateOktaConnectionCredentialsSchema
|
||||
} from "./okta-connection-schemas";
|
||||
|
||||
export type TOktaConnection = z.infer<typeof OktaConnectionSchema>;
|
||||
|
||||
export type TOktaConnectionInput = z.infer<typeof CreateOktaConnectionSchema> & {
|
||||
app: AppConnection.Okta;
|
||||
};
|
||||
|
||||
export type TValidateOktaConnectionCredentialsSchema = typeof ValidateOktaConnectionCredentialsSchema;
|
||||
|
||||
export type TOktaConnectionConfig = DiscriminativePick<TOktaConnectionInput, "method" | "app" | "credentials"> & {
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export type TOktaApp = {
|
||||
id: string;
|
||||
label: string;
|
||||
status: "ACTIVE" | "INACTIVE";
|
||||
name: "oidc_client"; // "oidc_client" or other types
|
||||
};
|
@@ -47,7 +47,10 @@ export const ValidatePostgresConnectionCredentialsSchema = z.discriminatedUnion(
|
||||
]);
|
||||
|
||||
export const CreatePostgresConnectionSchema = ValidatePostgresConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.Postgres, { supportsPlatformManagedCredentials: true })
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.Postgres, {
|
||||
supportsPlatformManagedCredentials: true,
|
||||
supportsGateways: true
|
||||
})
|
||||
);
|
||||
|
||||
export const UpdatePostgresConnectionSchema = z
|
||||
@@ -56,7 +59,12 @@ export const UpdatePostgresConnectionSchema = z
|
||||
AppConnections.UPDATE(AppConnection.Postgres).credentials
|
||||
)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Postgres, { supportsPlatformManagedCredentials: true }));
|
||||
.and(
|
||||
GenericUpdateAppConnectionFieldsSchema(AppConnection.Postgres, {
|
||||
supportsPlatformManagedCredentials: true,
|
||||
supportsGateways: true
|
||||
})
|
||||
);
|
||||
|
||||
export const PostgresConnectionListItemSchema = z.object({
|
||||
name: z.literal("PostgreSQL"),
|
||||
|
@@ -1,11 +1,13 @@
|
||||
import knex, { Knex } from "knex";
|
||||
|
||||
import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic-secret-fns";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import {
|
||||
TSqlCredentialsRotationGeneratedCredentials,
|
||||
TSqlCredentialsRotationWithConnection
|
||||
} from "@app/ee/services/secret-rotation-v2/shared/sql-credentials/sql-credentials-rotation-types";
|
||||
import { BadRequestError, DatabaseError } from "@app/lib/errors";
|
||||
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { TAppConnectionRaw, TSqlConnection } from "@app/services/app-connection/app-connection-types";
|
||||
@@ -98,25 +100,80 @@ export const getSqlConnectionClient = async (appConnection: Pick<TSqlConnection,
|
||||
return client;
|
||||
};
|
||||
|
||||
export const validateSqlConnectionCredentials = async (config: TSqlConnectionConfig) => {
|
||||
const { credentials, app } = config;
|
||||
export const executeWithPotentialGateway = async <T>(
|
||||
config: TSqlConnectionConfig,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
|
||||
operation: (client: Knex) => Promise<T>
|
||||
): Promise<T> => {
|
||||
const { credentials, app, gatewayId } = config;
|
||||
|
||||
let client: Knex | undefined;
|
||||
if (gatewayId && gatewayService) {
|
||||
const [targetHost] = await verifyHostInputValidity(credentials.host, true);
|
||||
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(gatewayId);
|
||||
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
|
||||
|
||||
return withGatewayProxy(
|
||||
async (proxyPort) => {
|
||||
const client = knex({
|
||||
client: SQL_CONNECTION_CLIENT_MAP[app],
|
||||
connection: {
|
||||
database: credentials.database,
|
||||
port: proxyPort,
|
||||
host: "localhost",
|
||||
user: credentials.username,
|
||||
password: credentials.password,
|
||||
connectionTimeoutMillis: EXTERNAL_REQUEST_TIMEOUT,
|
||||
...getConnectionConfig({ app, credentials })
|
||||
}
|
||||
});
|
||||
try {
|
||||
return await operation(client);
|
||||
} finally {
|
||||
await client.destroy();
|
||||
}
|
||||
},
|
||||
{
|
||||
protocol: GatewayProxyProtocol.Tcp,
|
||||
targetHost,
|
||||
targetPort: credentials.port,
|
||||
relayHost,
|
||||
relayPort: Number(relayPort),
|
||||
identityId: relayDetails.identityId,
|
||||
orgId: relayDetails.orgId,
|
||||
tlsOptions: {
|
||||
ca: relayDetails.certChain,
|
||||
cert: relayDetails.certificate,
|
||||
key: relayDetails.privateKey.toString()
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Non-gateway path
|
||||
const client = await getSqlConnectionClient({ app, credentials });
|
||||
try {
|
||||
client = await getSqlConnectionClient({ app, credentials });
|
||||
return await operation(client);
|
||||
} finally {
|
||||
await client.destroy();
|
||||
}
|
||||
};
|
||||
|
||||
await client.raw(`Select 1`);
|
||||
|
||||
return credentials;
|
||||
export const validateSqlConnectionCredentials = async (
|
||||
config: TSqlConnectionConfig,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
try {
|
||||
await executeWithPotentialGateway(config, gatewayService, async (client) => {
|
||||
await client.raw(`Select 1`);
|
||||
});
|
||||
return config.credentials;
|
||||
} catch (error) {
|
||||
throw new BadRequestError({
|
||||
message: `Unable to validate connection: ${
|
||||
(error as Error)?.message?.replaceAll(credentials.password, "********************") ?? "verify credentials"
|
||||
(error as Error)?.message?.replaceAll(config.credentials.password, "********************") ??
|
||||
"verify credentials"
|
||||
}`
|
||||
});
|
||||
} finally {
|
||||
await client?.destroy();
|
||||
}
|
||||
};
|
||||
|
||||
@@ -132,22 +189,23 @@ export const SQL_CONNECTION_ALTER_LOGIN_STATEMENT: Record<
|
||||
|
||||
export const transferSqlConnectionCredentialsToPlatform = async (
|
||||
config: TSqlConnectionConfig,
|
||||
callback: (credentials: TSqlConnectionConfig["credentials"]) => Promise<TAppConnectionRaw>
|
||||
callback: (credentials: TSqlConnectionConfig["credentials"]) => Promise<TAppConnectionRaw>,
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
|
||||
) => {
|
||||
const { credentials, app } = config;
|
||||
|
||||
const client = await getSqlConnectionClient({ app, credentials });
|
||||
|
||||
const newPassword = alphaNumericNanoId(32);
|
||||
|
||||
try {
|
||||
return await client.transaction(async (tx) => {
|
||||
await tx.raw(
|
||||
...SQL_CONNECTION_ALTER_LOGIN_STATEMENT[app]({ username: credentials.username, password: newPassword })
|
||||
);
|
||||
return callback({
|
||||
...credentials,
|
||||
password: newPassword
|
||||
return await executeWithPotentialGateway(config, gatewayService, (client) => {
|
||||
return client.transaction(async (tx) => {
|
||||
await tx.raw(
|
||||
...SQL_CONNECTION_ALTER_LOGIN_STATEMENT[app]({ username: credentials.username, password: newPassword })
|
||||
);
|
||||
return callback({
|
||||
...credentials,
|
||||
password: newPassword
|
||||
});
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
@@ -161,7 +219,5 @@ export const transferSqlConnectionCredentialsToPlatform = async (
|
||||
(error as Error)?.message?.replaceAll(newPassword, "********************") ??
|
||||
"Encountered an error transferring credentials to platform"
|
||||
});
|
||||
} finally {
|
||||
await client.destroy();
|
||||
}
|
||||
};
|
||||
|
@@ -1,6 +1,9 @@
|
||||
import { DiscriminativePick } from "@app/lib/types";
|
||||
import { TSqlConnectionInput } from "@app/services/app-connection/app-connection-types";
|
||||
|
||||
export type TSqlConnectionConfig = DiscriminativePick<TSqlConnectionInput, "method" | "app" | "credentials"> & {
|
||||
export type TSqlConnectionConfig = DiscriminativePick<
|
||||
TSqlConnectionInput,
|
||||
"method" | "app" | "credentials" | "gatewayId"
|
||||
> & {
|
||||
orgId: string;
|
||||
};
|
||||
|
4
backend/src/services/app-connection/supabase/index.ts
Normal file
4
backend/src/services/app-connection/supabase/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from "./supabase-connection-constants";
|
||||
export * from "./supabase-connection-fns";
|
||||
export * from "./supabase-connection-schemas";
|
||||
export * from "./supabase-connection-types";
|
@@ -0,0 +1,3 @@
|
||||
export enum SupabaseConnectionMethod {
|
||||
AccessToken = "access-token"
|
||||
}
|
@@ -0,0 +1,58 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { AxiosError } from "axios";
|
||||
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
|
||||
import { SupabaseConnectionMethod } from "./supabase-connection-constants";
|
||||
import { SupabasePublicAPI } from "./supabase-connection-public-client";
|
||||
import { TSupabaseConnection, TSupabaseConnectionConfig } from "./supabase-connection-types";
|
||||
|
||||
export const getSupabaseConnectionListItem = () => {
|
||||
return {
|
||||
name: "Supabase" as const,
|
||||
app: AppConnection.Supabase as const,
|
||||
methods: Object.values(SupabaseConnectionMethod)
|
||||
};
|
||||
};
|
||||
|
||||
export const validateSupabaseConnectionCredentials = async (config: TSupabaseConnectionConfig) => {
|
||||
const { credentials } = config;
|
||||
|
||||
try {
|
||||
await SupabasePublicAPI.healthcheck(config);
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
|
||||
});
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: "Unable to validate connection - verify credentials"
|
||||
});
|
||||
}
|
||||
|
||||
return credentials;
|
||||
};
|
||||
|
||||
export const listProjects = async (appConnection: TSupabaseConnection) => {
|
||||
try {
|
||||
return await SupabasePublicAPI.getProjects(appConnection);
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof AxiosError) {
|
||||
throw new BadRequestError({
|
||||
message: `Failed to list projects: ${error.message || "Unknown error"}`
|
||||
});
|
||||
}
|
||||
|
||||
if (error instanceof BadRequestError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
throw new BadRequestError({
|
||||
message: "Unable to list projects",
|
||||
error
|
||||
});
|
||||
}
|
||||
};
|
@@ -0,0 +1,133 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
/* eslint-disable class-methods-use-this */
|
||||
import { AxiosInstance, AxiosRequestConfig, AxiosResponse, HttpStatusCode } from "axios";
|
||||
|
||||
import { createRequestClient } from "@app/lib/config/request";
|
||||
import { delay } from "@app/lib/delay";
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
|
||||
|
||||
import { SupabaseConnectionMethod } from "./supabase-connection-constants";
|
||||
import { TSupabaseConnectionConfig, TSupabaseProject, TSupabaseSecret } from "./supabase-connection-types";
|
||||
|
||||
export const getSupabaseInstanceUrl = async (config: TSupabaseConnectionConfig) => {
|
||||
const instanceUrl = config.credentials.instanceUrl
|
||||
? removeTrailingSlash(config.credentials.instanceUrl)
|
||||
: "https://api.supabase.com";
|
||||
|
||||
await blockLocalAndPrivateIpAddresses(instanceUrl);
|
||||
|
||||
return instanceUrl;
|
||||
};
|
||||
|
||||
export function getSupabaseAuthHeaders(connection: TSupabaseConnectionConfig): Record<string, string> {
|
||||
switch (connection.method) {
|
||||
case SupabaseConnectionMethod.AccessToken:
|
||||
return {
|
||||
Authorization: `Bearer ${connection.credentials.accessKey}`
|
||||
};
|
||||
default:
|
||||
throw new Error(`Unsupported Supabase connection method`);
|
||||
}
|
||||
}
|
||||
|
||||
export function getSupabaseRatelimiter(response: AxiosResponse): {
|
||||
maxAttempts: number;
|
||||
isRatelimited: boolean;
|
||||
wait: () => Promise<void>;
|
||||
} {
|
||||
const wait = () => {
|
||||
return delay(60 * 1000);
|
||||
};
|
||||
|
||||
return {
|
||||
isRatelimited: response.status === HttpStatusCode.TooManyRequests,
|
||||
wait,
|
||||
maxAttempts: 3
|
||||
};
|
||||
}
|
||||
|
||||
class SupabasePublicClient {
|
||||
private client: AxiosInstance;
|
||||
|
||||
constructor() {
|
||||
this.client = createRequestClient({
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async send<T>(
|
||||
connection: TSupabaseConnectionConfig,
|
||||
config: AxiosRequestConfig,
|
||||
retryAttempt = 0
|
||||
): Promise<T | undefined> {
|
||||
const response = await this.client.request<T>({
|
||||
...config,
|
||||
baseURL: await getSupabaseInstanceUrl(connection),
|
||||
validateStatus: (status) => (status >= 200 && status < 300) || status === HttpStatusCode.TooManyRequests,
|
||||
headers: getSupabaseAuthHeaders(connection)
|
||||
});
|
||||
|
||||
const limiter = getSupabaseRatelimiter(response);
|
||||
|
||||
if (limiter.isRatelimited && retryAttempt <= limiter.maxAttempts) {
|
||||
await limiter.wait();
|
||||
return this.send(connection, config, retryAttempt + 1);
|
||||
}
|
||||
|
||||
return response.data;
|
||||
}
|
||||
|
||||
async healthcheck(connection: TSupabaseConnectionConfig) {
|
||||
switch (connection.method) {
|
||||
case SupabaseConnectionMethod.AccessToken:
|
||||
return void (await this.getProjects(connection));
|
||||
default:
|
||||
throw new Error(`Unsupported Supabase connection method`);
|
||||
}
|
||||
}
|
||||
|
||||
async getVariables(connection: TSupabaseConnectionConfig, projectRef: string) {
|
||||
const res = await this.send<TSupabaseSecret[]>(connection, {
|
||||
method: "GET",
|
||||
url: `/v1/projects/${projectRef}/secrets`
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
// Supabase does not support updating variables directly
|
||||
// Instead, just call create again with the same key and it will overwrite the existing variable
|
||||
async createVariables(connection: TSupabaseConnectionConfig, projectRef: string, ...variables: TSupabaseSecret[]) {
|
||||
const res = await this.send<TSupabaseSecret>(connection, {
|
||||
method: "POST",
|
||||
url: `/v1/projects/${projectRef}/secrets`,
|
||||
data: variables
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
async deleteVariables(connection: TSupabaseConnectionConfig, projectRef: string, ...variables: string[]) {
|
||||
const res = await this.send(connection, {
|
||||
method: "DELETE",
|
||||
url: `/v1/projects/${projectRef}/secrets`,
|
||||
data: variables
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
async getProjects(connection: TSupabaseConnectionConfig) {
|
||||
const res = await this.send<TSupabaseProject[]>(connection, {
|
||||
method: "GET",
|
||||
url: `/v1/projects`
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
export const SupabasePublicAPI = new SupabasePublicClient();
|
@@ -0,0 +1,70 @@
|
||||
import z from "zod";
|
||||
|
||||
import { AppConnections } from "@app/lib/api-docs";
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import {
|
||||
BaseAppConnectionSchema,
|
||||
GenericCreateAppConnectionFieldsSchema,
|
||||
GenericUpdateAppConnectionFieldsSchema
|
||||
} from "@app/services/app-connection/app-connection-schemas";
|
||||
|
||||
import { SupabaseConnectionMethod } from "./supabase-connection-constants";
|
||||
|
||||
export const SupabaseConnectionMethodSchema = z
|
||||
.nativeEnum(SupabaseConnectionMethod)
|
||||
.describe(AppConnections.CREATE(AppConnection.Supabase).method);
|
||||
|
||||
export const SupabaseConnectionAccessTokenCredentialsSchema = z.object({
|
||||
accessKey: z
|
||||
.string()
|
||||
.trim()
|
||||
.min(1, "Access Key required")
|
||||
.max(255)
|
||||
.describe(AppConnections.CREDENTIALS.SUPABASE.accessKey),
|
||||
instanceUrl: z.string().trim().url().max(255).describe(AppConnections.CREDENTIALS.SUPABASE.instanceUrl).optional()
|
||||
});
|
||||
|
||||
const BaseSupabaseConnectionSchema = BaseAppConnectionSchema.extend({
|
||||
app: z.literal(AppConnection.Supabase)
|
||||
});
|
||||
|
||||
export const SupabaseConnectionSchema = BaseSupabaseConnectionSchema.extend({
|
||||
method: SupabaseConnectionMethodSchema,
|
||||
credentials: SupabaseConnectionAccessTokenCredentialsSchema
|
||||
});
|
||||
|
||||
export const SanitizedSupabaseConnectionSchema = z.discriminatedUnion("method", [
|
||||
BaseSupabaseConnectionSchema.extend({
|
||||
method: SupabaseConnectionMethodSchema,
|
||||
credentials: SupabaseConnectionAccessTokenCredentialsSchema.pick({
|
||||
instanceUrl: true
|
||||
})
|
||||
})
|
||||
]);
|
||||
|
||||
export const ValidateSupabaseConnectionCredentialsSchema = z.discriminatedUnion("method", [
|
||||
z.object({
|
||||
method: SupabaseConnectionMethodSchema,
|
||||
credentials: SupabaseConnectionAccessTokenCredentialsSchema.describe(
|
||||
AppConnections.CREATE(AppConnection.Supabase).credentials
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
export const CreateSupabaseConnectionSchema = ValidateSupabaseConnectionCredentialsSchema.and(
|
||||
GenericCreateAppConnectionFieldsSchema(AppConnection.Supabase)
|
||||
);
|
||||
|
||||
export const UpdateSupabaseConnectionSchema = z
|
||||
.object({
|
||||
credentials: SupabaseConnectionAccessTokenCredentialsSchema.optional().describe(
|
||||
AppConnections.UPDATE(AppConnection.Supabase).credentials
|
||||
)
|
||||
})
|
||||
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Supabase));
|
||||
|
||||
export const SupabaseConnectionListItemSchema = z.object({
|
||||
name: z.literal("Supabase"),
|
||||
app: z.literal(AppConnection.Supabase),
|
||||
methods: z.nativeEnum(SupabaseConnectionMethod).array()
|
||||
});
|
@@ -0,0 +1,30 @@
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import { listProjects as getSupabaseProjects } from "./supabase-connection-fns";
|
||||
import { TSupabaseConnection } from "./supabase-connection-types";
|
||||
|
||||
type TGetAppConnectionFunc = (
|
||||
app: AppConnection,
|
||||
connectionId: string,
|
||||
actor: OrgServiceActor
|
||||
) => Promise<TSupabaseConnection>;
|
||||
|
||||
export const supabaseConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
|
||||
const listProjects = async (connectionId: string, actor: OrgServiceActor) => {
|
||||
const appConnection = await getAppConnection(AppConnection.Supabase, connectionId, actor);
|
||||
try {
|
||||
const projects = await getSupabaseProjects(appConnection);
|
||||
|
||||
return projects ?? [];
|
||||
} catch (error) {
|
||||
logger.error(error, "Failed to establish connection with Supabase");
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
listProjects
|
||||
};
|
||||
};
|
@@ -0,0 +1,44 @@
|
||||
import z from "zod";
|
||||
|
||||
import { DiscriminativePick } from "@app/lib/types";
|
||||
|
||||
import { AppConnection } from "../app-connection-enums";
|
||||
import {
|
||||
CreateSupabaseConnectionSchema,
|
||||
SupabaseConnectionSchema,
|
||||
ValidateSupabaseConnectionCredentialsSchema
|
||||
} from "./supabase-connection-schemas";
|
||||
|
||||
export type TSupabaseConnection = z.infer<typeof SupabaseConnectionSchema>;
|
||||
|
||||
export type TSupabaseConnectionInput = z.infer<typeof CreateSupabaseConnectionSchema> & {
|
||||
app: AppConnection.Supabase;
|
||||
};
|
||||
|
||||
export type TValidateSupabaseConnectionCredentialsSchema = typeof ValidateSupabaseConnectionCredentialsSchema;
|
||||
|
||||
export type TSupabaseConnectionConfig = DiscriminativePick<TSupabaseConnection, "method" | "app" | "credentials"> & {
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export type TSupabaseProject = {
|
||||
id: string;
|
||||
organization_id: string;
|
||||
name: string;
|
||||
region: string;
|
||||
created_at: Date;
|
||||
status: string;
|
||||
database: TSupabaseDatabase;
|
||||
};
|
||||
|
||||
type TSupabaseDatabase = {
|
||||
host: string;
|
||||
version: string;
|
||||
postgres_engine: string;
|
||||
release_channel: string;
|
||||
};
|
||||
|
||||
export type TSupabaseSecret = {
|
||||
name: string;
|
||||
value: string;
|
||||
};
|
@@ -218,7 +218,7 @@ export const certificateAuthorityDALFactory = (db: TDbClient) => {
|
||||
};
|
||||
|
||||
const findWithAssociatedCa = async (
|
||||
filter: Parameters<(typeof caOrm)["find"]>[0] & { dn?: string; type?: string },
|
||||
filter: Parameters<(typeof caOrm)["find"]>[0] & { dn?: string; type?: string; serialNumber?: string },
|
||||
{ offset, limit, sort = [["createdAt", "desc"]] }: TFindOpt<TCertificateAuthorities> = {},
|
||||
tx?: Knex
|
||||
) => {
|
||||
|
@@ -1068,11 +1068,11 @@ export const internalCertificateAuthorityServiceFactory = ({
|
||||
throw new BadRequestError({ message: "Invalid certificate chain" });
|
||||
|
||||
const parentCertObj = chainItems[1];
|
||||
const parentCertSubject = parentCertObj.subject;
|
||||
const parentSerialNumber = parentCertObj.serialNumber;
|
||||
|
||||
const [parentCa] = await certificateAuthorityDAL.findWithAssociatedCa({
|
||||
[`${TableName.CertificateAuthority}.projectId` as "projectId"]: ca.projectId,
|
||||
[`${TableName.InternalCertificateAuthority}.dn` as "dn"]: parentCertSubject
|
||||
[`${TableName.InternalCertificateAuthority}.serialNumber` as "serialNumber"]: parentSerialNumber
|
||||
});
|
||||
|
||||
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
|
||||
|
@@ -47,6 +47,14 @@ export enum ResourceType {
|
||||
FOLDER = "folder"
|
||||
}
|
||||
|
||||
export type TCommitResourceChangeDTO = {
|
||||
type: string;
|
||||
secretVersionId?: string;
|
||||
folderVersionId?: string;
|
||||
isUpdate?: boolean;
|
||||
folderId?: string;
|
||||
};
|
||||
|
||||
type TCreateCommitDTO = {
|
||||
actor: {
|
||||
type: string;
|
||||
@@ -57,13 +65,7 @@ type TCreateCommitDTO = {
|
||||
};
|
||||
message?: string;
|
||||
folderId: string;
|
||||
changes: {
|
||||
type: string;
|
||||
secretVersionId?: string;
|
||||
folderVersionId?: string;
|
||||
isUpdate?: boolean;
|
||||
folderId?: string;
|
||||
}[];
|
||||
changes: TCommitResourceChangeDTO[];
|
||||
omitIgnoreFilter?: boolean;
|
||||
};
|
||||
|
||||
|
@@ -37,7 +37,7 @@ export const validateAccountIds = z
|
||||
export const validatePrincipalArns = z
|
||||
.string()
|
||||
.trim()
|
||||
.max(2048)
|
||||
.max(4096)
|
||||
.default("")
|
||||
// Custom validation for ARN format
|
||||
.refine(
|
||||
|
@@ -6,7 +6,8 @@ export type TLoginOciAuthDTO = {
|
||||
headers: {
|
||||
authorization: string;
|
||||
host: string;
|
||||
"x-date": string;
|
||||
"x-date"?: string;
|
||||
date?: string;
|
||||
};
|
||||
};
|
||||
|
||||
|
@@ -1,4 +1,6 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import { Knex } from "knex";
|
||||
import path from "path";
|
||||
import { v4 as uuidv4, validate as uuidValidate } from "uuid";
|
||||
|
||||
@@ -12,14 +14,21 @@ import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { OrderByDirection, OrgServiceActor } from "@app/lib/types";
|
||||
import { buildFolderPath } from "@app/services/secret-folder/secret-folder-fns";
|
||||
|
||||
import { ChangeType, CommitType, TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
|
||||
import {
|
||||
ChangeType,
|
||||
CommitType,
|
||||
TCommitResourceChangeDTO,
|
||||
TFolderCommitServiceFactory
|
||||
} from "../folder-commit/folder-commit-service";
|
||||
import { TProjectDALFactory } from "../project/project-dal";
|
||||
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
|
||||
import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal";
|
||||
import { TSecretFolderDALFactory } from "./secret-folder-dal";
|
||||
import {
|
||||
TCreateFolderDTO,
|
||||
TCreateManyFoldersDTO,
|
||||
TDeleteFolderDTO,
|
||||
TDeleteManyFoldersDTO,
|
||||
TGetFolderByIdDTO,
|
||||
TGetFolderDTO,
|
||||
TGetFoldersDeepByEnvsDTO,
|
||||
@@ -236,19 +245,29 @@ export const secretFolderServiceFactory = ({
|
||||
actor,
|
||||
actorId,
|
||||
projectSlug,
|
||||
projectId: providedProjectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
folders
|
||||
}: TUpdateManyFoldersDTO) => {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) {
|
||||
throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
folders,
|
||||
tx: providedTx,
|
||||
commitChanges
|
||||
}: TUpdateManyFoldersDTO & { tx?: Knex; commitChanges?: TCommitResourceChangeDTO[]; projectId?: string }) => {
|
||||
let projectId = providedProjectId;
|
||||
if (!projectId && projectSlug) {
|
||||
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
|
||||
if (!project) {
|
||||
throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
|
||||
}
|
||||
projectId = project.id;
|
||||
}
|
||||
if (!projectId) {
|
||||
throw new BadRequestError({ message: "Must provide either project slug or projectId" });
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId: project.id,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
});
|
||||
@@ -260,12 +279,12 @@ export const secretFolderServiceFactory = ({
|
||||
);
|
||||
});
|
||||
|
||||
const result = await folderDAL.transaction(async (tx) =>
|
||||
Promise.all(
|
||||
const executeBulkUpdate = async (tx: Knex) => {
|
||||
return Promise.all(
|
||||
folders.map(async (newFolder) => {
|
||||
const { environment, path: secretPath, id, name, description } = newFolder;
|
||||
|
||||
const parentFolder = await folderDAL.findBySecretPath(project.id, environment, secretPath);
|
||||
const parentFolder = await folderDAL.findBySecretPath(projectId as string, environment, secretPath, tx);
|
||||
if (!parentFolder) {
|
||||
throw new NotFoundError({
|
||||
message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found`,
|
||||
@@ -273,10 +292,10 @@ export const secretFolderServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const env = await projectEnvDAL.findOne({ projectId: project.id, slug: environment });
|
||||
const env = await projectEnvDAL.findOne({ projectId, slug: environment }, tx);
|
||||
if (!env) {
|
||||
throw new NotFoundError({
|
||||
message: `Environment with slug '${environment}' in project with ID '${project.id}' not found`,
|
||||
message: `Environment with slug '${environment}' in project with ID '${projectId}' not found`,
|
||||
name: "UpdateManyFolders"
|
||||
});
|
||||
}
|
||||
@@ -323,26 +342,34 @@ export const secretFolderServiceFactory = ({
|
||||
},
|
||||
tx
|
||||
);
|
||||
await folderCommitService.createCommit(
|
||||
{
|
||||
actor: {
|
||||
type: actor,
|
||||
metadata: {
|
||||
id: actorId
|
||||
}
|
||||
if (commitChanges) {
|
||||
commitChanges.push({
|
||||
type: CommitType.ADD,
|
||||
isUpdate: true,
|
||||
folderVersionId: folderVersion.id
|
||||
});
|
||||
} else {
|
||||
await folderCommitService.createCommit(
|
||||
{
|
||||
actor: {
|
||||
type: actor,
|
||||
metadata: {
|
||||
id: actorId
|
||||
}
|
||||
},
|
||||
message: "Folder updated",
|
||||
folderId: parentFolder.id,
|
||||
changes: [
|
||||
{
|
||||
type: CommitType.ADD,
|
||||
isUpdate: true,
|
||||
folderVersionId: folderVersion.id
|
||||
}
|
||||
]
|
||||
},
|
||||
message: "Folder updated",
|
||||
folderId: parentFolder.id,
|
||||
changes: [
|
||||
{
|
||||
type: CommitType.ADD,
|
||||
isUpdate: true,
|
||||
folderVersionId: folderVersion.id
|
||||
}
|
||||
]
|
||||
},
|
||||
tx
|
||||
);
|
||||
tx
|
||||
);
|
||||
}
|
||||
if (!doc) {
|
||||
throw new NotFoundError({
|
||||
message: `Failed to update folder with id '${id}', not found`,
|
||||
@@ -352,13 +379,16 @@ export const secretFolderServiceFactory = ({
|
||||
|
||||
return { oldFolder: folder, newFolder: doc };
|
||||
})
|
||||
)
|
||||
);
|
||||
);
|
||||
};
|
||||
|
||||
// Execute with provided transaction or create new one
|
||||
const result = providedTx ? await executeBulkUpdate(providedTx) : await folderDAL.transaction(executeBulkUpdate);
|
||||
|
||||
await Promise.all(result.map(async (res) => snapshotService.performSnapshot(res.newFolder.parentId as string)));
|
||||
|
||||
return {
|
||||
projectId: project.id,
|
||||
projectId,
|
||||
newFolders: result.map((res) => res.newFolder),
|
||||
oldFolders: result.map((res) => res.oldFolder)
|
||||
};
|
||||
@@ -974,6 +1004,361 @@ export const secretFolderServiceFactory = ({
|
||||
}));
|
||||
};
|
||||
|
||||
const createManyFolders = async ({
|
||||
projectId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
folders,
|
||||
tx: providedTx,
|
||||
commitChanges
|
||||
}: TCreateManyFoldersDTO & { tx?: Knex; commitChanges?: TCommitResourceChangeDTO[] }) => {
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
});
|
||||
|
||||
folders.forEach(({ environment, path: secretPath }) => {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Create,
|
||||
subject(ProjectPermissionSub.SecretFolders, { environment, secretPath })
|
||||
);
|
||||
});
|
||||
|
||||
const foldersByEnv = folders.reduce(
|
||||
(acc, folder) => {
|
||||
if (!acc[folder.environment]) {
|
||||
acc[folder.environment] = [];
|
||||
}
|
||||
acc[folder.environment].push(folder);
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, typeof folders>
|
||||
);
|
||||
|
||||
const executeBulkCreate = async (tx: Knex) => {
|
||||
const createdFolders = [];
|
||||
|
||||
for (const [environment, envFolders] of Object.entries(foldersByEnv)) {
|
||||
const env = await projectEnvDAL.findOne({ projectId, slug: environment });
|
||||
if (!env) {
|
||||
throw new NotFoundError({
|
||||
message: `Environment with slug '${environment}' in project with ID '${projectId}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.CreateFolder(env.id, env.projectId)]);
|
||||
|
||||
for (const folderSpec of envFolders) {
|
||||
const { name, path: secretPath, description } = folderSpec;
|
||||
|
||||
const pathWithFolder = path.join(secretPath, name);
|
||||
const parentFolder = await folderDAL.findClosestFolder(projectId, environment, pathWithFolder, tx);
|
||||
|
||||
if (!parentFolder) {
|
||||
throw new NotFoundError({
|
||||
message: `Parent folder for path '${pathWithFolder}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
// Check if the exact folder already exists
|
||||
const existingFolder = await folderDAL.findOne(
|
||||
{
|
||||
envId: env.id,
|
||||
parentId: parentFolder.id,
|
||||
name,
|
||||
isReserved: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (existingFolder) {
|
||||
createdFolders.push(existingFolder);
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle exact folder case
|
||||
if (parentFolder.path === pathWithFolder) {
|
||||
createdFolders.push(parentFolder);
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
let currentParentId = parentFolder.id;
|
||||
|
||||
// Build the full path we need by processing each segment
|
||||
if (parentFolder.path !== secretPath) {
|
||||
const missingSegments = secretPath.substring(parentFolder.path.length).split("/").filter(Boolean);
|
||||
const newFolders: TSecretFoldersInsert[] = [];
|
||||
|
||||
for (const segment of missingSegments) {
|
||||
const existingSegment = await folderDAL.findOne(
|
||||
{
|
||||
name: segment,
|
||||
parentId: currentParentId,
|
||||
envId: env.id,
|
||||
isReserved: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (existingSegment) {
|
||||
currentParentId = existingSegment.id;
|
||||
} else {
|
||||
const newFolder = {
|
||||
name: segment,
|
||||
parentId: currentParentId,
|
||||
id: uuidv4(),
|
||||
envId: env.id,
|
||||
version: 1
|
||||
};
|
||||
|
||||
currentParentId = newFolder.id;
|
||||
newFolders.push(newFolder);
|
||||
}
|
||||
}
|
||||
|
||||
if (newFolders.length) {
|
||||
const docs = await folderDAL.insertMany(newFolders, tx);
|
||||
const folderVersions = await folderVersionDAL.insertMany(
|
||||
docs.map((doc) => ({
|
||||
name: doc.name,
|
||||
envId: doc.envId,
|
||||
version: doc.version,
|
||||
folderId: doc.id,
|
||||
description: doc.description
|
||||
})),
|
||||
tx
|
||||
);
|
||||
await folderCommitService.createCommit(
|
||||
{
|
||||
actor: {
|
||||
type: actor,
|
||||
metadata: {
|
||||
id: actorId
|
||||
}
|
||||
},
|
||||
message: "Folders created (batch)",
|
||||
folderId: currentParentId,
|
||||
changes: folderVersions.map((fv) => ({
|
||||
type: CommitType.ADD,
|
||||
folderVersionId: fv.id
|
||||
}))
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Create the target folder
|
||||
const doc = await folderDAL.create(
|
||||
{ name, envId: env.id, version: 1, parentId: currentParentId, description },
|
||||
tx
|
||||
);
|
||||
|
||||
const folderVersion = await folderVersionDAL.create(
|
||||
{
|
||||
name: doc.name,
|
||||
envId: doc.envId,
|
||||
version: doc.version,
|
||||
folderId: doc.id,
|
||||
description: doc.description
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
if (commitChanges) {
|
||||
commitChanges.push({
|
||||
type: CommitType.ADD,
|
||||
folderVersionId: folderVersion.id
|
||||
});
|
||||
} else {
|
||||
await folderCommitService.createCommit(
|
||||
{
|
||||
actor: {
|
||||
type: actor,
|
||||
metadata: {
|
||||
id: actorId
|
||||
}
|
||||
},
|
||||
message: "Folder created (batch)",
|
||||
folderId: doc.id,
|
||||
changes: [
|
||||
{
|
||||
type: CommitType.ADD,
|
||||
folderVersionId: folderVersion.id
|
||||
}
|
||||
]
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
createdFolders.push(doc);
|
||||
}
|
||||
}
|
||||
|
||||
return createdFolders;
|
||||
};
|
||||
const result = providedTx ? await executeBulkCreate(providedTx) : await folderDAL.transaction(executeBulkCreate);
|
||||
const uniqueParentIds = [...new Set(result.map((folder) => folder.parentId).filter(Boolean))];
|
||||
await Promise.all(uniqueParentIds.map((parentId) => snapshotService.performSnapshot(parentId as string)));
|
||||
|
||||
return {
|
||||
folders: result,
|
||||
count: result.length
|
||||
};
|
||||
};
|
||||
|
||||
const deleteManyFolders = async ({
|
||||
projectId,
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
folders,
|
||||
tx: providedTx,
|
||||
commitChanges
|
||||
}: TDeleteManyFoldersDTO & { tx?: Knex; commitChanges?: TCommitResourceChangeDTO[] }) => {
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
});
|
||||
|
||||
folders.forEach(({ environment, path: secretPath }) => {
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Delete,
|
||||
subject(ProjectPermissionSub.SecretFolders, { environment, secretPath })
|
||||
);
|
||||
});
|
||||
|
||||
const foldersByEnv = folders.reduce(
|
||||
(acc, folder) => {
|
||||
if (!acc[folder.environment]) {
|
||||
acc[folder.environment] = [];
|
||||
}
|
||||
acc[folder.environment].push(folder);
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, typeof folders>
|
||||
);
|
||||
|
||||
const executeBulkDelete = async (tx: Knex) => {
|
||||
const deletedFolders = [];
|
||||
|
||||
for (const [environment, envFolders] of Object.entries(foldersByEnv)) {
|
||||
const env = await projectEnvDAL.findOne({ projectId, slug: environment });
|
||||
if (!env) {
|
||||
throw new NotFoundError({
|
||||
message: `Environment with slug '${environment}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
for (const folderSpec of envFolders) {
|
||||
const { path: secretPath, idOrName } = folderSpec;
|
||||
|
||||
const parentFolder = await folderDAL.findBySecretPath(projectId, environment, secretPath, tx);
|
||||
if (!parentFolder) {
|
||||
throw new NotFoundError({
|
||||
message: `Folder with path '${secretPath}' in environment with slug '${environment}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
await $checkFolderPolicy({ projectId, env, parentId: parentFolder.id, idOrName });
|
||||
|
||||
let folderToDelete = await folderDAL
|
||||
.findOne({
|
||||
envId: env.id,
|
||||
name: idOrName,
|
||||
parentId: parentFolder.id,
|
||||
isReserved: false
|
||||
})
|
||||
.catch(() => null);
|
||||
|
||||
if (!folderToDelete && uuidValidate(idOrName)) {
|
||||
folderToDelete = await folderDAL
|
||||
.findOne({
|
||||
envId: env.id,
|
||||
id: idOrName,
|
||||
parentId: parentFolder.id,
|
||||
isReserved: false
|
||||
})
|
||||
.catch(() => null);
|
||||
}
|
||||
|
||||
if (!folderToDelete) {
|
||||
throw new NotFoundError({
|
||||
message: `Folder with ID/name '${idOrName}' not found`
|
||||
});
|
||||
}
|
||||
|
||||
const [doc] = await folderDAL.delete(
|
||||
{
|
||||
envId: env.id,
|
||||
id: folderToDelete.id,
|
||||
parentId: parentFolder.id,
|
||||
isReserved: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
const folderVersions = await folderVersionDAL.findLatestFolderVersions([doc.id], tx);
|
||||
|
||||
if (commitChanges) {
|
||||
commitChanges.push({
|
||||
type: CommitType.DELETE,
|
||||
folderVersionId: folderVersions[doc.id].id,
|
||||
folderId: doc.id
|
||||
});
|
||||
} else {
|
||||
await folderCommitService.createCommit(
|
||||
{
|
||||
actor: {
|
||||
type: actor,
|
||||
metadata: {
|
||||
id: actorId
|
||||
}
|
||||
},
|
||||
message: "Folder deleted (batch)",
|
||||
folderId: parentFolder.id,
|
||||
changes: [
|
||||
{
|
||||
type: CommitType.DELETE,
|
||||
folderVersionId: folderVersions[doc.id].id,
|
||||
folderId: doc.id
|
||||
}
|
||||
]
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
deletedFolders.push(doc);
|
||||
}
|
||||
}
|
||||
|
||||
return deletedFolders;
|
||||
};
|
||||
|
||||
const result = providedTx ? await executeBulkDelete(providedTx) : await folderDAL.transaction(executeBulkDelete);
|
||||
|
||||
const uniqueParentIds = [...new Set(result.map((folder) => folder.parentId).filter(Boolean))];
|
||||
await Promise.all(uniqueParentIds.map((parentId) => snapshotService.performSnapshot(parentId as string)));
|
||||
|
||||
return {
|
||||
folders: result,
|
||||
count: result.length
|
||||
};
|
||||
};
|
||||
|
||||
return {
|
||||
createFolder,
|
||||
updateFolder,
|
||||
@@ -986,6 +1371,8 @@ export const secretFolderServiceFactory = ({
|
||||
getFoldersDeepByEnvs,
|
||||
getProjectEnvironmentsFolders,
|
||||
getFolderVersionsByIds,
|
||||
getFolderVersions
|
||||
getFolderVersions,
|
||||
createManyFolders,
|
||||
deleteManyFolders
|
||||
};
|
||||
};
|
||||
|
@@ -1,6 +1,8 @@
|
||||
import { OrderByDirection, TProjectPermission } from "@app/lib/types";
|
||||
import { SecretsOrderBy } from "@app/services/secret/secret-types";
|
||||
|
||||
import { ActorAuthMethod, ActorType } from "../auth/auth-type";
|
||||
|
||||
export enum ReservedFolders {
|
||||
SecretReplication = "__reserve_replication_"
|
||||
}
|
||||
@@ -21,7 +23,7 @@ export type TUpdateFolderDTO = {
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TUpdateManyFoldersDTO = {
|
||||
projectSlug: string;
|
||||
projectSlug?: string;
|
||||
folders: {
|
||||
environment: string;
|
||||
path: string;
|
||||
@@ -62,3 +64,30 @@ export type TGetFoldersDeepByEnvsDTO = {
|
||||
export type TFindFoldersDeepByParentIdsDTO = {
|
||||
parentIds: string[];
|
||||
};
|
||||
|
||||
export type TCreateManyFoldersDTO = {
|
||||
projectId: string;
|
||||
actor: ActorType;
|
||||
actorId: string;
|
||||
actorAuthMethod: ActorAuthMethod;
|
||||
actorOrgId?: string;
|
||||
folders: Array<{
|
||||
name: string;
|
||||
environment: string;
|
||||
path: string;
|
||||
description?: string | null;
|
||||
}>;
|
||||
};
|
||||
|
||||
export type TDeleteManyFoldersDTO = {
|
||||
projectId: string;
|
||||
actor: ActorType;
|
||||
actorId: string;
|
||||
actorAuthMethod: ActorAuthMethod;
|
||||
actorOrgId?: string;
|
||||
folders: Array<{
|
||||
environment: string;
|
||||
path: string;
|
||||
idOrName: string;
|
||||
}>;
|
||||
};
|
||||
|
@@ -174,6 +174,7 @@ export const fnSecretsV2FromImports = async ({
|
||||
skipMultilineEncoding?: boolean | null;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
secretKey: string;
|
||||
}) => Promise<string | undefined>;
|
||||
hasSecretAccess: (environment: string, secretPath: string, secretName: string, secretTagSlugs: string[]) => boolean;
|
||||
}) => {
|
||||
@@ -293,7 +294,8 @@ export const fnSecretsV2FromImports = async ({
|
||||
value: decryptedSecret.secretValue,
|
||||
secretPath: processedImport.secretPath,
|
||||
environment: processedImport.environment,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding,
|
||||
secretKey: decryptedSecret.secretKey
|
||||
});
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
processedImport.secrets[index].secretValue = expandedSecretValue || "";
|
||||
|
@@ -1,4 +1,6 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { isAxiosError } from "axios";
|
||||
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
|
||||
import { matchesSchema } from "@app/services/secret-sync/secret-sync-fns";
|
||||
@@ -71,7 +73,7 @@ const putEnvironmentSecret = async (secretSync: TRenderSyncWithCredentials, secr
|
||||
);
|
||||
};
|
||||
|
||||
const deleteEnvironmentSecret = async (secretSync: TRenderSyncWithCredentials, secret: TRenderSecret) => {
|
||||
const deleteEnvironmentSecret = async (secretSync: TRenderSyncWithCredentials, secret: Pick<TRenderSecret, "key">) => {
|
||||
const {
|
||||
destinationConfig,
|
||||
connection: {
|
||||
@@ -79,15 +81,24 @@ const deleteEnvironmentSecret = async (secretSync: TRenderSyncWithCredentials, s
|
||||
}
|
||||
} = secretSync;
|
||||
|
||||
await request.delete(
|
||||
`${IntegrationUrls.RENDER_API_URL}/v1/services/${destinationConfig.serviceId}/env-vars/${secret.key}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
Accept: "application/json"
|
||||
try {
|
||||
await request.delete(
|
||||
`${IntegrationUrls.RENDER_API_URL}/v1/services/${destinationConfig.serviceId}/env-vars/${secret.key}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
Accept: "application/json"
|
||||
}
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
if (isAxiosError(error) && error.response?.status === 404) {
|
||||
// If the secret does not exist, we can ignore this error
|
||||
return;
|
||||
}
|
||||
);
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const sleep = async () =>
|
||||
@@ -99,6 +110,11 @@ export const RenderSyncFns = {
|
||||
syncSecrets: async (secretSync: TRenderSyncWithCredentials, secretMap: TSecretMap) => {
|
||||
const renderSecrets = await getRenderEnvironmentSecrets(secretSync);
|
||||
for await (const key of Object.keys(secretMap)) {
|
||||
// If value is empty skip it as render does not allow empty variables
|
||||
if (secretMap[key].value === "") {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
await putEnvironmentSecret(secretSync, secretMap, key);
|
||||
await sleep();
|
||||
}
|
||||
|
@@ -22,7 +22,7 @@ export enum SecretSync {
|
||||
GitLab = "gitlab",
|
||||
CloudflarePages = "cloudflare-pages",
|
||||
CloudflareWorkers = "cloudflare-workers",
|
||||
|
||||
Supabase = "supabase",
|
||||
Zabbix = "zabbix",
|
||||
Railway = "railway",
|
||||
Checkly = "checkly"
|
||||
|
@@ -46,6 +46,7 @@ import { RAILWAY_SYNC_LIST_OPTION } from "./railway/railway-sync-constants";
|
||||
import { RailwaySyncFns } from "./railway/railway-sync-fns";
|
||||
import { RENDER_SYNC_LIST_OPTION, RenderSyncFns } from "./render";
|
||||
import { SECRET_SYNC_PLAN_MAP } from "./secret-sync-maps";
|
||||
import { SUPABASE_SYNC_LIST_OPTION, SupabaseSyncFns } from "./supabase";
|
||||
import { TEAMCITY_SYNC_LIST_OPTION, TeamCitySyncFns } from "./teamcity";
|
||||
import { TERRAFORM_CLOUD_SYNC_LIST_OPTION, TerraformCloudSyncFns } from "./terraform-cloud";
|
||||
import { VERCEL_SYNC_LIST_OPTION, VercelSyncFns } from "./vercel";
|
||||
@@ -76,7 +77,7 @@ const SECRET_SYNC_LIST_OPTIONS: Record<SecretSync, TSecretSyncListItem> = {
|
||||
[SecretSync.GitLab]: GITLAB_SYNC_LIST_OPTION,
|
||||
[SecretSync.CloudflarePages]: CLOUDFLARE_PAGES_SYNC_LIST_OPTION,
|
||||
[SecretSync.CloudflareWorkers]: CLOUDFLARE_WORKERS_SYNC_LIST_OPTION,
|
||||
|
||||
[SecretSync.Supabase]: SUPABASE_SYNC_LIST_OPTION,
|
||||
[SecretSync.Zabbix]: ZABBIX_SYNC_LIST_OPTION,
|
||||
[SecretSync.Railway]: RAILWAY_SYNC_LIST_OPTION,
|
||||
[SecretSync.Checkly]: CHECKLY_SYNC_LIST_OPTION
|
||||
@@ -255,6 +256,8 @@ export const SecretSyncFns = {
|
||||
return RailwaySyncFns.syncSecrets(secretSync, schemaSecretMap);
|
||||
case SecretSync.Checkly:
|
||||
return ChecklySyncFns.syncSecrets(secretSync, schemaSecretMap);
|
||||
case SecretSync.Supabase:
|
||||
return SupabaseSyncFns.syncSecrets(secretSync, schemaSecretMap);
|
||||
default:
|
||||
throw new Error(
|
||||
`Unhandled sync destination for sync secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`
|
||||
@@ -359,6 +362,9 @@ export const SecretSyncFns = {
|
||||
case SecretSync.Checkly:
|
||||
secretMap = await ChecklySyncFns.getSecrets(secretSync);
|
||||
break;
|
||||
case SecretSync.Supabase:
|
||||
secretMap = await SupabaseSyncFns.getSecrets(secretSync);
|
||||
break;
|
||||
default:
|
||||
throw new Error(
|
||||
`Unhandled sync destination for get secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`
|
||||
@@ -444,6 +450,8 @@ export const SecretSyncFns = {
|
||||
return RailwaySyncFns.removeSecrets(secretSync, schemaSecretMap);
|
||||
case SecretSync.Checkly:
|
||||
return ChecklySyncFns.removeSecrets(secretSync, schemaSecretMap);
|
||||
case SecretSync.Supabase:
|
||||
return SupabaseSyncFns.removeSecrets(secretSync, schemaSecretMap);
|
||||
default:
|
||||
throw new Error(
|
||||
`Unhandled sync destination for remove secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`
|
||||
|
@@ -25,7 +25,7 @@ export const SECRET_SYNC_NAME_MAP: Record<SecretSync, string> = {
|
||||
[SecretSync.GitLab]: "GitLab",
|
||||
[SecretSync.CloudflarePages]: "Cloudflare Pages",
|
||||
[SecretSync.CloudflareWorkers]: "Cloudflare Workers",
|
||||
|
||||
[SecretSync.Supabase]: "Supabase",
|
||||
[SecretSync.Zabbix]: "Zabbix",
|
||||
[SecretSync.Railway]: "Railway",
|
||||
[SecretSync.Checkly]: "Checkly"
|
||||
@@ -55,7 +55,7 @@ export const SECRET_SYNC_CONNECTION_MAP: Record<SecretSync, AppConnection> = {
|
||||
[SecretSync.GitLab]: AppConnection.GitLab,
|
||||
[SecretSync.CloudflarePages]: AppConnection.Cloudflare,
|
||||
[SecretSync.CloudflareWorkers]: AppConnection.Cloudflare,
|
||||
|
||||
[SecretSync.Supabase]: AppConnection.Supabase,
|
||||
[SecretSync.Zabbix]: AppConnection.Zabbix,
|
||||
[SecretSync.Railway]: AppConnection.Railway,
|
||||
[SecretSync.Checkly]: AppConnection.Checkly
|
||||
@@ -85,7 +85,7 @@ export const SECRET_SYNC_PLAN_MAP: Record<SecretSync, SecretSyncPlanType> = {
|
||||
[SecretSync.GitLab]: SecretSyncPlanType.Regular,
|
||||
[SecretSync.CloudflarePages]: SecretSyncPlanType.Regular,
|
||||
[SecretSync.CloudflareWorkers]: SecretSyncPlanType.Regular,
|
||||
|
||||
[SecretSync.Supabase]: SecretSyncPlanType.Regular,
|
||||
[SecretSync.Zabbix]: SecretSyncPlanType.Regular,
|
||||
[SecretSync.Railway]: SecretSyncPlanType.Regular,
|
||||
[SecretSync.Checkly]: SecretSyncPlanType.Regular
|
||||
|
@@ -231,7 +231,8 @@ export const secretSyncQueueFactory = ({
|
||||
environment: environment.slug,
|
||||
secretPath: folder.path,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
value: secretValue
|
||||
value: secretValue,
|
||||
secretKey
|
||||
});
|
||||
secretMap[secretKey] = { value: expandedSecretValue || "" };
|
||||
|
||||
|
@@ -118,6 +118,12 @@ import {
|
||||
TRenderSyncListItem,
|
||||
TRenderSyncWithCredentials
|
||||
} from "./render/render-sync-types";
|
||||
import {
|
||||
TSupabaseSync,
|
||||
TSupabaseSyncInput,
|
||||
TSupabaseSyncListItem,
|
||||
TSupabaseSyncWithCredentials
|
||||
} from "./supabase/supabase-sync-types";
|
||||
import {
|
||||
TTeamCitySync,
|
||||
TTeamCitySyncInput,
|
||||
@@ -159,7 +165,8 @@ export type TSecretSync =
|
||||
| TCloudflareWorkersSync
|
||||
| TZabbixSync
|
||||
| TRailwaySync
|
||||
| TChecklySync;
|
||||
| TChecklySync
|
||||
| TSupabaseSync;
|
||||
|
||||
export type TSecretSyncWithCredentials =
|
||||
| TAwsParameterStoreSyncWithCredentials
|
||||
@@ -187,7 +194,8 @@ export type TSecretSyncWithCredentials =
|
||||
| TCloudflareWorkersSyncWithCredentials
|
||||
| TZabbixSyncWithCredentials
|
||||
| TRailwaySyncWithCredentials
|
||||
| TChecklySyncWithCredentials;
|
||||
| TChecklySyncWithCredentials
|
||||
| TSupabaseSyncWithCredentials;
|
||||
|
||||
export type TSecretSyncInput =
|
||||
| TAwsParameterStoreSyncInput
|
||||
@@ -215,7 +223,8 @@ export type TSecretSyncInput =
|
||||
| TCloudflareWorkersSyncInput
|
||||
| TZabbixSyncInput
|
||||
| TRailwaySyncInput
|
||||
| TChecklySyncInput;
|
||||
| TChecklySyncInput
|
||||
| TSupabaseSyncInput;
|
||||
|
||||
export type TSecretSyncListItem =
|
||||
| TAwsParameterStoreSyncListItem
|
||||
@@ -243,7 +252,8 @@ export type TSecretSyncListItem =
|
||||
| TCloudflareWorkersSyncListItem
|
||||
| TZabbixSyncListItem
|
||||
| TRailwaySyncListItem
|
||||
| TChecklySyncListItem;
|
||||
| TChecklySyncListItem
|
||||
| TSupabaseSyncListItem;
|
||||
|
||||
export type TSyncOptionsConfig = {
|
||||
canImportSecrets: boolean;
|
||||
|
4
backend/src/services/secret-sync/supabase/index.ts
Normal file
4
backend/src/services/secret-sync/supabase/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from "./supabase-sync-constants";
|
||||
export * from "./supabase-sync-fns";
|
||||
export * from "./supabase-sync-schemas";
|
||||
export * from "./supabase-sync-types";
|
@@ -0,0 +1,10 @@
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||
import { TSecretSyncListItem } from "@app/services/secret-sync/secret-sync-types";
|
||||
|
||||
export const SUPABASE_SYNC_LIST_OPTION: TSecretSyncListItem = {
|
||||
name: "Supabase",
|
||||
destination: SecretSync.Supabase,
|
||||
connection: AppConnection.Supabase,
|
||||
canImportSecrets: false
|
||||
};
|
102
backend/src/services/secret-sync/supabase/supabase-sync-fns.ts
Normal file
102
backend/src/services/secret-sync/supabase/supabase-sync-fns.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
/* eslint-disable no-continue */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
|
||||
import { chunkArray } from "@app/lib/fn";
|
||||
import { TSupabaseSecret } from "@app/services/app-connection/supabase";
|
||||
import { SupabasePublicAPI } from "@app/services/app-connection/supabase/supabase-connection-public-client";
|
||||
import { matchesSchema } from "@app/services/secret-sync/secret-sync-fns";
|
||||
|
||||
import { SecretSyncError } from "../secret-sync-errors";
|
||||
import { SECRET_SYNC_NAME_MAP } from "../secret-sync-maps";
|
||||
import { TSecretMap } from "../secret-sync-types";
|
||||
import { TSupabaseSyncWithCredentials } from "./supabase-sync-types";
|
||||
|
||||
const SUPABASE_INTERNAL_SECRETS = ["SUPABASE_URL", "SUPABASE_ANON_KEY", "SUPABASE_SERVICE_ROLE_KEY", "SUPABASE_DB_URL"];
|
||||
|
||||
export const SupabaseSyncFns = {
|
||||
async getSecrets(secretSync: TSupabaseSyncWithCredentials) {
|
||||
throw new Error(`${SECRET_SYNC_NAME_MAP[secretSync.destination]} does not support importing secrets.`);
|
||||
},
|
||||
|
||||
async syncSecrets(secretSync: TSupabaseSyncWithCredentials, secretMap: TSecretMap) {
|
||||
const {
|
||||
environment,
|
||||
syncOptions: { disableSecretDeletion, keySchema }
|
||||
} = secretSync;
|
||||
const config = secretSync.destinationConfig;
|
||||
|
||||
const variables = await SupabasePublicAPI.getVariables(secretSync.connection, config.projectId);
|
||||
|
||||
const supabaseSecrets = new Map(variables!.map((variable) => [variable.name, variable]));
|
||||
|
||||
const toCreate: TSupabaseSecret[] = [];
|
||||
|
||||
for (const key of Object.keys(secretMap)) {
|
||||
const variable: TSupabaseSecret = { name: key, value: secretMap[key].value ?? "" };
|
||||
toCreate.push(variable);
|
||||
}
|
||||
|
||||
for await (const batch of chunkArray(toCreate, 100)) {
|
||||
try {
|
||||
await SupabasePublicAPI.createVariables(secretSync.connection, config.projectId, ...batch);
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error,
|
||||
secretKey: batch[0].name // Use the first key in the batch for error reporting
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (disableSecretDeletion) return;
|
||||
|
||||
const toDelete: string[] = [];
|
||||
|
||||
for (const key of supabaseSecrets.keys()) {
|
||||
// eslint-disable-next-line no-continue
|
||||
if (!matchesSchema(key, environment?.slug || "", keySchema) || SUPABASE_INTERNAL_SECRETS.includes(key)) continue;
|
||||
|
||||
if (!secretMap[key]) {
|
||||
toDelete.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
for await (const batch of chunkArray(toDelete, 100)) {
|
||||
try {
|
||||
await SupabasePublicAPI.deleteVariables(secretSync.connection, config.projectId, ...batch);
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error,
|
||||
secretKey: batch[0] // Use the first key in the batch for error reporting
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
async removeSecrets(secretSync: TSupabaseSyncWithCredentials, secretMap: TSecretMap) {
|
||||
const config = secretSync.destinationConfig;
|
||||
|
||||
const variables = await SupabasePublicAPI.getVariables(secretSync.connection, config.projectId);
|
||||
|
||||
const supabaseSecrets = new Map(variables!.map((variable) => [variable.name, variable]));
|
||||
|
||||
const toDelete: string[] = [];
|
||||
|
||||
for (const key of supabaseSecrets.keys()) {
|
||||
if (SUPABASE_INTERNAL_SECRETS.includes(key) || !(key in secretMap)) continue;
|
||||
|
||||
toDelete.push(key);
|
||||
}
|
||||
|
||||
for await (const batch of chunkArray(toDelete, 100)) {
|
||||
try {
|
||||
await SupabasePublicAPI.deleteVariables(secretSync.connection, config.projectId, ...batch);
|
||||
} catch (error) {
|
||||
throw new SecretSyncError({
|
||||
error,
|
||||
secretKey: batch[0] // Use the first key in the batch for error reporting
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
@@ -0,0 +1,43 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
|
||||
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
|
||||
import {
|
||||
BaseSecretSyncSchema,
|
||||
GenericCreateSecretSyncFieldsSchema,
|
||||
GenericUpdateSecretSyncFieldsSchema
|
||||
} from "@app/services/secret-sync/secret-sync-schemas";
|
||||
import { TSyncOptionsConfig } from "@app/services/secret-sync/secret-sync-types";
|
||||
|
||||
const SupabaseSyncDestinationConfigSchema = z.object({
|
||||
projectId: z.string().max(255).min(1, "Project ID is required"),
|
||||
projectName: z.string().max(255).min(1, "Project Name is required")
|
||||
});
|
||||
|
||||
const SupabaseSyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: false };
|
||||
|
||||
export const SupabaseSyncSchema = BaseSecretSyncSchema(SecretSync.Supabase, SupabaseSyncOptionsConfig).extend({
|
||||
destination: z.literal(SecretSync.Supabase),
|
||||
destinationConfig: SupabaseSyncDestinationConfigSchema
|
||||
});
|
||||
|
||||
export const CreateSupabaseSyncSchema = GenericCreateSecretSyncFieldsSchema(
|
||||
SecretSync.Supabase,
|
||||
SupabaseSyncOptionsConfig
|
||||
).extend({
|
||||
destinationConfig: SupabaseSyncDestinationConfigSchema
|
||||
});
|
||||
|
||||
export const UpdateSupabaseSyncSchema = GenericUpdateSecretSyncFieldsSchema(
|
||||
SecretSync.Supabase,
|
||||
SupabaseSyncOptionsConfig
|
||||
).extend({
|
||||
destinationConfig: SupabaseSyncDestinationConfigSchema.optional()
|
||||
});
|
||||
|
||||
export const SupabaseSyncListItemSchema = z.object({
|
||||
name: z.literal("Supabase"),
|
||||
connection: z.literal(AppConnection.Supabase),
|
||||
destination: z.literal(SecretSync.Supabase),
|
||||
canImportSecrets: z.literal(false)
|
||||
});
|
@@ -0,0 +1,21 @@
|
||||
import z from "zod";
|
||||
|
||||
import { TSupabaseConnection } from "@app/services/app-connection/supabase";
|
||||
|
||||
import { CreateSupabaseSyncSchema, SupabaseSyncListItemSchema, SupabaseSyncSchema } from "./supabase-sync-schemas";
|
||||
|
||||
export type TSupabaseSyncListItem = z.infer<typeof SupabaseSyncListItemSchema>;
|
||||
|
||||
export type TSupabaseSync = z.infer<typeof SupabaseSyncSchema>;
|
||||
|
||||
export type TSupabaseSyncInput = z.infer<typeof CreateSupabaseSyncSchema>;
|
||||
|
||||
export type TSupabaseSyncWithCredentials = TSupabaseSync & {
|
||||
connection: TSupabaseConnection;
|
||||
};
|
||||
|
||||
export type TSupabaseVariablesGraphResponse = {
|
||||
data: {
|
||||
variables: Record<string, string>;
|
||||
};
|
||||
};
|
@@ -67,6 +67,7 @@ export const getAllSecretReferences = (maybeSecretReference: string) => {
|
||||
export const fnSecretBulkInsert = async ({
|
||||
// TODO: Pick types here
|
||||
folderId,
|
||||
commitChanges,
|
||||
orgId,
|
||||
inputSecrets,
|
||||
secretDAL,
|
||||
@@ -134,28 +135,32 @@ export const fnSecretBulkInsert = async ({
|
||||
tx
|
||||
);
|
||||
|
||||
const commitChanges = secretVersions
|
||||
const changes = secretVersions
|
||||
.filter(({ type }) => type === SecretType.Shared)
|
||||
.map((sv) => ({
|
||||
type: CommitType.ADD,
|
||||
secretVersionId: sv.id
|
||||
}));
|
||||
|
||||
if (commitChanges.length > 0) {
|
||||
await folderCommitService.createCommit(
|
||||
{
|
||||
actor: {
|
||||
type: actorType || ActorType.PLATFORM,
|
||||
metadata: {
|
||||
id: actor?.actorId
|
||||
}
|
||||
if (changes.length > 0) {
|
||||
if (commitChanges) {
|
||||
commitChanges.push(...changes);
|
||||
} else {
|
||||
await folderCommitService.createCommit(
|
||||
{
|
||||
actor: {
|
||||
type: actorType || ActorType.PLATFORM,
|
||||
metadata: {
|
||||
id: actor?.actorId
|
||||
}
|
||||
},
|
||||
message: "Secret Created",
|
||||
folderId,
|
||||
changes
|
||||
},
|
||||
message: "Secret Created",
|
||||
folderId,
|
||||
changes: commitChanges
|
||||
},
|
||||
tx
|
||||
);
|
||||
tx
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await secretDAL.upsertSecretReferences(
|
||||
@@ -209,6 +214,7 @@ export const fnSecretBulkUpdate = async ({
|
||||
tx,
|
||||
inputSecrets,
|
||||
folderId,
|
||||
commitChanges,
|
||||
orgId,
|
||||
secretDAL,
|
||||
secretVersionDAL,
|
||||
@@ -233,6 +239,7 @@ export const fnSecretBulkUpdate = async ({
|
||||
userId,
|
||||
encryptedComment,
|
||||
metadata,
|
||||
secretMetadata,
|
||||
reminderNote,
|
||||
reminderRepeatDays
|
||||
}
|
||||
@@ -244,7 +251,7 @@ export const fnSecretBulkUpdate = async ({
|
||||
key,
|
||||
userId,
|
||||
encryptedComment,
|
||||
metadata,
|
||||
metadata: JSON.stringify(metadata || secretMetadata || []),
|
||||
reminderNote,
|
||||
encryptedValue,
|
||||
reminderRepeatDays
|
||||
@@ -359,28 +366,32 @@ export const fnSecretBulkUpdate = async ({
|
||||
{ tx }
|
||||
);
|
||||
|
||||
const commitChanges = secretVersions
|
||||
const changes = secretVersions
|
||||
.filter(({ type }) => type === SecretType.Shared)
|
||||
.map((sv) => ({
|
||||
type: CommitType.ADD,
|
||||
isUpdate: true,
|
||||
secretVersionId: sv.id
|
||||
}));
|
||||
if (commitChanges.length > 0) {
|
||||
await folderCommitService.createCommit(
|
||||
{
|
||||
actor: {
|
||||
type: actorType || ActorType.PLATFORM,
|
||||
metadata: {
|
||||
id: actor?.actorId
|
||||
}
|
||||
if (changes.length > 0) {
|
||||
if (commitChanges) {
|
||||
commitChanges.push(...changes);
|
||||
} else {
|
||||
await folderCommitService.createCommit(
|
||||
{
|
||||
actor: {
|
||||
type: actorType || ActorType.PLATFORM,
|
||||
metadata: {
|
||||
id: actor?.actorId
|
||||
}
|
||||
},
|
||||
message: "Secret Updated",
|
||||
folderId,
|
||||
changes
|
||||
},
|
||||
message: "Secret Updated",
|
||||
folderId,
|
||||
changes: commitChanges
|
||||
},
|
||||
tx
|
||||
);
|
||||
tx
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return secretsWithTags.map((secret) => ({ ...secret, _id: secret.id }));
|
||||
@@ -395,7 +406,8 @@ export const fnSecretBulkDelete = async ({
|
||||
secretDAL,
|
||||
secretQueueService,
|
||||
folderCommitService,
|
||||
secretVersionDAL
|
||||
secretVersionDAL,
|
||||
commitChanges
|
||||
}: TFnSecretBulkDelete) => {
|
||||
const deletedSecrets = await secretDAL.deleteMany(
|
||||
inputSecrets.map(({ type, secretKey }) => ({
|
||||
@@ -421,27 +433,31 @@ export const fnSecretBulkDelete = async ({
|
||||
tx
|
||||
);
|
||||
|
||||
const commitChanges = deletedSecrets
|
||||
const changes = deletedSecrets
|
||||
.filter(({ type }) => type === SecretType.Shared)
|
||||
.map(({ id }) => ({
|
||||
type: CommitType.DELETE,
|
||||
secretVersionId: secretVersions[id].id
|
||||
}));
|
||||
if (commitChanges.length > 0) {
|
||||
await folderCommitService.createCommit(
|
||||
{
|
||||
actor: {
|
||||
type: actorType || ActorType.PLATFORM,
|
||||
metadata: {
|
||||
id: actorId
|
||||
}
|
||||
if (changes.length > 0) {
|
||||
if (commitChanges) {
|
||||
commitChanges.push(...changes);
|
||||
} else {
|
||||
await folderCommitService.createCommit(
|
||||
{
|
||||
actor: {
|
||||
type: actorType || ActorType.PLATFORM,
|
||||
metadata: {
|
||||
id: actorId
|
||||
}
|
||||
},
|
||||
message: "Secret Deleted",
|
||||
folderId,
|
||||
changes
|
||||
},
|
||||
message: "Secret Deleted",
|
||||
folderId,
|
||||
changes: commitChanges
|
||||
},
|
||||
tx
|
||||
);
|
||||
tx
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return deletedSecrets;
|
||||
@@ -614,6 +630,7 @@ export const expandSecretReferencesFactory = ({
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
shouldStackTrace?: boolean;
|
||||
secretKey: string;
|
||||
}) => {
|
||||
const stackTrace = { ...dto, key: "root", children: [] } as TSecretReferenceTraceNode;
|
||||
|
||||
@@ -656,7 +673,7 @@ export const expandSecretReferencesFactory = ({
|
||||
const referredValue = await fetchSecret(environment, secretPath, secretKey);
|
||||
if (!canExpandValue(environment, secretPath, secretKey, referredValue.tags))
|
||||
throw new ForbiddenRequestError({
|
||||
message: `You are attempting to reference secret named ${secretKey} from environment ${environment} in path ${secretPath} which you do not have access to read value on.`
|
||||
message: `You do not have permission to read secret '${secretKey}' in environment '${environment}' at path '${secretPath}', which is referenced by secret '${dto.secretKey}' in environment '${dto.environment}' at path '${dto.secretPath}'.`
|
||||
});
|
||||
|
||||
const cacheKey = getCacheUniqueKey(environment, secretPath);
|
||||
@@ -675,7 +692,7 @@ export const expandSecretReferencesFactory = ({
|
||||
const referedValue = await fetchSecret(secretReferenceEnvironment, secretReferencePath, secretReferenceKey);
|
||||
if (!canExpandValue(secretReferenceEnvironment, secretReferencePath, secretReferenceKey, referedValue.tags))
|
||||
throw new ForbiddenRequestError({
|
||||
message: `You are attempting to reference secret named ${secretReferenceKey} from environment ${secretReferenceEnvironment} in path ${secretReferencePath} which you do not have access to read value on.`
|
||||
message: `You do not have permission to read secret '${secretReferenceKey}' in environment '${secretReferenceEnvironment}' at path '${secretReferencePath}', which is referenced by secret '${dto.secretKey}' in environment '${dto.environment}' at path '${dto.secretPath}'.`
|
||||
});
|
||||
|
||||
const cacheKey = getCacheUniqueKey(secretReferenceEnvironment, secretReferencePath);
|
||||
@@ -692,6 +709,7 @@ export const expandSecretReferencesFactory = ({
|
||||
secretPath: referencedSecretPath,
|
||||
environment: referencedSecretEnvironmentSlug,
|
||||
depth: depth + 1,
|
||||
secretKey: referencedSecretKey,
|
||||
trace
|
||||
};
|
||||
|
||||
@@ -726,6 +744,7 @@ export const expandSecretReferencesFactory = ({
|
||||
skipMultilineEncoding?: boolean | null;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
secretKey: string;
|
||||
}) => {
|
||||
if (!inputSecret.value) return inputSecret.value;
|
||||
|
||||
@@ -741,6 +760,7 @@ export const expandSecretReferencesFactory = ({
|
||||
value?: string;
|
||||
secretPath: string;
|
||||
environment: string;
|
||||
secretKey: string;
|
||||
}) => {
|
||||
const { stackTrace, expandedValue } = await recursivelyExpandSecret({ ...inputSecret, shouldStackTrace: true });
|
||||
return { stackTrace, expandedValue };
|
||||
|
@@ -28,7 +28,7 @@ import { logger } from "@app/lib/logger";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { ActorType } from "../auth/auth-type";
|
||||
import { TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
|
||||
import { TCommitResourceChangeDTO, TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
|
||||
import { TKmsServiceFactory } from "../kms/kms-service";
|
||||
import { KmsDataKey } from "../kms/kms-types";
|
||||
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
|
||||
@@ -1105,7 +1105,7 @@ export const secretV2BridgeServiceFactory = ({
|
||||
|
||||
if (shouldExpandSecretReferences) {
|
||||
const secretsGroupByPath = groupBy(decryptedSecrets, (i) => i.secretPath);
|
||||
await Promise.allSettled(
|
||||
const settledPromises = await Promise.allSettled(
|
||||
Object.keys(secretsGroupByPath).map((groupedPath) =>
|
||||
Promise.allSettled(
|
||||
secretsGroupByPath[groupedPath].map(async (decryptedSecret, index) => {
|
||||
@@ -1113,7 +1113,8 @@ export const secretV2BridgeServiceFactory = ({
|
||||
value: decryptedSecret.secretValue,
|
||||
secretPath: groupedPath,
|
||||
environment,
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding
|
||||
skipMultilineEncoding: decryptedSecret.skipMultilineEncoding,
|
||||
secretKey: decryptedSecret.secretKey
|
||||
});
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
secretsGroupByPath[groupedPath][index].secretValue = expandedSecretValue || "";
|
||||
@@ -1121,6 +1122,35 @@ export const secretV2BridgeServiceFactory = ({
|
||||
)
|
||||
)
|
||||
);
|
||||
const errors: { path: string; error: string }[] = [];
|
||||
|
||||
settledPromises.forEach((outerResult: PromiseSettledResult<PromiseSettledResult<void>[]>, outerIndex) => {
|
||||
const groupedPath = Object.keys(secretsGroupByPath)[outerIndex];
|
||||
|
||||
if (outerResult.status === "rejected") {
|
||||
errors.push({
|
||||
path: groupedPath,
|
||||
error: `Failed to process secret group: ${outerResult.reason}`
|
||||
});
|
||||
} else {
|
||||
// Check inner promise results
|
||||
outerResult.value.forEach((innerResult: PromiseSettledResult<void>) => {
|
||||
if (innerResult.status === "rejected") {
|
||||
const reason = innerResult.reason as ForbiddenRequestError;
|
||||
errors.push({
|
||||
path: groupedPath,
|
||||
error: reason.message
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
if (errors.length > 0) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Failed to expand one or more secret references",
|
||||
details: errors.map((err) => err.error)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!includeImports) {
|
||||
@@ -1424,7 +1454,8 @@ export const secretV2BridgeServiceFactory = ({
|
||||
environment,
|
||||
secretPath: path,
|
||||
value: secretValue,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
secretKey: secret.key
|
||||
});
|
||||
|
||||
secretValue = expandedSecretValue || "";
|
||||
@@ -1474,8 +1505,10 @@ export const secretV2BridgeServiceFactory = ({
|
||||
actorOrgId,
|
||||
environment,
|
||||
projectId,
|
||||
secrets: inputSecrets
|
||||
}: TCreateManySecretDTO) => {
|
||||
secrets: inputSecrets,
|
||||
tx: providedTx,
|
||||
commitChanges
|
||||
}: TCreateManySecretDTO & { tx?: Knex; commitChanges?: TCommitResourceChangeDTO[] }) => {
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
@@ -1558,8 +1591,8 @@ export const secretV2BridgeServiceFactory = ({
|
||||
const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } =
|
||||
await kmsService.createCipherPairWithDataKey({ type: KmsDataKey.SecretManager, projectId });
|
||||
|
||||
const newSecrets = await secretDAL.transaction(async (tx) =>
|
||||
fnSecretBulkInsert({
|
||||
const executeBulkInsert = async (tx: Knex) => {
|
||||
return fnSecretBulkInsert({
|
||||
inputSecrets: inputSecrets.map((el) => {
|
||||
const references = secretReferencesGroupByInputSecretKey[el.secretKey]?.nestedReferences;
|
||||
|
||||
@@ -1581,6 +1614,7 @@ export const secretV2BridgeServiceFactory = ({
|
||||
};
|
||||
}),
|
||||
folderId,
|
||||
commitChanges,
|
||||
orgId: actorOrgId,
|
||||
secretDAL,
|
||||
resourceMetadataDAL,
|
||||
@@ -1593,8 +1627,13 @@ export const secretV2BridgeServiceFactory = ({
|
||||
actorId
|
||||
},
|
||||
tx
|
||||
})
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
const newSecrets = providedTx
|
||||
? await executeBulkInsert(providedTx)
|
||||
: await secretDAL.transaction(executeBulkInsert);
|
||||
|
||||
await secretDAL.invalidateSecretCacheByProjectId(projectId);
|
||||
await snapshotService.performSnapshot(folderId);
|
||||
await secretQueueService.syncSecrets({
|
||||
@@ -1641,8 +1680,10 @@ export const secretV2BridgeServiceFactory = ({
|
||||
projectId,
|
||||
secretPath: defaultSecretPath = "/",
|
||||
secrets: inputSecrets,
|
||||
mode: updateMode
|
||||
}: TUpdateManySecretDTO) => {
|
||||
mode: updateMode,
|
||||
tx: providedTx,
|
||||
commitChanges
|
||||
}: TUpdateManySecretDTO & { tx?: Knex; commitChanges?: TCommitResourceChangeDTO[] }) => {
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
@@ -1671,18 +1712,20 @@ export const secretV2BridgeServiceFactory = ({
|
||||
const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } =
|
||||
await kmsService.createCipherPairWithDataKey({ type: KmsDataKey.SecretManager, projectId });
|
||||
|
||||
const updatedSecrets: Array<
|
||||
TSecretsV2 & {
|
||||
secretPath: string;
|
||||
tags: {
|
||||
id: string;
|
||||
slug: string;
|
||||
color?: string | null;
|
||||
name: string;
|
||||
}[];
|
||||
}
|
||||
> = [];
|
||||
await secretDAL.transaction(async (tx) => {
|
||||
// Function to execute the bulk update operation
|
||||
const executeBulkUpdate = async (tx: Knex) => {
|
||||
const updatedSecrets: Array<
|
||||
TSecretsV2 & {
|
||||
secretPath: string;
|
||||
tags: {
|
||||
id: string;
|
||||
slug: string;
|
||||
color?: string | null;
|
||||
name: string;
|
||||
}[];
|
||||
}
|
||||
> = [];
|
||||
|
||||
for await (const folder of folders) {
|
||||
if (!folder) throw new NotFoundError({ message: "Folder not found" });
|
||||
|
||||
@@ -1801,7 +1844,7 @@ export const secretV2BridgeServiceFactory = ({
|
||||
{
|
||||
operator: "eq",
|
||||
field: `${TableName.SecretV2}.key` as "key",
|
||||
value: el.secretKey
|
||||
value: el.newSecretName as string
|
||||
},
|
||||
{
|
||||
operator: "eq",
|
||||
@@ -1855,6 +1898,7 @@ export const secretV2BridgeServiceFactory = ({
|
||||
orgId: actorOrgId,
|
||||
folderCommitService,
|
||||
tx,
|
||||
commitChanges,
|
||||
inputSecrets: secretsToUpdate.map((el) => {
|
||||
const originalSecret = secretsToUpdateInDBGroupedByKey[el.secretKey][0];
|
||||
const encryptedValue =
|
||||
@@ -1934,7 +1978,13 @@ export const secretV2BridgeServiceFactory = ({
|
||||
updatedSecrets.push(...bulkInsertedSecrets.map((el) => ({ ...el, secretPath: folder.path })));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return updatedSecrets;
|
||||
};
|
||||
|
||||
const updatedSecrets = providedTx
|
||||
? await executeBulkUpdate(providedTx)
|
||||
: await secretDAL.transaction(executeBulkUpdate);
|
||||
|
||||
await secretDAL.invalidateSecretCacheByProjectId(projectId);
|
||||
await Promise.allSettled(folders.map((el) => (el?.id ? snapshotService.performSnapshot(el.id) : undefined)));
|
||||
@@ -1991,8 +2041,10 @@ export const secretV2BridgeServiceFactory = ({
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TDeleteManySecretDTO) => {
|
||||
actorOrgId,
|
||||
tx: providedTx,
|
||||
commitChanges
|
||||
}: TDeleteManySecretDTO & { tx?: Knex; commitChanges?: TCommitResourceChangeDTO[] }) => {
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
@@ -2051,24 +2103,29 @@ export const secretV2BridgeServiceFactory = ({
|
||||
);
|
||||
});
|
||||
|
||||
const executeBulkDelete = async (tx: Knex) => {
|
||||
return fnSecretBulkDelete({
|
||||
secretDAL,
|
||||
secretQueueService,
|
||||
folderCommitService,
|
||||
secretVersionDAL,
|
||||
inputSecrets: inputSecrets.map(({ type, secretKey }) => ({
|
||||
secretKey,
|
||||
type: type || SecretType.Shared
|
||||
})),
|
||||
projectId,
|
||||
folderId,
|
||||
actorId,
|
||||
actorType: actor,
|
||||
commitChanges,
|
||||
tx
|
||||
});
|
||||
};
|
||||
|
||||
try {
|
||||
const secretsDeleted = await secretDAL.transaction(async (tx) =>
|
||||
fnSecretBulkDelete({
|
||||
secretDAL,
|
||||
secretQueueService,
|
||||
folderCommitService,
|
||||
secretVersionDAL,
|
||||
inputSecrets: inputSecrets.map(({ type, secretKey }) => ({
|
||||
secretKey,
|
||||
type: type || SecretType.Shared
|
||||
})),
|
||||
projectId,
|
||||
folderId,
|
||||
actorId,
|
||||
actorType: actor,
|
||||
tx
|
||||
})
|
||||
);
|
||||
const secretsDeleted = providedTx
|
||||
? await executeBulkDelete(providedTx)
|
||||
: await secretDAL.transaction(executeBulkDelete);
|
||||
|
||||
await secretDAL.invalidateSecretCacheByProjectId(projectId);
|
||||
await snapshotService.performSnapshot(folderId);
|
||||
@@ -2722,7 +2779,8 @@ export const secretV2BridgeServiceFactory = ({
|
||||
const { expandedValue, stackTrace } = await getExpandedSecretStackTrace({
|
||||
environment,
|
||||
secretPath,
|
||||
value: decryptedSecretValue
|
||||
value: decryptedSecretValue,
|
||||
secretKey: secretName
|
||||
});
|
||||
|
||||
return { tree: stackTrace, value: expandedValue };
|
||||
|
@@ -8,7 +8,7 @@ import { SecretsOrderBy } from "@app/services/secret/secret-types";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
import { TSecretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
|
||||
|
||||
import { TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
|
||||
import { TCommitResourceChangeDTO, TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
|
||||
import { TResourceMetadataDALFactory } from "../resource-metadata/resource-metadata-dal";
|
||||
import { ResourceMetadataDTO } from "../resource-metadata/resource-metadata-schema";
|
||||
import { TSecretV2BridgeDALFactory } from "./secret-v2-bridge-dal";
|
||||
@@ -167,6 +167,7 @@ export type TFnSecretBulkInsert = {
|
||||
folderId: string;
|
||||
orgId: string;
|
||||
tx?: Knex;
|
||||
commitChanges?: TCommitResourceChangeDTO[];
|
||||
inputSecrets: Array<
|
||||
Omit<TSecretsV2Insert, "folderId"> & {
|
||||
tagIds?: string[];
|
||||
@@ -214,6 +215,7 @@ export type TFnSecretBulkUpdate = {
|
||||
actorId?: string;
|
||||
};
|
||||
tx?: Knex;
|
||||
commitChanges?: TCommitResourceChangeDTO[];
|
||||
};
|
||||
|
||||
export type TFnSecretBulkDelete = {
|
||||
@@ -223,6 +225,7 @@ export type TFnSecretBulkDelete = {
|
||||
actorId: string;
|
||||
actorType?: string;
|
||||
tx?: Knex;
|
||||
commitChanges?: TCommitResourceChangeDTO[];
|
||||
secretDAL: Pick<TSecretV2BridgeDALFactory, "deleteMany">;
|
||||
secretQueueService: {
|
||||
removeSecretReminder: (data: TRemoveSecretReminderDTO, tx?: Knex) => Promise<void>;
|
||||
|
@@ -426,7 +426,8 @@ export const secretQueueFactory = ({
|
||||
environment: dto.environment,
|
||||
secretPath: dto.secretPath,
|
||||
skipMultilineEncoding: secret.skipMultilineEncoding,
|
||||
value: secretValue
|
||||
value: secretValue,
|
||||
secretKey
|
||||
});
|
||||
content[secretKey] = { value: expandedSecretValue || "" };
|
||||
|
||||
|
@@ -544,3 +544,33 @@ export enum SecretProtectionType {
|
||||
}
|
||||
|
||||
export type TStartSecretsV2MigrationDTO = TProjectPermission;
|
||||
|
||||
export type TProcessNewCommitRawDTO = {
|
||||
secrets: {
|
||||
create?: {
|
||||
secretKey: string;
|
||||
secretValue: string;
|
||||
secretComment?: string;
|
||||
skipMultilineEncoding?: boolean;
|
||||
tagIds?: string[];
|
||||
secretMetadata?: ResourceMetadataDTO;
|
||||
metadata?: { source?: string };
|
||||
}[];
|
||||
update?: {
|
||||
secretKey: string;
|
||||
newSecretName?: string;
|
||||
secretValue?: string;
|
||||
secretComment?: string;
|
||||
skipMultilineEncoding?: boolean;
|
||||
tagIds?: string[];
|
||||
secretMetadata?: ResourceMetadataDTO;
|
||||
metadata?: { source?: string };
|
||||
}[];
|
||||
delete?: { secretKey: string }[];
|
||||
};
|
||||
folders: {
|
||||
create?: { folderName: string; description?: string }[];
|
||||
update?: { folderName: string; description?: string | null; id: string }[];
|
||||
delete?: { folderName: string; id: string }[];
|
||||
};
|
||||
};
|
||||
|
@@ -33,6 +33,7 @@ Every feature/problem is unique, but your design docs should generally include t
|
||||
- A high-level summary of the problem and proposed solution. Keep it brief (max 3 paragraphs).
|
||||
3. **Context**
|
||||
- Explain the problem's background, why it's important to solve now, and any constraints (e.g., technical, sales, or timeline-related). What do we get out of solving this problem? (needed to close a deal, scale, performance, etc.).
|
||||
- Consider whether this feature has notable sales implications (e.g., affects pricing, customer commitments, go-to-market strategy, or competitive positioning) that would require Sales team input and approval.
|
||||
4. **Solution**
|
||||
|
||||
- Provide a big-picture explanation of the solution, followed by detailed technical architecture.
|
||||
@@ -76,3 +77,11 @@ Before sharing your design docs with others, review your design doc as if you we
|
||||
- Ask a relevant engineer(s) to review your document. Their role is to identify blind spots, challenge assumptions, and ensure everything is clear. Once you and the reviewer are on the same page on the approach, update the document with any missing details they brought up.
|
||||
4. **Team Review and Feedback**
|
||||
- Invite the relevant engineers to a design doc review meeting and give them 10-15 minutes to read through the document. After everyone has had a chance to review it, open the floor up for discussion. Address any feedback or concerns raised during this meeting. If significant points were overlooked during your initial planning, you may need to revisit the drawing board. Your goal is to think about the feature holistically and minimize the need for drastic changes to your design doc later on.
|
||||
5. **Sales Approval (When Applicable)**
|
||||
- If your design document has notable sales implications, get explicit approval from the Sales team before proceeding to implementation. This includes features that:
|
||||
- Affect pricing models or billing structures
|
||||
- Impact customer commitments or contractual obligations
|
||||
- Change core product functionality that's actively being sold
|
||||
- Introduce new capabilities that could affect competitive positioning
|
||||
- Modify user experience in ways that could impact customer acquisition or retention
|
||||
- Share the design document with the Sales team to ensure alignment between the proposed technical approach and sales strategy, pricing models, and market positioning.
|
||||
|
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: "Available"
|
||||
openapi: "GET /api/v1/app-connections/okta/available"
|
||||
---
|
@@ -0,0 +1,8 @@
|
||||
---
|
||||
title: "Create"
|
||||
openapi: "POST /api/v1/app-connections/okta"
|
||||
---
|
||||
|
||||
<Note>
|
||||
Check out the configuration docs for [Okta Connections](/integrations/app-connections/okta) to learn how to obtain the required credentials.
|
||||
</Note>
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user