mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-11 12:11:38 +00:00
Compare commits
2 Commits
add-webhoo
...
daniel/gat
Author | SHA1 | Date | |
---|---|---|---|
4c8bf9bd92 | |||
a6554deb80 |
@ -44,7 +44,6 @@ import {
|
||||
TSecretSyncRaw,
|
||||
TUpdateSecretSyncDTO
|
||||
} from "@app/services/secret-sync/secret-sync-types";
|
||||
import { TWebhookPayloads } from "@app/services/webhook/webhook-types";
|
||||
import { WorkflowIntegration } from "@app/services/workflow-integration/workflow-integration-types";
|
||||
|
||||
import { KmipPermission } from "../kmip/kmip-enum";
|
||||
@ -207,7 +206,6 @@ export enum EventType {
|
||||
CREATE_WEBHOOK = "create-webhook",
|
||||
UPDATE_WEBHOOK_STATUS = "update-webhook-status",
|
||||
DELETE_WEBHOOK = "delete-webhook",
|
||||
WEBHOOK_TRIGGERED = "webhook-triggered",
|
||||
GET_SECRET_IMPORTS = "get-secret-imports",
|
||||
GET_SECRET_IMPORT = "get-secret-import",
|
||||
CREATE_SECRET_IMPORT = "create-secret-import",
|
||||
@ -1442,14 +1440,6 @@ interface DeleteWebhookEvent {
|
||||
};
|
||||
}
|
||||
|
||||
export interface WebhookTriggeredEvent {
|
||||
type: EventType.WEBHOOK_TRIGGERED;
|
||||
metadata: {
|
||||
webhookId: string;
|
||||
status: string;
|
||||
} & TWebhookPayloads;
|
||||
}
|
||||
|
||||
interface GetSecretImportsEvent {
|
||||
type: EventType.GET_SECRET_IMPORTS;
|
||||
metadata: {
|
||||
@ -3231,7 +3221,6 @@ export type Event =
|
||||
| CreateWebhookEvent
|
||||
| UpdateWebhookStatusEvent
|
||||
| DeleteWebhookEvent
|
||||
| WebhookTriggeredEvent
|
||||
| GetSecretImportsEvent
|
||||
| GetSecretImportEvent
|
||||
| CreateSecretImportEvent
|
||||
|
@ -709,10 +709,6 @@ export const licenseServiceFactory = ({
|
||||
return licenses;
|
||||
};
|
||||
|
||||
const invalidateGetPlan = async (orgId: string) => {
|
||||
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
|
||||
};
|
||||
|
||||
return {
|
||||
generateOrgCustomerId,
|
||||
removeOrgCustomer,
|
||||
@ -727,7 +723,6 @@ export const licenseServiceFactory = ({
|
||||
return onPremFeatures;
|
||||
},
|
||||
getPlan,
|
||||
invalidateGetPlan,
|
||||
updateSubscriptionOrgMemberCount,
|
||||
refreshPlan,
|
||||
getOrgPlan,
|
||||
|
@ -376,8 +376,7 @@ const DynamicSecretConditionV2Schema = z
|
||||
.object({
|
||||
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
|
||||
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
|
||||
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
|
||||
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
|
||||
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
|
||||
})
|
||||
.partial()
|
||||
]),
|
||||
@ -405,23 +404,6 @@ const DynamicSecretConditionV2Schema = z
|
||||
})
|
||||
.partial();
|
||||
|
||||
const SecretImportConditionSchema = z
|
||||
.object({
|
||||
environment: z.union([
|
||||
z.string(),
|
||||
z
|
||||
.object({
|
||||
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
|
||||
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
|
||||
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
|
||||
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
|
||||
})
|
||||
.partial()
|
||||
]),
|
||||
secretPath: SECRET_PATH_PERMISSION_OPERATOR_SCHEMA
|
||||
})
|
||||
.partial();
|
||||
|
||||
const SecretConditionV2Schema = z
|
||||
.object({
|
||||
environment: z.union([
|
||||
@ -759,7 +741,7 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
),
|
||||
conditions: SecretImportConditionSchema.describe(
|
||||
conditions: SecretConditionV1Schema.describe(
|
||||
"When specified, only matching conditions will be allowed to access given resource."
|
||||
).optional()
|
||||
}),
|
||||
|
@ -835,22 +835,16 @@ export const orgServiceFactory = ({
|
||||
|
||||
// if the user doesn't exist we create the user with the email
|
||||
if (!inviteeUser) {
|
||||
// TODO(carlos): will be removed once the function receives usernames instead of emails
|
||||
const usersByEmail = await userDAL.findUserByEmail(inviteeEmail, tx);
|
||||
if (usersByEmail?.length === 1) {
|
||||
[inviteeUser] = usersByEmail;
|
||||
} else {
|
||||
inviteeUser = await userDAL.create(
|
||||
{
|
||||
isAccepted: false,
|
||||
email: inviteeEmail,
|
||||
username: inviteeEmail,
|
||||
authMethods: [AuthMethod.EMAIL],
|
||||
isGhost: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
inviteeUser = await userDAL.create(
|
||||
{
|
||||
isAccepted: false,
|
||||
email: inviteeEmail,
|
||||
username: inviteeEmail,
|
||||
authMethods: [AuthMethod.EMAIL],
|
||||
isGhost: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
const inviteeUserId = inviteeUser?.id;
|
||||
|
@ -165,7 +165,7 @@ type TProjectServiceFactoryDep = {
|
||||
sshHostGroupDAL: Pick<TSshHostGroupDALFactory, "find" | "findSshHostGroupsWithLoginMappings">;
|
||||
permissionService: TPermissionServiceFactory;
|
||||
orgService: Pick<TOrgServiceFactory, "addGhostUser">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "invalidateGetPlan">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
queueService: Pick<TQueueServiceFactory, "stopRepeatableJob">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
orgDAL: Pick<TOrgDALFactory, "findOne">;
|
||||
@ -494,10 +494,6 @@ export const projectServiceFactory = ({
|
||||
);
|
||||
}
|
||||
|
||||
// no need to invalidate if there was no limit
|
||||
if (plan.workspaceLimit) {
|
||||
await licenseService.invalidateGetPlan(organization.id);
|
||||
}
|
||||
return {
|
||||
...project,
|
||||
environments: envs,
|
||||
|
@ -1581,7 +1581,6 @@ export const secretQueueFactory = ({
|
||||
projectDAL,
|
||||
webhookDAL,
|
||||
event: job.data,
|
||||
auditLogService,
|
||||
secretManagerDecryptor: (value) => secretManagerDecryptor({ cipherTextBlob: value }).toString()
|
||||
});
|
||||
});
|
||||
|
@ -21,11 +21,6 @@ export const userDALFactory = (db: TDbClient) => {
|
||||
const findUserByUsername = async (username: string, tx?: Knex) =>
|
||||
(tx || db)(TableName.Users).whereRaw('lower("username") = :username', { username: username.toLowerCase() });
|
||||
|
||||
const findUserByEmail = async (email: string, tx?: Knex) =>
|
||||
(tx || db)(TableName.Users).whereRaw('lower("email") = :email', { email: email.toLowerCase() }).where({
|
||||
isEmailVerified: true
|
||||
});
|
||||
|
||||
const getUsersByFilter = async ({
|
||||
limit,
|
||||
offset,
|
||||
@ -239,7 +234,6 @@ export const userDALFactory = (db: TDbClient) => {
|
||||
findOneUserAction,
|
||||
createUserAction,
|
||||
getUsersByFilter,
|
||||
findAllMyAccounts,
|
||||
findUserByEmail
|
||||
findAllMyAccounts
|
||||
};
|
||||
};
|
||||
|
@ -4,12 +4,9 @@ import { AxiosError } from "axios";
|
||||
import picomatch from "picomatch";
|
||||
|
||||
import { TWebhooks } from "@app/db/schemas";
|
||||
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||
import { EventType, WebhookTriggeredEvent } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { TProjectDALFactory } from "../project/project-dal";
|
||||
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
|
||||
@ -166,7 +163,6 @@ export type TFnTriggerWebhookDTO = {
|
||||
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findById">;
|
||||
secretManagerDecryptor: (value: Buffer) => string;
|
||||
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
|
||||
};
|
||||
|
||||
// this is reusable function
|
||||
@ -179,8 +175,7 @@ export const fnTriggerWebhook = async ({
|
||||
projectEnvDAL,
|
||||
event,
|
||||
secretManagerDecryptor,
|
||||
projectDAL,
|
||||
auditLogService
|
||||
projectDAL
|
||||
}: TFnTriggerWebhookDTO) => {
|
||||
const webhooks = await webhookDAL.findAllWebhooks(projectId, environment);
|
||||
const toBeTriggeredHooks = webhooks.filter(
|
||||
@ -205,43 +200,16 @@ export const fnTriggerWebhook = async ({
|
||||
})
|
||||
);
|
||||
|
||||
const eventPayloads: WebhookTriggeredEvent["metadata"][] = [];
|
||||
// filter hooks by status
|
||||
const successWebhooks = webhooksTriggered
|
||||
.filter(({ status }) => status === "fulfilled")
|
||||
.map((_, i) => {
|
||||
eventPayloads.push({
|
||||
webhookId: toBeTriggeredHooks[i].id,
|
||||
type: event.type,
|
||||
payload: {
|
||||
type: toBeTriggeredHooks[i].type!,
|
||||
...event.payload,
|
||||
projectName
|
||||
},
|
||||
status: "success"
|
||||
} as WebhookTriggeredEvent["metadata"]);
|
||||
|
||||
return toBeTriggeredHooks[i].id;
|
||||
});
|
||||
.map((_, i) => toBeTriggeredHooks[i].id);
|
||||
const failedWebhooks = webhooksTriggered
|
||||
.filter(({ status }) => status === "rejected")
|
||||
.map((data, i) => {
|
||||
eventPayloads.push({
|
||||
webhookId: toBeTriggeredHooks[i].id,
|
||||
type: event.type,
|
||||
payload: {
|
||||
type: toBeTriggeredHooks[i].type!,
|
||||
...event.payload,
|
||||
projectName
|
||||
},
|
||||
status: "failed"
|
||||
} as WebhookTriggeredEvent["metadata"]);
|
||||
|
||||
return {
|
||||
id: toBeTriggeredHooks[i].id,
|
||||
error: data.status === "rejected" ? (data.reason as AxiosError).message : ""
|
||||
};
|
||||
});
|
||||
.map((data, i) => ({
|
||||
id: toBeTriggeredHooks[i].id,
|
||||
error: data.status === "rejected" ? (data.reason as AxiosError).message : ""
|
||||
}));
|
||||
|
||||
await webhookDAL.transaction(async (tx) => {
|
||||
const env = await projectEnvDAL.findOne({ projectId, slug: environment }, tx);
|
||||
@ -268,21 +236,5 @@ export const fnTriggerWebhook = async ({
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
for (const eventPayload of eventPayloads) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await auditLogService.createAuditLog({
|
||||
actor: {
|
||||
type: ActorType.PLATFORM,
|
||||
metadata: {}
|
||||
},
|
||||
projectId,
|
||||
event: {
|
||||
type: EventType.WEBHOOK_TRIGGERED,
|
||||
metadata: eventPayload
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
logger.info({ environment, secretPath, projectId }, "Secret webhook job ended");
|
||||
};
|
||||
|
@ -26,9 +26,13 @@ func handleConnection(ctx context.Context, quicConn quic.Connection) {
|
||||
log.Info().Msgf("New connection from: %s", quicConn.RemoteAddr().String())
|
||||
// Use WaitGroup to track all streams
|
||||
var wg sync.WaitGroup
|
||||
|
||||
contextWithTimeout, cancel := context.WithTimeout(ctx, 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
for {
|
||||
// Accept the first stream, which we'll use for commands
|
||||
stream, err := quicConn.AcceptStream(ctx)
|
||||
stream, err := quicConn.AcceptStream(contextWithTimeout)
|
||||
if err != nil {
|
||||
log.Printf("Failed to accept QUIC stream: %v", err)
|
||||
break
|
||||
@ -52,7 +56,12 @@ func handleStream(stream quic.Stream, quicConn quic.Connection) {
|
||||
|
||||
// Use buffered reader for better handling of fragmented data
|
||||
reader := bufio.NewReader(stream)
|
||||
defer stream.Close()
|
||||
defer func() {
|
||||
log.Info().Msgf("Closing stream %d", streamID)
|
||||
if stream != nil {
|
||||
stream.Close()
|
||||
}
|
||||
}()
|
||||
|
||||
for {
|
||||
msg, err := reader.ReadBytes('\n')
|
||||
@ -166,7 +175,6 @@ func handleHTTPProxy(stream quic.Stream, reader *bufio.Reader, targetURL string,
|
||||
}
|
||||
}
|
||||
|
||||
// set certificate verification based on what the gateway client sent
|
||||
if verifyParam != "" {
|
||||
tlsConfig.InsecureSkipVerify = verifyParam == "false"
|
||||
log.Info().Msgf("TLS verification set to: %s", verifyParam)
|
||||
@ -175,82 +183,94 @@ func handleHTTPProxy(stream quic.Stream, reader *bufio.Reader, targetURL string,
|
||||
transport.TLSClientConfig = tlsConfig
|
||||
}
|
||||
|
||||
// read and parse the http request from the stream
|
||||
req, err := http.ReadRequest(reader)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read HTTP request: %v", err)
|
||||
}
|
||||
|
||||
actionHeader := req.Header.Get("x-infisical-action")
|
||||
if actionHeader != "" {
|
||||
|
||||
if actionHeader == "inject-k8s-sa-auth-token" {
|
||||
token, err := os.ReadFile("/var/run/secrets/kubernetes.io/serviceaccount/token")
|
||||
|
||||
if err != nil {
|
||||
stream.Write([]byte(buildHttpInternalServerError("failed to read k8s sa auth token")))
|
||||
return fmt.Errorf("failed to read k8s sa auth token: %v", err)
|
||||
}
|
||||
|
||||
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", string(token)))
|
||||
log.Info().Msgf("Injected gateway k8s SA auth token in request to %s", targetURL)
|
||||
}
|
||||
|
||||
req.Header.Del("x-infisical-action")
|
||||
}
|
||||
|
||||
var targetFullURL string
|
||||
if strings.HasPrefix(targetURL, "http://") || strings.HasPrefix(targetURL, "https://") {
|
||||
baseURL := strings.TrimSuffix(targetURL, "/")
|
||||
targetFullURL = baseURL + req.URL.Path
|
||||
if req.URL.RawQuery != "" {
|
||||
targetFullURL += "?" + req.URL.RawQuery
|
||||
}
|
||||
} else {
|
||||
baseURL := strings.TrimSuffix("http://"+targetURL, "/")
|
||||
targetFullURL = baseURL + req.URL.Path
|
||||
if req.URL.RawQuery != "" {
|
||||
targetFullURL += "?" + req.URL.RawQuery
|
||||
}
|
||||
}
|
||||
|
||||
// create the request to the target
|
||||
proxyReq, err := http.NewRequest(req.Method, targetFullURL, req.Body)
|
||||
proxyReq.Header = req.Header.Clone()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create proxy request: %v", err)
|
||||
}
|
||||
|
||||
log.Info().Msgf("Proxying %s %s to %s", req.Method, req.URL.Path, targetFullURL)
|
||||
|
||||
client := &http.Client{
|
||||
Transport: transport,
|
||||
Timeout: 30 * time.Second,
|
||||
}
|
||||
|
||||
// make the request to the target
|
||||
resp, err := client.Do(proxyReq)
|
||||
if err != nil {
|
||||
stream.Write([]byte(buildHttpInternalServerError(fmt.Sprintf("failed to reach target due to networking error: %s", err.Error()))))
|
||||
return fmt.Errorf("failed to reach target due to networking error: %v", err)
|
||||
// Loop to handle multiple HTTP requests on the same stream
|
||||
for {
|
||||
req, err := http.ReadRequest(reader)
|
||||
|
||||
if err != nil {
|
||||
if errors.Is(err, io.EOF) {
|
||||
log.Info().Msg("Client closed HTTP connection")
|
||||
return nil
|
||||
}
|
||||
return fmt.Errorf("failed to read HTTP request: %v", err)
|
||||
}
|
||||
log.Info().Msgf("Received HTTP request: %s", req.URL.Path)
|
||||
|
||||
actionHeader := req.Header.Get("x-infisical-action")
|
||||
if actionHeader != "" {
|
||||
if actionHeader == "inject-k8s-sa-auth-token" {
|
||||
token, err := os.ReadFile("/var/run/secrets/kubernetes.io/serviceaccount/token")
|
||||
if err != nil {
|
||||
stream.Write([]byte(buildHttpInternalServerError("failed to read k8s sa auth token")))
|
||||
continue // Continue to next request instead of returning
|
||||
}
|
||||
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", string(token)))
|
||||
log.Info().Msgf("Injected gateway k8s SA auth token in request to %s", targetURL)
|
||||
}
|
||||
req.Header.Del("x-infisical-action")
|
||||
}
|
||||
|
||||
// Build full target URL
|
||||
var targetFullURL string
|
||||
if strings.HasPrefix(targetURL, "http://") || strings.HasPrefix(targetURL, "https://") {
|
||||
baseURL := strings.TrimSuffix(targetURL, "/")
|
||||
targetFullURL = baseURL + req.URL.Path
|
||||
if req.URL.RawQuery != "" {
|
||||
targetFullURL += "?" + req.URL.RawQuery
|
||||
}
|
||||
} else {
|
||||
baseURL := strings.TrimSuffix("http://"+targetURL, "/")
|
||||
targetFullURL = baseURL + req.URL.Path
|
||||
if req.URL.RawQuery != "" {
|
||||
targetFullURL += "?" + req.URL.RawQuery
|
||||
}
|
||||
}
|
||||
|
||||
// create the request to the target
|
||||
proxyReq, err := http.NewRequest(req.Method, targetFullURL, req.Body)
|
||||
if err != nil {
|
||||
log.Error().Msgf("Failed to create proxy request: %v", err)
|
||||
stream.Write([]byte(buildHttpInternalServerError("failed to create proxy request")))
|
||||
continue // Continue to next request
|
||||
}
|
||||
proxyReq.Header = req.Header.Clone()
|
||||
|
||||
log.Info().Msgf("Proxying %s %s to %s", req.Method, req.URL.Path, targetFullURL)
|
||||
|
||||
resp, err := client.Do(proxyReq)
|
||||
if err != nil {
|
||||
log.Error().Msgf("Failed to reach target: %v", err)
|
||||
stream.Write([]byte(buildHttpInternalServerError(fmt.Sprintf("failed to reach target due to networking error: %s", err.Error()))))
|
||||
continue // Continue to next request
|
||||
}
|
||||
|
||||
// Write the entire response (status line, headers, body) to the stream
|
||||
// http.Response.Write handles this for "Connection: close" correctly.
|
||||
// For other connection tokens, manual removal might be needed if they cause issues with QUIC.
|
||||
// For a simple proxy, this is generally sufficient.
|
||||
resp.Header.Del("Connection") // Good practice for proxies
|
||||
|
||||
log.Info().Msgf("Writing response to stream: %s", resp.Status)
|
||||
|
||||
if err := resp.Write(stream); err != nil {
|
||||
log.Error().Err(err).Msg("Failed to write response to stream")
|
||||
resp.Body.Close()
|
||||
return fmt.Errorf("failed to write response to stream: %w", err)
|
||||
}
|
||||
|
||||
resp.Body.Close()
|
||||
|
||||
// Check if client wants to close connection
|
||||
if req.Header.Get("Connection") == "close" {
|
||||
log.Info().Msg("Client requested connection close")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
// Write the entire response (status line, headers, body) to the stream
|
||||
// http.Response.Write handles this for "Connection: close" correctly.
|
||||
// For other connection tokens, manual removal might be needed if they cause issues with QUIC.
|
||||
// For a simple proxy, this is generally sufficient.
|
||||
resp.Header.Del("Connection") // Good practice for proxies
|
||||
|
||||
log.Info().Msgf("Writing response to stream: %s", resp.Status)
|
||||
if err := resp.Write(stream); err != nil {
|
||||
// If writing the response fails, the connection to the client might be broken.
|
||||
// Logging the error is important. The original error will be returned.
|
||||
log.Error().Err(err).Msg("Failed to write response to stream")
|
||||
return fmt.Errorf("failed to write response to stream: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func buildHttpInternalServerError(message string) string {
|
||||
|
@ -5,12 +5,12 @@ description: "Learn how to stream Infisical Audit Logs to external logging provi
|
||||
|
||||
<Info>
|
||||
Audit log streams is a paid feature.
|
||||
|
||||
|
||||
If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical,
|
||||
then you should contact team@infisical.com to purchase an enterprise license to use it.
|
||||
</Info>
|
||||
|
||||
Infisical Audit Log Streaming enables you to transmit your organization's Audit Logs to external logging providers for monitoring and analysis.
|
||||
Infisical Audit Log Streaming enables you to transmit your organization's Audit Logs to external logging providers for monitoring and analysis.
|
||||
|
||||
The logs are formatted in JSON, requiring your logging provider to support JSON-based log parsing.
|
||||
|
||||
@ -118,7 +118,7 @@ Each log entry sent to the external logging provider will follow the same struct
|
||||
|
||||
### Audit Logs Structure
|
||||
<ParamField path="id" type="string" required>
|
||||
The unique identifier for the log entry.
|
||||
The unique identifier for the log entry.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="actor" type="platform | user | service | identity | scimClient | unknownUser" required>
|
||||
@ -168,7 +168,7 @@ Each log entry sent to the external logging provider will follow the same struct
|
||||
<Note>
|
||||
If the `actor` field is set to `platform`, `scimClient`, or `unknownUser`, the `actorMetadata` field will be an empty object.
|
||||
</Note>
|
||||
|
||||
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="ipAddress" type="string" required>
|
||||
@ -178,7 +178,7 @@ Each log entry sent to the external logging provider will follow the same struct
|
||||
<ParamField path="eventType" type="string" required>
|
||||
The type of event that occurred. Below you can see a list of possible event types. More event types will be added in the future as we expand our audit logs further.
|
||||
|
||||
`get-secrets`, `delete-secrets`, `get-secret`, `create-secret`, `update-secret`, `delete-secret`, `get-workspace-key`, `authorize-integration`, `update-integration-auth`, `unauthorize-integration`, `create-integration`, `delete-integration`, `add-trusted-ip`, `update-trusted-ip`, `delete-trusted-ip`, `create-service-token`, `delete-service-token`, `create-identity`, `update-identity`, `delete-identity`, `login-identity-universal-auth`, `add-identity-universal-auth`, `update-identity-universal-auth`, `get-identity-universal-auth`, `create-identity-universal-auth-client-secret`, `revoke-identity-universal-auth-client-secret`, `get-identity-universal-auth-client-secret`, `create-environment`, `update-environment`, `delete-environment`, `add-workspace-member`, `remove-workspace-member`, `create-folder`, `update-folder`, `delete-folder`, `create-webhook`, `update-webhook-status`, `delete-webhook`, `webhook-triggered`, `get-secret-imports`, `create-secret-import`, `update-secret-import`, `delete-secret-import`, `update-user-workspace-role`, `update-user-workspace-denied-permissions`, `create-certificate-authority`, `get-certificate-authority`, `update-certificate-authority`, `delete-certificate-authority`, `get-certificate-authority-csr`, `get-certificate-authority-cert`, `sign-intermediate`, `import-certificate-authority-cert`, `get-certificate-authority-crl`, `issue-cert`, `get-cert`, `delete-cert`, `revoke-cert`, `get-cert-body`, `create-pki-alert`, `get-pki-alert`, `update-pki-alert`, `delete-pki-alert`, `create-pki-collection`, `get-pki-collection`, `update-pki-collection`, `delete-pki-collection`, `get-pki-collection-items`, `add-pki-collection-item`, `delete-pki-collection-item`, `org-admin-accessed-project`, `create-certificate-template`, `update-certificate-template`, `delete-certificate-template`, `get-certificate-template`, `create-certificate-template-est-config`, `update-certificate-template-est-config`, `get-certificate-template-est-config`, `update-project-slack-config`, `get-project-slack-config`, `integration-synced`, `create-shared-secret`, `delete-shared-secret`, `read-shared-secret`.
|
||||
`get-secrets`, `delete-secrets`, `get-secret`, `create-secret`, `update-secret`, `delete-secret`, `get-workspace-key`, `authorize-integration`, `update-integration-auth`, `unauthorize-integration`, `create-integration`, `delete-integration`, `add-trusted-ip`, `update-trusted-ip`, `delete-trusted-ip`, `create-service-token`, `delete-service-token`, `create-identity`, `update-identity`, `delete-identity`, `login-identity-universal-auth`, `add-identity-universal-auth`, `update-identity-universal-auth`, `get-identity-universal-auth`, `create-identity-universal-auth-client-secret`, `revoke-identity-universal-auth-client-secret`, `get-identity-universal-auth-client-secret`, `create-environment`, `update-environment`, `delete-environment`, `add-workspace-member`, `remove-workspace-member`, `create-folder`, `update-folder`, `delete-folder`, `create-webhook`, `update-webhook-status`, `delete-webhook`, `get-secret-imports`, `create-secret-import`, `update-secret-import`, `delete-secret-import`, `update-user-workspace-role`, `update-user-workspace-denied-permissions`, `create-certificate-authority`, `get-certificate-authority`, `update-certificate-authority`, `delete-certificate-authority`, `get-certificate-authority-csr`, `get-certificate-authority-cert`, `sign-intermediate`, `import-certificate-authority-cert`, `get-certificate-authority-crl`, `issue-cert`, `get-cert`, `delete-cert`, `revoke-cert`, `get-cert-body`, `create-pki-alert`, `get-pki-alert`, `update-pki-alert`, `delete-pki-alert`, `create-pki-collection`, `get-pki-collection`, `update-pki-collection`, `delete-pki-collection`, `get-pki-collection-items`, `add-pki-collection-item`, `delete-pki-collection-item`, `org-admin-accessed-project`, `create-certificate-template`, `update-certificate-template`, `delete-certificate-template`, `get-certificate-template`, `create-certificate-template-est-config`, `update-certificate-template-est-config`, `get-certificate-template-est-config`, `update-project-slack-config`, `get-project-slack-config`, `integration-synced`, `create-shared-secret`, `delete-shared-secret`, `read-shared-secret`.
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="eventMetadata" type="object" required>
|
||||
@ -219,4 +219,4 @@ Each log entry sent to the external logging provider will follow the same struct
|
||||
The name of the project where the event occurred.
|
||||
|
||||
The `projectName` field will only be present if the event occurred at the project level, not the organization level.
|
||||
</ParamField>
|
||||
</ParamField>
|
@ -1,157 +0,0 @@
|
||||
---
|
||||
title: Azure
|
||||
description: "Learn how to authenticate Azure pipelines with Infisical using OpenID Connect (OIDC)."
|
||||
---
|
||||
|
||||
**OIDC Auth** is a platform-agnostic JWT-based authentication method that can be used to authenticate from any platform or environment using an identity provider with OpenID Connect.
|
||||
|
||||
## Diagram
|
||||
|
||||
The following sequence diagram illustrates the OIDC Auth workflow for authenticating Azure pipelines with Infisical.
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant Client as Azure Pipeline
|
||||
participant Idp as Identity Provider
|
||||
participant Infis as Infisical
|
||||
|
||||
Client->>Idp: Step 1: Request identity token
|
||||
Idp-->>Client: Return JWT with verifiable claims
|
||||
|
||||
Note over Client,Infis: Step 2: Login Operation
|
||||
Client->>Infis: Send signed JWT to /api/v1/auth/oidc-auth/login
|
||||
|
||||
Note over Infis,Idp: Step 3: Query verification
|
||||
Infis->>Idp: Request JWT public key using OIDC Discovery
|
||||
Idp-->>Infis: Return public key
|
||||
|
||||
Note over Infis: Step 4: JWT validation
|
||||
Infis->>Client: Return short-lived access token
|
||||
|
||||
Note over Client,Infis: Step 5: Access Infisical API with Token
|
||||
Client->>Infis: Make authenticated requests using the short-lived access token
|
||||
```
|
||||
|
||||
## Concept
|
||||
|
||||
At a high-level, Infisical authenticates a client by verifying the JWT and checking that it meets specific requirements (e.g. it is issued by a trusted identity provider) at the `/api/v1/auth/oidc-auth/login` endpoint. If successful,
|
||||
then Infisical returns a short-lived access token that can be used to make authenticated requests to the Infisical API.
|
||||
|
||||
To be more specific:
|
||||
|
||||
1. The Azure pipeline requests an identity token from Azure's identity provider.
|
||||
2. The fetched identity token is sent to Infisical at the `/api/v1/auth/oidc-auth/login` endpoint.
|
||||
3. Infisical fetches the public key that was used to sign the identity token from Azure's identity provider using OIDC Discovery.
|
||||
4. Infisical validates the JWT using the public key provided by the identity provider and checks that the subject, audience, and claims of the token matches with the set criteria.
|
||||
5. If all is well, Infisical returns a short-lived access token that the Azure pipeline can use to make authenticated requests to the Infisical API.
|
||||
|
||||
<Note>
|
||||
Infisical needs network-level access to Azure's identity provider endpoints.
|
||||
</Note>
|
||||
|
||||
## Guide
|
||||
|
||||
In the following steps, we explore how to create and use identities to access the Infisical API using the OIDC Auth authentication method.
|
||||
|
||||
<Steps>
|
||||
<Step title="Creating an identity">
|
||||
To create an identity, head to your Organization Settings > Access Control > Identities and press **Create identity**.
|
||||
|
||||

|
||||
|
||||
When creating an identity, you specify an organization level [role](/documentation/platform/role-based-access-controls) for it to assume; you can configure roles in Organization Settings > Access Control > Organization Roles.
|
||||
|
||||

|
||||
|
||||
Now input a few details for your new identity. Here's some guidance for each field:
|
||||
|
||||
- Name (required): A friendly name for the identity.
|
||||
- Role (required): A role from the **Organization Roles** tab for the identity to assume. The organization role assigned will determine what organization level resources this identity can have access to.
|
||||
|
||||
Once you've created an identity, you'll be redirected to a page where you can manage the identity.
|
||||
|
||||

|
||||
|
||||
Since the identity has been configured with Universal Auth by default, you should re-configure it to use OIDC Auth instead. To do this, press to edit the **Authentication** section,
|
||||
remove the existing Universal Auth configuration, and add a new OIDC Auth configuration onto the identity.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
<Warning>Restrict access by configuring the Subject, Audiences, and Claims fields</Warning>
|
||||
|
||||
Here's some more guidance on each field:
|
||||
- <div style={{ textAlign: 'justify' }}>**OIDC Discovery URL**: The URL used to retrieve the OpenID Connect configuration from the identity provider. This is used to fetch the public keys needed to verify the JWT. For Azure, set this to `https://login.microsoftonline.com/{tenant-id}/v2.0` (replace `{tenant-id}` with your Azure AD tenant ID).</div>
|
||||
- <div style={{ textAlign: 'justify' }}>**Issuer**: The value of the `iss` claim that the token must match. For Azure, this should be `https://login.microsoftonline.com/{tenant-id}/v2.0`.</div>
|
||||
- **Subject**: This must match the `sub` claim in the JWT.
|
||||
- **Audiences**: Values that must match the `aud` claim.
|
||||
- **Claims**: Additional claims that must be present. Refer to [Azure DevOps docs](https://learn.microsoft.com/en-us/azure/devops/pipelines/library/connect-to-azure?view=azure-devops#workload-identity-federation) for available claims.
|
||||
- **Access Token TTL**: Lifetime of the issued token (in seconds), e.g., `2592000` (30 days)
|
||||
- **Access Token Max TTL**: Maximum allowed lifetime of the token
|
||||
- **Access Token Max Number of Uses**: Max times the token can be used (`0` = unlimited)
|
||||
- **Access Token Trusted IPs**: List of allowed IP ranges (defaults to `0.0.0.0/0`)
|
||||
|
||||
<Tip>If you are unsure about what to configure for the subject, audience, and claims fields, you can inspect the JWT token from your Azure DevOps pipeline by adding a debug step that outputs the token claims.</Tip>
|
||||
<Info>The `subject`, `audiences`, and `claims` fields support glob pattern matching; however, we highly recommend using hardcoded values whenever possible.</Info>
|
||||
</Step>
|
||||
<Step title="Adding an identity to a project">
|
||||
To enable the identity to access project-level resources such as secrets within a specific project, you should add it to that project.
|
||||
|
||||
To do this, head over to the project you want to add the identity to and go to Project Settings > Access Control > Machine Identities and press **Add identity**.
|
||||
|
||||
Next, select the identity you want to add to the project and the project level role you want to allow it to assume. The project role assigned will determine what project level resources this identity can have access to.
|
||||
|
||||

|
||||
|
||||

|
||||
</Step>
|
||||
<Step title="Accessing the Infisical API with the identity">
|
||||
In Azure DevOps, to authenticate with Infisical using OIDC, you must configure a service connection that enables workload identity federation.
|
||||
|
||||
Once set up, the OIDC token can be fetched automatically within the pipeline job context. Here's an example:
|
||||
|
||||
```yaml
|
||||
trigger:
|
||||
- main
|
||||
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- task: AzureCLI@2
|
||||
displayName: 'Retrieve secrets from Infisical using OIDC'
|
||||
inputs:
|
||||
azureSubscription: 'your-azure-service-connection-name'
|
||||
scriptType: 'bash'
|
||||
scriptLocation: 'inlineScript'
|
||||
addSpnToEnvironment: true
|
||||
inlineScript: |
|
||||
# Get OIDC access token
|
||||
OIDC_TOKEN=$(az account get-access-token --resource "api://AzureADTokenExchange" --query accessToken -o tsv)
|
||||
|
||||
[ -z "$OIDC_TOKEN" ] && { echo "Failed to get access token"; exit 1; }
|
||||
|
||||
# Exchange for Infisical access token
|
||||
ACCESS_TOKEN=$(curl -s -X POST "<YOUR-INFISICAL-INSTANCE-URL>/api/v1/auth/oidc-auth/login" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"identityId\":\"{your-identity-id}\",\"jwt\":\"$OIDC_TOKEN\"}" \
|
||||
| jq -r '.accessToken')
|
||||
|
||||
# Fetch secrets
|
||||
curl -s -H "Authorization: Bearer $ACCESS_TOKEN" \
|
||||
"<YOUR-INFISICAL-INSTANCE-URL>/api/v3/secrets/raw?environment={your-environment-slug}&workspaceSlug={your-workspace-slug}"
|
||||
```
|
||||
|
||||
Make sure the service connection is properly configured for workload identity federation and linked to your Azure AD app registration with appropriate claims.
|
||||
|
||||
<Note>
|
||||
Each identity access token has a time-to-live (TTL) which you can infer from the response of the login operation;
|
||||
the default TTL is `7200` seconds which can be adjusted.
|
||||
|
||||
If an identity access token expires, it can no longer authenticate with the Infisical API. In this case,
|
||||
a new access token should be obtained by performing another login operation.
|
||||
</Note>
|
||||
|
||||
</Step>
|
||||
</Steps>
|
@ -27,7 +27,7 @@ If the signature in the header matches the signature that you generated, then yo
|
||||
|
||||
```json
|
||||
{
|
||||
"event": "secrets.modified",
|
||||
"event": "secret.modified",
|
||||
"project": {
|
||||
"workspaceId": "the workspace id",
|
||||
"environment": "project environment",
|
||||
|
@ -341,7 +341,6 @@
|
||||
"group": "OIDC Auth",
|
||||
"pages": [
|
||||
"documentation/platform/identities/oidc-auth/general",
|
||||
"documentation/platform/identities/oidc-auth/azure",
|
||||
"documentation/platform/identities/oidc-auth/github",
|
||||
"documentation/platform/identities/oidc-auth/circleci",
|
||||
"documentation/platform/identities/oidc-auth/gitlab",
|
||||
|
@ -52,7 +52,6 @@ export const eventToNameMap: { [K in EventType]: string } = {
|
||||
[EventType.CREATE_WEBHOOK]: "Create webhook",
|
||||
[EventType.UPDATE_WEBHOOK_STATUS]: "Update webhook status",
|
||||
[EventType.DELETE_WEBHOOK]: "Delete webhook",
|
||||
[EventType.WEBHOOK_TRIGGERED]: "Webhook event",
|
||||
[EventType.GET_SECRET_IMPORTS]: "List secret imports",
|
||||
[EventType.CREATE_SECRET_IMPORT]: "Create secret import",
|
||||
[EventType.UPDATE_SECRET_IMPORT]: "Update secret import",
|
||||
|
@ -65,7 +65,6 @@ export enum EventType {
|
||||
CREATE_WEBHOOK = "create-webhook",
|
||||
UPDATE_WEBHOOK_STATUS = "update-webhook-status",
|
||||
DELETE_WEBHOOK = "delete-webhook",
|
||||
WEBHOOK_TRIGGERED = "webhook-triggered",
|
||||
GET_SECRET_IMPORTS = "get-secret-imports",
|
||||
CREATE_SECRET_IMPORT = "create-secret-import",
|
||||
UPDATE_SECRET_IMPORT = "update-secret-import",
|
||||
|
@ -427,16 +427,6 @@ interface DeleteWebhookEvent {
|
||||
};
|
||||
}
|
||||
|
||||
export interface WebhookTriggeredEvent {
|
||||
type: EventType.WEBHOOK_TRIGGERED;
|
||||
metadata: {
|
||||
webhookId: string;
|
||||
status: string;
|
||||
type: string;
|
||||
payload: { [k: string]: string | null };
|
||||
};
|
||||
}
|
||||
|
||||
interface GetSecretImportsEvent {
|
||||
type: EventType.GET_SECRET_IMPORTS;
|
||||
metadata: {
|
||||
@ -901,7 +891,6 @@ export type Event =
|
||||
| CreateWebhookEvent
|
||||
| UpdateWebhookStatusEvent
|
||||
| DeleteWebhookEvent
|
||||
| WebhookTriggeredEvent
|
||||
| GetSecretImportsEvent
|
||||
| CreateSecretImportEvent
|
||||
| UpdateSecretImportEvent
|
||||
|
@ -194,7 +194,6 @@ export const WebhooksTab = withProjectPermission(
|
||||
<Tr key={id}>
|
||||
<Td className="max-w-xs overflow-hidden text-ellipsis hover:overflow-auto hover:break-all">
|
||||
{url}
|
||||
<p className="text-xs text-mineshaft-400">{id}</p>
|
||||
</Td>
|
||||
<Td>{environment.slug}</Td>
|
||||
<Td>{secretPath}</Td>
|
||||
|
@ -1,6 +1,6 @@
|
||||
image:
|
||||
pullPolicy: IfNotPresent
|
||||
tag: "0.41.81"
|
||||
tag: "0.41.82"
|
||||
|
||||
secret:
|
||||
# The secret that contains the environment variables to be used by the gateway, such as INFISICAL_API_URL and TOKEN
|
||||
|
Reference in New Issue
Block a user