Compare commits

...

62 Commits

Author SHA1 Message Date
0ec56c9928 docs: add podman compose docs 2025-07-10 18:57:25 +04:00
35520cfe99 Merge pull request #3989 from Infisical/add-access-token-index
add index for referencing columns in identity access token
2025-07-10 09:48:39 -04:00
Sid
ba0f6e60e2 fix: yaml secret file parsing (#3837) 2025-07-10 15:33:59 +05:30
579c68b2a3 Merge pull request #3991 from Infisical/helm-update-v0.9.4
Update Helm chart to version v0.9.4
2025-07-10 14:03:10 +04:00
f4ea3e1c75 Update Helm chart to version v0.9.4 2025-07-10 10:02:02 +00:00
7d37ea318f Merge pull request #3990 from Infisical/daniel/operator-logs
fix: add request ID to error logs
2025-07-10 13:57:44 +04:00
5cb7ecc354 fix: update go sdk 2025-07-10 13:35:59 +04:00
5e85de3937 fix lint and short index name 2025-07-09 23:36:55 -04:00
8719e3e75e add index for referencing columns in identity access token
This PR will address issue with very long identity deletions due to a sequential scan over ALL identity access rows during CASCADE
2025-07-09 23:19:01 -04:00
69ece1f3e3 Merge pull request #3986 from Infisical/update-email-reinvite-job
Add jitter and increase window to 12 m
2025-07-09 22:03:02 -04:00
d5cd6f79f9 Merge branch 'main' into update-email-reinvite-job 2025-07-09 19:57:15 -04:00
19c0731166 Add jitter and increase window to 12 m 2025-07-09 19:54:35 -04:00
f636cc678b Merge pull request #3985 from Infisical/move-migration-logger-init-to-knexfile
fix(migration): move logger init for standalone migration to entry knexfile
2025-07-09 19:16:31 -04:00
ff8ad14e1b fix: move logger init for standalone migration to entry knexfile 2025-07-09 16:14:11 -07:00
d683d3adb3 Merge pull request #3984 from Infisical/ENG-3149
Dockerfile for mintlify docs
2025-07-09 17:32:02 -04:00
d9b8cd1204 Utilize cache 2025-07-09 17:28:10 -04:00
27b5e2aa68 Dockerfile for mintlify docs 2025-07-09 17:20:26 -04:00
692121445d Merge pull request #3862 from vespersio/patch-1
 PR: fix infisical-schema-migration CrashLoopBackOff when upgrading to 0.133.0 #3849
2025-07-09 16:38:01 +08:00
Sid
9a940dce64 fix: support email link template pre-fill (#3979)
* fix: support email link template pre-fill

* fix: remove support dropdown from personal settings

* fix: update support template

---------

Co-authored-by: sidwebworks <xodeveloper@gmail.com>
2025-07-08 22:15:55 +05:30
7e523546b3 Merge pull request #3981 from Infisical/fix-integrations-audit-log-type
fix(typo): add missing space on integrations audit log upgrade prompt
2025-07-08 08:56:19 -07:00
814d6e2709 fix: add missing space on integrations audit log upgrade prompt 2025-07-08 08:48:14 -07:00
c0b296ccd5 Merge pull request #3975 from Infisical/improve-approval-audit-logs
improvement(audit-logs): Create crud events for secret approvals on merge and improve approval audit logs
2025-07-08 08:37:29 -07:00
da82cfdf6b Merge pull request #3925 from Infisical/ENG-3041
feat(secret-scanning): Bitbucket data source + App Connection
2025-07-07 22:41:38 -04:00
92147b5398 improvements: nits and remove console log 2025-07-07 19:19:37 -07:00
526e184bd9 Step 4 image fix 2025-07-07 22:00:04 -04:00
9943312063 Docs fixes v3 2025-07-07 21:57:43 -04:00
c2cefb2b0c Fix image again xD 2025-07-07 21:51:49 -04:00
7571c9b426 Fix image 2025-07-07 21:48:01 -04:00
bf707667b5 Merge pull request #3977 from Infisical/fix-search-filter-for-imported-secrets-on-single-env-view
fix(secret-imports-dashboard): support filtering imported secrets in single env view
2025-07-07 18:32:20 -07:00
d2e6743f22 fix: support filtering imported secrets in singl env view 2025-07-07 18:06:09 -07:00
9e896563ed Feedback 2025-07-07 20:26:35 -04:00
64744d042d Rename GitHubRepositoryRegex 2025-07-07 19:23:26 -04:00
2648ac1c90 Improve teardown 2025-07-07 19:18:53 -04:00
22ae1aeee4 Swap away from using hash checks 2025-07-07 19:07:18 -04:00
cd13733621 improvement: create crud events for secret approvals on merge, improve secret approval audit logs and add missing merge event 2025-07-07 13:50:03 -07:00
0191eb48f3 Merge pull request #3974 from Infisical/fix-email-invite-notifications
Improve + fix invitation reminder logic
2025-07-07 14:47:50 -04:00
9d39910152 Minor fix to prevent setting lastInvitedAt for invitees who weren’t actually sent an invitation 2025-07-07 15:35:49 -03:00
c5a8786d1c Merge branch 'main' into ENG-3041 2025-07-07 13:41:59 -04:00
9137fa4ca5 Improve + fix invitation reminder logic 2025-07-07 13:31:20 -04:00
84687c0558 remove comments 2025-07-07 11:00:27 -04:00
78da7ec343 Merge pull request #3972 from Infisical/fix/telemetryOrgIdentify
feat(telemetry): improve Posthog org identity logic
2025-07-07 10:15:59 -03:00
00d4ae9fbd fix: fix resource table search 2025-07-04 17:51:18 -07:00
218338e5d2 Review fixes 2025-07-04 01:50:41 -04:00
456107fbf3 Update CLI version 2025-07-04 01:32:55 -04:00
2003f5b671 Bitbucket app connection docs 2025-07-04 01:14:52 -04:00
d2c6bcc7a7 Secret scanning docs 2025-07-03 23:45:05 -04:00
06bd593b60 Verify requests are from Bitbucket using signing 2025-07-03 23:10:32 -04:00
aea43c0a8e Final tweaks 2025-07-03 22:18:40 -04:00
5badb811e1 Rename BitBucket files to Bitbucket 2025-07-03 20:41:53 -04:00
7f8b489724 Merge branch 'ENG-3041' of github.com:Infisical/infisical into ENG-3041 2025-07-03 20:31:40 -04:00
8723a16913 Lint fixes 2025-07-03 20:30:20 -04:00
b4593a2e11 improvement: add teardown functionality to scanning factory and update generic types 2025-07-03 17:28:52 -07:00
1bbf78e295 Merge branch 'main' into ENG-3041 2025-07-03 19:55:32 -04:00
3b767a4deb Comment changes + revert license 2025-07-03 19:12:03 -04:00
18f5f5d04e Comment 2025-07-03 18:51:21 -04:00
6a6f08fc4d Make webhooks work, add workspace selection, rename BitBucket to
Bitbucket
2025-07-03 18:49:29 -04:00
8c6b903204 Tweaks 2025-07-03 02:00:14 -04:00
edecfb1f62 feat(secret-scanning): BitBucket data source 2025-07-03 00:01:37 -04:00
ae35a863bc App connection updates 2025-07-03 00:00:50 -04:00
62ad82f7b1 feat(app-connection): BitBucket app connection 2025-07-02 17:56:48 -04:00
ba57899a56 Update 20250602155451_fix-secret-versions.ts 2025-07-02 00:50:33 +04:00
aef3a7436f fix 20250602155451_fix-secret-versions.ts
fix infisical-schema-migration CrashLoopBackOff when upgrading to 0.133.0 #3849
2025-06-26 13:48:41 +03:00
135 changed files with 2951 additions and 299 deletions

View File

@ -19,7 +19,7 @@ WORKDIR /app
# Copy dependencies # Copy dependencies
COPY --from=frontend-dependencies /app/node_modules ./node_modules COPY --from=frontend-dependencies /app/node_modules ./node_modules
# Copy all files # Copy all files
COPY /frontend . COPY /frontend .
ENV NODE_ENV production ENV NODE_ENV production
@ -32,7 +32,7 @@ ENV VITE_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build # Build
RUN npm run build RUN npm run build
@ -134,7 +134,7 @@ RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-li
# Install Infisical CLI # Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \ RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.41.2 \ && apt-get update && apt-get install -y infisical=0.41.89 \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
@ -155,7 +155,7 @@ ENV INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
WORKDIR / WORKDIR /
COPY --from=backend-runner /app /backend COPY --from=backend-runner /app /backend
@ -166,9 +166,9 @@ ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV PORT 8080 ENV PORT 8080
ENV HOST=0.0.0.0 ENV HOST=0.0.0.0
ENV HTTPS_ENABLED false ENV HTTPS_ENABLED false
ENV NODE_ENV production ENV NODE_ENV production
ENV STANDALONE_BUILD true ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true ENV STANDALONE_MODE true
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/ ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
ENV NODE_OPTIONS="--max-old-space-size=1024" ENV NODE_OPTIONS="--max-old-space-size=1024"

View File

@ -20,7 +20,7 @@ WORKDIR /app
# Copy dependencies # Copy dependencies
COPY --from=frontend-dependencies /app/node_modules ./node_modules COPY --from=frontend-dependencies /app/node_modules ./node_modules
# Copy all files # Copy all files
COPY /frontend . COPY /frontend .
ENV NODE_ENV production ENV NODE_ENV production
@ -33,7 +33,7 @@ ENV VITE_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build # Build
RUN npm run build RUN npm run build
@ -128,7 +128,7 @@ RUN apt-get update && apt-get install -y \
# Install Infisical CLI # Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \ RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.41.2 \ && apt-get update && apt-get install -y infisical=0.41.89 \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
WORKDIR / WORKDIR /
@ -164,9 +164,9 @@ ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV PORT 8080 ENV PORT 8080
ENV HOST=0.0.0.0 ENV HOST=0.0.0.0
ENV HTTPS_ENABLED false ENV HTTPS_ENABLED false
ENV NODE_ENV production ENV NODE_ENV production
ENV STANDALONE_BUILD true ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true ENV STANDALONE_MODE true
ENV NODE_OPTIONS="--max-old-space-size=1024" ENV NODE_OPTIONS="--max-old-space-size=1024"

View File

@ -9,7 +9,7 @@ RUN apt-get update && apt-get install -y \
make \ make \
g++ \ g++ \
openssh-client \ openssh-client \
openssl openssl
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets) # Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \ RUN apt-get install -y \
@ -55,10 +55,10 @@ COPY --from=build /app .
# Install Infisical CLI # Install Infisical CLI
RUN apt-get install -y curl bash && \ RUN apt-get install -y curl bash && \
curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \ curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && apt-get install -y infisical=0.41.2 git apt-get update && apt-get install -y infisical=0.41.89 git
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \ HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js CMD node healthcheck.js
ENV HOST=0.0.0.0 ENV HOST=0.0.0.0

View File

@ -57,7 +57,7 @@ RUN mkdir -p /etc/softhsm2/tokens && \
# Install Infisical CLI # Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \ RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && \ apt-get update && \
apt-get install -y infisical=0.41.2 apt-get install -y infisical=0.41.89
WORKDIR /app WORKDIR /app

View File

@ -52,7 +52,7 @@ RUN apt-get install -y opensc
RUN mkdir -p /etc/softhsm2/tokens && \ RUN mkdir -p /etc/softhsm2/tokens && \
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000 softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
WORKDIR /openssl-build WORKDIR /openssl-build
RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
&& tar -xf openssl-3.1.2.tar.gz \ && tar -xf openssl-3.1.2.tar.gz \
@ -66,7 +66,7 @@ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
# Install Infisical CLI # Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \ RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && \ apt-get update && \
apt-get install -y infisical=0.41.2 apt-get install -y infisical=0.41.89
WORKDIR /app WORKDIR /app

View File

@ -4,6 +4,7 @@ import "ts-node/register";
import dotenv from "dotenv"; import dotenv from "dotenv";
import type { Knex } from "knex"; import type { Knex } from "knex";
import path from "path"; import path from "path";
import { initLogger } from "@app/lib/logger";
// Update with your config settings. . // Update with your config settings. .
dotenv.config({ dotenv.config({
@ -13,6 +14,8 @@ dotenv.config({
path: path.join(__dirname, "../../../.env") path: path.join(__dirname, "../../../.env")
}); });
initLogger();
export default { export default {
development: { development: {
client: "postgres", client: "postgres",

View File

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.OrgMembership, "lastInvitedAt");
if (hasColumn) {
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
t.datetime("lastInvitedAt").nullable().defaultTo(knex.fn.now()).alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.OrgMembership, "lastInvitedAt");
if (hasColumn) {
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
t.datetime("lastInvitedAt").nullable().alter();
});
}
}

View File

@ -0,0 +1,48 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
const MIGRATION_TIMEOUT = 30 * 60 * 1000; // 30 minutes
export async function up(knex: Knex): Promise<void> {
const result = await knex.raw("SHOW statement_timeout");
const originalTimeout = result.rows[0].statement_timeout;
try {
await knex.raw(`SET statement_timeout = ${MIGRATION_TIMEOUT}`);
// iat means IdentityAccessToken
await knex.raw(`
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_iat_identity_id
ON ${TableName.IdentityAccessToken} ("identityId")
`);
await knex.raw(`
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_iat_ua_client_secret_id
ON ${TableName.IdentityAccessToken} ("identityUAClientSecretId")
`);
} finally {
await knex.raw(`SET statement_timeout = '${originalTimeout}'`);
}
}
export async function down(knex: Knex): Promise<void> {
const result = await knex.raw("SHOW statement_timeout");
const originalTimeout = result.rows[0].statement_timeout;
try {
await knex.raw(`SET statement_timeout = ${MIGRATION_TIMEOUT}`);
await knex.raw(`
DROP INDEX IF EXISTS idx_iat_identity_id
`);
await knex.raw(`
DROP INDEX IF EXISTS idx_iat_ua_client_secret_id
`);
} finally {
await knex.raw(`SET statement_timeout = '${originalTimeout}'`);
}
}
export const config = { transaction: false };

View File

@ -141,14 +141,39 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
}, },
onRequest: verifyAuth([AuthMode.JWT]), onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => { handler: async (req) => {
const { approval } = await server.services.secretApprovalRequest.mergeSecretApprovalRequest({ const { approval, projectId, secretMutationEvents } =
actorId: req.permission.id, await server.services.secretApprovalRequest.mergeSecretApprovalRequest({
actor: req.permission.type, actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod, actor: req.permission.type,
actorOrgId: req.permission.orgId, actorAuthMethod: req.permission.authMethod,
approvalId: req.params.id, actorOrgId: req.permission.orgId,
bypassReason: req.body.bypassReason approvalId: req.params.id,
bypassReason: req.body.bypassReason
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId,
event: {
type: EventType.SECRET_APPROVAL_MERGED,
metadata: {
mergedBy: req.permission.id,
secretApprovalRequestSlug: approval.slug,
secretApprovalRequestId: approval.id
}
}
}); });
for await (const event of secretMutationEvents) {
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId,
event
});
}
return { approval }; return { approval };
} }
}); });

View File

@ -0,0 +1,16 @@
import { registerSecretScanningEndpoints } from "@app/ee/routes/v2/secret-scanning-v2-routers/secret-scanning-v2-endpoints";
import {
BitbucketDataSourceSchema,
CreateBitbucketDataSourceSchema,
UpdateBitbucketDataSourceSchema
} from "@app/ee/services/secret-scanning-v2/bitbucket";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export const registerBitbucketSecretScanningRouter = async (server: FastifyZodProvider) =>
registerSecretScanningEndpoints({
type: SecretScanningDataSource.Bitbucket,
server,
responseSchema: BitbucketDataSourceSchema,
createSchema: CreateBitbucketDataSourceSchema,
updateSchema: UpdateBitbucketDataSourceSchema
});

View File

@ -1,5 +1,6 @@
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums"; import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { registerBitbucketSecretScanningRouter } from "./bitbucket-secret-scanning-router";
import { registerGitHubSecretScanningRouter } from "./github-secret-scanning-router"; import { registerGitHubSecretScanningRouter } from "./github-secret-scanning-router";
export * from "./secret-scanning-v2-router"; export * from "./secret-scanning-v2-router";
@ -8,5 +9,6 @@ export const SECRET_SCANNING_REGISTER_ROUTER_MAP: Record<
SecretScanningDataSource, SecretScanningDataSource,
(server: FastifyZodProvider) => Promise<void> (server: FastifyZodProvider) => Promise<void>
> = { > = {
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter [SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter,
[SecretScanningDataSource.Bitbucket]: registerBitbucketSecretScanningRouter
}; };

View File

@ -2,6 +2,7 @@ import { z } from "zod";
import { SecretScanningConfigsSchema } from "@app/db/schemas"; import { SecretScanningConfigsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types"; import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { BitbucketDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github"; import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
import { import {
SecretScanningFindingStatus, SecretScanningFindingStatus,
@ -21,7 +22,10 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type"; import { AuthMode } from "@app/services/auth/auth-type";
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [GitHubDataSourceListItemSchema]); const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [
GitHubDataSourceListItemSchema,
BitbucketDataSourceListItemSchema
]);
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => { export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
server.route({ server.route({

View File

@ -116,6 +116,15 @@ interface BaseAuthData {
userAgentType?: UserAgentType; userAgentType?: UserAgentType;
} }
export enum SecretApprovalEvent {
Create = "create",
Update = "update",
Delete = "delete",
CreateMany = "create-many",
UpdateMany = "update-many",
DeleteMany = "delete-many"
}
export enum UserAgentType { export enum UserAgentType {
WEB = "web", WEB = "web",
CLI = "cli", CLI = "cli",
@ -1705,6 +1714,17 @@ interface SecretApprovalRequest {
committedBy: string; committedBy: string;
secretApprovalRequestSlug: string; secretApprovalRequestSlug: string;
secretApprovalRequestId: string; secretApprovalRequestId: string;
eventType: SecretApprovalEvent;
secretKey?: string;
secretId?: string;
secrets?: {
secretKey?: string;
secretId?: string;
environment?: string;
secretPath?: string;
}[];
environment: string;
secretPath: string;
}; };
} }

View File

@ -10,6 +10,7 @@ import {
TSecretApprovalRequestsSecretsInsert, TSecretApprovalRequestsSecretsInsert,
TSecretApprovalRequestsSecretsV2Insert TSecretApprovalRequestsSecretsV2Insert
} from "@app/db/schemas"; } from "@app/db/schemas";
import { Event, EventType } from "@app/ee/services/audit-log/audit-log-types";
import { getConfig } from "@app/lib/config/env"; import { getConfig } from "@app/lib/config/env";
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto"; import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
@ -523,7 +524,7 @@ export const secretApprovalRequestServiceFactory = ({
}); });
} }
const { policy, folderId, projectId, bypassers } = secretApprovalRequest; const { policy, folderId, projectId, bypassers, environment } = secretApprovalRequest;
if (policy.deletedAt) { if (policy.deletedAt) {
throw new BadRequestError({ throw new BadRequestError({
message: "The policy associated with this secret approval request has been deleted." message: "The policy associated with this secret approval request has been deleted."
@ -957,7 +958,112 @@ export const secretApprovalRequestServiceFactory = ({
}); });
} }
return mergeStatus; const { created, updated, deleted } = mergeStatus.secrets;
const secretMutationEvents: Event[] = [];
if (created.length) {
if (created.length > 1) {
secretMutationEvents.push({
type: EventType.CREATE_SECRETS,
metadata: {
environment,
secretPath: folder.path,
secrets: created.map((secret) => ({
secretId: secret.id,
secretVersion: 1,
// @ts-expect-error not present on v1 secrets
secretKey: secret.key as string,
// @ts-expect-error not present on v1 secrets
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
}))
}
});
} else {
const [secret] = created;
secretMutationEvents.push({
type: EventType.CREATE_SECRET,
metadata: {
environment,
secretPath: folder.path,
secretId: secret.id,
secretVersion: 1,
// @ts-expect-error not present on v1 secrets
secretKey: secret.key as string,
// @ts-expect-error not present on v1 secrets
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
}
});
}
}
if (updated.length) {
if (updated.length > 1) {
secretMutationEvents.push({
type: EventType.UPDATE_SECRETS,
metadata: {
environment,
secretPath: folder.path,
secrets: updated.map((secret) => ({
secretId: secret.id,
secretVersion: secret.version,
// @ts-expect-error not present on v1 secrets
secretKey: secret.key as string,
// @ts-expect-error not present on v1 secrets
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
}))
}
});
} else {
const [secret] = updated;
secretMutationEvents.push({
type: EventType.UPDATE_SECRET,
metadata: {
environment,
secretPath: folder.path,
secretId: secret.id,
secretVersion: secret.version,
// @ts-expect-error not present on v1 secrets
secretKey: secret.key as string,
// @ts-expect-error not present on v1 secrets
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
}
});
}
}
if (deleted.length) {
if (deleted.length > 1) {
secretMutationEvents.push({
type: EventType.DELETE_SECRETS,
metadata: {
environment,
secretPath: folder.path,
secrets: deleted.map((secret) => ({
secretId: secret.id,
secretVersion: secret.version,
// @ts-expect-error not present on v1 secrets
secretKey: secret.key as string
}))
}
});
} else {
const [secret] = deleted;
secretMutationEvents.push({
type: EventType.DELETE_SECRET,
metadata: {
environment,
secretPath: folder.path,
secretId: secret.id,
secretVersion: secret.version,
// @ts-expect-error not present on v1 secrets
secretKey: secret.key as string
}
});
}
}
return { ...mergeStatus, projectId, secretMutationEvents };
}; };
// function to save secret change to secret approval // function to save secret change to secret approval

View File

@ -0,0 +1,9 @@
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TSecretScanningDataSourceListItem } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION: TSecretScanningDataSourceListItem = {
name: "Bitbucket",
type: SecretScanningDataSource.Bitbucket,
connection: AppConnection.Bitbucket
};

View File

@ -0,0 +1,314 @@
import { join } from "path";
import { scanContentAndGetFindings } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import {
SecretScanningFindingSeverity,
SecretScanningResource
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
cloneRepository,
convertPatchLineToFileLineNumber,
replaceNonChangesWithNewlines
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
import {
TSecretScanningFactoryGetDiffScanFindingsPayload,
TSecretScanningFactoryGetDiffScanResourcePayload,
TSecretScanningFactoryGetFullScanPath,
TSecretScanningFactoryInitialize,
TSecretScanningFactoryListRawResources,
TSecretScanningFactoryPostInitialization,
TSecretScanningFactoryTeardown
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { titleCaseToCamelCase } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { BasicRepositoryRegex } from "@app/lib/regex";
import {
getBitbucketUser,
listBitbucketRepositories,
TBitbucketConnection
} from "@app/services/app-connection/bitbucket";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import {
TBitbucketDataSourceCredentials,
TBitbucketDataSourceInput,
TBitbucketDataSourceWithConnection,
TQueueBitbucketResourceDiffScan
} from "./bitbucket-secret-scanning-types";
export const BitbucketSecretScanningFactory = () => {
const initialize: TSecretScanningFactoryInitialize<
TBitbucketDataSourceInput,
TBitbucketConnection,
TBitbucketDataSourceCredentials
> = async ({ connection, payload }, callback) => {
const cfg = getConfig();
const { email, apiToken } = connection.credentials;
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
const { data } = await request.post<{ uuid: string }>(
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces/${encodeURIComponent(payload.config.workspaceSlug)}/hooks`,
{
description: "Infisical webhook for push events",
url: `${cfg.SITE_URL}/secret-scanning/webhooks/bitbucket`,
active: false,
events: ["repo:push"]
},
{
headers: {
Authorization: authHeader,
Accept: "application/json"
}
}
);
return callback({
credentials: { webhookId: data.uuid, webhookSecret: alphaNumericNanoId(64) }
});
};
const postInitialization: TSecretScanningFactoryPostInitialization<
TBitbucketDataSourceInput,
TBitbucketConnection,
TBitbucketDataSourceCredentials
> = async ({ dataSourceId, credentials, connection, payload }) => {
const { email, apiToken } = connection.credentials;
const { webhookId, webhookSecret } = credentials;
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
const cfg = getConfig();
const newWebhookUrl = `${cfg.SITE_URL}/secret-scanning/webhooks/bitbucket?dataSourceId=${dataSourceId}`;
await request.put(
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces/${encodeURIComponent(payload.config.workspaceSlug)}/hooks/${webhookId}`,
{
description: "Infisical webhook for push events",
url: newWebhookUrl,
active: true,
events: ["repo:push"],
secret: webhookSecret
},
{
headers: {
Authorization: authHeader,
Accept: "application/json"
}
}
);
};
const teardown: TSecretScanningFactoryTeardown<
TBitbucketDataSourceWithConnection,
TBitbucketDataSourceCredentials
> = async ({ credentials, dataSource }) => {
const {
connection: {
credentials: { email, apiToken }
},
config
} = dataSource;
const { webhookId } = credentials;
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
try {
await request.delete(
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces/${config.workspaceSlug}/hooks/${webhookId}`,
{
headers: {
Authorization: authHeader,
Accept: "application/json"
}
}
);
} catch (err) {
logger.error(`teardown: Bitbucket - Failed to call delete on webhook [webhookId=${webhookId}]`);
}
};
const listRawResources: TSecretScanningFactoryListRawResources<TBitbucketDataSourceWithConnection> = async (
dataSource
) => {
const {
connection,
config: { includeRepos, workspaceSlug }
} = dataSource;
const repos = await listBitbucketRepositories(connection, workspaceSlug);
const filteredRepos: typeof repos = [];
if (includeRepos.includes("*")) {
filteredRepos.push(...repos);
} else {
filteredRepos.push(...repos.filter((repo) => includeRepos.includes(repo.full_name)));
}
return filteredRepos.map(({ full_name, uuid }) => ({
name: full_name,
externalId: uuid,
type: SecretScanningResource.Repository
}));
};
const getFullScanPath: TSecretScanningFactoryGetFullScanPath<TBitbucketDataSourceWithConnection> = async ({
dataSource,
resourceName,
tempFolder
}) => {
const {
connection: {
credentials: { apiToken, email }
}
} = dataSource;
const repoPath = join(tempFolder, "repo.git");
if (!BasicRepositoryRegex.test(resourceName)) {
throw new Error("Invalid Bitbucket repository name");
}
const { username } = await getBitbucketUser({ email, apiToken });
await cloneRepository({
cloneUrl: `https://${encodeURIComponent(username)}:${apiToken}@bitbucket.org/${resourceName}.git`,
repoPath
});
return repoPath;
};
const getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<
TQueueBitbucketResourceDiffScan["payload"]
> = ({ repository }) => {
return {
name: repository.full_name,
externalId: repository.uuid,
type: SecretScanningResource.Repository
};
};
const getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<
TBitbucketDataSourceWithConnection,
TQueueBitbucketResourceDiffScan["payload"]
> = async ({ dataSource, payload, resourceName, configPath }) => {
const {
connection: {
credentials: { apiToken, email }
}
} = dataSource;
const { push, repository } = payload;
const allFindings: SecretMatch[] = [];
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
for (const change of push.changes) {
for (const commit of change.commits) {
// eslint-disable-next-line no-await-in-loop
const { data: diffstat } = await request.get<{
values: {
status: "added" | "modified" | "removed" | "renamed";
new?: { path: string };
old?: { path: string };
}[];
}>(`${IntegrationUrls.BITBUCKET_API_URL}/2.0/repositories/${repository.full_name}/diffstat/${commit.hash}`, {
headers: {
Authorization: authHeader,
Accept: "application/json"
}
});
// eslint-disable-next-line no-continue
if (!diffstat.values) continue;
for (const file of diffstat.values) {
if ((file.status === "added" || file.status === "modified") && file.new?.path) {
const filePath = file.new.path;
// eslint-disable-next-line no-await-in-loop
const { data: patch } = await request.get<string>(
`https://api.bitbucket.org/2.0/repositories/${repository.full_name}/diff/${commit.hash}`,
{
params: {
path: filePath
},
headers: {
Authorization: authHeader
},
responseType: "text"
}
);
// eslint-disable-next-line no-continue
if (!patch) continue;
// eslint-disable-next-line no-await-in-loop
const findings = await scanContentAndGetFindings(replaceNonChangesWithNewlines(`\n${patch}`), configPath);
const adjustedFindings = findings.map((finding) => {
const startLine = convertPatchLineToFileLineNumber(patch, finding.StartLine);
const endLine =
finding.StartLine === finding.EndLine
? startLine
: convertPatchLineToFileLineNumber(patch, finding.EndLine);
const startColumn = finding.StartColumn - 1; // subtract 1 for +
const endColumn = finding.EndColumn - 1; // subtract 1 for +
const authorName = commit.author.user?.display_name || commit.author.raw.split(" <")[0];
const emailMatch = commit.author.raw.match(/<(.*)>/);
const authorEmail = emailMatch?.[1] ?? "";
return {
...finding,
StartLine: startLine,
EndLine: endLine,
StartColumn: startColumn,
EndColumn: endColumn,
File: filePath,
Commit: commit.hash,
Author: authorName,
Email: authorEmail,
Message: commit.message,
Fingerprint: `${commit.hash}:${filePath}:${finding.RuleID}:${startLine}:${startColumn}`,
Date: commit.date,
Link: `https://bitbucket.org/${resourceName}/src/${commit.hash}/${filePath}#lines-${startLine}`
};
});
allFindings.push(...adjustedFindings);
}
}
}
}
return allFindings.map(
({
// discard match and secret as we don't want to store
Match,
Secret,
...finding
}) => ({
details: titleCaseToCamelCase(finding),
fingerprint: finding.Fingerprint,
severity: SecretScanningFindingSeverity.High,
rule: finding.RuleID
})
);
};
return {
initialize,
postInitialization,
listRawResources,
getFullScanPath,
getDiffScanResourcePayload,
getDiffScanFindingsPayload,
teardown
};
};

View File

@ -0,0 +1,97 @@
import { z } from "zod";
import {
SecretScanningDataSource,
SecretScanningResource
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
BaseCreateSecretScanningDataSourceSchema,
BaseSecretScanningDataSourceSchema,
BaseSecretScanningFindingSchema,
BaseUpdateSecretScanningDataSourceSchema,
GitRepositoryScanFindingDetailsSchema
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
import { SecretScanningDataSources } from "@app/lib/api-docs";
import { BasicRepositoryRegex } from "@app/lib/regex";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const BitbucketDataSourceConfigSchema = z.object({
workspaceSlug: z
.string()
.min(1, "Workspace slug required")
.max(128)
.describe(SecretScanningDataSources.CONFIG.BITBUCKET.workspaceSlug),
includeRepos: z
.array(
z
.string()
.min(1)
.max(256)
.refine((value) => value === "*" || BasicRepositoryRegex.test(value), "Invalid repository name format")
)
.nonempty("One or more repositories required")
.max(100, "Cannot configure more than 100 repositories")
.default(["*"])
.describe(SecretScanningDataSources.CONFIG.BITBUCKET.includeRepos)
});
export const BitbucketDataSourceSchema = BaseSecretScanningDataSourceSchema({
type: SecretScanningDataSource.Bitbucket,
isConnectionRequired: true
})
.extend({
config: BitbucketDataSourceConfigSchema
})
.describe(
JSON.stringify({
title: "Bitbucket"
})
);
export const CreateBitbucketDataSourceSchema = BaseCreateSecretScanningDataSourceSchema({
type: SecretScanningDataSource.Bitbucket,
isConnectionRequired: true
})
.extend({
config: BitbucketDataSourceConfigSchema
})
.describe(
JSON.stringify({
title: "Bitbucket"
})
);
export const UpdateBitbucketDataSourceSchema = BaseUpdateSecretScanningDataSourceSchema(
SecretScanningDataSource.Bitbucket
)
.extend({
config: BitbucketDataSourceConfigSchema.optional()
})
.describe(
JSON.stringify({
title: "Bitbucket"
})
);
export const BitbucketDataSourceListItemSchema = z
.object({
name: z.literal("Bitbucket"),
connection: z.literal(AppConnection.Bitbucket),
type: z.literal(SecretScanningDataSource.Bitbucket)
})
.describe(
JSON.stringify({
title: "Bitbucket"
})
);
export const BitbucketFindingSchema = BaseSecretScanningFindingSchema.extend({
resourceType: z.literal(SecretScanningResource.Repository),
dataSourceType: z.literal(SecretScanningDataSource.Bitbucket),
details: GitRepositoryScanFindingDetailsSchema
});
export const BitbucketDataSourceCredentialsSchema = z.object({
webhookId: z.string(),
webhookSecret: z.string()
});

View File

@ -0,0 +1,104 @@
import crypto from "crypto";
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TSecretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
import { logger } from "@app/lib/logger";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import {
TBitbucketDataSource,
TBitbucketDataSourceCredentials,
TBitbucketPushEvent
} from "./bitbucket-secret-scanning-types";
export const bitbucketSecretScanningService = (
secretScanningV2DAL: TSecretScanningV2DALFactory,
secretScanningV2Queue: Pick<TSecretScanningV2QueueServiceFactory, "queueResourceDiffScan">,
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
const handlePushEvent = async (
payload: TBitbucketPushEvent & { dataSourceId: string; receivedSignature: string; bodyString: string }
) => {
const { push, repository, bodyString, receivedSignature } = payload;
if (!push?.changes?.length || !repository?.workspace?.uuid) {
logger.warn(
`secretScanningV2PushEvent: Bitbucket - Insufficient data [changes=${
push?.changes?.length ?? 0
}] [repository=${repository?.name}] [workspaceUuid=${repository?.workspace?.uuid}]`
);
return;
}
const dataSource = (await secretScanningV2DAL.dataSources.findOne({
id: payload.dataSourceId,
type: SecretScanningDataSource.Bitbucket
})) as TBitbucketDataSource | undefined;
if (!dataSource) {
logger.error(
`secretScanningV2PushEvent: Bitbucket - Could not find data source [workspaceUuid=${repository.workspace.uuid}]`
);
return;
}
const {
isAutoScanEnabled,
config: { includeRepos },
encryptedCredentials,
projectId
} = dataSource;
if (!encryptedCredentials) {
logger.info(
`secretScanningV2PushEvent: Bitbucket - Could not find encrypted credentials [dataSourceId=${dataSource.id}] [workspaceUuid=${repository.workspace.uuid}]`
);
return;
}
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const decryptedCredentials = decryptor({ cipherTextBlob: encryptedCredentials });
const credentials = JSON.parse(decryptedCredentials.toString()) as TBitbucketDataSourceCredentials;
const hmac = crypto.createHmac("sha256", credentials.webhookSecret);
hmac.update(bodyString);
const calculatedSignature = hmac.digest("hex");
if (calculatedSignature !== receivedSignature) {
logger.error(
`secretScanningV2PushEvent: Bitbucket - Invalid signature for webhook [dataSourceId=${dataSource.id}] [workspaceUuid=${repository.workspace.uuid}]`
);
return;
}
if (!isAutoScanEnabled) {
logger.info(
`secretScanningV2PushEvent: Bitbucket - ignoring due to auto scan disabled [dataSourceId=${dataSource.id}] [workspaceUuid=${repository.workspace.uuid}]`
);
return;
}
if (includeRepos.includes("*") || includeRepos.includes(repository.full_name)) {
await secretScanningV2Queue.queueResourceDiffScan({
dataSourceType: SecretScanningDataSource.Bitbucket,
payload,
dataSourceId: dataSource.id
});
} else {
logger.info(
`secretScanningV2PushEvent: Bitbucket - ignoring due to repository not being present in config [workspaceUuid=${repository.workspace.uuid}] [dataSourceId=${dataSource.id}]`
);
}
};
return {
handlePushEvent
};
};

View File

@ -0,0 +1,85 @@
import { z } from "zod";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TBitbucketConnection } from "@app/services/app-connection/bitbucket";
import {
BitbucketDataSourceCredentialsSchema,
BitbucketDataSourceListItemSchema,
BitbucketDataSourceSchema,
BitbucketFindingSchema,
CreateBitbucketDataSourceSchema
} from "./bitbucket-secret-scanning-schemas";
export type TBitbucketDataSource = z.infer<typeof BitbucketDataSourceSchema>;
export type TBitbucketDataSourceInput = z.infer<typeof CreateBitbucketDataSourceSchema>;
export type TBitbucketDataSourceListItem = z.infer<typeof BitbucketDataSourceListItemSchema>;
export type TBitbucketDataSourceCredentials = z.infer<typeof BitbucketDataSourceCredentialsSchema>;
export type TBitbucketFinding = z.infer<typeof BitbucketFindingSchema>;
export type TBitbucketDataSourceWithConnection = TBitbucketDataSource & {
connection: TBitbucketConnection;
};
export type TBitbucketPushEventRepository = {
full_name: string;
name: string;
workspace: {
slug: string;
uuid: string;
};
uuid: string;
};
export type TBitbucketPushEventCommit = {
hash: string;
message: string;
author: {
raw: string;
user?: {
display_name: string;
uuid: string;
nickname: string;
};
};
date: string;
};
export type TBitbucketPushEventChange = {
new?: {
name: string;
type: string;
};
old?: {
name: string;
type: string;
};
created: boolean;
closed: boolean;
forced: boolean;
commits: TBitbucketPushEventCommit[];
};
export type TBitbucketPushEvent = {
push: {
changes: TBitbucketPushEventChange[];
};
repository: TBitbucketPushEventRepository;
actor: {
display_name: string;
uuid: string;
nickname: string;
};
};
export type TQueueBitbucketResourceDiffScan = {
dataSourceType: SecretScanningDataSource.Bitbucket;
payload: TBitbucketPushEvent & { dataSourceId: string };
dataSourceId: string;
resourceId: string;
scanId: string;
};

View File

@ -0,0 +1,3 @@
export * from "./bitbucket-secret-scanning-constants";
export * from "./bitbucket-secret-scanning-schemas";
export * from "./bitbucket-secret-scanning-types";

View File

@ -19,18 +19,23 @@ import {
TSecretScanningFactoryGetFullScanPath, TSecretScanningFactoryGetFullScanPath,
TSecretScanningFactoryInitialize, TSecretScanningFactoryInitialize,
TSecretScanningFactoryListRawResources, TSecretScanningFactoryListRawResources,
TSecretScanningFactoryPostInitialization TSecretScanningFactoryPostInitialization,
TSecretScanningFactoryTeardown
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types"; } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { getConfig } from "@app/lib/config/env"; import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors"; import { BadRequestError } from "@app/lib/errors";
import { titleCaseToCamelCase } from "@app/lib/fn"; import { titleCaseToCamelCase } from "@app/lib/fn";
import { GitHubRepositoryRegex } from "@app/lib/regex"; import { BasicRepositoryRegex } from "@app/lib/regex";
import { listGitHubRadarRepositories, TGitHubRadarConnection } from "@app/services/app-connection/github-radar"; import { listGitHubRadarRepositories, TGitHubRadarConnection } from "@app/services/app-connection/github-radar";
import { TGitHubDataSourceWithConnection, TQueueGitHubResourceDiffScan } from "./github-secret-scanning-types"; import {
TGitHubDataSourceInput,
TGitHubDataSourceWithConnection,
TQueueGitHubResourceDiffScan
} from "./github-secret-scanning-types";
export const GitHubSecretScanningFactory = () => { export const GitHubSecretScanningFactory = () => {
const initialize: TSecretScanningFactoryInitialize<TGitHubRadarConnection> = async ( const initialize: TSecretScanningFactoryInitialize<TGitHubDataSourceInput, TGitHubRadarConnection> = async (
{ connection, secretScanningV2DAL }, { connection, secretScanningV2DAL },
callback callback
) => { ) => {
@ -51,10 +56,17 @@ export const GitHubSecretScanningFactory = () => {
}); });
}; };
const postInitialization: TSecretScanningFactoryPostInitialization<TGitHubRadarConnection> = async () => { const postInitialization: TSecretScanningFactoryPostInitialization<
TGitHubDataSourceInput,
TGitHubRadarConnection
> = async () => {
// no post-initialization required // no post-initialization required
}; };
const teardown: TSecretScanningFactoryTeardown<TGitHubDataSourceWithConnection> = async () => {
// no termination required
};
const listRawResources: TSecretScanningFactoryListRawResources<TGitHubDataSourceWithConnection> = async ( const listRawResources: TSecretScanningFactoryListRawResources<TGitHubDataSourceWithConnection> = async (
dataSource dataSource
) => { ) => {
@ -107,7 +119,7 @@ export const GitHubSecretScanningFactory = () => {
const repoPath = join(tempFolder, "repo.git"); const repoPath = join(tempFolder, "repo.git");
if (!GitHubRepositoryRegex.test(resourceName)) { if (!BasicRepositoryRegex.test(resourceName)) {
throw new Error("Invalid GitHub repository name"); throw new Error("Invalid GitHub repository name");
} }
@ -225,6 +237,7 @@ export const GitHubSecretScanningFactory = () => {
listRawResources, listRawResources,
getFullScanPath, getFullScanPath,
getDiffScanResourcePayload, getDiffScanResourcePayload,
getDiffScanFindingsPayload getDiffScanFindingsPayload,
teardown
}; };
}; };

View File

@ -12,7 +12,7 @@ import {
GitRepositoryScanFindingDetailsSchema GitRepositoryScanFindingDetailsSchema
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas"; } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
import { SecretScanningDataSources } from "@app/lib/api-docs"; import { SecretScanningDataSources } from "@app/lib/api-docs";
import { GitHubRepositoryRegex } from "@app/lib/regex"; import { BasicRepositoryRegex } from "@app/lib/regex";
import { AppConnection } from "@app/services/app-connection/app-connection-enums"; import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const GitHubDataSourceConfigSchema = z.object({ export const GitHubDataSourceConfigSchema = z.object({
@ -22,7 +22,7 @@ export const GitHubDataSourceConfigSchema = z.object({
.string() .string()
.min(1) .min(1)
.max(256) .max(256)
.refine((value) => value === "*" || GitHubRepositoryRegex.test(value), "Invalid repository name format") .refine((value) => value === "*" || BasicRepositoryRegex.test(value), "Invalid repository name format")
) )
.nonempty("One or more repositories required") .nonempty("One or more repositories required")
.max(100, "Cannot configure more than 100 repositories") .max(100, "Cannot configure more than 100 repositories")

View File

@ -1,5 +1,6 @@
export enum SecretScanningDataSource { export enum SecretScanningDataSource {
GitHub = "github" GitHub = "github",
Bitbucket = "bitbucket"
} }
export enum SecretScanningScanStatus { export enum SecretScanningScanStatus {

View File

@ -1,19 +1,23 @@
import { BitbucketSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-factory";
import { GitHubSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-factory"; import { GitHubSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-factory";
import { SecretScanningDataSource } from "./secret-scanning-v2-enums"; import { SecretScanningDataSource } from "./secret-scanning-v2-enums";
import { import {
TQueueSecretScanningResourceDiffScan, TQueueSecretScanningResourceDiffScan,
TSecretScanningDataSourceCredentials, TSecretScanningDataSourceCredentials,
TSecretScanningDataSourceInput,
TSecretScanningDataSourceWithConnection, TSecretScanningDataSourceWithConnection,
TSecretScanningFactory TSecretScanningFactory
} from "./secret-scanning-v2-types"; } from "./secret-scanning-v2-types";
type TSecretScanningFactoryImplementation = TSecretScanningFactory< type TSecretScanningFactoryImplementation = TSecretScanningFactory<
TSecretScanningDataSourceWithConnection, TSecretScanningDataSourceWithConnection,
TSecretScanningDataSourceCredentials, TQueueSecretScanningResourceDiffScan["payload"],
TQueueSecretScanningResourceDiffScan["payload"] TSecretScanningDataSourceInput,
TSecretScanningDataSourceCredentials
>; >;
export const SECRET_SCANNING_FACTORY_MAP: Record<SecretScanningDataSource, TSecretScanningFactoryImplementation> = { export const SECRET_SCANNING_FACTORY_MAP: Record<SecretScanningDataSource, TSecretScanningFactoryImplementation> = {
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation [SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation,
[SecretScanningDataSource.Bitbucket]: BitbucketSecretScanningFactory as TSecretScanningFactoryImplementation
}; };

View File

@ -4,6 +4,7 @@ import RE2 from "re2";
import { readFindingsFile } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns"; import { readFindingsFile } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types"; import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import { BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/bitbucket";
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github"; import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
import { titleCaseToCamelCase } from "@app/lib/fn"; import { titleCaseToCamelCase } from "@app/lib/fn";
@ -11,7 +12,8 @@ import { SecretScanningDataSource, SecretScanningFindingSeverity } from "./secre
import { TCloneRepository, TGetFindingsPayload, TSecretScanningDataSourceListItem } from "./secret-scanning-v2-types"; import { TCloneRepository, TGetFindingsPayload, TSecretScanningDataSourceListItem } from "./secret-scanning-v2-types";
const SECRET_SCANNING_SOURCE_LIST_OPTIONS: Record<SecretScanningDataSource, TSecretScanningDataSourceListItem> = { const SECRET_SCANNING_SOURCE_LIST_OPTIONS: Record<SecretScanningDataSource, TSecretScanningDataSourceListItem> = {
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION [SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION,
[SecretScanningDataSource.Bitbucket]: BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
}; };
export const listSecretScanningDataSourceOptions = () => { export const listSecretScanningDataSourceOptions = () => {

View File

@ -2,13 +2,16 @@ import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/se
import { AppConnection } from "@app/services/app-connection/app-connection-enums"; import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const SECRET_SCANNING_DATA_SOURCE_NAME_MAP: Record<SecretScanningDataSource, string> = { export const SECRET_SCANNING_DATA_SOURCE_NAME_MAP: Record<SecretScanningDataSource, string> = {
[SecretScanningDataSource.GitHub]: "GitHub" [SecretScanningDataSource.GitHub]: "GitHub",
[SecretScanningDataSource.Bitbucket]: "Bitbucket"
}; };
export const SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP: Record<SecretScanningDataSource, AppConnection> = { export const SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP: Record<SecretScanningDataSource, AppConnection> = {
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar [SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar,
[SecretScanningDataSource.Bitbucket]: AppConnection.Bitbucket
}; };
export const AUTO_SYNC_DESCRIPTION_HELPER: Record<SecretScanningDataSource, { verb: string; noun: string }> = { export const AUTO_SYNC_DESCRIPTION_HELPER: Record<SecretScanningDataSource, { verb: string; noun: string }> = {
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" } [SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" },
[SecretScanningDataSource.Bitbucket]: { verb: "push", noun: "repositories" }
}; };

View File

@ -19,8 +19,7 @@ export const BaseSecretScanningDataSourceSchema = ({
// unique to provider // unique to provider
type: true, type: true,
connectionId: true, connectionId: true,
config: true, config: true
encryptedCredentials: true
}).extend({ }).extend({
type: z.literal(type), type: z.literal(type),
connectionId: isConnectionRequired ? z.string().uuid() : z.null(), connectionId: isConnectionRequired ? z.string().uuid() : z.null(),

View File

@ -30,6 +30,8 @@ import {
TFindSecretScanningDataSourceByNameDTO, TFindSecretScanningDataSourceByNameDTO,
TListSecretScanningDataSourcesByProjectId, TListSecretScanningDataSourcesByProjectId,
TSecretScanningDataSource, TSecretScanningDataSource,
TSecretScanningDataSourceCredentials,
TSecretScanningDataSourceInput,
TSecretScanningDataSourceWithConnection, TSecretScanningDataSourceWithConnection,
TSecretScanningDataSourceWithDetails, TSecretScanningDataSourceWithDetails,
TSecretScanningFinding, TSecretScanningFinding,
@ -49,6 +51,7 @@ import { TAppConnection } from "@app/services/app-connection/app-connection-type
import { TKmsServiceFactory } from "@app/services/kms/kms-service"; import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types"; import { KmsDataKey } from "@app/services/kms/kms-types";
import { bitbucketSecretScanningService } from "./bitbucket/bitbucket-secret-scanning-service";
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal"; import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
import { TSecretScanningV2QueueServiceFactory } from "./secret-scanning-v2-queue"; import { TSecretScanningV2QueueServiceFactory } from "./secret-scanning-v2-queue";
@ -256,7 +259,7 @@ export const secretScanningV2ServiceFactory = ({
try { try {
const createdDataSource = await factory.initialize( const createdDataSource = await factory.initialize(
{ {
payload, payload: payload as TSecretScanningDataSourceInput,
connection: connection as TSecretScanningDataSourceWithConnection["connection"], connection: connection as TSecretScanningDataSourceWithConnection["connection"],
secretScanningV2DAL secretScanningV2DAL
}, },
@ -287,7 +290,7 @@ export const secretScanningV2ServiceFactory = ({
); );
await factory.postInitialization({ await factory.postInitialization({
payload, payload: payload as TSecretScanningDataSourceInput,
connection: connection as TSecretScanningDataSourceWithConnection["connection"], connection: connection as TSecretScanningDataSourceWithConnection["connection"],
dataSourceId: dataSource.id, dataSourceId: dataSource.id,
credentials credentials
@ -398,7 +401,6 @@ export const secretScanningV2ServiceFactory = ({
actorId: actor.id, actorId: actor.id,
actorAuthMethod: actor.authMethod, actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId, actorOrgId: actor.orgId,
projectId: dataSource.projectId projectId: dataSource.projectId
}); });
@ -412,7 +414,36 @@ export const secretScanningV2ServiceFactory = ({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}` message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
}); });
// TODO: clean up webhooks const factory = SECRET_SCANNING_FACTORY_MAP[type]();
let connection: TAppConnection | null = null;
if (dataSource.connection) {
connection = await decryptAppConnection(dataSource.connection, kmsService);
}
let credentials: TSecretScanningDataSourceCredentials | undefined;
if (dataSource.encryptedCredentials) {
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: dataSource.projectId
});
credentials = JSON.parse(
decryptor({
cipherTextBlob: dataSource.encryptedCredentials
}).toString()
) as TSecretScanningDataSourceCredentials;
}
await factory.teardown({
dataSource: {
...dataSource,
// @ts-expect-error currently we don't have a null connection data source
connection
},
credentials
});
await secretScanningV2DAL.dataSources.deleteById(dataSourceId); await secretScanningV2DAL.dataSources.deleteById(dataSourceId);
@ -869,6 +900,7 @@ export const secretScanningV2ServiceFactory = ({
updateSecretScanningFindingById, updateSecretScanningFindingById,
findSecretScanningConfigByProjectId, findSecretScanningConfigByProjectId,
upsertSecretScanningConfig, upsertSecretScanningConfig,
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue) github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue),
bitbucket: bitbucketSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService)
}; };
}; };

View File

@ -4,6 +4,15 @@ import {
TSecretScanningResources, TSecretScanningResources,
TSecretScanningScans TSecretScanningScans
} from "@app/db/schemas"; } from "@app/db/schemas";
import {
TBitbucketDataSource,
TBitbucketDataSourceCredentials,
TBitbucketDataSourceInput,
TBitbucketDataSourceListItem,
TBitbucketDataSourceWithConnection,
TBitbucketFinding,
TQueueBitbucketResourceDiffScan
} from "@app/ee/services/secret-scanning-v2/bitbucket";
import { import {
TGitHubDataSource, TGitHubDataSource,
TGitHubDataSourceInput, TGitHubDataSourceInput,
@ -19,7 +28,7 @@ import {
SecretScanningScanStatus SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums"; } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export type TSecretScanningDataSource = TGitHubDataSource; export type TSecretScanningDataSource = TGitHubDataSource | TBitbucketDataSource;
export type TSecretScanningDataSourceWithDetails = TSecretScanningDataSource & { export type TSecretScanningDataSourceWithDetails = TSecretScanningDataSource & {
lastScannedAt?: Date | null; lastScannedAt?: Date | null;
@ -41,13 +50,17 @@ export type TSecretScanningScanWithDetails = TSecretScanningScans & {
resourceName: string; resourceName: string;
}; };
export type TSecretScanningDataSourceWithConnection = TGitHubDataSourceWithConnection; export type TSecretScanningDataSourceWithConnection =
| TGitHubDataSourceWithConnection
| TBitbucketDataSourceWithConnection;
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput; export type TSecretScanningDataSourceInput = TGitHubDataSourceInput | TBitbucketDataSourceInput;
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem; export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem | TBitbucketDataSourceListItem;
export type TSecretScanningFinding = TGitHubFinding; export type TSecretScanningDataSourceCredentials = TBitbucketDataSourceCredentials | undefined;
export type TSecretScanningFinding = TGitHubFinding | TBitbucketFinding;
export type TListSecretScanningDataSourcesByProjectId = { export type TListSecretScanningDataSourcesByProjectId = {
projectId: string; projectId: string;
@ -99,7 +112,7 @@ export type TQueueSecretScanningDataSourceFullScan = {
scanId: string; scanId: string;
}; };
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan; export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan | TQueueBitbucketResourceDiffScan;
export type TQueueSecretScanningSendNotification = { export type TQueueSecretScanningSendNotification = {
dataSource: TSecretScanningDataSources; dataSource: TSecretScanningDataSources;
@ -138,11 +151,12 @@ export type TSecretScanningDataSourceRaw = NonNullable<
>; >;
export type TSecretScanningFactoryInitialize< export type TSecretScanningFactoryInitialize<
P extends TSecretScanningDataSourceInput,
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined, T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
C extends TSecretScanningDataSourceCredentials = undefined C extends TSecretScanningDataSourceCredentials = undefined
> = ( > = (
params: { params: {
payload: TCreateSecretScanningDataSourceDTO; payload: P;
connection: T; connection: T;
secretScanningV2DAL: TSecretScanningV2DALFactory; secretScanningV2DAL: TSecretScanningV2DALFactory;
}, },
@ -150,24 +164,27 @@ export type TSecretScanningFactoryInitialize<
) => Promise<TSecretScanningDataSourceRaw>; ) => Promise<TSecretScanningDataSourceRaw>;
export type TSecretScanningFactoryPostInitialization< export type TSecretScanningFactoryPostInitialization<
P extends TSecretScanningDataSourceInput,
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined, T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
C extends TSecretScanningDataSourceCredentials = undefined C extends TSecretScanningDataSourceCredentials = undefined
> = (params: { > = (params: { payload: P; connection: T; credentials: C; dataSourceId: string }) => Promise<void>;
payload: TCreateSecretScanningDataSourceDTO;
connection: T; export type TSecretScanningFactoryTeardown<
credentials: C; T extends TSecretScanningDataSourceWithConnection,
dataSourceId: string; C extends TSecretScanningDataSourceCredentials = undefined
}) => Promise<void>; > = (params: { dataSource: T; credentials: C }) => Promise<void>;
export type TSecretScanningFactory< export type TSecretScanningFactory<
T extends TSecretScanningDataSourceWithConnection, T extends TSecretScanningDataSourceWithConnection,
C extends TSecretScanningDataSourceCredentials, P extends TQueueSecretScanningResourceDiffScan["payload"],
P extends TQueueSecretScanningResourceDiffScan["payload"] I extends TSecretScanningDataSourceInput,
C extends TSecretScanningDataSourceCredentials | undefined = undefined
> = () => { > = () => {
listRawResources: TSecretScanningFactoryListRawResources<T>; listRawResources: TSecretScanningFactoryListRawResources<T>;
getFullScanPath: TSecretScanningFactoryGetFullScanPath<T>; getFullScanPath: TSecretScanningFactoryGetFullScanPath<T>;
initialize: TSecretScanningFactoryInitialize<T["connection"] | undefined, C>; initialize: TSecretScanningFactoryInitialize<I, T["connection"] | undefined, C>;
postInitialization: TSecretScanningFactoryPostInitialization<T["connection"] | undefined, C>; postInitialization: TSecretScanningFactoryPostInitialization<I, T["connection"] | undefined, C>;
teardown: TSecretScanningFactoryTeardown<T, C>;
getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<P>; getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<P>;
getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<T, P>; getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<T, P>;
}; };
@ -185,5 +202,3 @@ export type TUpsertSecretScanningConfigDTO = {
projectId: string; projectId: string;
content: string | null; content: string | null;
}; };
export type TSecretScanningDataSourceCredentials = undefined;

View File

@ -1,7 +1,22 @@
import { z } from "zod"; import { z } from "zod";
import { BitbucketDataSourceSchema, BitbucketFindingSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
import { GitHubDataSourceSchema, GitHubFindingSchema } from "@app/ee/services/secret-scanning-v2/github"; import { GitHubDataSourceSchema, GitHubFindingSchema } from "@app/ee/services/secret-scanning-v2/github";
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [GitHubDataSourceSchema]); export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [
GitHubDataSourceSchema,
BitbucketDataSourceSchema
]);
export const SecretScanningFindingSchema = z.discriminatedUnion("resourceType", [GitHubFindingSchema]); export const SecretScanningFindingSchema = z.discriminatedUnion("dataSourceType", [
GitHubFindingSchema.describe(
JSON.stringify({
title: "GitHub"
})
),
BitbucketFindingSchema.describe(
JSON.stringify({
title: "Bitbucket"
})
)
]);

View File

@ -2272,6 +2272,10 @@ export const AppConnections = {
code: "The OAuth code to use to connect with GitLab.", code: "The OAuth code to use to connect with GitLab.",
accessTokenType: "The type of token used to connect with GitLab." accessTokenType: "The type of token used to connect with GitLab."
}, },
BITBUCKET: {
email: "The email used to access Bitbucket.",
apiToken: "The API token used to access Bitbucket."
},
ZABBIX: { ZABBIX: {
apiToken: "The API Token used to access Zabbix.", apiToken: "The API Token used to access Zabbix.",
instanceUrl: "The Zabbix instance URL to connect with." instanceUrl: "The Zabbix instance URL to connect with."
@ -2641,6 +2645,10 @@ export const SecretScanningDataSources = {
CONFIG: { CONFIG: {
GITHUB: { GITHUB: {
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).' includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
},
BITBUCKET: {
workspaceSlug: "The workspace to scan.",
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
} }
} }
}; };

View File

@ -10,4 +10,4 @@ export const UserPrincipalNameRegex = new RE2(/^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9._-]
export const LdapUrlRegex = new RE2(/^ldaps?:\/\//); export const LdapUrlRegex = new RE2(/^ldaps?:\/\//);
export const GitHubRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/); export const BasicRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/);

View File

@ -1,7 +1,9 @@
import type { EmitterWebhookEventName } from "@octokit/webhooks/dist-types/types"; import type { EmitterWebhookEventName } from "@octokit/webhooks/dist-types/types";
import { PushEvent } from "@octokit/webhooks-types"; import { PushEvent } from "@octokit/webhooks-types";
import { Probot } from "probot"; import { Probot } from "probot";
import { z } from "zod";
import { TBitbucketPushEvent } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-types";
import { getConfig } from "@app/lib/config/env"; import { getConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger"; import { logger } from "@app/lib/logger";
import { writeLimit } from "@app/server/config/rateLimiter"; import { writeLimit } from "@app/server/config/rateLimiter";
@ -63,4 +65,52 @@ export const registerSecretScanningV2Webhooks = async (server: FastifyZodProvide
return res.send("ok"); return res.send("ok");
} }
}); });
// bitbucket push event webhook
server.route({
method: "POST",
url: "/bitbucket",
schema: {
querystring: z.object({
dataSourceId: z.string().min(1, { message: "Data Source ID is required" })
}),
headers: z
.object({
"x-hub-signature": z.string().min(1, { message: "X-Hub-Signature header is required" })
})
.passthrough()
},
config: {
rateLimit: writeLimit
},
handler: async (req, res) => {
const { dataSourceId } = req.query;
// Verify signature
const signature = req.headers["x-hub-signature"];
if (!signature) {
logger.error("Missing X-Hub-Signature header for Bitbucket webhook");
return res.status(401).send({ message: "Unauthorized: Missing signature" });
}
const expectedSignaturePrefix = "sha256=";
if (!signature.startsWith(expectedSignaturePrefix)) {
logger.error({ signature }, "Invalid X-Hub-Signature format for Bitbucket webhook");
return res.status(401).send({ message: "Unauthorized: Invalid signature format" });
}
const receivedSignature = signature.substring(expectedSignaturePrefix.length);
if (!dataSourceId) return res.status(400).send({ message: "Data Source ID is required" });
await server.services.secretScanningV2.bitbucket.handlePushEvent({
...(req.body as TBitbucketPushEvent),
dataSourceId,
receivedSignature,
bodyString: JSON.stringify(req.body)
});
return res.send("ok");
}
});
}; };

View File

@ -31,6 +31,10 @@ import {
AzureKeyVaultConnectionListItemSchema, AzureKeyVaultConnectionListItemSchema,
SanitizedAzureKeyVaultConnectionSchema SanitizedAzureKeyVaultConnectionSchema
} from "@app/services/app-connection/azure-key-vault"; } from "@app/services/app-connection/azure-key-vault";
import {
BitbucketConnectionListItemSchema,
SanitizedBitbucketConnectionSchema
} from "@app/services/app-connection/bitbucket";
import { import {
CamundaConnectionListItemSchema, CamundaConnectionListItemSchema,
SanitizedCamundaConnectionSchema SanitizedCamundaConnectionSchema
@ -118,6 +122,7 @@ const SanitizedAppConnectionSchema = z.union([
...SanitizedFlyioConnectionSchema.options, ...SanitizedFlyioConnectionSchema.options,
...SanitizedGitLabConnectionSchema.options, ...SanitizedGitLabConnectionSchema.options,
...SanitizedCloudflareConnectionSchema.options, ...SanitizedCloudflareConnectionSchema.options,
...SanitizedBitbucketConnectionSchema.options,
...SanitizedZabbixConnectionSchema.options ...SanitizedZabbixConnectionSchema.options
]); ]);
@ -151,6 +156,7 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
FlyioConnectionListItemSchema, FlyioConnectionListItemSchema,
GitLabConnectionListItemSchema, GitLabConnectionListItemSchema,
CloudflareConnectionListItemSchema, CloudflareConnectionListItemSchema,
BitbucketConnectionListItemSchema,
ZabbixConnectionListItemSchema ZabbixConnectionListItemSchema
]); ]);

View File

@ -0,0 +1,88 @@
import { z } from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateBitbucketConnectionSchema,
SanitizedBitbucketConnectionSchema,
UpdateBitbucketConnectionSchema
} from "@app/services/app-connection/bitbucket";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerBitbucketConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.Bitbucket,
server,
sanitizedResponseSchema: SanitizedBitbucketConnectionSchema,
createSchema: CreateBitbucketConnectionSchema,
updateSchema: UpdateBitbucketConnectionSchema
});
// The below endpoints are not exposed and for Infisical App use
server.route({
method: "GET",
url: `/:connectionId/workspaces`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z.object({
workspaces: z.object({ slug: z.string() }).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const {
params: { connectionId }
} = req;
const workspaces = await server.services.appConnection.bitbucket.listWorkspaces(connectionId, req.permission);
return { workspaces };
}
});
server.route({
method: "GET",
url: `/:connectionId/repositories`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
querystring: z.object({
workspaceSlug: z.string().min(1).max(255)
}),
response: {
200: z.object({
repositories: z.object({ slug: z.string(), full_name: z.string(), uuid: z.string() }).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const {
params: { connectionId },
query: { workspaceSlug }
} = req;
const repositories = await server.services.appConnection.bitbucket.listRepositories(
{ connectionId, workspaceSlug },
req.permission
);
return { repositories };
}
});
};

View File

@ -9,6 +9,7 @@ import { registerAzureAppConfigurationConnectionRouter } from "./azure-app-confi
import { registerAzureClientSecretsConnectionRouter } from "./azure-client-secrets-connection-router"; import { registerAzureClientSecretsConnectionRouter } from "./azure-client-secrets-connection-router";
import { registerAzureDevOpsConnectionRouter } from "./azure-devops-connection-router"; import { registerAzureDevOpsConnectionRouter } from "./azure-devops-connection-router";
import { registerAzureKeyVaultConnectionRouter } from "./azure-key-vault-connection-router"; import { registerAzureKeyVaultConnectionRouter } from "./azure-key-vault-connection-router";
import { registerBitbucketConnectionRouter } from "./bitbucket-connection-router";
import { registerCamundaConnectionRouter } from "./camunda-connection-router"; import { registerCamundaConnectionRouter } from "./camunda-connection-router";
import { registerCloudflareConnectionRouter } from "./cloudflare-connection-router"; import { registerCloudflareConnectionRouter } from "./cloudflare-connection-router";
import { registerDatabricksConnectionRouter } from "./databricks-connection-router"; import { registerDatabricksConnectionRouter } from "./databricks-connection-router";
@ -64,5 +65,6 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
[AppConnection.Flyio]: registerFlyioConnectionRouter, [AppConnection.Flyio]: registerFlyioConnectionRouter,
[AppConnection.GitLab]: registerGitLabConnectionRouter, [AppConnection.GitLab]: registerGitLabConnectionRouter,
[AppConnection.Cloudflare]: registerCloudflareConnectionRouter, [AppConnection.Cloudflare]: registerCloudflareConnectionRouter,
[AppConnection.Bitbucket]: registerBitbucketConnectionRouter,
[AppConnection.Zabbix]: registerZabbixConnectionRouter [AppConnection.Zabbix]: registerZabbixConnectionRouter
}; };

View File

@ -732,8 +732,8 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
actorOrgId: req.permission.orgId, actorOrgId: req.permission.orgId,
projectId, projectId,
environment, environment,
path: secretPath, path: secretPath
search // search scott: removing for now because this prevents searching imported secrets which are fetched separately client side
}); });
if (remainingLimit > 0 && totalImportCount > adjustedOffset) { if (remainingLimit > 0 && totalImportCount > adjustedOffset) {
@ -745,7 +745,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
projectId, projectId,
environment, environment,
path: secretPath, path: secretPath,
search, // search scott: removing for now because this prevents searching imported secrets which are fetched separately client side
limit: remainingLimit, limit: remainingLimit,
offset: adjustedOffset offset: adjustedOffset
}); });

View File

@ -2,7 +2,7 @@ import picomatch from "picomatch";
import { z } from "zod"; import { z } from "zod";
import { SecretApprovalRequestsSchema, SecretsSchema, SecretType, ServiceTokenScopes } from "@app/db/schemas"; import { SecretApprovalRequestsSchema, SecretsSchema, SecretType, ServiceTokenScopes } from "@app/db/schemas";
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types"; import { EventType, SecretApprovalEvent, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
import { ApiDocsTags, RAW_SECRETS, SECRETS } from "@app/lib/api-docs"; import { ApiDocsTags, RAW_SECRETS, SECRETS } from "@app/lib/api-docs";
import { BadRequestError } from "@app/lib/errors"; import { BadRequestError } from "@app/lib/errors";
import { removeTrailingSlash } from "@app/lib/fn"; import { removeTrailingSlash } from "@app/lib/fn";
@ -594,6 +594,23 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
secretReminderRepeatDays: req.body.secretReminderRepeatDays secretReminderRepeatDays: req.body.secretReminderRepeatDays
}); });
if (secretOperation.type === SecretProtectionType.Approval) { if (secretOperation.type === SecretProtectionType.Approval) {
await server.services.auditLog.createAuditLog({
projectId: req.body.workspaceId,
...req.auditLogInfo,
event: {
type: EventType.SECRET_APPROVAL_REQUEST,
metadata: {
committedBy: secretOperation.approval.committerUserId,
secretApprovalRequestId: secretOperation.approval.id,
secretApprovalRequestSlug: secretOperation.approval.slug,
secretPath: req.body.secretPath,
environment: req.body.environment,
secretKey: req.params.secretName,
eventType: SecretApprovalEvent.Create
}
}
});
return { approval: secretOperation.approval }; return { approval: secretOperation.approval };
} }
@ -730,6 +747,23 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
}); });
if (secretOperation.type === SecretProtectionType.Approval) { if (secretOperation.type === SecretProtectionType.Approval) {
await server.services.auditLog.createAuditLog({
projectId: req.body.workspaceId,
...req.auditLogInfo,
event: {
type: EventType.SECRET_APPROVAL_REQUEST,
metadata: {
committedBy: secretOperation.approval.committerUserId,
secretApprovalRequestId: secretOperation.approval.id,
secretApprovalRequestSlug: secretOperation.approval.slug,
secretPath: req.body.secretPath,
environment: req.body.environment,
secretKey: req.params.secretName,
eventType: SecretApprovalEvent.Update
}
}
});
return { approval: secretOperation.approval }; return { approval: secretOperation.approval };
} }
const { secret } = secretOperation; const { secret } = secretOperation;
@ -831,6 +865,23 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
type: req.body.type type: req.body.type
}); });
if (secretOperation.type === SecretProtectionType.Approval) { if (secretOperation.type === SecretProtectionType.Approval) {
await server.services.auditLog.createAuditLog({
projectId: req.body.workspaceId,
...req.auditLogInfo,
event: {
type: EventType.SECRET_APPROVAL_REQUEST,
metadata: {
committedBy: secretOperation.approval.committerUserId,
secretApprovalRequestId: secretOperation.approval.id,
secretApprovalRequestSlug: secretOperation.approval.slug,
secretPath: req.body.secretPath,
environment: req.body.environment,
secretKey: req.params.secretName,
eventType: SecretApprovalEvent.Delete
}
}
});
return { approval: secretOperation.approval }; return { approval: secretOperation.approval };
} }
@ -1165,7 +1216,10 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
metadata: { metadata: {
committedBy: approval.committerUserId, committedBy: approval.committerUserId,
secretApprovalRequestId: approval.id, secretApprovalRequestId: approval.id,
secretApprovalRequestSlug: approval.slug secretApprovalRequestSlug: approval.slug,
secretPath,
environment,
eventType: SecretApprovalEvent.Create
} }
} }
}); });
@ -1351,7 +1405,11 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
metadata: { metadata: {
committedBy: approval.committerUserId, committedBy: approval.committerUserId,
secretApprovalRequestId: approval.id, secretApprovalRequestId: approval.id,
secretApprovalRequestSlug: approval.slug secretApprovalRequestSlug: approval.slug,
secretPath,
environment,
secretKey: req.params.secretName,
eventType: SecretApprovalEvent.Update
} }
} }
}); });
@ -1489,7 +1547,11 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
metadata: { metadata: {
committedBy: approval.committerUserId, committedBy: approval.committerUserId,
secretApprovalRequestId: approval.id, secretApprovalRequestId: approval.id,
secretApprovalRequestSlug: approval.slug secretApprovalRequestSlug: approval.slug,
secretPath,
environment,
secretKey: req.params.secretName,
eventType: SecretApprovalEvent.Delete
} }
} }
}); });
@ -1673,7 +1735,10 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
metadata: { metadata: {
committedBy: approval.committerUserId, committedBy: approval.committerUserId,
secretApprovalRequestId: approval.id, secretApprovalRequestId: approval.id,
secretApprovalRequestSlug: approval.slug secretApprovalRequestSlug: approval.slug,
secretPath,
environment,
eventType: SecretApprovalEvent.CreateMany
} }
} }
}); });
@ -1801,7 +1866,13 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
metadata: { metadata: {
committedBy: approval.committerUserId, committedBy: approval.committerUserId,
secretApprovalRequestId: approval.id, secretApprovalRequestId: approval.id,
secretApprovalRequestSlug: approval.slug secretApprovalRequestSlug: approval.slug,
secretPath,
environment,
eventType: SecretApprovalEvent.UpdateMany,
secrets: inputSecrets.map((secret) => ({
secretKey: secret.secretName
}))
} }
} }
}); });
@ -1920,7 +1991,13 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
metadata: { metadata: {
committedBy: approval.committerUserId, committedBy: approval.committerUserId,
secretApprovalRequestId: approval.id, secretApprovalRequestId: approval.id,
secretApprovalRequestSlug: approval.slug secretApprovalRequestSlug: approval.slug,
secretPath,
environment,
secrets: inputSecrets.map((secret) => ({
secretKey: secret.secretName
})),
eventType: SecretApprovalEvent.DeleteMany
} }
} }
}); });
@ -2038,6 +2115,24 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
secrets: inputSecrets secrets: inputSecrets
}); });
if (secretOperation.type === SecretProtectionType.Approval) { if (secretOperation.type === SecretProtectionType.Approval) {
await server.services.auditLog.createAuditLog({
projectId: req.body.workspaceId,
...req.auditLogInfo,
event: {
type: EventType.SECRET_APPROVAL_REQUEST,
metadata: {
committedBy: secretOperation.approval.committerUserId,
secretApprovalRequestId: secretOperation.approval.id,
secretApprovalRequestSlug: secretOperation.approval.slug,
secretPath,
environment,
secrets: inputSecrets.map((secret) => ({
secretKey: secret.secretKey
})),
eventType: SecretApprovalEvent.CreateMany
}
}
});
return { approval: secretOperation.approval }; return { approval: secretOperation.approval };
} }
const { secrets } = secretOperation; const { secrets } = secretOperation;
@ -2170,6 +2265,25 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
mode: req.body.mode mode: req.body.mode
}); });
if (secretOperation.type === SecretProtectionType.Approval) { if (secretOperation.type === SecretProtectionType.Approval) {
await server.services.auditLog.createAuditLog({
projectId: req.body.workspaceId,
...req.auditLogInfo,
event: {
type: EventType.SECRET_APPROVAL_REQUEST,
metadata: {
committedBy: secretOperation.approval.committerUserId,
secretApprovalRequestId: secretOperation.approval.id,
secretApprovalRequestSlug: secretOperation.approval.slug,
secretPath,
environment,
secrets: inputSecrets.map((secret) => ({
secretKey: secret.secretKey,
secretPath: secret.secretPath
})),
eventType: SecretApprovalEvent.UpdateMany
}
}
});
return { approval: secretOperation.approval }; return { approval: secretOperation.approval };
} }
const { secrets } = secretOperation; const { secrets } = secretOperation;
@ -2298,6 +2412,25 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
secrets: inputSecrets secrets: inputSecrets
}); });
if (secretOperation.type === SecretProtectionType.Approval) { if (secretOperation.type === SecretProtectionType.Approval) {
await server.services.auditLog.createAuditLog({
projectId: req.body.workspaceId,
...req.auditLogInfo,
event: {
type: EventType.SECRET_APPROVAL_REQUEST,
metadata: {
committedBy: secretOperation.approval.committerUserId,
secretApprovalRequestId: secretOperation.approval.id,
secretApprovalRequestSlug: secretOperation.approval.slug,
secretPath,
environment,
secrets: inputSecrets.map((secret) => ({
secretKey: secret.secretKey
})),
eventType: SecretApprovalEvent.DeleteMany
}
}
});
return { approval: secretOperation.approval }; return { approval: secretOperation.approval };
} }
const { secrets } = secretOperation; const { secrets } = secretOperation;

View File

@ -28,6 +28,7 @@ export enum AppConnection {
Flyio = "flyio", Flyio = "flyio",
GitLab = "gitlab", GitLab = "gitlab",
Cloudflare = "cloudflare", Cloudflare = "cloudflare",
Bitbucket = "bitbucket",
Zabbix = "zabbix" Zabbix = "zabbix"
} }

View File

@ -50,6 +50,11 @@ import {
getAzureKeyVaultConnectionListItem, getAzureKeyVaultConnectionListItem,
validateAzureKeyVaultConnectionCredentials validateAzureKeyVaultConnectionCredentials
} from "./azure-key-vault"; } from "./azure-key-vault";
import {
BitbucketConnectionMethod,
getBitbucketConnectionListItem,
validateBitbucketConnectionCredentials
} from "./bitbucket";
import { CamundaConnectionMethod, getCamundaConnectionListItem, validateCamundaConnectionCredentials } from "./camunda"; import { CamundaConnectionMethod, getCamundaConnectionListItem, validateCamundaConnectionCredentials } from "./camunda";
import { CloudflareConnectionMethod } from "./cloudflare/cloudflare-connection-enum"; import { CloudflareConnectionMethod } from "./cloudflare/cloudflare-connection-enum";
import { import {
@ -138,6 +143,7 @@ export const listAppConnectionOptions = () => {
getFlyioConnectionListItem(), getFlyioConnectionListItem(),
getGitLabConnectionListItem(), getGitLabConnectionListItem(),
getCloudflareConnectionListItem(), getCloudflareConnectionListItem(),
getBitbucketConnectionListItem(),
getZabbixConnectionListItem() getZabbixConnectionListItem()
].sort((a, b) => a.name.localeCompare(b.name)); ].sort((a, b) => a.name.localeCompare(b.name));
}; };
@ -219,6 +225,7 @@ export const validateAppConnectionCredentials = async (
[AppConnection.Flyio]: validateFlyioConnectionCredentials as TAppConnectionCredentialsValidator, [AppConnection.Flyio]: validateFlyioConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.GitLab]: validateGitLabConnectionCredentials as TAppConnectionCredentialsValidator, [AppConnection.GitLab]: validateGitLabConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Cloudflare]: validateCloudflareConnectionCredentials as TAppConnectionCredentialsValidator, [AppConnection.Cloudflare]: validateCloudflareConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Bitbucket]: validateBitbucketConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Zabbix]: validateZabbixConnectionCredentials as TAppConnectionCredentialsValidator [AppConnection.Zabbix]: validateZabbixConnectionCredentials as TAppConnectionCredentialsValidator
}; };
@ -256,6 +263,7 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
case VercelConnectionMethod.ApiToken: case VercelConnectionMethod.ApiToken:
case OnePassConnectionMethod.ApiToken: case OnePassConnectionMethod.ApiToken:
case CloudflareConnectionMethod.APIToken: case CloudflareConnectionMethod.APIToken:
case BitbucketConnectionMethod.ApiToken:
case ZabbixConnectionMethod.ApiToken: case ZabbixConnectionMethod.ApiToken:
return "API Token"; return "API Token";
case PostgresConnectionMethod.UsernameAndPassword: case PostgresConnectionMethod.UsernameAndPassword:
@ -337,6 +345,7 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
[AppConnection.Flyio]: platformManagedCredentialsNotSupported, [AppConnection.Flyio]: platformManagedCredentialsNotSupported,
[AppConnection.GitLab]: platformManagedCredentialsNotSupported, [AppConnection.GitLab]: platformManagedCredentialsNotSupported,
[AppConnection.Cloudflare]: platformManagedCredentialsNotSupported, [AppConnection.Cloudflare]: platformManagedCredentialsNotSupported,
[AppConnection.Bitbucket]: platformManagedCredentialsNotSupported,
[AppConnection.Zabbix]: platformManagedCredentialsNotSupported [AppConnection.Zabbix]: platformManagedCredentialsNotSupported
}; };

View File

@ -30,6 +30,7 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
[AppConnection.Flyio]: "Fly.io", [AppConnection.Flyio]: "Fly.io",
[AppConnection.GitLab]: "GitLab", [AppConnection.GitLab]: "GitLab",
[AppConnection.Cloudflare]: "Cloudflare", [AppConnection.Cloudflare]: "Cloudflare",
[AppConnection.Bitbucket]: "Bitbucket",
[AppConnection.Zabbix]: "Zabbix" [AppConnection.Zabbix]: "Zabbix"
}; };
@ -63,5 +64,6 @@ export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanTyp
[AppConnection.Flyio]: AppConnectionPlanType.Regular, [AppConnection.Flyio]: AppConnectionPlanType.Regular,
[AppConnection.GitLab]: AppConnectionPlanType.Regular, [AppConnection.GitLab]: AppConnectionPlanType.Regular,
[AppConnection.Cloudflare]: AppConnectionPlanType.Regular, [AppConnection.Cloudflare]: AppConnectionPlanType.Regular,
[AppConnection.Bitbucket]: AppConnectionPlanType.Regular,
[AppConnection.Zabbix]: AppConnectionPlanType.Regular [AppConnection.Zabbix]: AppConnectionPlanType.Regular
}; };

View File

@ -45,6 +45,8 @@ import { azureClientSecretsConnectionService } from "./azure-client-secrets/azur
import { ValidateAzureDevOpsConnectionCredentialsSchema } from "./azure-devops/azure-devops-schemas"; import { ValidateAzureDevOpsConnectionCredentialsSchema } from "./azure-devops/azure-devops-schemas";
import { azureDevOpsConnectionService } from "./azure-devops/azure-devops-service"; import { azureDevOpsConnectionService } from "./azure-devops/azure-devops-service";
import { ValidateAzureKeyVaultConnectionCredentialsSchema } from "./azure-key-vault"; import { ValidateAzureKeyVaultConnectionCredentialsSchema } from "./azure-key-vault";
import { ValidateBitbucketConnectionCredentialsSchema } from "./bitbucket";
import { bitbucketConnectionService } from "./bitbucket/bitbucket-connection-service";
import { ValidateCamundaConnectionCredentialsSchema } from "./camunda"; import { ValidateCamundaConnectionCredentialsSchema } from "./camunda";
import { camundaConnectionService } from "./camunda/camunda-connection-service"; import { camundaConnectionService } from "./camunda/camunda-connection-service";
import { ValidateCloudflareConnectionCredentialsSchema } from "./cloudflare/cloudflare-connection-schema"; import { ValidateCloudflareConnectionCredentialsSchema } from "./cloudflare/cloudflare-connection-schema";
@ -122,6 +124,7 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
[AppConnection.Flyio]: ValidateFlyioConnectionCredentialsSchema, [AppConnection.Flyio]: ValidateFlyioConnectionCredentialsSchema,
[AppConnection.GitLab]: ValidateGitLabConnectionCredentialsSchema, [AppConnection.GitLab]: ValidateGitLabConnectionCredentialsSchema,
[AppConnection.Cloudflare]: ValidateCloudflareConnectionCredentialsSchema, [AppConnection.Cloudflare]: ValidateCloudflareConnectionCredentialsSchema,
[AppConnection.Bitbucket]: ValidateBitbucketConnectionCredentialsSchema,
[AppConnection.Zabbix]: ValidateZabbixConnectionCredentialsSchema [AppConnection.Zabbix]: ValidateZabbixConnectionCredentialsSchema
}; };
@ -533,6 +536,7 @@ export const appConnectionServiceFactory = ({
flyio: flyioConnectionService(connectAppConnectionById), flyio: flyioConnectionService(connectAppConnectionById),
gitlab: gitlabConnectionService(connectAppConnectionById, appConnectionDAL, kmsService), gitlab: gitlabConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
cloudflare: cloudflareConnectionService(connectAppConnectionById), cloudflare: cloudflareConnectionService(connectAppConnectionById),
bitbucket: bitbucketConnectionService(connectAppConnectionById),
zabbix: zabbixConnectionService(connectAppConnectionById) zabbix: zabbixConnectionService(connectAppConnectionById)
}; };
}; };

View File

@ -56,6 +56,12 @@ import {
TAzureKeyVaultConnectionInput, TAzureKeyVaultConnectionInput,
TValidateAzureKeyVaultConnectionCredentialsSchema TValidateAzureKeyVaultConnectionCredentialsSchema
} from "./azure-key-vault"; } from "./azure-key-vault";
import {
TBitbucketConnection,
TBitbucketConnectionConfig,
TBitbucketConnectionInput,
TValidateBitbucketConnectionCredentialsSchema
} from "./bitbucket";
import { import {
TCamundaConnection, TCamundaConnection,
TCamundaConnectionConfig, TCamundaConnectionConfig,
@ -202,6 +208,7 @@ export type TAppConnection = { id: string } & (
| TFlyioConnection | TFlyioConnection
| TGitLabConnection | TGitLabConnection
| TCloudflareConnection | TCloudflareConnection
| TBitbucketConnection
| TZabbixConnection | TZabbixConnection
); );
@ -239,6 +246,7 @@ export type TAppConnectionInput = { id: string } & (
| TFlyioConnectionInput | TFlyioConnectionInput
| TGitLabConnectionInput | TGitLabConnectionInput
| TCloudflareConnectionInput | TCloudflareConnectionInput
| TBitbucketConnectionInput
| TZabbixConnectionInput | TZabbixConnectionInput
); );
@ -284,6 +292,7 @@ export type TAppConnectionConfig =
| TFlyioConnectionConfig | TFlyioConnectionConfig
| TGitLabConnectionConfig | TGitLabConnectionConfig
| TCloudflareConnectionConfig | TCloudflareConnectionConfig
| TBitbucketConnectionConfig
| TZabbixConnectionConfig; | TZabbixConnectionConfig;
export type TValidateAppConnectionCredentialsSchema = export type TValidateAppConnectionCredentialsSchema =
@ -316,6 +325,7 @@ export type TValidateAppConnectionCredentialsSchema =
| TValidateFlyioConnectionCredentialsSchema | TValidateFlyioConnectionCredentialsSchema
| TValidateGitLabConnectionCredentialsSchema | TValidateGitLabConnectionCredentialsSchema
| TValidateCloudflareConnectionCredentialsSchema | TValidateCloudflareConnectionCredentialsSchema
| TValidateBitbucketConnectionCredentialsSchema
| TValidateZabbixConnectionCredentialsSchema; | TValidateZabbixConnectionCredentialsSchema;
export type TListAwsConnectionKmsKeys = { export type TListAwsConnectionKmsKeys = {

View File

@ -0,0 +1,3 @@
export enum BitbucketConnectionMethod {
ApiToken = "api-token"
}

View File

@ -0,0 +1,117 @@
import { AxiosError } from "axios";
import { request } from "@app/lib/config/request";
import { BadRequestError } from "@app/lib/errors";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { BitbucketConnectionMethod } from "./bitbucket-connection-enums";
import {
TBitbucketConnection,
TBitbucketConnectionConfig,
TBitbucketRepo,
TBitbucketWorkspace
} from "./bitbucket-connection-types";
export const getBitbucketConnectionListItem = () => {
return {
name: "Bitbucket" as const,
app: AppConnection.Bitbucket as const,
methods: Object.values(BitbucketConnectionMethod) as [BitbucketConnectionMethod.ApiToken]
};
};
export const getBitbucketUser = async ({ email, apiToken }: { email: string; apiToken: string }) => {
try {
const { data } = await request.get<{ username: string }>(`${IntegrationUrls.BITBUCKET_API_URL}/2.0/user`, {
headers: {
Authorization: `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`,
Accept: "application/json"
}
});
return data;
} catch (error: unknown) {
if (error instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
});
}
throw new BadRequestError({
message: "Unable to validate connection: verify credentials"
});
}
};
export const validateBitbucketConnectionCredentials = async (config: TBitbucketConnectionConfig) => {
await getBitbucketUser(config.credentials);
return config.credentials;
};
interface BitbucketWorkspacesResponse {
values: TBitbucketWorkspace[];
next?: string;
}
export const listBitbucketWorkspaces = async (appConnection: TBitbucketConnection) => {
const { email, apiToken } = appConnection.credentials;
const headers = {
Authorization: `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`,
Accept: "application/json"
};
let allWorkspaces: TBitbucketWorkspace[] = [];
let nextUrl: string | undefined = `${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces?pagelen=100`;
let iterationCount = 0;
// Limit to 10 iterations, fetching at most 10 * 100 = 1000 workspaces
while (nextUrl && iterationCount < 10) {
// eslint-disable-next-line no-await-in-loop
const { data }: { data: BitbucketWorkspacesResponse } = await request.get<BitbucketWorkspacesResponse>(nextUrl, {
headers
});
allWorkspaces = allWorkspaces.concat(data.values.map((workspace) => ({ slug: workspace.slug })));
nextUrl = data.next;
iterationCount += 1;
}
return allWorkspaces;
};
interface BitbucketRepositoriesResponse {
values: TBitbucketRepo[];
next?: string;
}
export const listBitbucketRepositories = async (appConnection: TBitbucketConnection, workspaceSlug: string) => {
const { email, apiToken } = appConnection.credentials;
const headers = {
Authorization: `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`,
Accept: "application/json"
};
let allRepos: TBitbucketRepo[] = [];
let nextUrl: string | undefined =
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/repositories/${encodeURIComponent(workspaceSlug)}?pagelen=100`;
let iterationCount = 0;
// Limit to 10 iterations, fetching at most 10 * 100 = 1000 repositories
while (nextUrl && iterationCount < 10) {
// eslint-disable-next-line no-await-in-loop
const { data }: { data: BitbucketRepositoriesResponse } = await request.get<BitbucketRepositoriesResponse>(
nextUrl,
{
headers
}
);
allRepos = allRepos.concat(data.values);
nextUrl = data.next;
iterationCount += 1;
}
return allRepos;
};

View File

@ -0,0 +1,72 @@
import z from "zod";
import { AppConnections } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
BaseAppConnectionSchema,
GenericCreateAppConnectionFieldsSchema,
GenericUpdateAppConnectionFieldsSchema
} from "@app/services/app-connection/app-connection-schemas";
import { BitbucketConnectionMethod } from "./bitbucket-connection-enums";
export const BitbucketConnectionAccessTokenCredentialsSchema = z.object({
apiToken: z
.string()
.trim()
.min(1, "API Token required")
.max(255)
.describe(AppConnections.CREDENTIALS.BITBUCKET.apiToken),
email: z
.string()
.email()
.trim()
.min(1, "Email required")
.max(255)
.describe(AppConnections.CREDENTIALS.BITBUCKET.email)
});
const BaseBitbucketConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.Bitbucket) });
export const BitbucketConnectionSchema = BaseBitbucketConnectionSchema.extend({
method: z.literal(BitbucketConnectionMethod.ApiToken),
credentials: BitbucketConnectionAccessTokenCredentialsSchema
});
export const SanitizedBitbucketConnectionSchema = z.discriminatedUnion("method", [
BaseBitbucketConnectionSchema.extend({
method: z.literal(BitbucketConnectionMethod.ApiToken),
credentials: BitbucketConnectionAccessTokenCredentialsSchema.pick({
email: true
})
})
]);
export const ValidateBitbucketConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
.literal(BitbucketConnectionMethod.ApiToken)
.describe(AppConnections.CREATE(AppConnection.Bitbucket).method),
credentials: BitbucketConnectionAccessTokenCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.Bitbucket).credentials
)
})
]);
export const CreateBitbucketConnectionSchema = ValidateBitbucketConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.Bitbucket)
);
export const UpdateBitbucketConnectionSchema = z
.object({
credentials: BitbucketConnectionAccessTokenCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.Bitbucket).credentials
)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Bitbucket));
export const BitbucketConnectionListItemSchema = z.object({
name: z.literal("Bitbucket"),
app: z.literal(AppConnection.Bitbucket),
methods: z.nativeEnum(BitbucketConnectionMethod).array()
});

View File

@ -0,0 +1,33 @@
import { OrgServiceActor } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import { listBitbucketRepositories, listBitbucketWorkspaces } from "./bitbucket-connection-fns";
import { TBitbucketConnection, TGetBitbucketRepositoriesDTO } from "./bitbucket-connection-types";
type TGetAppConnectionFunc = (
app: AppConnection,
connectionId: string,
actor: OrgServiceActor
) => Promise<TBitbucketConnection>;
export const bitbucketConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
const listWorkspaces = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.Bitbucket, connectionId, actor);
const workspaces = await listBitbucketWorkspaces(appConnection);
return workspaces;
};
const listRepositories = async (
{ connectionId, workspaceSlug }: TGetBitbucketRepositoriesDTO,
actor: OrgServiceActor
) => {
const appConnection = await getAppConnection(AppConnection.Bitbucket, connectionId, actor);
const repositories = await listBitbucketRepositories(appConnection, workspaceSlug);
return repositories;
};
return {
listWorkspaces,
listRepositories
};
};

View File

@ -0,0 +1,40 @@
import z from "zod";
import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
BitbucketConnectionSchema,
CreateBitbucketConnectionSchema,
ValidateBitbucketConnectionCredentialsSchema
} from "./bitbucket-connection-schemas";
export type TBitbucketConnection = z.infer<typeof BitbucketConnectionSchema>;
export type TBitbucketConnectionInput = z.infer<typeof CreateBitbucketConnectionSchema> & {
app: AppConnection.Bitbucket;
};
export type TValidateBitbucketConnectionCredentialsSchema = typeof ValidateBitbucketConnectionCredentialsSchema;
export type TBitbucketConnectionConfig = DiscriminativePick<
TBitbucketConnectionInput,
"method" | "app" | "credentials"
> & {
orgId: string;
};
export type TGetBitbucketRepositoriesDTO = {
connectionId: string;
workspaceSlug: string;
};
export type TBitbucketWorkspace = {
slug: string;
};
export type TBitbucketRepo = {
uuid: string;
full_name: string; // workspace-slug/repo-slug
slug: string;
};

View File

@ -0,0 +1,4 @@
export * from "./bitbucket-connection-enums";
export * from "./bitbucket-connection-fns";
export * from "./bitbucket-connection-schemas";
export * from "./bitbucket-connection-types";

View File

@ -814,9 +814,9 @@ const getAppsCloudflareWorkers = async ({ accessToken, accountId }: { accessToke
}; };
/** /**
* Return list of repositories for the BitBucket integration based on provided BitBucket workspace * Return list of repositories for the Bitbucket integration based on provided Bitbucket workspace
*/ */
const getAppsBitBucket = async ({ accessToken, workspaceSlug }: { accessToken: string; workspaceSlug?: string }) => { const getAppsBitbucket = async ({ accessToken, workspaceSlug }: { accessToken: string; workspaceSlug?: string }) => {
interface RepositoriesResponse { interface RepositoriesResponse {
size: number; size: number;
page: number; page: number;
@ -1302,7 +1302,7 @@ export const getApps = async ({
}); });
case Integrations.BITBUCKET: case Integrations.BITBUCKET:
return getAppsBitBucket({ return getAppsBitbucket({
accessToken, accessToken,
workspaceSlug workspaceSlug
}); });

View File

@ -342,7 +342,7 @@ export const getIntegrationOptions = async () => {
{ {
name: "Bitbucket", name: "Bitbucket",
slug: "bitbucket", slug: "bitbucket",
image: "BitBucket.png", image: "Bitbucket.png",
isAvailable: true, isAvailable: true,
type: "oauth", type: "oauth",
clientId: appCfg.CLIENT_ID_BITBUCKET, clientId: appCfg.CLIENT_ID_BITBUCKET,

View File

@ -3921,9 +3921,9 @@ const syncSecretsCloudflareWorkers = async ({
}; };
/** /**
* Sync/push [secrets] to BitBucket repo with name [integration.app] * Sync/push [secrets] to Bitbucket repo with name [integration.app]
*/ */
const syncSecretsBitBucket = async ({ const syncSecretsBitbucket = async ({
integration, integration,
secrets, secrets,
accessToken accessToken
@ -4832,7 +4832,7 @@ export const syncIntegrationSecrets = async ({
}); });
break; break;
case Integrations.BITBUCKET: case Integrations.BITBUCKET:
await syncSecretsBitBucket({ await syncSecretsBitbucket({
integration, integration,
secrets, secrets,
accessToken accessToken

View File

@ -64,7 +64,7 @@ type ExchangeCodeGitlabResponse = {
created_at: number; created_at: number;
}; };
type ExchangeCodeBitBucketResponse = { type ExchangeCodeBitbucketResponse = {
access_token: string; access_token: string;
token_type: string; token_type: string;
expires_in: number; expires_in: number;
@ -392,10 +392,10 @@ const exchangeCodeGitlab = async ({ code, url }: { code: string; url?: string })
}; };
/** /**
* Return [accessToken], [accessExpiresAt], and [refreshToken] for BitBucket * Return [accessToken], [accessExpiresAt], and [refreshToken] for Bitbucket
* code-token exchange * code-token exchange
*/ */
const exchangeCodeBitBucket = async ({ code }: { code: string }) => { const exchangeCodeBitbucket = async ({ code }: { code: string }) => {
const accessExpiresAt = new Date(); const accessExpiresAt = new Date();
const appCfg = getConfig(); const appCfg = getConfig();
if (!appCfg.CLIENT_SECRET_BITBUCKET || !appCfg.CLIENT_ID_BITBUCKET) { if (!appCfg.CLIENT_SECRET_BITBUCKET || !appCfg.CLIENT_ID_BITBUCKET) {
@ -403,7 +403,7 @@ const exchangeCodeBitBucket = async ({ code }: { code: string }) => {
} }
const res = ( const res = (
await request.post<ExchangeCodeBitBucketResponse>( await request.post<ExchangeCodeBitbucketResponse>(
IntegrationUrls.BITBUCKET_TOKEN_URL, IntegrationUrls.BITBUCKET_TOKEN_URL,
new URLSearchParams({ new URLSearchParams({
grant_type: "authorization_code", grant_type: "authorization_code",
@ -490,7 +490,7 @@ export const exchangeCode = async ({
url url
}); });
case Integrations.BITBUCKET: case Integrations.BITBUCKET:
return exchangeCodeBitBucket({ return exchangeCodeBitbucket({
code code
}); });
default: default:
@ -524,7 +524,7 @@ type RefreshTokenGitLabResponse = {
created_at: number; created_at: number;
}; };
type RefreshTokenBitBucketResponse = { type RefreshTokenBitbucketResponse = {
access_token: string; access_token: string;
token_type: string; token_type: string;
expires_in: number; expires_in: number;
@ -653,9 +653,9 @@ const exchangeRefreshGitLab = async ({ refreshToken, url }: { url?: string | nul
/** /**
* Return new access token by exchanging refresh token [refreshToken] for the * Return new access token by exchanging refresh token [refreshToken] for the
* BitBucket integration * Bitbucket integration
*/ */
const exchangeRefreshBitBucket = async ({ refreshToken }: { refreshToken: string }) => { const exchangeRefreshBitbucket = async ({ refreshToken }: { refreshToken: string }) => {
const accessExpiresAt = new Date(); const accessExpiresAt = new Date();
const appCfg = getConfig(); const appCfg = getConfig();
if (!appCfg.CLIENT_SECRET_BITBUCKET || !appCfg.CLIENT_ID_BITBUCKET) { if (!appCfg.CLIENT_SECRET_BITBUCKET || !appCfg.CLIENT_ID_BITBUCKET) {
@ -664,7 +664,7 @@ const exchangeRefreshBitBucket = async ({ refreshToken }: { refreshToken: string
const { const {
data data
}: { }: {
data: RefreshTokenBitBucketResponse; data: RefreshTokenBitbucketResponse;
} = await request.post( } = await request.post(
IntegrationUrls.BITBUCKET_TOKEN_URL, IntegrationUrls.BITBUCKET_TOKEN_URL,
new URLSearchParams({ new URLSearchParams({
@ -794,7 +794,7 @@ export const exchangeRefresh = async (
url url
}); });
case Integrations.BITBUCKET: case Integrations.BITBUCKET:
return exchangeRefreshBitBucket({ return exchangeRefreshBitbucket({
refreshToken refreshToken
}); });
case Integrations.GCP_SECRET_MANAGER: case Integrations.GCP_SECRET_MANAGER:

View File

@ -108,22 +108,22 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
const now = new Date(); const now = new Date();
const oneWeekAgo = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000); const oneWeekAgo = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000);
const oneMonthAgo = new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000); const oneMonthAgo = new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000);
const threeMonthsAgo = new Date(now.getTime() - 90 * 24 * 60 * 60 * 1000); const twelveMonthsAgo = new Date(now.getTime() - 360 * 24 * 60 * 60 * 1000);
const memberships = await db const memberships = await db
.replicaNode()(TableName.OrgMembership) .replicaNode()(TableName.OrgMembership)
.where("status", "invited") .where("status", "invited")
.where((qb) => { .where((qb) => {
// lastInvitedAt is null AND createdAt is between 1 week and 3 months ago // lastInvitedAt is null AND createdAt is between 1 week and 12 months ago
void qb void qb
.whereNull(`${TableName.OrgMembership}.lastInvitedAt`) .whereNull(`${TableName.OrgMembership}.lastInvitedAt`)
.whereBetween(`${TableName.OrgMembership}.createdAt`, [threeMonthsAgo, oneWeekAgo]); .whereBetween(`${TableName.OrgMembership}.createdAt`, [twelveMonthsAgo, oneWeekAgo]);
}) })
.orWhere((qb) => { .orWhere((qb) => {
// lastInvitedAt is older than 1 week ago AND createdAt is younger than 1 month ago // lastInvitedAt is older than 1 week ago AND createdAt is younger than 1 month ago
void qb void qb
.where(`${TableName.OrgMembership}.lastInvitedAt`, "<", oneMonthAgo) .where(`${TableName.OrgMembership}.lastInvitedAt`, "<", oneWeekAgo)
.where(`${TableName.OrgMembership}.createdAt`, ">", oneWeekAgo); .where(`${TableName.OrgMembership}.createdAt`, ">", oneMonthAgo);
}); });
return memberships; return memberships;
@ -135,9 +135,22 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
} }
}; };
const updateLastInvitedAtByIds = async (membershipIds: string[]) => {
try {
if (membershipIds.length === 0) return;
await db(TableName.OrgMembership).whereIn("id", membershipIds).update({ lastInvitedAt: new Date() });
} catch (error) {
throw new DatabaseError({
error,
name: "Update last invited at by ids"
});
}
};
return { return {
...orgMembershipOrm, ...orgMembershipOrm,
findOrgMembershipById, findOrgMembershipById,
findRecentInvitedMemberships findRecentInvitedMemberships,
updateLastInvitedAtByIds
}; };
}; };

View File

@ -36,6 +36,8 @@ import { getConfig } from "@app/lib/config/env";
import { generateAsymmetricKeyPair } from "@app/lib/crypto"; import { generateAsymmetricKeyPair } from "@app/lib/crypto";
import { generateSymmetricKey, infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption"; import { generateSymmetricKey, infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
import { generateUserSrpKeys } from "@app/lib/crypto/srp"; import { generateUserSrpKeys } from "@app/lib/crypto/srp";
import { applyJitter } from "@app/lib/dates";
import { delay as delayMs } from "@app/lib/delay";
import { import {
BadRequestError, BadRequestError,
ForbiddenRequestError, ForbiddenRequestError,
@ -44,9 +46,10 @@ import {
UnauthorizedError UnauthorizedError
} from "@app/lib/errors"; } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn"; import { groupBy } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid"; import { alphaNumericNanoId } from "@app/lib/nanoid";
import { isDisposableEmail } from "@app/lib/validator"; import { isDisposableEmail } from "@app/lib/validator";
import { TQueueServiceFactory } from "@app/queue"; import { QueueName, TQueueServiceFactory } from "@app/queue";
import { getDefaultOrgMembershipRoleForUpdateOrg } from "@app/services/org/org-role-fns"; import { getDefaultOrgMembershipRoleForUpdateOrg } from "@app/services/org/org-role-fns";
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal"; import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal"; import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
@ -109,7 +112,12 @@ type TOrgServiceFactoryDep = {
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete" | "insertMany" | "findLatestProjectKey" | "create">; projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete" | "insertMany" | "findLatestProjectKey" | "create">;
orgMembershipDAL: Pick< orgMembershipDAL: Pick<
TOrgMembershipDALFactory, TOrgMembershipDALFactory,
"findOrgMembershipById" | "findOne" | "findById" | "findRecentInvitedMemberships" | "updateById" | "findOrgMembershipById"
| "findOne"
| "findById"
| "findRecentInvitedMemberships"
| "updateById"
| "updateLastInvitedAtByIds"
>; >;
incidentContactDAL: TIncidentContactsDALFactory; incidentContactDAL: TIncidentContactsDALFactory;
samlConfigDAL: Pick<TSamlConfigDALFactory, "findOne">; samlConfigDAL: Pick<TSamlConfigDALFactory, "findOne">;
@ -763,6 +771,10 @@ export const orgServiceFactory = ({
} }
}); });
await orgMembershipDAL.updateById(inviteeOrgMembership.id, {
lastInvitedAt: new Date()
});
return { signupToken: undefined }; return { signupToken: undefined };
}; };
@ -1429,10 +1441,13 @@ export const orgServiceFactory = ({
* Re-send emails to users who haven't accepted an invite yet * Re-send emails to users who haven't accepted an invite yet
*/ */
const notifyInvitedUsers = async () => { const notifyInvitedUsers = async () => {
logger.info(`${QueueName.DailyResourceCleanUp}: notify invited users started`);
const invitedUsers = await orgMembershipDAL.findRecentInvitedMemberships(); const invitedUsers = await orgMembershipDAL.findRecentInvitedMemberships();
const appCfg = getConfig(); const appCfg = getConfig();
const orgCache: Record<string, { name: string; id: string } | undefined> = {}; const orgCache: Record<string, { name: string; id: string } | undefined> = {};
const notifiedUsers: string[] = [];
await Promise.all( await Promise.all(
invitedUsers.map(async (invitedUser) => { invitedUsers.map(async (invitedUser) => {
@ -1451,25 +1466,32 @@ export const orgServiceFactory = ({
}); });
if (invitedUser.inviteEmail) { if (invitedUser.inviteEmail) {
await smtpService.sendMail({ await delayMs(Math.max(0, applyJitter(0, 2000)));
template: SmtpTemplates.OrgInvite,
subjectLine: `Reminder: You have been invited to ${org.name} on Infisical`,
recipients: [invitedUser.inviteEmail],
substitutions: {
organizationName: org.name,
email: invitedUser.inviteEmail,
organizationId: org.id.toString(),
token,
callback_url: `${appCfg.SITE_URL}/signupinvite`
}
});
}
await orgMembershipDAL.updateById(invitedUser.id, { try {
lastInvitedAt: new Date() await smtpService.sendMail({
}); template: SmtpTemplates.OrgInvite,
subjectLine: `Reminder: You have been invited to ${org.name} on Infisical`,
recipients: [invitedUser.inviteEmail],
substitutions: {
organizationName: org.name,
email: invitedUser.inviteEmail,
organizationId: org.id.toString(),
token,
callback_url: `${appCfg.SITE_URL}/signupinvite`
}
});
notifiedUsers.push(invitedUser.id);
} catch (err) {
logger.error(err, `${QueueName.DailyResourceCleanUp}: notify invited users failed to send email`);
}
}
}) })
); );
await orgMembershipDAL.updateLastInvitedAtByIds(notifiedUsers);
logger.info(`${QueueName.DailyResourceCleanUp}: notify invited users completed`);
}; };
return { return {

6
docs/Dockerfile Normal file
View File

@ -0,0 +1,6 @@
FROM node:20-alpine
WORKDIR /app
RUN npm install -g mint
COPY . .
EXPOSE 3000
CMD ["mint", "dev"]

View File

@ -0,0 +1,4 @@
---
title: "Available"
openapi: "GET /api/v1/app-connections/bitbucket/available"
---

View File

@ -0,0 +1,8 @@
---
title: "Create"
openapi: "POST /api/v1/app-connections/bitbucket"
---
<Note>
Check out the configuration docs for [Bitbucket Connections](/integrations/app-connections/bitbucket) to learn how to obtain the required credentials.
</Note>

View File

@ -0,0 +1,4 @@
---
title: "Delete"
openapi: "DELETE /api/v1/app-connections/bitbucket/{connectionId}"
---

View File

@ -0,0 +1,4 @@
---
title: "Get by ID"
openapi: "GET /api/v1/app-connections/bitbucket/{connectionId}"
---

View File

@ -0,0 +1,4 @@
---
title: "Get by Name"
openapi: "GET /api/v1/app-connections/bitbucket/connection-name/{connectionName}"
---

View File

@ -0,0 +1,4 @@
---
title: "List"
openapi: "GET /api/v1/app-connections/bitbucket"
---

View File

@ -0,0 +1,8 @@
---
title: "Update"
openapi: "PATCH /api/v1/app-connections/bitbucket/{connectionId}"
---
<Note>
Check out the configuration docs for [Bitbucket Connections](/integrations/app-connections/bitbucket) to learn how to obtain the required credentials.
</Note>

View File

@ -0,0 +1,4 @@
---
title: "Create"
openapi: "POST /api/v2/secret-scanning/data-sources/bitbucket"
---

View File

@ -0,0 +1,4 @@
---
title: "Delete"
openapi: "DELETE /api/v2/secret-scanning/data-sources/bitbucket/{dataSourceId}"
---

View File

@ -0,0 +1,4 @@
---
title: "Get by ID"
openapi: "GET /api/v2/secret-scanning/data-sources/bitbucket/{dataSourceId}"
---

View File

@ -0,0 +1,4 @@
---
title: "Get by Name"
openapi: "GET /api/v2/secret-scanning/data-sources/bitbucket/data-source-name/{dataSourceName}"
---

View File

@ -0,0 +1,4 @@
---
title: "List Resources"
openapi: "GET /api/v2/secret-scanning/data-sources/bitbucket/{dataSourceId}/resources"
---

View File

@ -0,0 +1,4 @@
---
title: "List Scans"
openapi: "GET /api/v2/secret-scanning/data-sources/bitbucket/{dataSourceId}/scans"
---

View File

@ -0,0 +1,4 @@
---
title: "List"
openapi: "GET /api/v2/secret-scanning/data-sources/bitbucket"
---

View File

@ -0,0 +1,4 @@
---
title: "Scan Resource"
openapi: "POST /api/v2/secret-scanning/data-sources/bitbucket/{dataSourceId}/resources/{resourceId}/scan"
---

View File

@ -0,0 +1,4 @@
---
title: "Scan"
openapi: "POST /api/v2/secret-scanning/data-sources/bitbucket/{dataSourceId}/scan"
---

View File

@ -0,0 +1,4 @@
---
title: "Update"
openapi: "PATCH /api/v2/secret-scanning/data-sources/bitbucket/{dataSourceId}"
---

View File

@ -210,6 +210,7 @@
"group": "Secret Scanning", "group": "Secret Scanning",
"pages": [ "pages": [
"documentation/platform/secret-scanning/overview", "documentation/platform/secret-scanning/overview",
"documentation/platform/secret-scanning/bitbucket",
"documentation/platform/secret-scanning/github" "documentation/platform/secret-scanning/github"
] ]
} }
@ -469,6 +470,7 @@
"integrations/app-connections/azure-client-secrets", "integrations/app-connections/azure-client-secrets",
"integrations/app-connections/azure-devops", "integrations/app-connections/azure-devops",
"integrations/app-connections/azure-key-vault", "integrations/app-connections/azure-key-vault",
"integrations/app-connections/bitbucket",
"integrations/app-connections/camunda", "integrations/app-connections/camunda",
"integrations/app-connections/cloudflare", "integrations/app-connections/cloudflare",
"integrations/app-connections/databricks", "integrations/app-connections/databricks",
@ -1135,6 +1137,21 @@
"pages": [ "pages": [
"api-reference/endpoints/secret-scanning/data-sources/list", "api-reference/endpoints/secret-scanning/data-sources/list",
"api-reference/endpoints/secret-scanning/data-sources/options", "api-reference/endpoints/secret-scanning/data-sources/options",
{
"group": "Bitbucket",
"pages": [
"api-reference/endpoints/secret-scanning/data-sources/bitbucket/list",
"api-reference/endpoints/secret-scanning/data-sources/bitbucket/get-by-id",
"api-reference/endpoints/secret-scanning/data-sources/bitbucket/get-by-name",
"api-reference/endpoints/secret-scanning/data-sources/bitbucket/list-resources",
"api-reference/endpoints/secret-scanning/data-sources/bitbucket/list-scans",
"api-reference/endpoints/secret-scanning/data-sources/bitbucket/create",
"api-reference/endpoints/secret-scanning/data-sources/bitbucket/update",
"api-reference/endpoints/secret-scanning/data-sources/bitbucket/delete",
"api-reference/endpoints/secret-scanning/data-sources/bitbucket/scan",
"api-reference/endpoints/secret-scanning/data-sources/bitbucket/scan-resource"
]
},
{ {
"group": "GitHub", "group": "GitHub",
"pages": [ "pages": [
@ -1284,6 +1301,18 @@
"api-reference/endpoints/app-connections/azure-key-vault/delete" "api-reference/endpoints/app-connections/azure-key-vault/delete"
] ]
}, },
{
"group": "Bitbucket",
"pages": [
"api-reference/endpoints/app-connections/bitbucket/list",
"api-reference/endpoints/app-connections/bitbucket/available",
"api-reference/endpoints/app-connections/bitbucket/get-by-id",
"api-reference/endpoints/app-connections/bitbucket/get-by-name",
"api-reference/endpoints/app-connections/bitbucket/create",
"api-reference/endpoints/app-connections/bitbucket/update",
"api-reference/endpoints/app-connections/bitbucket/delete"
]
},
{ {
"group": "Camunda", "group": "Camunda",
"pages": [ "pages": [

View File

@ -0,0 +1,100 @@
---
title: "Bitbucket Secret Scanning"
sidebarTitle: "Bitbucket"
description: "Learn how to configure secret scanning for Bitbucket."
---
## Prerequisites
- Create a [Bitbucket Connection](/integrations/app-connections/bitbucket) with Secret Scanning permissions
## Create a Bitbucket Data Source in Infisical
<Tabs>
<Tab title="Infisical UI">
1. Navigate to your Secret Scanning Project's Dashboard and click the **Add Data Source** button.
![Secret Scanning Dashboard](/images/platform/secret-scanning/github/github-data-source-step-1.png)
2. Select the **Bitbucket** option.
![Select Bitbucket](/images/platform/secret-scanning/bitbucket/step-2.png)
3. Configure which workspace and repositories you would like to scan. Then click **Next**.
![Data Source Configuration](/images/platform/secret-scanning/bitbucket/step-3.png)
- **Bitbucket Connection** - the connection that has access to the repositories you want to scan.
- **Workspace** - the Bitbucket workspace to scan secrets in.
- **Scan Repositories** - select which repositories you would like to scan.
- **All Repositories** - Infisical will scan all repositories associated with your connection.
- **Select Repositories** - Infisical will scan the selected repositories.
- **Auto-Scan Enabled** - whether Infisical should automatically perform a scan when a push is made to configured repositories.
4. Give your data source a name and description (optional). Then click **Next**.
![Data Source Details](/images/platform/secret-scanning/bitbucket/step-4.png)
- **Name** - the name of the data source. Must be slug-friendly.
- **Description** (optional) - a description of this data source.
5. Review your data source, then click **Create Data Source**.
![Data Source Review](/images/platform/secret-scanning/bitbucket/step-5.png)
6. Your **Bitbucket Data Source** is now available and will begin a full scan if **Auto-Scan** is enabled.
![Data Source Created](/images/platform/secret-scanning/bitbucket/step-6.png)
7. You can view repositories and scan results by clicking on your data source.
![Data Source Page](/images/platform/secret-scanning/bitbucket/step-7.png)
8. In addition, you can review any findings from the **Findings Page**.
![Findings Page](/images/platform/secret-scanning/bitbucket/step-8.png)
</Tab>
<Tab title="API">
To create a Bitbucket Data Source, make an API request to the [Create Bitbucket Data Source](/api-reference/endpoints/secret-scanning/data-sources/bitbucket/create) API endpoint.
### Sample request
```bash Request
curl --request POST \
--url https://us.infisical.com/api/v2/secret-scanning/data-sources/bitbucket \
--header 'Content-Type: application/json' \
--data '{
"name": "my-bitbucket-source",
"projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
"description": "my bitbucket data source",
"connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
"isAutoScanEnabled": true,
"config": {
"workspaceSlug": "my-workspace",
"includeRepos": ["*"]
}
}'
```
### Sample response
```bash Response
{
"dataSource": {
"id": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
"externalId": "1234567890",
"name": "my-bitbucket-source",
"description": "my bitbucket data source",
"isAutoScanEnabled": true,
"projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
"createdAt": "2023-11-07T05:31:56Z",
"updatedAt": "2023-11-07T05:31:56Z",
"type": "bitbucket",
"connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a",
"connection": {
"app": "bitbucket",
"name": "my-bitbucket-app",
"id": "3c90c3cc-0d44-4b50-8888-8dd25736052a"
},
"config": {
"workspaceSlug": "my-workspace",
"includeRepos": ["*"]
}
}
}
```
</Tab>
</Tabs>

Binary file not shown.

After

Width:  |  Height:  |  Size: 225 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 218 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 218 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 570 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 260 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 123 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 649 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 146 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 102 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 91 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 948 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 MiB

View File

@ -92,7 +92,7 @@ Infisical supports the use of [Service Accounts](https://developer.1password.com
"method": "api-token", "method": "api-token",
"credentials": { "credentials": {
"instanceUrl": "https://1pass.example.com", "instanceUrl": "https://1pass.example.com",
"apiToken": "[PRIVATE TOKEN]" "apiToken": "<YOUR-API-TOKEN>"
} }
}' }'
``` ```

View File

@ -0,0 +1,133 @@
---
title: "Bitbucket Connection"
description: "Learn how to configure a Bitbucket Connection for Infisical."
---
Infisical supports the use of [API Tokens](https://support.atlassian.com/bitbucket-cloud/docs/api-tokens/) to connect with Bitbucket.
<Tip>
Infisical recommends creating a dedicated Bitbucket account with access restricted to only the resources your use case requires.
</Tip>
## Create Bitbucket Access Token
<Steps>
<Step title="Create API Token">
Go to [Account API Tokens](https://id.atlassian.com/manage-profile/security/api-tokens) and click **Create API token with scopes**.
![Create API Token](/images/app-connections/bitbucket/step-1.png)
</Step>
<Step title="Set Name and Expiry">
Set the name and expiration date of the token, then click **Next**.
![Set Name and Expiry](/images/app-connections/bitbucket/step-2.png)
<Note>
Keep in mind that you'll need to manually replace the token after it expires.
</Note>
</Step>
<Step title="Select Bitbucket">
Select **Bitbucket** and then click **Next**.
![Select Bitbucket](/images/app-connections/bitbucket/step-3.png)
</Step>
<Step title="Configure Permissions">
Configure permissions according to your app's use case:
<Tabs>
<Tab title="Secret Scanning">
```
read:workspace:bitbucket
read:user:bitbucket
read:webhook:bitbucket
write:webhook:bitbucket
delete:webhook:bitbucket
read:repository:bitbucket
```
![Configure Permissions](/images/app-connections/bitbucket/step-4.png)
</Tab>
</Tabs>
Click **Next**.
</Step>
<Step title="Copy Token">
Save the API Token for later steps.
![Save Token](/images/app-connections/bitbucket/step-5.png)
</Step>
</Steps>
## Create Bitbucket Connection in Infisical
<Tabs>
<Tab title="Infisical UI">
<Steps>
<Step title="Navigate to App Connections">
In your Infisical dashboard, go to **Organization Settings** and select the [**App Connections**](https://app.infisical.com/organization/app-connections) tab.
![App Connections Tab](/images/app-connections/general/add-connection.png)
</Step>
<Step title="Select Bitbucket Connection">
Click the **Add new connection** button and select **Bitbucket** from the list of available connections.
</Step>
<Step title="Fill out the Bitbucket Connection Modal">
Complete the Bitbucket Connection form by entering:
- A descriptive name for the connection
- An optional description for future reference
- Your Bitbucket email
- The API Token from earlier steps
![Bitbucket Connection Modal](/images/app-connections/bitbucket/step-6.png)
</Step>
<Step title="Connection Created">
After clicking Create, your **Bitbucket Connection** is established and ready to use with your Infisical projects.
![Bitbucket Connection Created](/images/app-connections/bitbucket/step-7.png)
</Step>
</Steps>
</Tab>
<Tab title="API">
To create a Bitbucket Connection, make an API request to the [Create Bitbucket Connection](/api-reference/endpoints/app-connections/bitbucket/create) API endpoint.
### Sample request
```bash Request
curl --request POST \
--url https://app.infisical.com/api/v1/app-connections/bitbucket \
--header 'Content-Type: application/json' \
--data '{
"name": "my-bitbucket-connection",
"method": "api-token",
"credentials": {
"email": "user@example.com",
"apiToken": "<YOUR-API-TOKEN>"
}
}'
```
### Sample response
```bash Response
{
"appConnection": {
"id": "e5d18aca-86f7-4026-a95e-efb8aeb0d8e6",
"name": "my-bitbucket-connection",
"description": null,
"version": 1,
"orgId": "6f03caa1-a5de-43ce-b127-95a145d3464c",
"createdAt": "2025-04-23T19:46:34.831Z",
"updatedAt": "2025-04-23T19:46:34.831Z",
"isPlatformManagedCredentials": false,
"credentialsHash": "7c2d371dec195f82a6a0d5b41c970a229cfcaf88e894a5b6395e2dbd0280661f",
"app": "bitbucket",
"method": "api-token",
"credentials": {
"email": "user@example.com"
}
}
}
```
</Tab>
</Tabs>

View File

@ -35,7 +35,7 @@ Missing an integration? [Throw in a request](https://github.com/Infisical/infisi
| [Azure Key Vault](/integrations/cloud/azure-key-vault) | Cloud | Available | | [Azure Key Vault](/integrations/cloud/azure-key-vault) | Cloud | Available |
| [GCP Secret Manager](/integrations/cloud/gcp-secret-manager) | Cloud | Available | | [GCP Secret Manager](/integrations/cloud/gcp-secret-manager) | Cloud | Available |
| [Windmill](/integrations/cloud/windmill) | Cloud | Available | | [Windmill](/integrations/cloud/windmill) | Cloud | Available |
| [BitBucket](/integrations/cicd/bitbucket) | CI/CD | Available | | [Bitbucket](/integrations/cicd/bitbucket) | CI/CD | Available |
| [Codefresh](/integrations/cicd/codefresh) | CI/CD | Available | | [Codefresh](/integrations/cicd/codefresh) | CI/CD | Available |
| [GitHub Actions](/integrations/cicd/githubactions) | CI/CD | Available | | [GitHub Actions](/integrations/cicd/githubactions) | CI/CD | Available |
| [GitLab](/integrations/cicd/gitlab) | CI/CD | Available | | [GitLab](/integrations/cicd/gitlab) | CI/CD | Available |

View File

@ -669,11 +669,11 @@ To help you sync secrets from Infisical to services such as Github and Gitlab, I
<Accordion title="Bitbucket"> <Accordion title="Bitbucket">
<ParamField query="CLIENT_ID_BITBUCKET" type="string" default="none" optional> <ParamField query="CLIENT_ID_BITBUCKET" type="string" default="none" optional>
OAuth2 client ID for BitBucket integration OAuth2 client ID for Bitbucket integration
</ParamField> </ParamField>
<ParamField query="CLIENT_SECRET_BITBUCKET" type="string" default="none" optional> <ParamField query="CLIENT_SECRET_BITBUCKET" type="string" default="none" optional>
OAuth2 client secret for BitBucket integration OAuth2 client secret for Bitbucket integration
</ParamField> </ParamField>
</Accordion> </Accordion>

View File

@ -4,17 +4,20 @@ description: "Read how to run Infisical with Docker Compose template."
--- ---
This self-hosting guide will walk you through the steps to self-host Infisical using Docker Compose. This self-hosting guide will walk you through the steps to self-host Infisical using Docker Compose.
## Prerequisites
- [Docker](https://docs.docker.com/engine/install/)
- [Docker compose](https://docs.docker.com/compose/install/)
<Warning> <Tabs>
This Docker Compose configuration is not designed for high-availability production scenarios. <Tab title="Docker Compose">
It includes just the essential components needed to set up an Infisical proof of concept (POC). ## Prerequisites
To run Infisical in a highly available manner, give the [Docker Swarm guide](/self-hosting/deployment-options/docker-swarm). - [Docker](https://docs.docker.com/engine/install/)
</Warning> - [Docker compose](https://docs.docker.com/compose/install/)
## Verify prerequisites <Warning>
This Docker Compose configuration is not designed for high-availability production scenarios.
It includes just the essential components needed to set up an Infisical proof of concept (POC).
To run Infisical in a highly available manner, give the [Docker Swarm guide](/self-hosting/deployment-options/docker-swarm).
</Warning>
## Verify prerequisites
To verify that Docker compose and Docker are installed on the machine where you plan to install Infisical, run the following commands. To verify that Docker compose and Docker are installed on the machine where you plan to install Infisical, run the following commands.
Check for docker installation Check for docker installation
@ -27,55 +30,145 @@ To run Infisical in a highly available manner, give the [Docker Swarm guide](/se
docker-compose docker-compose
``` ```
## Download docker compose file ## Download docker compose file
You can obtain the Infisical docker compose file by using a command-line downloader such as `wget` or `curl`. You can obtain the Infisical docker compose file by using a command-line downloader such as `wget` or `curl`.
If your system doesn't have either of these, you can use a equivalent command that works with your machine. If your system doesn't have either of these, you can use a equivalent command that works with your machine.
<Tabs>
<Tab title="curl">
```bash
curl -o docker-compose.prod.yml https://raw.githubusercontent.com/Infisical/infisical/main/docker-compose.prod.yml
```
</Tab>
<Tab title="wget">
```bash
wget -O docker-compose.prod.yml https://raw.githubusercontent.com/Infisical/infisical/main/docker-compose.prod.yml
```
</Tab>
</Tabs>
## Configure instance credentials
Infisical requires a set of credentials used for connecting to dependent services such as Postgres, Redis, etc.
The default credentials can be downloaded using the one of the commands listed below.
<Tabs>
<Tab title="curl">
```bash
curl -o .env https://raw.githubusercontent.com/Infisical/infisical/main/.env.example
```
</Tab>
<Tab title="wget">
```bash
wget -O .env https://raw.githubusercontent.com/Infisical/infisical/main/.env.example
```
</Tab>
</Tabs>
Once downloaded, the credentials file will be saved to your working directly as `.env` file.
View all available configurations [here](/self-hosting/configuration/envars).
<Warning>
The default .env file contains credentials that are intended solely for testing purposes.
Please generate a new `ENCRYPTION_KEY` and `AUTH_SECRET` for use outside of testing.
Instructions to do so, can be found [here](/self-hosting/configuration/envars).
</Warning>
## Start Infisical
Run the command below to start Infisical and all related services.
<Tabs>
<Tab title="curl">
```bash ```bash
curl -o docker-compose.prod.yml https://raw.githubusercontent.com/Infisical/infisical/main/docker-compose.prod.yml docker-compose -f docker-compose.prod.yml up
``` ```
</Tab> </Tab>
<Tab title="wget"> <Tab title="Podman Compose">
```bash Podman Compose is an alternative way to run Infisical using Podman as a replacement for Docker. Podman is backwards compatible with Docker Compose files.
wget -O docker-compose.prod.yml https://raw.githubusercontent.com/Infisical/infisical/main/docker-compose.prod.yml
## Prerequisites
- [Podman](https://podman-desktop.io/docs/installation)
- [Podman Compose](https://podman-desktop.io/docs/compose)
<Warning>
This Docker Compose configuration is not designed for high-availability production scenarios.
It includes just the essential components needed to set up an Infisical proof of concept (POC).
To run Infisical in a highly available manner, give the [Docker Swarm guide](/self-hosting/deployment-options/docker-swarm).
</Warning>
## Verify prerequisites
To verify that Podman compose and Podman are installed on the machine where you plan to install Infisical, run the following commands.
Check for podman installation
```bash
podman version
```
Check for podman compose installation
```bash
podman-compose version
```
## Download Docker Compose file
You can obtain the Infisical docker compose file by using a command-line downloader such as `wget` or `curl`.
If your system doesn't have either of these, you can use a equivalent command that works with your machine.
<Tabs>
<Tab title="curl">
```bash
curl -o docker-compose.prod.yml https://raw.githubusercontent.com/Infisical/infisical/main/docker-compose.prod.yml
```
</Tab>
<Tab title="wget">
```bash
wget -O docker-compose.prod.yml https://raw.githubusercontent.com/Infisical/infisical/main/docker-compose.prod.yml
```
</Tab>
</Tabs>
## Configure instance credentials
Infisical requires a set of credentials used for connecting to dependent services such as Postgres, Redis, etc.
The default credentials can be downloaded using the one of the commands listed below.
<Tabs>
<Tab title="curl">
```bash
curl -o .env https://raw.githubusercontent.com/Infisical/infisical/main/.env.example
```
</Tab>
<Tab title="wget">
```bash
wget -O .env https://raw.githubusercontent.com/Infisical/infisical/main/.env.example
```
</Tab>
</Tabs>
<Note>
Make sure to rename the `.env.example` file to `.env` before starting Infisical. Additionally it's important that the `.env` file is in the same directory as the `docker-compose.prod.yml` file.
</Note>
## Setup Podman
Run the commands below to setup Podman for first time use.
```bash
podman machine init --now
podman machine set --rootful
podman machine start
```
<Note>
If you are using a rootless podman installation, you can skip the `podman machine set --rootful` command.
</Note>
## Start Infisical
Run the command below to start Infisical and all related services.
```bash
podman-compose -f docker-compose.prod.yml up
``` ```
</Tab> </Tab>
</Tabs> </Tabs>
## Configure instance credentials
Infisical requires a set of credentials used for connecting to dependent services such as Postgres, Redis, etc.
The default credentials can be downloaded using the one of the commands listed below.
<Tabs>
<Tab title="curl">
```bash
curl -o .env https://raw.githubusercontent.com/Infisical/infisical/main/.env.example
```
</Tab>
<Tab title="wget">
```bash
wget -O .env https://raw.githubusercontent.com/Infisical/infisical/main/.env.example
```
</Tab>
</Tabs>
Once downloaded, the credentials file will be saved to your working directly as `.env` file.
View all available configurations [here](/self-hosting/configuration/envars).
<Warning>
The default .env file contains credentials that are intended solely for testing purposes.
Please generate a new `ENCRYPTION_KEY` and `AUTH_SECRET` for use outside of testing.
Instructions to do so, can be found [here](/self-hosting/configuration/envars).
</Warning>
## Start Infisical
Run the command below to start Infisical and all related services.
```bash
docker-compose -f docker-compose.prod.yml up
```
Your Infisical instance should now be running on port `80`. To access your instance, visit `http://localhost:80`. Your Infisical instance should now be running on port `80`. To access your instance, visit `http://localhost:80`.

View File

Before

Width:  |  Height:  |  Size: 9.4 KiB

After

Width:  |  Height:  |  Size: 9.4 KiB

View File

@ -0,0 +1,172 @@
import { useEffect } from "react";
import { Controller, useFormContext, useWatch } from "react-hook-form";
import { MultiValue, SingleValue } from "react-select";
import { faCircleInfo } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { FilterableSelect, FormControl, Select, SelectItem, Tooltip } from "@app/components/v2";
import {
TBitbucketRepo,
TBitbucketWorkspace,
useBitbucketConnectionListRepositories,
useBitbucketConnectionListWorkspaces
} from "@app/hooks/api/appConnections/bitbucket";
import { SecretScanningDataSource } from "@app/hooks/api/secretScanningV2";
import { TSecretScanningDataSourceForm } from "../schemas";
import { SecretScanningDataSourceConnectionField } from "../SecretScanningDataSourceConnectionField";
enum ScanMethod {
AllRepositories = "all-repositories",
SelectRepositories = "select-repositories"
}
export const BitbucketDataSourceConfigFields = () => {
const { control, watch, setValue } = useFormContext<
TSecretScanningDataSourceForm & {
type: SecretScanningDataSource.Bitbucket;
}
>();
const connectionId = useWatch({ control, name: "connection.id" });
const isUpdate = Boolean(watch("id"));
const selectedWorkspaceSlug = useWatch({ control, name: "config.workspaceSlug" });
const { data: workspaces, isPending: areWorkspacesLoading } =
useBitbucketConnectionListWorkspaces(connectionId, { enabled: Boolean(connectionId) });
const { data: repositories, isPending: areRepositoriesLoading } =
useBitbucketConnectionListRepositories(connectionId, selectedWorkspaceSlug, {
enabled: Boolean(connectionId) && Boolean(selectedWorkspaceSlug)
});
const includeRepos = watch("config.includeRepos");
const scanMethod =
!includeRepos || includeRepos[0] === "*"
? ScanMethod.AllRepositories
: ScanMethod.SelectRepositories;
useEffect(() => {
if (!includeRepos) {
setValue("config.includeRepos", ["*"]);
}
}, [includeRepos, setValue]);
return (
<>
<SecretScanningDataSourceConnectionField
isUpdate={isUpdate}
onChange={() => {
if (scanMethod === ScanMethod.SelectRepositories) {
setValue("config.workspaceSlug", "");
setValue("config.includeRepos", []);
}
}}
/>
<Controller
name="config.workspaceSlug"
control={control}
render={({ field: { value, onChange }, fieldState: { error } }) => (
<FormControl
isError={Boolean(error)}
errorText={error?.message}
label="Workspace"
helperText={
<Tooltip
className="max-w-md"
content={<>Ensure that your connection has the correct permissions.</>}
>
<div>
<span>Don&#39;t see the workspaces you&#39;re looking for?</span>{" "}
<FontAwesomeIcon icon={faCircleInfo} className="text-mineshaft-400" />
</div>
</Tooltip>
}
>
<FilterableSelect
menuPlacement="top"
isLoading={areWorkspacesLoading && Boolean(connectionId)}
isDisabled={!connectionId}
value={value ? { slug: value } : null}
onChange={(newValue) => {
onChange((newValue as SingleValue<TBitbucketWorkspace>)?.slug);
if (scanMethod === ScanMethod.SelectRepositories) {
setValue("config.includeRepos", []);
}
}}
options={workspaces}
placeholder="Select workspace..."
getOptionLabel={(option) => option.slug}
getOptionValue={(option) => option.slug}
/>
</FormControl>
)}
/>
<FormControl label="Scan Repositories">
<Select
value={scanMethod}
onValueChange={(val) => {
setValue("config.includeRepos", val === ScanMethod.AllRepositories ? ["*"] : []);
}}
className="w-full border border-mineshaft-500 capitalize"
position="popper"
dropdownContainerClassName="max-w-none"
isDisabled={!connectionId}
>
{Object.values(ScanMethod).map((method) => {
return (
<SelectItem className="capitalize" value={method} key={method}>
{method.replace("-", " ")}
</SelectItem>
);
})}
</Select>
</FormControl>
{scanMethod === ScanMethod.SelectRepositories && (
<Controller
name="config.includeRepos"
control={control}
render={({ field: { value, onChange }, fieldState: { error } }) => (
<FormControl
isError={Boolean(error)}
errorText={error?.message}
label="Include Repositories"
helperText={
<Tooltip
className="max-w-md"
content={<>Ensure that your connection has the correct permissions.</>}
>
<div>
<span>Don&#39;t see the repository you&#39;re looking for?</span>{" "}
<FontAwesomeIcon icon={faCircleInfo} className="text-mineshaft-400" />
</div>
</Tooltip>
}
>
<FilterableSelect
menuPlacement="top"
isLoading={areRepositoriesLoading && Boolean(connectionId)}
isDisabled={!connectionId || !selectedWorkspaceSlug}
isMulti
value={repositories?.filter((repository) =>
(value as string[]).includes(repository.full_name)
)}
onChange={(newValue) => {
onChange(
newValue ? (newValue as MultiValue<TBitbucketRepo>).map((p) => p.full_name) : []
);
}}
options={repositories}
placeholder="Select repositories..."
getOptionLabel={(option) => option.full_name}
getOptionValue={(option) => option.full_name}
/>
</FormControl>
)}
/>
)}
</>
);
};

View File

@ -5,10 +5,12 @@ import { RESOURCE_DESCRIPTION_HELPER } from "@app/helpers/secretScanningV2";
import { SecretScanningDataSource } from "@app/hooks/api/secretScanningV2"; import { SecretScanningDataSource } from "@app/hooks/api/secretScanningV2";
import { TSecretScanningDataSourceForm } from "../schemas"; import { TSecretScanningDataSourceForm } from "../schemas";
import { BitbucketDataSourceConfigFields } from "./BitbucketDataSourceConfigFields";
import { GitHubDataSourceConfigFields } from "./GitHubDataSourceConfigFields"; import { GitHubDataSourceConfigFields } from "./GitHubDataSourceConfigFields";
const COMPONENT_MAP: Record<SecretScanningDataSource, React.FC> = { const COMPONENT_MAP: Record<SecretScanningDataSource, React.FC> = {
[SecretScanningDataSource.GitHub]: GitHubDataSourceConfigFields [SecretScanningDataSource.GitHub]: GitHubDataSourceConfigFields,
[SecretScanningDataSource.Bitbucket]: BitbucketDataSourceConfigFields
}; };
export const SecretScanningDataSourceConfigFields = () => { export const SecretScanningDataSourceConfigFields = () => {

View File

@ -0,0 +1,28 @@
import { useFormContext } from "react-hook-form";
import { GenericFieldLabel } from "@app/components/v2";
import { SecretScanningDataSource } from "@app/hooks/api/secretScanningV2";
import { TSecretScanningDataSourceForm } from "../schemas";
import { SecretScanningDataSourceConfigReviewSection } from "./shared";
export const BitbucketDataSourceReviewFields = () => {
const { watch } = useFormContext<
TSecretScanningDataSourceForm & {
type: SecretScanningDataSource.Bitbucket;
}
>();
const [{ includeRepos, workspaceSlug }, connection] = watch(["config", "connection"]);
const shouldScanAll = includeRepos[0] === "*";
return (
<SecretScanningDataSourceConfigReviewSection>
{connection && <GenericFieldLabel label="Connection">{connection.name}</GenericFieldLabel>}
<GenericFieldLabel label="Workspace Slug">{workspaceSlug}</GenericFieldLabel>
<GenericFieldLabel label="Scan Repositories">
{shouldScanAll ? "All" : includeRepos.join(", ")}
</GenericFieldLabel>
</SecretScanningDataSourceConfigReviewSection>
);
};

View File

@ -4,10 +4,12 @@ import { GenericFieldLabel } from "@app/components/v2";
import { SecretScanningDataSource } from "@app/hooks/api/secretScanningV2"; import { SecretScanningDataSource } from "@app/hooks/api/secretScanningV2";
import { TSecretScanningDataSourceForm } from "../schemas"; import { TSecretScanningDataSourceForm } from "../schemas";
import { BitbucketDataSourceReviewFields } from "./BitbucketDataSourceReviewFields";
import { GitHubDataSourceReviewFields } from "./GitHubDataSourceReviewFields"; import { GitHubDataSourceReviewFields } from "./GitHubDataSourceReviewFields";
const COMPONENT_MAP: Record<SecretScanningDataSource, React.FC> = { const COMPONENT_MAP: Record<SecretScanningDataSource, React.FC> = {
[SecretScanningDataSource.GitHub]: GitHubDataSourceReviewFields [SecretScanningDataSource.GitHub]: GitHubDataSourceReviewFields,
[SecretScanningDataSource.Bitbucket]: BitbucketDataSourceReviewFields
}; };
export const SecretScanningDataSourceReviewFields = () => { export const SecretScanningDataSourceReviewFields = () => {

View File

@ -0,0 +1,19 @@
import { z } from "zod";
import { SecretScanningDataSource } from "@app/hooks/api/secretScanningV2";
import { BaseSecretScanningDataSourceSchema } from "./base-secret-scanning-data-source-schema";
export const BitbucketDataSourceSchema = z
.object({
type: z.literal(SecretScanningDataSource.Bitbucket),
config: z.object({
workspaceSlug: z.string().min(1, "Workspace Required").max(128),
includeRepos: z
.string()
.array()
.min(1, "One or more repositories required")
.max(100, "Cannot configure more than 100 repositories")
})
})
.merge(BaseSecretScanningDataSourceSchema({ isConnectionRequired: true }));

Some files were not shown because too many files have changed in this diff Show More