Compare commits

...

55 Commits

Author SHA1 Message Date
Scott Wilson
1898c16f1b improvement: make secret overview table header sticky, add underlines to env header links and limit table height for scroll 2025-08-01 16:47:11 -07:00
Scott Wilson
55e5360dd4 Merge pull request #4291 from Infisical/server-admin-bulk-delete
improvement(server-admin): add bulk delete users support, bulk actions server admin table support, overflow/truncation and dropdown improvements
2025-07-31 17:19:03 -07:00
Scott Wilson
77a8cd9efc improvement: add bulk delete users support, bulk actions server admin table support, overflow/truncation and dropdown improvements 2025-07-31 16:14:13 -07:00
Sid
52f773c647 feat: events system implementation (#4246)
* chore: save poc

* chore: save wip

* fix: undo cors

* fix: impl changes

* fix: PR changes

* fix: mocks

* fix: connection tracking and auth changes

* fix: PR changes

* fix: revert license

* feat: frontend change

* fix: revert docker compose.dev

* fix: duplicate publisher connection

* fix: pr changes

* chore: move event impl to `ee`

* fix: lint errors

* fix: check length of events

* fix: static permissions matching

* fix: secretPath

* fix: remove source prefix in bus event name

* fix: license check
2025-08-01 01:20:45 +05:30
Sid
79de7c5f08 feat: Add Netlify app connection and secrets sync (#4205)
* fix: save wip

* feat: final impl

* feat: docs

* Update backend/src/services/app-connection/digital-ocean/digital-ocean-connection-service.ts

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* chore: remove empty conflict files

* Update backend/src/server/routes/v1/app-connection-routers/app-connection-router.ts

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* Update frontend/src/components/secret-syncs/forms/schemas/digital-ocean-app-platform-sync-destination-schema.ts

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* Update frontend/src/components/secret-syncs/forms/schemas/digital-ocean-app-platform-sync-destination-schema.ts

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* Update frontend/src/components/secret-syncs/forms/SecretSyncDestinationFields/DigitalOceanAppPlatformSyncFields.tsx

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* Update backend/src/services/secret-sync/digital-ocean-app-platform/digital-ocean-app-platform-sync-schemas.ts

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* fix: lint

* feat: Netlify app connection and secrets sync

* feat: docs

* fix: type check

* fix: api client

* fix: lint and types

* fix: typecheck lint

* fix: docs

* fix: lint

* fix: lint

* fix: PR changes

* fix: typecheck

* fix: PR changes

* fix PR changes

* fix: PR Change

* fix: type error

* Small tweaks

* fix: support is_secret

* fix: revert is_secret

* fix: force update existing netlify secret

---------

Co-authored-by: sidwebworks <xodeveloper@gmail.com>
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
Co-authored-by: x032205 <x032205@gmail.com>
2025-08-01 00:24:40 +05:30
x032205
3877fe524d Merge pull request #4286 from Infisical/ENG-3376
feat(app-connections, PKI): Cloudflare as DNS provider
2025-07-31 13:34:31 -04:00
Daniel Hougaard
4c5df70790 Merge pull request #4290 from Infisical/daniel/fix-vault-migration
fix(external-migration/vault): fix vault parsing
2025-07-31 21:28:58 +04:00
x032205
5645dd2b8d Lint + form fixes 2025-07-31 13:21:28 -04:00
Daniel Hougaard
0d55195561 Fixed mailing inconsistency 2025-07-31 21:20:54 +04:00
x032205
1c0caab469 Remove typo 2025-07-31 13:01:04 -04:00
x032205
ed9dfd2974 Docs fix 2025-07-31 12:55:59 -04:00
Daniel Hougaard
7f72037d77 Update vault.ts 2025-07-31 20:54:21 +04:00
x032205
9928ca17ea Greptile review fixes 2025-07-31 12:51:56 -04:00
Daniel Hougaard
7357d377e1 Merge pull request #4281 from Infisical/daniel/hsm-support-main-image
chore(hsm): add hsm support to main docker image
2025-07-31 18:03:35 +04:00
x032205
149cecd805 Small tweaks 2025-07-31 00:32:31 -04:00
x032205
c80fd55a74 docs 2025-07-31 00:29:02 -04:00
x032205
93e7723b48 feat(app-connections, PKI): Cloudflare as DNS provider 2025-07-31 00:10:18 -04:00
Scott Wilson
573b990aa3 Merge pull request #4269 from Infisical/org-bulk-user-deletion
improvement(org-memberships): add bulk delete org memberships endpoint and table support
2025-07-30 18:49:57 -07:00
Scott Wilson
e15086edc0 fix: prevent bulk deletion on frontend if scim is enabled 2025-07-30 18:37:58 -07:00
Vlad Matsiiako
13ef3809bd Merge pull request #4283 from Infisical/update-favicon
improvement(frontend/docs): update favicon for app and docs
2025-07-30 17:06:38 -07:00
Scott Wilson
fb49c9250a chore: add missing .ico 2025-07-30 17:01:05 -07:00
Scott Wilson
5ced7fa923 improvement: update favicon for app and docs 2025-07-30 16:59:12 -07:00
Scott Wilson
5ffd42378a Merge pull request #4256 from Infisical/gitlab-secret-scanning
feature(secret-scanning): gitlab secret scanning
2025-07-30 16:53:02 -07:00
Scott Wilson
f995708e44 merge main 2025-07-30 16:38:35 -07:00
carlosmonastyrski
c266d68993 Merge pull request #4280 from Infisical/fix/secretApprovalConditionalReadPermissions
Fix conditional permissions check on secret access request hidden values
2025-07-30 20:16:48 -03:00
Daniel Hougaard
c7c8107f85 Update Dockerfile.standalone-infisical 2025-07-31 02:15:08 +04:00
Carlos Monastyrski
b906fe34a1 Fix conditional permissions check on secret access request hidden values 2025-07-30 18:37:54 -03:00
Daniel Hougaard
bec1fefee8 Merge pull request #4271 from Infisical/feat/azureAppConnectionsNewAuth
Add Azure Client Secrets Auth to Azure App Connections
2025-07-30 23:47:15 +04:00
Carlos Monastyrski
cd03107a60 Minor frontend fixes on Azure App Connection forms 2025-07-30 16:42:02 -03:00
Scott Wilson
07965de1db Merge pull request #4279 from Infisical/azure-client-secret-expiry-adjustment
improvement(azure-client-secret-rotation): reduce token expiry to two rotation intervals
2025-07-30 12:01:08 -07:00
Carlos Monastyrski
b20ff0f029 Minor fix on docs titles 2025-07-30 15:35:47 -03:00
Scott Wilson
691cbe0a4f fix: correct issue client secret rotation interval check 2025-07-30 11:15:10 -07:00
x032205
0787128803 Merge pull request #4277 from Infisical/fix-sql-app-conn-gateways
Fix SQL app connection with gateways
2025-07-30 14:09:24 -04:00
Scott Wilson
837158e344 improvement: reduce azure client secret token expiry to two rotation intervals 2025-07-30 11:09:16 -07:00
x032205
03bd1471b2 Revert old "fix" + new bug patch 2025-07-30 13:58:46 -04:00
Scott Wilson
f53c39f65b improvements: address feedback, improve org members table overflow handling, fix user details email/username overflow 2025-07-30 10:43:10 -07:00
Daniel Hougaard
092695089d Merge pull request #4276 from Infisical/daniel/fix-github-app-conn
fix(app-connections): github app connection creation
2025-07-30 21:17:51 +04:00
x032205
2d80681597 Fix 2025-07-30 13:16:48 -04:00
Scott Wilson
cf23f98170 Merge pull request #4259 from Infisical/org-alert-banner-additions
improvement(frontend): revise org alter banner designs and add smtp banner
2025-07-30 10:14:34 -07:00
Daniel Hougaard
c4c8e121f0 Update OauthCallbackPage.tsx 2025-07-30 21:03:36 +04:00
Scott Wilson
0701c996e5 improvement: update smtp link 2025-07-30 09:43:47 -07:00
Scott Wilson
4ca6f165b7 improvement: revise org alter banners and add smtp banner 2025-07-30 09:42:31 -07:00
Scott Wilson
b9dd565926 Merge pull request #4273 from Infisical/improve-initial-app-loading-ui
improvement(frontend): make login/org selection loading screens consistent
2025-07-30 09:11:33 -07:00
Daniel Hougaard
136b0bdcb5 Merge pull request #4275 from Infisical/daniel/update-passport-saml
fix: update passport saml
2025-07-30 18:14:21 +04:00
Daniel Hougaard
7266d1f310 fix: update passport saml 2025-07-30 17:43:57 +04:00
carlosmonastyrski
9c6ec807cb Merge pull request #4212 from Infisical/feat/blockLastPaymentMethodDelete
Prevent users from deleting the last payment method attached to the org
2025-07-30 09:59:50 -03:00
Carlos Monastyrski
5fcae35fae Improve azure app connection docs 2025-07-29 22:32:14 -03:00
Carlos Monastyrski
359e19f804 Add Azure Client Secrets Auth to Azure App Connections 2025-07-29 22:05:28 -03:00
Scott Wilson
2aa548c7dc improvement: address feedback 2025-07-29 17:06:33 -07:00
Scott Wilson
4f00fc6777 improvement: add bulk delete org members endpoint and table support 2025-07-29 16:42:13 -07:00
Scott Wilson
82b765553c chore: remove unused form variable 2025-07-28 15:22:44 -07:00
Scott Wilson
8972521716 chore: add images 2025-07-28 15:22:19 -07:00
Scott Wilson
81b45b24ec improvement: address greptile feedback 2025-07-28 15:16:10 -07:00
Scott Wilson
f2b0e4ae37 feature: gitlab secret scanning 2025-07-28 15:03:23 -07:00
Carlos Monastyrski
b4ed1fa96a Prevent users from deleting the last payment method attached to the org 2025-07-21 21:17:36 -03:00
279 changed files with 7761 additions and 1500 deletions

View File

@@ -55,6 +55,8 @@ USER non-root-user
##
FROM base AS backend-build
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
WORKDIR /app
# Install all required dependencies for build
@@ -84,6 +86,8 @@ RUN npm run build
# Production stage
FROM base AS backend-runner
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
WORKDIR /app
# Install all required dependencies for runtime
@@ -112,6 +116,11 @@ RUN mkdir frontend-build
FROM base AS production
RUN apt-get update && apt-get install -y \
build-essential \
autoconf \
automake \
libtool \
libssl-dev \
ca-certificates \
bash \
curl \
@@ -171,6 +180,7 @@ ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
ENV NODE_OPTIONS="--max-old-space-size=1024"
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
WORKDIR /backend

View File

@@ -7,7 +7,6 @@
"": {
"name": "backend",
"version": "1.0.0",
"hasInstallScript": true,
"license": "ISC",
"dependencies": {
"@aws-sdk/client-elasticache": "^3.637.0",
@@ -34,7 +33,7 @@
"@gitbeaker/rest": "^42.5.0",
"@google-cloud/kms": "^4.5.0",
"@infisical/quic": "^1.0.8",
"@node-saml/passport-saml": "^5.0.1",
"@node-saml/passport-saml": "^5.1.0",
"@octokit/auth-app": "^7.1.1",
"@octokit/core": "^5.2.1",
"@octokit/plugin-paginate-graphql": "^4.0.1",
@@ -9574,20 +9573,20 @@
}
},
"node_modules/@node-saml/node-saml": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/@node-saml/node-saml/-/node-saml-5.0.1.tgz",
"integrity": "sha512-YQzFPEC+CnsfO9AFYnwfYZKIzOLx3kITaC1HrjHVLTo6hxcQhc+LgHODOMvW4VCV95Gwrz1MshRUWCPzkDqmnA==",
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/@node-saml/node-saml/-/node-saml-5.1.0.tgz",
"integrity": "sha512-t3cJnZ4aC7HhPZ6MGylGZULvUtBOZ6FzuUndaHGXjmIZHXnLfC/7L8a57O9Q9V7AxJGKAiRM5zu2wNm9EsvQpw==",
"license": "MIT",
"dependencies": {
"@types/debug": "^4.1.12",
"@types/qs": "^6.9.11",
"@types/qs": "^6.9.18",
"@types/xml-encryption": "^1.2.4",
"@types/xml2js": "^0.4.14",
"@xmldom/is-dom-node": "^1.0.1",
"@xmldom/xmldom": "^0.8.10",
"debug": "^4.3.4",
"xml-crypto": "^6.0.1",
"xml-encryption": "^3.0.2",
"debug": "^4.4.0",
"xml-crypto": "^6.1.2",
"xml-encryption": "^3.1.0",
"xml2js": "^0.6.2",
"xmlbuilder": "^15.1.1",
"xpath": "^0.0.34"
@@ -9597,9 +9596,9 @@
}
},
"node_modules/@node-saml/node-saml/node_modules/debug": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
"integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
"license": "MIT",
"dependencies": {
"ms": "^2.1.3"
@@ -9636,14 +9635,14 @@
}
},
"node_modules/@node-saml/passport-saml": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/@node-saml/passport-saml/-/passport-saml-5.0.1.tgz",
"integrity": "sha512-fMztg3zfSnjLEgxvpl6HaDMNeh0xeQX4QHiF9e2Lsie2dc4qFE37XYbQZhVmn8XJ2awPpSWLQ736UskYgGU8lQ==",
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/@node-saml/passport-saml/-/passport-saml-5.1.0.tgz",
"integrity": "sha512-pBm+iFjv9eihcgeJuSUs4c0AuX1QEFdHwP8w1iaWCfDzXdeWZxUBU5HT2bY2S4dvNutcy+A9hYsH7ZLBGtgwDg==",
"license": "MIT",
"dependencies": {
"@node-saml/node-saml": "^5.0.1",
"@types/express": "^4.17.21",
"@types/passport": "^1.0.16",
"@node-saml/node-saml": "^5.1.0",
"@types/express": "^4.17.23",
"@types/passport": "^1.0.17",
"@types/passport-strategy": "^0.2.38",
"passport": "^0.7.0",
"passport-strategy": "^1.0.0"
@@ -13351,9 +13350,10 @@
"license": "MIT"
},
"node_modules/@types/express": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz",
"integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==",
"version": "4.17.23",
"resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.23.tgz",
"integrity": "sha512-Crp6WY9aTYP3qPi2wGDo9iUe/rceX01UMhnF1jmwDcKCFM6cx7YhGP/Mpr3y9AASpfHixIG0E6azCcL5OcDHsQ==",
"license": "MIT",
"dependencies": {
"@types/body-parser": "*",
"@types/express-serve-static-core": "^4.17.33",
@@ -13523,9 +13523,10 @@
}
},
"node_modules/@types/passport": {
"version": "1.0.16",
"resolved": "https://registry.npmjs.org/@types/passport/-/passport-1.0.16.tgz",
"integrity": "sha512-FD0qD5hbPWQzaM0wHUnJ/T0BBCJBxCeemtnCwc/ThhTg3x9jfrAcRUmj5Dopza+MfFS9acTe3wk7rcVnRIp/0A==",
"version": "1.0.17",
"resolved": "https://registry.npmjs.org/@types/passport/-/passport-1.0.17.tgz",
"integrity": "sha512-aciLyx+wDwT2t2/kJGJR2AEeBz0nJU4WuRX04Wu9Dqc5lSUtwu0WERPHYsLhF9PtseiAMPBGNUOtFjxZ56prsg==",
"license": "MIT",
"dependencies": {
"@types/express": "*"
}
@@ -31953,9 +31954,9 @@
"license": "MIT"
},
"node_modules/xml-crypto": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-6.0.1.tgz",
"integrity": "sha512-v05aU7NS03z4jlZ0iZGRFeZsuKO1UfEbbYiaeRMiATBFs6Jq9+wqKquEMTn4UTrYZ9iGD8yz3KT4L9o2iF682w==",
"version": "6.1.2",
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-6.1.2.tgz",
"integrity": "sha512-leBOVQdVi8FvPJrMYoum7Ici9qyxfE4kVi+AkpUoYCSXaQF4IlBm1cneTK9oAxR61LpYxTx7lNcsnBIeRpGW2w==",
"license": "MIT",
"dependencies": {
"@xmldom/is-dom-node": "^1.0.1",

View File

@@ -153,7 +153,7 @@
"@gitbeaker/rest": "^42.5.0",
"@google-cloud/kms": "^4.5.0",
"@infisical/quic": "^1.0.8",
"@node-saml/passport-saml": "^5.0.1",
"@node-saml/passport-saml": "^5.1.0",
"@octokit/auth-app": "^7.1.1",
"@octokit/core": "^5.2.1",
"@octokit/plugin-paginate-graphql": "^4.0.1",

View File

@@ -12,6 +12,8 @@ import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certifi
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-types";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-types";
import { TEventBusService } from "@app/ee/services/event/event-bus-service";
import { TServerSentEventsService } from "@app/ee/services/event/event-sse-service";
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { TGithubOrgSyncServiceFactory } from "@app/ee/services/github-org-sync/github-org-sync-service";
@@ -296,6 +298,8 @@ declare module "fastify" {
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
pkiTemplate: TPkiTemplatesServiceFactory;
reminder: TReminderServiceFactory;
bus: TEventBusService;
sse: TServerSentEventsService;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

View File

@@ -0,0 +1,16 @@
import { registerSecretScanningEndpoints } from "@app/ee/routes/v2/secret-scanning-v2-routers/secret-scanning-v2-endpoints";
import {
CreateGitLabDataSourceSchema,
GitLabDataSourceSchema,
UpdateGitLabDataSourceSchema
} from "@app/ee/services/secret-scanning-v2/gitlab";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export const registerGitLabSecretScanningRouter = async (server: FastifyZodProvider) =>
registerSecretScanningEndpoints({
type: SecretScanningDataSource.GitLab,
server,
responseSchema: GitLabDataSourceSchema,
createSchema: CreateGitLabDataSourceSchema,
updateSchema: UpdateGitLabDataSourceSchema
});

View File

@@ -1,3 +1,4 @@
import { registerGitLabSecretScanningRouter } from "@app/ee/routes/v2/secret-scanning-v2-routers/gitlab-secret-scanning-router";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { registerBitbucketSecretScanningRouter } from "./bitbucket-secret-scanning-router";
@@ -10,5 +11,6 @@ export const SECRET_SCANNING_REGISTER_ROUTER_MAP: Record<
(server: FastifyZodProvider) => Promise<void>
> = {
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter,
[SecretScanningDataSource.Bitbucket]: registerBitbucketSecretScanningRouter
[SecretScanningDataSource.Bitbucket]: registerBitbucketSecretScanningRouter,
[SecretScanningDataSource.GitLab]: registerGitLabSecretScanningRouter
};

View File

@@ -4,6 +4,7 @@ import { SecretScanningConfigsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { BitbucketDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
import { GitLabDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/gitlab";
import {
SecretScanningFindingStatus,
SecretScanningScanStatus
@@ -24,7 +25,8 @@ import { AuthMode } from "@app/services/auth/auth-type";
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [
GitHubDataSourceListItemSchema,
BitbucketDataSourceListItemSchema
BitbucketDataSourceListItemSchema,
GitLabDataSourceListItemSchema
]);
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {

View File

@@ -1,7 +1,8 @@
import { AxiosError, RawAxiosRequestHeaders } from "axios";
import { SecretKeyEncoding } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { ProjectType, SecretKeyEncoding } from "@app/db/schemas";
import { TEventBusService } from "@app/ee/services/event/event-bus-service";
import { TopicName, toPublishableEvent } from "@app/ee/services/event/types";
import { request } from "@app/lib/config/request";
import { crypto } from "@app/lib/crypto/cryptography";
import { logger } from "@app/lib/logger";
@@ -21,6 +22,7 @@ type TAuditLogQueueServiceFactoryDep = {
queueService: TQueueServiceFactory;
projectDAL: Pick<TProjectDALFactory, "findById">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
eventBusService: TEventBusService;
};
export type TAuditLogQueueServiceFactory = {
@@ -36,133 +38,17 @@ export const auditLogQueueServiceFactory = async ({
queueService,
projectDAL,
licenseService,
auditLogStreamDAL
auditLogStreamDAL,
eventBusService
}: TAuditLogQueueServiceFactoryDep): Promise<TAuditLogQueueServiceFactory> => {
const appCfg = getConfig();
const pushToLog = async (data: TCreateAuditLogDTO) => {
if (appCfg.USE_PG_QUEUE && appCfg.SHOULD_INIT_PG_QUEUE) {
await queueService.queuePg<QueueName.AuditLog>(QueueJobs.AuditLog, data, {
retryLimit: 10,
retryBackoff: true
});
} else {
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
removeOnFail: {
count: 3
},
removeOnComplete: true
});
}
};
if (appCfg.SHOULD_INIT_PG_QUEUE) {
await queueService.startPg<QueueName.AuditLog>(
QueueJobs.AuditLog,
async ([job]) => {
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
let { orgId } = job.data;
const MS_IN_DAY = 24 * 60 * 60 * 1000;
let project;
if (!orgId) {
// it will never be undefined for both org and project id
// TODO(akhilmhdh): use caching here in dal to avoid db calls
project = await projectDAL.findById(projectId as string);
orgId = project.orgId;
}
const plan = await licenseService.getPlan(orgId);
if (plan.auditLogsRetentionDays === 0) {
// skip inserting if audit log retention is 0 meaning its not supported
return;
}
// For project actions, set TTL to project-level audit log retention config
// This condition ensures that the plan's audit log retention days cannot be bypassed
const ttlInDays =
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
? project.auditLogsRetentionDays
: plan.auditLogsRetentionDays;
const ttl = ttlInDays * MS_IN_DAY;
const auditLog = await auditLogDAL.create({
actor: actor.type,
actorMetadata: actor.metadata,
userAgent,
projectId,
projectName: project?.name,
ipAddress,
orgId,
eventType: event.type,
expiresAt: new Date(Date.now() + ttl),
eventMetadata: event.metadata,
userAgentType
});
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
await Promise.allSettled(
logStreams.map(
async ({
url,
encryptedHeadersTag,
encryptedHeadersIV,
encryptedHeadersKeyEncoding,
encryptedHeadersCiphertext
}) => {
const streamHeaders =
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
? (JSON.parse(
crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
iv: encryptedHeadersIV,
tag: encryptedHeadersTag,
ciphertext: encryptedHeadersCiphertext
})
) as LogStreamHeaders[])
: [];
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
if (streamHeaders.length)
streamHeaders.forEach(({ key, value }) => {
headers[key] = value;
});
try {
const response = await request.post(
url,
{ ...providerSpecificPayload(url), ...auditLog },
{
headers,
// request timeout
timeout: AUDIT_LOG_STREAM_TIMEOUT,
// connection timeout
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
}
);
return response;
} catch (error) {
logger.error(
`Failed to stream audit log [url=${url}] for org [orgId=${orgId}] [error=${(error as AxiosError).message}]`
);
return error;
}
}
)
);
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
removeOnFail: {
count: 3
},
{
batchSize: 1,
workerCount: 30,
pollingIntervalSeconds: 0.5
}
);
}
removeOnComplete: true
});
};
queueService.start(QueueName.AuditLog, async (job) => {
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
@@ -178,88 +64,97 @@ export const auditLogQueueServiceFactory = async ({
}
const plan = await licenseService.getPlan(orgId);
if (plan.auditLogsRetentionDays === 0) {
// skip inserting if audit log retention is 0 meaning its not supported
return;
// skip inserting if audit log retention is 0 meaning its not supported
if (plan.auditLogsRetentionDays !== 0) {
// For project actions, set TTL to project-level audit log retention config
// This condition ensures that the plan's audit log retention days cannot be bypassed
const ttlInDays =
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
? project.auditLogsRetentionDays
: plan.auditLogsRetentionDays;
const ttl = ttlInDays * MS_IN_DAY;
const auditLog = await auditLogDAL.create({
actor: actor.type,
actorMetadata: actor.metadata,
userAgent,
projectId,
projectName: project?.name,
ipAddress,
orgId,
eventType: event.type,
expiresAt: new Date(Date.now() + ttl),
eventMetadata: event.metadata,
userAgentType
});
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
await Promise.allSettled(
logStreams.map(
async ({
url,
encryptedHeadersTag,
encryptedHeadersIV,
encryptedHeadersKeyEncoding,
encryptedHeadersCiphertext
}) => {
const streamHeaders =
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
? (JSON.parse(
crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
iv: encryptedHeadersIV,
tag: encryptedHeadersTag,
ciphertext: encryptedHeadersCiphertext
})
) as LogStreamHeaders[])
: [];
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
if (streamHeaders.length)
streamHeaders.forEach(({ key, value }) => {
headers[key] = value;
});
try {
const response = await request.post(
url,
{ ...providerSpecificPayload(url), ...auditLog },
{
headers,
// request timeout
timeout: AUDIT_LOG_STREAM_TIMEOUT,
// connection timeout
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
}
);
return response;
} catch (error) {
logger.error(
`Failed to stream audit log [url=${url}] for org [orgId=${orgId}] [error=${(error as AxiosError).message}]`
);
return error;
}
}
)
);
}
// For project actions, set TTL to project-level audit log retention config
// This condition ensures that the plan's audit log retention days cannot be bypassed
const ttlInDays =
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
? project.auditLogsRetentionDays
: plan.auditLogsRetentionDays;
const publishable = toPublishableEvent(event);
const ttl = ttlInDays * MS_IN_DAY;
const auditLog = await auditLogDAL.create({
actor: actor.type,
actorMetadata: actor.metadata,
userAgent,
projectId,
projectName: project?.name,
ipAddress,
orgId,
eventType: event.type,
expiresAt: new Date(Date.now() + ttl),
eventMetadata: event.metadata,
userAgentType
});
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
await Promise.allSettled(
logStreams.map(
async ({
url,
encryptedHeadersTag,
encryptedHeadersIV,
encryptedHeadersKeyEncoding,
encryptedHeadersCiphertext
}) => {
const streamHeaders =
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
? (JSON.parse(
crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
iv: encryptedHeadersIV,
tag: encryptedHeadersTag,
ciphertext: encryptedHeadersCiphertext
})
) as LogStreamHeaders[])
: [];
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
if (streamHeaders.length)
streamHeaders.forEach(({ key, value }) => {
headers[key] = value;
});
try {
const response = await request.post(
url,
{ ...providerSpecificPayload(url), ...auditLog },
{
headers,
// request timeout
timeout: AUDIT_LOG_STREAM_TIMEOUT,
// connection timeout
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
}
);
return response;
} catch (error) {
logger.error(
`Failed to stream audit log [url=${url}] for org [orgId=${orgId}] [error=${(error as AxiosError).message}]`
);
return error;
}
}
)
);
if (publishable) {
await eventBusService.publish(TopicName.CoreServers, {
type: ProjectType.SecretManager,
source: "infiscal",
data: publishable.data
});
}
});
return {

View File

@@ -0,0 +1,83 @@
import Redis from "ioredis";
import { z } from "zod";
import { logger } from "@app/lib/logger";
import { EventSchema, TopicName } from "./types";
export const eventBusFactory = (redis: Redis) => {
const publisher = redis.duplicate();
// Duplicate the publisher to create a subscriber.
// This is necessary because Redis does not allow a single connection to both publish and subscribe.
const subscriber = publisher.duplicate();
const init = async (topics: TopicName[] = Object.values(TopicName)) => {
subscriber.on("error", (e) => {
logger.error(e, "Event Bus subscriber error");
});
publisher.on("error", (e) => {
logger.error(e, "Event Bus publisher error");
});
await subscriber.subscribe(...topics);
};
/**
* Publishes an event to the specified topic.
* @param topic - The topic to publish the event to.
* @param event - The event data to publish.
*/
const publish = async <T extends z.input<typeof EventSchema>>(topic: TopicName, event: T) => {
const json = JSON.stringify(event);
return publisher.publish(topic, json, (err) => {
if (err) {
return logger.error(err, `Error publishing to channel ${topic}`);
}
});
};
/**
* @param fn - The function to call when a message is received.
* It should accept the parsed event data as an argument.
* @template T - The type of the event data, which should match the schema defined in EventSchema.
* @returns A function that can be called to unsubscribe from the event bus.
*/
const subscribe = <T extends z.infer<typeof EventSchema>>(fn: (data: T) => Promise<void> | void) => {
// Not using async await cause redis client's `on` method does not expect async listeners.
const listener = (channel: string, message: string) => {
try {
const parsed = JSON.parse(message) as T;
const thenable = fn(parsed);
// If the function returns a Promise, catch any errors that occur during processing.
if (thenable instanceof Promise) {
thenable.catch((error) => {
logger.error(error, `Error processing message from channel ${channel}`);
});
}
} catch (error) {
logger.error(error, `Error parsing message data from channel ${channel}`);
}
};
subscriber.on("message", listener);
return () => {
subscriber.off("message", listener);
};
};
const close = async () => {
try {
await publisher.quit();
await subscriber.quit();
} catch (error) {
logger.error(error, "Error closing event bus connections");
}
};
return { init, publish, subscribe, close };
};
export type TEventBusService = ReturnType<typeof eventBusFactory>;

View File

@@ -0,0 +1,164 @@
/* eslint-disable no-continue */
import { subject } from "@casl/ability";
import Redis from "ioredis";
import { KeyStorePrefixes } from "@app/keystore/keystore";
import { logger } from "@app/lib/logger";
import { TEventBusService } from "./event-bus-service";
import { createEventStreamClient, EventStreamClient, IEventStreamClientOpts } from "./event-sse-stream";
import { EventData, RegisteredEvent, toBusEventName } from "./types";
const AUTH_REFRESH_INTERVAL = 60 * 1000;
const HEART_BEAT_INTERVAL = 15 * 1000;
export const sseServiceFactory = (bus: TEventBusService, redis: Redis) => {
let heartbeatInterval: NodeJS.Timeout | null = null;
const clients = new Set<EventStreamClient>();
heartbeatInterval = setInterval(() => {
for (const client of clients) {
if (client.stream.closed) continue;
void client.ping();
}
}, HEART_BEAT_INTERVAL);
const refreshInterval = setInterval(() => {
for (const client of clients) {
if (client.stream.closed) continue;
void client.refresh();
}
}, AUTH_REFRESH_INTERVAL);
const removeActiveConnection = async (projectId: string, identityId: string, connectionId: string) => {
const set = KeyStorePrefixes.ActiveSSEConnectionsSet(projectId, identityId);
const key = KeyStorePrefixes.ActiveSSEConnections(projectId, identityId, connectionId);
await Promise.all([redis.lrem(set, 0, connectionId), redis.del(key)]);
};
const getActiveConnectionsCount = async (projectId: string, identityId: string) => {
const set = KeyStorePrefixes.ActiveSSEConnectionsSet(projectId, identityId);
const connections = await redis.lrange(set, 0, -1);
if (connections.length === 0) {
return 0; // No active connections
}
const keys = connections.map((c) => KeyStorePrefixes.ActiveSSEConnections(projectId, identityId, c));
const values = await redis.mget(...keys);
// eslint-disable-next-line no-plusplus
for (let i = 0; i < values.length; i++) {
if (values[i] === null) {
// eslint-disable-next-line no-await-in-loop
await removeActiveConnection(projectId, identityId, connections[i]);
}
}
return redis.llen(set);
};
const onDisconnect = async (client: EventStreamClient) => {
try {
client.close();
clients.delete(client);
await removeActiveConnection(client.auth.projectId, client.auth.actorId, client.id);
} catch (error) {
logger.error(error, "Error during SSE stream disconnection");
}
};
function filterEventsForClient(client: EventStreamClient, event: EventData, registered: RegisteredEvent[]) {
const eventType = toBusEventName(event.data.eventType);
const match = registered.find((r) => r.event === eventType);
if (!match) return;
const item = event.data.payload;
if (Array.isArray(item)) {
if (item.length === 0) return;
const baseSubject = {
eventType,
environment: undefined as string | undefined,
secretPath: undefined as string | undefined
};
const filtered = item.filter((ev) => {
baseSubject.secretPath = ev.secretPath ?? "/";
baseSubject.environment = ev.environment;
return client.matcher.can("subscribe", subject(event.type, baseSubject));
});
if (filtered.length === 0) return;
return client.send({
...event,
data: {
...event.data,
payload: filtered
}
});
}
// For single item
const baseSubject = {
eventType,
secretPath: item.secretPath ?? "/",
environment: item.environment
};
if (client.matcher.can("subscribe", subject(event.type, baseSubject))) {
client.send(event);
}
}
const subscribe = async (
opts: IEventStreamClientOpts & {
onClose?: () => void;
}
) => {
const client = createEventStreamClient(redis, opts);
// Set up event listener on event bus
const unsubscribe = bus.subscribe((event) => {
if (event.type !== opts.type) return;
filterEventsForClient(client, event, opts.registered);
});
client.stream.on("close", () => {
unsubscribe();
void onDisconnect(client); // This will never throw
});
await client.open();
clients.add(client);
return client;
};
const close = () => {
if (heartbeatInterval) {
clearInterval(heartbeatInterval);
}
if (refreshInterval) {
clearInterval(refreshInterval);
}
for (const client of clients) {
client.close();
}
clients.clear();
};
return { subscribe, close, getActiveConnectionsCount };
};
export type TServerSentEventsService = ReturnType<typeof sseServiceFactory>;

View File

@@ -0,0 +1,178 @@
/* eslint-disable no-underscore-dangle */
import { Readable } from "node:stream";
import { MongoAbility, PureAbility } from "@casl/ability";
import { MongoQuery } from "@ucast/mongo2js";
import Redis from "ioredis";
import { nanoid } from "nanoid";
import { ProjectType } from "@app/db/schemas";
import { ProjectPermissionSet } from "@app/ee/services/permission/project-permission";
import { KeyStorePrefixes } from "@app/keystore/keystore";
import { conditionsMatcher } from "@app/lib/casl";
import { logger } from "@app/lib/logger";
import { EventData, RegisteredEvent } from "./types";
export const getServerSentEventsHeaders = () =>
({
"Cache-Control": "no-cache",
"Content-Type": "text/event-stream",
Connection: "keep-alive",
"X-Accel-Buffering": "no"
}) as const;
type TAuthInfo = {
actorId: string;
projectId: string;
permission: MongoAbility<ProjectPermissionSet, MongoQuery>;
};
export interface IEventStreamClientOpts {
type: ProjectType;
registered: RegisteredEvent[];
onAuthRefresh: (info: TAuthInfo) => Promise<void> | void;
getAuthInfo: () => Promise<TAuthInfo> | TAuthInfo;
}
interface EventMessage {
time?: string | number;
type: string;
data?: unknown;
}
function serializeSseEvent(chunk: EventMessage): string {
let payload = "";
if (chunk.time) payload += `id: ${chunk.time}\n`;
if (chunk.type) payload += `event: ${chunk.type}\n`;
if (chunk.data) payload += `data: ${JSON.stringify(chunk)}\n`;
return `${payload}\n`;
}
export type EventStreamClient = {
id: string;
stream: Readable;
open: () => Promise<void>;
send: (data: EventMessage | EventData) => void;
ping: () => Promise<void>;
refresh: () => Promise<void>;
close: () => void;
get auth(): TAuthInfo;
signal: AbortSignal;
abort: () => void;
matcher: PureAbility;
};
export function createEventStreamClient(redis: Redis, options: IEventStreamClientOpts): EventStreamClient {
const rules = options.registered.map((r) => ({
subject: options.type,
action: "subscribe",
conditions: {
eventType: r.event,
secretPath: r.conditions?.secretPath ?? "/",
environment: r.conditions?.environmentSlug
}
}));
const id = `sse-${nanoid()}`;
const control = new AbortController();
const matcher = new PureAbility(rules, { conditionsMatcher });
let auth: TAuthInfo | undefined;
const stream = new Readable({
objectMode: true
});
// We will manually push data to the stream
stream._read = () => {};
const send = (data: EventMessage | EventData) => {
const chunk = serializeSseEvent(data);
if (!stream.push(chunk)) {
logger.debug("Backpressure detected: dropped manual event");
}
};
stream.on("error", (error: Error) => stream.destroy(error));
const open = async () => {
auth = await options.getAuthInfo();
await options.onAuthRefresh(auth);
const { actorId, projectId } = auth;
const set = KeyStorePrefixes.ActiveSSEConnectionsSet(projectId, actorId);
const key = KeyStorePrefixes.ActiveSSEConnections(projectId, actorId, id);
await Promise.all([redis.rpush(set, id), redis.set(key, "1", "EX", 60)]);
};
const ping = async () => {
if (!auth) return; // Avoid race condition if ping is called before open
const { actorId, projectId } = auth;
const key = KeyStorePrefixes.ActiveSSEConnections(projectId, actorId, id);
await redis.set(key, "1", "EX", 60);
stream.push("1");
};
const close = () => {
if (stream.closed) return;
stream.push(null);
stream.destroy();
};
/**
* Refreshes the connection's auth permissions
* Must be called atleast once when connection is opened
*/
const refresh = async () => {
try {
auth = await options.getAuthInfo();
await options.onAuthRefresh(auth);
} catch (error) {
if (error instanceof Error) {
send({
type: "error",
data: {
...error
}
});
return close();
}
stream.emit("error", error);
}
};
const abort = () => {
try {
control.abort();
} catch (error) {
logger.debug(error, "Error aborting SSE stream");
}
};
return {
id,
stream,
open,
send,
ping,
refresh,
close,
signal: control.signal,
abort,
matcher,
get auth() {
if (!auth) {
throw new Error("Auth info not set");
}
return auth;
}
};
}

View File

@@ -0,0 +1,125 @@
import { z } from "zod";
import { ProjectType } from "@app/db/schemas";
import { Event, EventType } from "@app/ee/services/audit-log/audit-log-types";
export enum TopicName {
CoreServers = "infisical::core-servers"
}
export enum BusEventName {
CreateSecret = "secret:create",
UpdateSecret = "secret:update",
DeleteSecret = "secret:delete"
}
type PublisableEventTypes =
| EventType.CREATE_SECRET
| EventType.CREATE_SECRETS
| EventType.DELETE_SECRET
| EventType.DELETE_SECRETS
| EventType.UPDATE_SECRETS
| EventType.UPDATE_SECRET;
export function toBusEventName(input: EventType) {
switch (input) {
case EventType.CREATE_SECRET:
case EventType.CREATE_SECRETS:
return BusEventName.CreateSecret;
case EventType.UPDATE_SECRET:
case EventType.UPDATE_SECRETS:
return BusEventName.UpdateSecret;
case EventType.DELETE_SECRET:
case EventType.DELETE_SECRETS:
return BusEventName.DeleteSecret;
default:
return null;
}
}
const isBulkEvent = (event: Event): event is Extract<Event, { metadata: { secrets: Array<unknown> } }> => {
return event.type.endsWith("-secrets"); // Feels so wrong
};
export const toPublishableEvent = (event: Event) => {
const name = toBusEventName(event.type);
if (!name) return null;
const e = event as Extract<Event, { type: PublisableEventTypes }>;
if (isBulkEvent(e)) {
return {
name,
isBulk: true,
data: {
eventType: e.type,
payload: e.metadata.secrets.map((s) => ({
environment: e.metadata.environment,
secretPath: e.metadata.secretPath,
...s
}))
}
} as const;
}
return {
name,
isBulk: false,
data: {
eventType: e.type,
payload: {
...e.metadata,
environment: e.metadata.environment
}
}
} as const;
};
export const EventName = z.nativeEnum(BusEventName);
const EventSecretPayload = z.object({
secretPath: z.string().optional(),
secretId: z.string(),
secretKey: z.string(),
environment: z.string()
});
export type EventSecret = z.infer<typeof EventSecretPayload>;
export const EventSchema = z.object({
datacontenttype: z.literal("application/json").optional().default("application/json"),
type: z.nativeEnum(ProjectType),
source: z.string(),
time: z
.string()
.optional()
.default(() => new Date().toISOString()),
data: z.discriminatedUnion("eventType", [
z.object({
specversion: z.number().optional().default(1),
eventType: z.enum([EventType.CREATE_SECRET, EventType.UPDATE_SECRET, EventType.DELETE_SECRET]),
payload: EventSecretPayload
}),
z.object({
specversion: z.number().optional().default(1),
eventType: z.enum([EventType.CREATE_SECRETS, EventType.UPDATE_SECRETS, EventType.DELETE_SECRETS]),
payload: EventSecretPayload.array()
})
// Add more event types as needed
])
});
export type EventData = z.infer<typeof EventSchema>;
export const EventRegisterSchema = z.object({
event: EventName,
conditions: z
.object({
secretPath: z.string().optional().default("/"),
environmentSlug: z.string()
})
.optional()
});
export type RegisteredEvent = z.infer<typeof EventRegisterSchema>;

View File

@@ -59,7 +59,8 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
secretScanning: false,
enterpriseSecretSyncs: false,
enterpriseAppConnections: false,
fips: false
fips: false,
eventSubscriptions: false
});
export const setupLicenseRequestWithStore = (

View File

@@ -5,13 +5,14 @@
// TODO(akhilmhdh): With tony find out the api structure and fill it here
import { ForbiddenError } from "@casl/ability";
import { AxiosError } from "axios";
import { CronJob } from "cron";
import { Knex } from "knex";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
import { verifyOfflineLicense } from "@app/lib/crypto";
import { NotFoundError } from "@app/lib/errors";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { TIdentityOrgDALFactory } from "@app/services/identity/identity-org-dal";
import { TOrgDALFactory } from "@app/services/org/org-dal";
@@ -603,10 +604,22 @@ export const licenseServiceFactory = ({
});
}
const { data } = await licenseServerCloudApi.request.delete(
`/api/license-server/v1/customers/${organization.customerId}/billing-details/payment-methods/${pmtMethodId}`
);
return data;
try {
const { data } = await licenseServerCloudApi.request.delete(
`/api/license-server/v1/customers/${organization.customerId}/billing-details/payment-methods/${pmtMethodId}`
);
return data;
} catch (error) {
if (error instanceof AxiosError) {
throw new BadRequestError({
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
message: `Failed to remove payment method: ${error.response?.data?.message}`
});
}
throw new BadRequestError({
message: "Unable to remove payment method"
});
}
};
const getOrgTaxIds = async ({ orgId, actor, actorId, actorAuthMethod, actorOrgId }: TGetOrgTaxIdDTO) => {

View File

@@ -76,6 +76,7 @@ export type TFeatureSet = {
enterpriseSecretSyncs: false;
enterpriseAppConnections: false;
fips: false;
eventSubscriptions: false;
};
export type TOrgPlansTableDTO = {

View File

@@ -161,7 +161,8 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSecretActions.ReadValue,
ProjectPermissionSecretActions.Create,
ProjectPermissionSecretActions.Edit,
ProjectPermissionSecretActions.Delete
ProjectPermissionSecretActions.Delete,
ProjectPermissionSecretActions.Subscribe
],
ProjectPermissionSub.Secrets
);
@@ -265,7 +266,8 @@ const buildMemberPermissionRules = () => {
ProjectPermissionSecretActions.ReadValue,
ProjectPermissionSecretActions.Edit,
ProjectPermissionSecretActions.Create,
ProjectPermissionSecretActions.Delete
ProjectPermissionSecretActions.Delete,
ProjectPermissionSecretActions.Subscribe
],
ProjectPermissionSub.Secrets
);

View File

@@ -36,7 +36,8 @@ export enum ProjectPermissionSecretActions {
ReadValue = "readValue",
Create = "create",
Edit = "edit",
Delete = "delete"
Delete = "delete",
Subscribe = "subscribe"
}
export enum ProjectPermissionCmekActions {
@@ -204,6 +205,7 @@ export type SecretSubjectFields = {
secretPath: string;
secretName?: string;
secretTags?: string[];
eventType?: string;
};
export type SecretFolderSubjectFields = {
@@ -483,7 +485,17 @@ const SecretConditionV2Schema = z
.object({
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
})
.partial()
.partial(),
eventType: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
})
.partial()
])
})
.partial();

View File

@@ -65,7 +65,10 @@ import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TLicenseServiceFactory } from "../license/license-service";
import { throwIfMissingSecretReadValueOrDescribePermission } from "../permission/permission-fns";
import {
hasSecretReadValueOrDescribePermission,
throwIfMissingSecretReadValueOrDescribePermission
} from "../permission/permission-fns";
import { TPermissionServiceFactory } from "../permission/permission-service-types";
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "../permission/project-permission";
import { TSecretApprovalPolicyDALFactory } from "../secret-approval-policy/secret-approval-policy-dal";
@@ -277,13 +280,19 @@ export const secretApprovalRequestServiceFactory = ({
) {
throw new ForbiddenRequestError({ message: "User has insufficient privileges" });
}
const hasSecretReadAccess = permission.can(
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSub.Secrets
);
const getHasSecretReadAccess = (environment: string, tags: { slug: string }[], secretPath?: string) => {
const canRead = hasSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
environment,
secretPath: secretPath || "/",
secretTags: tags.map((i) => i.slug)
});
return canRead;
};
let secrets;
const secretPath = await folderDAL.findSecretPathByFolderIds(secretApprovalRequest.projectId, [
secretApprovalRequest.folderId
]);
if (shouldUseSecretV2Bridge) {
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
@@ -299,8 +308,8 @@ export const secretApprovalRequestServiceFactory = ({
version: el.version,
secretMetadata: el.secretMetadata as ResourceMetadataDTO,
isRotatedSecret: el.secret?.isRotatedSecret ?? false,
secretValueHidden: !hasSecretReadAccess,
secretValue: !hasSecretReadAccess
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
: el.secret && el.secret.isRotatedSecret
? undefined
@@ -315,8 +324,12 @@ export const secretApprovalRequestServiceFactory = ({
secretKey: el.secret.key,
id: el.secret.id,
version: el.secret.version,
secretValueHidden: !hasSecretReadAccess,
secretValue: !hasSecretReadAccess
secretValueHidden: !getHasSecretReadAccess(
secretApprovalRequest.environment,
el.tags,
secretPath?.[0]?.path
),
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
: el.secret.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedValue }).toString()
@@ -331,8 +344,12 @@ export const secretApprovalRequestServiceFactory = ({
secretKey: el.secretVersion.key,
id: el.secretVersion.id,
version: el.secretVersion.version,
secretValueHidden: !hasSecretReadAccess,
secretValue: !hasSecretReadAccess
secretValueHidden: !getHasSecretReadAccess(
secretApprovalRequest.environment,
el.tags,
secretPath?.[0]?.path
),
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
: el.secretVersion.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedValue }).toString()
@@ -350,7 +367,7 @@ export const secretApprovalRequestServiceFactory = ({
const encryptedSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
secrets = encryptedSecrets.map((el) => ({
...el,
secretValueHidden: !hasSecretReadAccess,
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
...decryptSecretWithBot(el, botKey),
secret: el.secret
? {
@@ -370,9 +387,6 @@ export const secretApprovalRequestServiceFactory = ({
: undefined
}));
}
const secretPath = await folderDAL.findSecretPathByFolderIds(secretApprovalRequest.projectId, [
secretApprovalRequest.folderId
]);
return { ...secretApprovalRequest, secretPath: secretPath?.[0]?.path || "/", commits: secrets };
};

View File

@@ -21,6 +21,8 @@ const GRAPH_API_BASE = "https://graph.microsoft.com/v1.0";
type AzureErrorResponse = { error: { message: string } };
const EXPIRY_PADDING_IN_DAYS = 3;
const sleep = async () =>
new Promise((resolve) => {
setTimeout(resolve, 1000);
@@ -33,7 +35,8 @@ export const azureClientSecretRotationFactory: TRotationFactory<
const {
connection,
parameters: { objectId, clientId: clientIdParam },
secretsMapping
secretsMapping,
rotationInterval
} = secretRotation;
/**
@@ -50,7 +53,7 @@ export const azureClientSecretRotationFactory: TRotationFactory<
)}-${now.getFullYear()}`;
const endDateTime = new Date();
endDateTime.setFullYear(now.getFullYear() + 5);
endDateTime.setDate(now.getDate() + rotationInterval * 2 + EXPIRY_PADDING_IN_DAYS); // give 72 hour buffer
try {
const { data } = await request.post<AzureAddPasswordResponse>(
@@ -195,6 +198,12 @@ export const azureClientSecretRotationFactory: TRotationFactory<
callback
) => {
const credentials = await $rotateClientSecret();
// 2.5 years as expiry is set to x2 interval for the inactive period of credential
if (rotationInterval > Math.floor(365 * 2.5) - EXPIRY_PADDING_IN_DAYS) {
throw new BadRequestError({ message: "Azure does not support token duration over 5 years" });
}
return callback(credentials);
};

View File

@@ -51,6 +51,7 @@ const baseSecretRotationV2Query = ({
db.ref("encryptedCredentials").withSchema(TableName.AppConnection).as("connectionEncryptedCredentials"),
db.ref("description").withSchema(TableName.AppConnection).as("connectionDescription"),
db.ref("version").withSchema(TableName.AppConnection).as("connectionVersion"),
db.ref("gatewayId").withSchema(TableName.AppConnection).as("connectionGatewayId"),
db.ref("createdAt").withSchema(TableName.AppConnection).as("connectionCreatedAt"),
db.ref("updatedAt").withSchema(TableName.AppConnection).as("connectionUpdatedAt"),
db
@@ -104,6 +105,7 @@ const expandSecretRotation = <T extends Awaited<ReturnType<typeof baseSecretRota
connectionCreatedAt,
connectionUpdatedAt,
connectionVersion,
connectionGatewayId,
connectionIsPlatformManagedCredentials,
...el
} = secretRotation;
@@ -123,6 +125,7 @@ const expandSecretRotation = <T extends Awaited<ReturnType<typeof baseSecretRota
createdAt: connectionCreatedAt,
updatedAt: connectionUpdatedAt,
version: connectionVersion,
gatewayId: connectionGatewayId,
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials
},
folder: {

View File

@@ -18,7 +18,8 @@ import {
TSecretScanningFactoryInitialize,
TSecretScanningFactoryListRawResources,
TSecretScanningFactoryPostInitialization,
TSecretScanningFactoryTeardown
TSecretScanningFactoryTeardown,
TSecretScanningFactoryValidateConfigUpdate
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
@@ -302,6 +303,13 @@ export const BitbucketSecretScanningFactory = () => {
);
};
const validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<
TBitbucketDataSourceInput["config"],
TBitbucketDataSourceWithConnection
> = async () => {
// no validation required
};
return {
initialize,
postInitialization,
@@ -309,6 +317,7 @@ export const BitbucketSecretScanningFactory = () => {
getFullScanPath,
getDiffScanResourcePayload,
getDiffScanFindingsPayload,
teardown
teardown,
validateConfigUpdate
};
};

View File

@@ -20,7 +20,8 @@ import {
TSecretScanningFactoryInitialize,
TSecretScanningFactoryListRawResources,
TSecretScanningFactoryPostInitialization,
TSecretScanningFactoryTeardown
TSecretScanningFactoryTeardown,
TSecretScanningFactoryValidateConfigUpdate
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
@@ -64,7 +65,14 @@ export const GitHubSecretScanningFactory = () => {
};
const teardown: TSecretScanningFactoryTeardown<TGitHubDataSourceWithConnection> = async () => {
// no termination required
// no teardown required
};
const validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<
TGitHubDataSourceInput["config"],
TGitHubDataSourceWithConnection
> = async () => {
// no validation required
};
const listRawResources: TSecretScanningFactoryListRawResources<TGitHubDataSourceWithConnection> = async (
@@ -238,6 +246,7 @@ export const GitHubSecretScanningFactory = () => {
getFullScanPath,
getDiffScanResourcePayload,
getDiffScanFindingsPayload,
teardown
teardown,
validateConfigUpdate
};
};

View File

@@ -0,0 +1,9 @@
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TSecretScanningDataSourceListItem } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const GITLAB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION: TSecretScanningDataSourceListItem = {
name: "GitLab",
type: SecretScanningDataSource.GitLab,
connection: AppConnection.GitLab
};

View File

@@ -0,0 +1,8 @@
export enum GitLabDataSourceScope {
Project = "project",
Group = "group"
}
export enum GitLabWebHookEvent {
Push = "Push Hook"
}

View File

@@ -0,0 +1,409 @@
import { Camelize, GitbeakerRequestError, GroupHookSchema, ProjectHookSchema } from "@gitbeaker/rest";
import { join } from "path";
import { scanContentAndGetFindings } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import {
SecretScanningFindingSeverity,
SecretScanningResource
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
cloneRepository,
convertPatchLineToFileLineNumber,
replaceNonChangesWithNewlines
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
import {
TSecretScanningFactoryGetDiffScanFindingsPayload,
TSecretScanningFactoryGetDiffScanResourcePayload,
TSecretScanningFactoryGetFullScanPath,
TSecretScanningFactoryInitialize,
TSecretScanningFactoryListRawResources,
TSecretScanningFactoryParams,
TSecretScanningFactoryPostInitialization,
TSecretScanningFactoryTeardown,
TSecretScanningFactoryValidateConfigUpdate
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { titleCaseToCamelCase } from "@app/lib/fn";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { GitLabProjectRegex } from "@app/lib/regex";
import {
getGitLabConnectionClient,
getGitLabInstanceUrl,
TGitLabConnection
} from "@app/services/app-connection/gitlab";
import { GitLabDataSourceScope } from "./gitlab-secret-scanning-enums";
import {
TGitLabDataSourceCredentials,
TGitLabDataSourceInput,
TGitLabDataSourceWithConnection,
TQueueGitLabResourceDiffScan
} from "./gitlab-secret-scanning-types";
const getMainDomain = (instanceUrl: string) => {
const url = new URL(instanceUrl);
const { hostname } = url;
const parts = hostname.split(".");
if (parts.length >= 2) {
return parts.slice(-2).join(".");
}
return hostname;
};
export const GitLabSecretScanningFactory = ({ appConnectionDAL, kmsService }: TSecretScanningFactoryParams) => {
const initialize: TSecretScanningFactoryInitialize<
TGitLabDataSourceInput,
TGitLabConnection,
TGitLabDataSourceCredentials
> = async ({ payload: { config, name }, connection }, callback) => {
const token = alphaNumericNanoId(64);
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
const appCfg = getConfig();
if (config.scope === GitLabDataSourceScope.Project) {
const { projectId } = config;
const project = await client.Projects.show(projectId);
if (!project) {
throw new BadRequestError({ message: `Could not find project with ID ${projectId}.` });
}
let hook: Camelize<ProjectHookSchema>;
try {
hook = await client.ProjectHooks.add(projectId, `${appCfg.SITE_URL}/secret-scanning/webhooks/gitlab`, {
token,
pushEvents: true,
enableSslVerification: true,
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
name: `Infisical Secret Scanning - ${name}`
});
} catch (error) {
if (error instanceof GitbeakerRequestError) {
throw new BadRequestError({ message: `${error.message}: ${error.cause?.description ?? "Unknown Error"}` });
}
throw error;
}
try {
return await callback({
credentials: {
token,
hookId: hook.id
}
});
} catch (error) {
try {
await client.ProjectHooks.remove(projectId, hook.id);
} catch {
// do nothing, just try to clean up webhook
}
throw error;
}
}
// group scope
const { groupId } = config;
const group = await client.Groups.show(groupId);
if (!group) {
throw new BadRequestError({ message: `Could not find group with ID ${groupId}.` });
}
let hook: Camelize<GroupHookSchema>;
try {
hook = await client.GroupHooks.add(groupId, `${appCfg.SITE_URL}/secret-scanning/webhooks/gitlab`, {
token,
pushEvents: true,
enableSslVerification: true,
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
name: `Infisical Secret Scanning - ${name}`
});
} catch (error) {
if (error instanceof GitbeakerRequestError) {
throw new BadRequestError({ message: `${error.message}: ${error.cause?.description ?? "Unknown Error"}` });
}
throw error;
}
try {
return await callback({
credentials: {
token,
hookId: hook.id
}
});
} catch (error) {
try {
await client.GroupHooks.remove(groupId, hook.id);
} catch {
// do nothing, just try to clean up webhook
}
throw error;
}
};
const postInitialization: TSecretScanningFactoryPostInitialization<
TGitLabDataSourceInput,
TGitLabConnection,
TGitLabDataSourceCredentials
> = async ({ connection, dataSourceId, credentials, payload: { config } }) => {
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
const appCfg = getConfig();
const hookUrl = `${appCfg.SITE_URL}/secret-scanning/webhooks/gitlab`;
const { hookId } = credentials;
if (config.scope === GitLabDataSourceScope.Project) {
const { projectId } = config;
try {
await client.ProjectHooks.edit(projectId, hookId, hookUrl, {
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
name: `Infisical Secret Scanning - ${dataSourceId}`,
custom_headers: [{ key: "x-data-source-id", value: dataSourceId }]
});
} catch (error) {
try {
await client.ProjectHooks.remove(projectId, hookId);
} catch {
// do nothing, just try to clean up webhook
}
throw error;
}
return;
}
// group-scope
const { groupId } = config;
try {
await client.GroupHooks.edit(groupId, hookId, hookUrl, {
// @ts-expect-error gitbeaker is outdated, and the types don't support this field yet
name: `Infisical Secret Scanning - ${dataSourceId}`,
custom_headers: [{ key: "x-data-source-id", value: dataSourceId }]
});
} catch (error) {
try {
await client.GroupHooks.remove(groupId, hookId);
} catch {
// do nothing, just try to clean up webhook
}
throw error;
}
};
const listRawResources: TSecretScanningFactoryListRawResources<TGitLabDataSourceWithConnection> = async (
dataSource
) => {
const { connection, config } = dataSource;
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
if (config.scope === GitLabDataSourceScope.Project) {
const { projectId } = config;
const project = await client.Projects.show(projectId);
if (!project) {
throw new BadRequestError({ message: `Could not find project with ID ${projectId}.` });
}
// scott: even though we have this data we want to get potentially updated name
return [
{
name: project.pathWithNamespace,
externalId: project.id.toString(),
type: SecretScanningResource.Project
}
];
}
// group-scope
const { groupId, includeProjects } = config;
const projects = await client.Groups.allProjects(groupId, {
archived: false
});
const filteredProjects: typeof projects = [];
if (!includeProjects || includeProjects.includes("*")) {
filteredProjects.push(...projects);
} else {
filteredProjects.push(...projects.filter((project) => includeProjects.includes(project.pathWithNamespace)));
}
return filteredProjects.map(({ id, pathWithNamespace }) => ({
name: pathWithNamespace,
externalId: id.toString(),
type: SecretScanningResource.Project
}));
};
const getFullScanPath: TSecretScanningFactoryGetFullScanPath<TGitLabDataSourceWithConnection> = async ({
dataSource,
resourceName,
tempFolder
}) => {
const { connection } = dataSource;
const instanceUrl = await getGitLabInstanceUrl(connection.credentials.instanceUrl);
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
const user = await client.Users.showCurrentUser();
const repoPath = join(tempFolder, "repo.git");
if (!GitLabProjectRegex.test(resourceName)) {
throw new Error("Invalid GitLab project name");
}
await cloneRepository({
cloneUrl: `https://${user.username}:${connection.credentials.accessToken}@${getMainDomain(instanceUrl)}/${resourceName}.git`,
repoPath
});
return repoPath;
};
const teardown: TSecretScanningFactoryTeardown<
TGitLabDataSourceWithConnection,
TGitLabDataSourceCredentials
> = async ({ dataSource: { connection, config }, credentials: { hookId } }) => {
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
if (config.scope === GitLabDataSourceScope.Project) {
const { projectId } = config;
try {
await client.ProjectHooks.remove(projectId, hookId);
} catch (error) {
// do nothing, just try to clean up webhook
}
return;
}
const { groupId } = config;
try {
await client.GroupHooks.remove(groupId, hookId);
} catch (error) {
// do nothing, just try to clean up webhook
}
};
const getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<
TQueueGitLabResourceDiffScan["payload"]
> = ({ project }) => {
return {
name: project.path_with_namespace,
externalId: project.id.toString(),
type: SecretScanningResource.Project
};
};
const getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<
TGitLabDataSourceWithConnection,
TQueueGitLabResourceDiffScan["payload"]
> = async ({ dataSource, payload, resourceName, configPath }) => {
const { connection } = dataSource;
const client = await getGitLabConnectionClient(connection, appConnectionDAL, kmsService);
const { commits, project } = payload;
const allFindings: SecretMatch[] = [];
for (const commit of commits) {
// eslint-disable-next-line no-await-in-loop
const commitDiffs = await client.Commits.showDiff(project.id, commit.id);
for (const commitDiff of commitDiffs) {
// eslint-disable-next-line no-continue
if (commitDiff.deletedFile) continue;
// eslint-disable-next-line no-await-in-loop
const findings = await scanContentAndGetFindings(
replaceNonChangesWithNewlines(`\n${commitDiff.diff}`),
configPath
);
const adjustedFindings = findings.map((finding) => {
const startLine = convertPatchLineToFileLineNumber(commitDiff.diff, finding.StartLine);
const endLine =
finding.StartLine === finding.EndLine
? startLine
: convertPatchLineToFileLineNumber(commitDiff.diff, finding.EndLine);
const startColumn = finding.StartColumn - 1; // subtract 1 for +
const endColumn = finding.EndColumn - 1; // subtract 1 for +
const authorName = commit.author.name;
const authorEmail = commit.author.email;
return {
...finding,
StartLine: startLine,
EndLine: endLine,
StartColumn: startColumn,
EndColumn: endColumn,
File: commitDiff.newPath,
Commit: commit.id,
Author: authorName,
Email: authorEmail,
Message: commit.message,
Fingerprint: `${commit.id}:${commitDiff.newPath}:${finding.RuleID}:${startLine}:${startColumn}`,
Date: commit.timestamp,
Link: `https://gitlab.com/${resourceName}/blob/${commit.id}/${commitDiff.newPath}#L${startLine}`
};
});
allFindings.push(...adjustedFindings);
}
}
return allFindings.map(
({
// discard match and secret as we don't want to store
Match,
Secret,
...finding
}) => ({
details: titleCaseToCamelCase(finding),
fingerprint: finding.Fingerprint,
severity: SecretScanningFindingSeverity.High,
rule: finding.RuleID
})
);
};
const validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<
TGitLabDataSourceInput["config"],
TGitLabDataSourceWithConnection
> = async ({ config, dataSource }) => {
if (dataSource.config.scope !== config.scope) {
throw new BadRequestError({ message: "Cannot change Data Source scope after creation." });
}
};
return {
listRawResources,
getFullScanPath,
initialize,
postInitialization,
teardown,
getDiffScanResourcePayload,
getDiffScanFindingsPayload,
validateConfigUpdate
};
};

View File

@@ -0,0 +1,101 @@
import { z } from "zod";
import { GitLabDataSourceScope } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-enums";
import {
SecretScanningDataSource,
SecretScanningResource
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
BaseCreateSecretScanningDataSourceSchema,
BaseSecretScanningDataSourceSchema,
BaseSecretScanningFindingSchema,
BaseUpdateSecretScanningDataSourceSchema,
GitRepositoryScanFindingDetailsSchema
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
import { SecretScanningDataSources } from "@app/lib/api-docs";
import { GitLabProjectRegex } from "@app/lib/regex";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const GitLabDataSourceConfigSchema = z.discriminatedUnion("scope", [
z.object({
scope: z.literal(GitLabDataSourceScope.Group).describe(SecretScanningDataSources.CONFIG.GITLAB.scope),
groupId: z.number().describe(SecretScanningDataSources.CONFIG.GITLAB.groupId),
groupName: z.string().trim().max(256).optional().describe(SecretScanningDataSources.CONFIG.GITLAB.groupName),
includeProjects: z
.array(
z
.string()
.min(1)
.max(256)
.refine((value) => value === "*" || GitLabProjectRegex.test(value), "Invalid project name format")
)
.nonempty("One or more projects required")
.max(100, "Cannot configure more than 100 projects")
.default(["*"])
.describe(SecretScanningDataSources.CONFIG.GITLAB.includeProjects)
}),
z.object({
scope: z.literal(GitLabDataSourceScope.Project).describe(SecretScanningDataSources.CONFIG.GITLAB.scope),
projectName: z.string().trim().max(256).optional().describe(SecretScanningDataSources.CONFIG.GITLAB.projectName),
projectId: z.number().describe(SecretScanningDataSources.CONFIG.GITLAB.projectId)
})
]);
export const GitLabDataSourceSchema = BaseSecretScanningDataSourceSchema({
type: SecretScanningDataSource.GitLab,
isConnectionRequired: true
})
.extend({
config: GitLabDataSourceConfigSchema
})
.describe(
JSON.stringify({
title: "GitLab"
})
);
export const CreateGitLabDataSourceSchema = BaseCreateSecretScanningDataSourceSchema({
type: SecretScanningDataSource.GitLab,
isConnectionRequired: true
})
.extend({
config: GitLabDataSourceConfigSchema
})
.describe(
JSON.stringify({
title: "GitLab"
})
);
export const UpdateGitLabDataSourceSchema = BaseUpdateSecretScanningDataSourceSchema(SecretScanningDataSource.GitLab)
.extend({
config: GitLabDataSourceConfigSchema.optional()
})
.describe(
JSON.stringify({
title: "GitLab"
})
);
export const GitLabDataSourceListItemSchema = z
.object({
name: z.literal("GitLab"),
connection: z.literal(AppConnection.GitLab),
type: z.literal(SecretScanningDataSource.GitLab)
})
.describe(
JSON.stringify({
title: "GitLab"
})
);
export const GitLabFindingSchema = BaseSecretScanningFindingSchema.extend({
resourceType: z.literal(SecretScanningResource.Project),
dataSourceType: z.literal(SecretScanningDataSource.GitLab),
details: GitRepositoryScanFindingDetailsSchema
});
export const GitLabDataSourceCredentialsSchema = z.object({
token: z.string(),
hookId: z.number()
});

View File

@@ -0,0 +1,94 @@
import { GitLabDataSourceScope } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-enums";
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TSecretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
import { logger } from "@app/lib/logger";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import {
TGitLabDataSource,
TGitLabDataSourceCredentials,
THandleGitLabPushEvent
} from "./gitlab-secret-scanning-types";
export const gitlabSecretScanningService = (
secretScanningV2DAL: TSecretScanningV2DALFactory,
secretScanningV2Queue: Pick<TSecretScanningV2QueueServiceFactory, "queueResourceDiffScan">,
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
const handlePushEvent = async ({ payload, token, dataSourceId }: THandleGitLabPushEvent) => {
if (!payload.total_commits_count || !payload.project) {
logger.warn(
`secretScanningV2PushEvent: GitLab - Insufficient data [changes=${
payload.total_commits_count ?? 0
}] [projectName=${payload.project?.path_with_namespace ?? "unknown"}] [projectId=${payload.project?.id ?? "unknown"}]`
);
return;
}
const dataSource = (await secretScanningV2DAL.dataSources.findOne({
id: dataSourceId,
type: SecretScanningDataSource.GitLab
})) as TGitLabDataSource | undefined;
if (!dataSource) {
logger.error(
`secretScanningV2PushEvent: GitLab - Could not find data source [dataSourceId=${dataSourceId}] [projectId=${payload.project.id}]`
);
return;
}
const { isAutoScanEnabled, config, encryptedCredentials, projectId } = dataSource;
if (!encryptedCredentials) {
logger.info(
`secretScanningV2PushEvent: GitLab - Could not find encrypted credentials [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
);
return;
}
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const decryptedCredentials = decryptor({ cipherTextBlob: encryptedCredentials });
const credentials = JSON.parse(decryptedCredentials.toString()) as TGitLabDataSourceCredentials;
if (token !== credentials.token) {
logger.error(
`secretScanningV2PushEvent: GitLab - Invalid webhook token [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
);
return;
}
if (!isAutoScanEnabled) {
logger.info(
`secretScanningV2PushEvent: GitLab - ignoring due to auto scan disabled [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
);
return;
}
if (
config.scope === GitLabDataSourceScope.Project
? config.projectId.toString() === payload.project_id.toString()
: config.includeProjects.includes("*") || config.includeProjects.includes(payload.project.path_with_namespace)
) {
await secretScanningV2Queue.queueResourceDiffScan({
dataSourceType: SecretScanningDataSource.GitLab,
payload,
dataSourceId: dataSource.id
});
} else {
logger.info(
`secretScanningV2PushEvent: GitLab - ignoring due to repository not being present in config [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]`
);
}
};
return {
handlePushEvent
};
};

View File

@@ -0,0 +1,97 @@
import { z } from "zod";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TGitLabConnection } from "@app/services/app-connection/gitlab";
import {
CreateGitLabDataSourceSchema,
GitLabDataSourceCredentialsSchema,
GitLabDataSourceListItemSchema,
GitLabDataSourceSchema,
GitLabFindingSchema
} from "./gitlab-secret-scanning-schemas";
export type TGitLabDataSource = z.infer<typeof GitLabDataSourceSchema>;
export type TGitLabDataSourceInput = z.infer<typeof CreateGitLabDataSourceSchema>;
export type TGitLabDataSourceListItem = z.infer<typeof GitLabDataSourceListItemSchema>;
export type TGitLabFinding = z.infer<typeof GitLabFindingSchema>;
export type TGitLabDataSourceWithConnection = TGitLabDataSource & {
connection: TGitLabConnection;
};
export type TGitLabDataSourceCredentials = z.infer<typeof GitLabDataSourceCredentialsSchema>;
export type TGitLabDataSourcePushEventPayload = {
object_kind: "push";
event_name: "push";
before: string;
after: string;
ref: string;
ref_protected: boolean;
checkout_sha: string;
user_id: number;
user_name: string;
user_username: string;
user_email: string;
user_avatar: string;
project_id: number;
project: {
id: number;
name: string;
description: string;
web_url: string;
avatar_url: string | null;
git_ssh_url: string;
git_http_url: string;
namespace: string;
visibility_level: number;
path_with_namespace: string;
default_branch: string;
homepage: string;
url: string;
ssh_url: string;
http_url: string;
};
repository: {
name: string;
url: string;
description: string;
homepage: string;
git_http_url: string;
git_ssh_url: string;
visibility_level: number;
};
commits: {
id: string;
message: string;
title: string;
timestamp: string;
url: string;
author: {
name: string;
email: string;
};
added: string[];
modified: string[];
removed: string[];
}[];
total_commits_count: number;
};
export type THandleGitLabPushEvent = {
payload: TGitLabDataSourcePushEventPayload;
dataSourceId: string;
token: string;
};
export type TQueueGitLabResourceDiffScan = {
dataSourceType: SecretScanningDataSource.GitLab;
payload: TGitLabDataSourcePushEventPayload;
dataSourceId: string;
resourceId: string;
scanId: string;
};

View File

@@ -0,0 +1,3 @@
export * from "./gitlab-secret-scanning-constants";
export * from "./gitlab-secret-scanning-schemas";
export * from "./gitlab-secret-scanning-types";

View File

@@ -1,6 +1,7 @@
export enum SecretScanningDataSource {
GitHub = "github",
Bitbucket = "bitbucket"
Bitbucket = "bitbucket",
GitLab = "gitlab"
}
export enum SecretScanningScanStatus {

View File

@@ -1,5 +1,6 @@
import { BitbucketSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-factory";
import { GitHubSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-factory";
import { GitLabSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-factory";
import { SecretScanningDataSource } from "./secret-scanning-v2-enums";
import {
@@ -19,5 +20,6 @@ type TSecretScanningFactoryImplementation = TSecretScanningFactory<
export const SECRET_SCANNING_FACTORY_MAP: Record<SecretScanningDataSource, TSecretScanningFactoryImplementation> = {
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation,
[SecretScanningDataSource.Bitbucket]: BitbucketSecretScanningFactory as TSecretScanningFactoryImplementation
[SecretScanningDataSource.Bitbucket]: BitbucketSecretScanningFactory as TSecretScanningFactoryImplementation,
[SecretScanningDataSource.GitLab]: GitLabSecretScanningFactory as TSecretScanningFactoryImplementation
};

View File

@@ -13,6 +13,7 @@ import {
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import { BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/bitbucket";
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
import { GITLAB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/gitlab";
import { getConfig } from "@app/lib/config/env";
import { crypto } from "@app/lib/crypto";
import { BadRequestError } from "@app/lib/errors";
@@ -23,7 +24,8 @@ import { TCloneRepository, TGetFindingsPayload, TSecretScanningDataSourceListIte
const SECRET_SCANNING_SOURCE_LIST_OPTIONS: Record<SecretScanningDataSource, TSecretScanningDataSourceListItem> = {
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION,
[SecretScanningDataSource.Bitbucket]: BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
[SecretScanningDataSource.Bitbucket]: BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION,
[SecretScanningDataSource.GitLab]: GITLAB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
};
export const listSecretScanningDataSourceOptions = () => {

View File

@@ -3,15 +3,18 @@ import { AppConnection } from "@app/services/app-connection/app-connection-enums
export const SECRET_SCANNING_DATA_SOURCE_NAME_MAP: Record<SecretScanningDataSource, string> = {
[SecretScanningDataSource.GitHub]: "GitHub",
[SecretScanningDataSource.Bitbucket]: "Bitbucket"
[SecretScanningDataSource.Bitbucket]: "Bitbucket",
[SecretScanningDataSource.GitLab]: "GitLab"
};
export const SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP: Record<SecretScanningDataSource, AppConnection> = {
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar,
[SecretScanningDataSource.Bitbucket]: AppConnection.Bitbucket
[SecretScanningDataSource.Bitbucket]: AppConnection.Bitbucket,
[SecretScanningDataSource.GitLab]: AppConnection.GitLab
};
export const AUTO_SYNC_DESCRIPTION_HELPER: Record<SecretScanningDataSource, { verb: string; noun: string }> = {
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" },
[SecretScanningDataSource.Bitbucket]: { verb: "push", noun: "repositories" }
[SecretScanningDataSource.Bitbucket]: { verb: "push", noun: "repositories" },
[SecretScanningDataSource.GitLab]: { verb: "push", noun: "projects" }
};

View File

@@ -16,6 +16,7 @@ import { getConfig } from "@app/lib/config/env";
import { BadRequestError, InternalServerError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
import { ActorType } from "@app/services/auth/auth-type";
@@ -48,6 +49,7 @@ type TSecretRotationV2QueueServiceFactoryDep = {
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findAllProjectMembers">;
projectDAL: Pick<TProjectDALFactory, "findById">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">;
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "getItem">;
};
@@ -62,7 +64,8 @@ export const secretScanningV2QueueServiceFactory = async ({
smtpService,
kmsService,
auditLogService,
keyStore
keyStore,
appConnectionDAL
}: TSecretRotationV2QueueServiceFactoryDep) => {
const queueDataSourceFullScan = async (
dataSource: TSecretScanningDataSourceWithConnection,
@@ -71,7 +74,10 @@ export const secretScanningV2QueueServiceFactory = async ({
try {
const { type } = dataSource;
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
const factory = SECRET_SCANNING_FACTORY_MAP[type]({
kmsService,
appConnectionDAL
});
const rawResources = await factory.listRawResources(dataSource);
@@ -171,7 +177,10 @@ export const secretScanningV2QueueServiceFactory = async ({
let connection: TAppConnection | null = null;
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]({
kmsService,
appConnectionDAL
});
const findingsPath = join(tempFolder, "findings.json");
@@ -329,7 +338,10 @@ export const secretScanningV2QueueServiceFactory = async ({
dataSourceId,
dataSourceType
}: Pick<TQueueSecretScanningResourceDiffScan, "payload" | "dataSourceId" | "dataSourceType">) => {
const factory = SECRET_SCANNING_FACTORY_MAP[dataSourceType as SecretScanningDataSource]();
const factory = SECRET_SCANNING_FACTORY_MAP[dataSourceType as SecretScanningDataSource]({
kmsService,
appConnectionDAL
});
const resourcePayload = factory.getDiffScanResourcePayload(payload);
@@ -391,7 +403,10 @@ export const secretScanningV2QueueServiceFactory = async ({
if (!resource) throw new Error(`Resource with ID "${resourceId}" not found`);
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]({
kmsService,
appConnectionDAL
});
const tempFolder = await createTempFolder();

View File

@@ -46,6 +46,7 @@ import {
import { DatabaseErrorCode } from "@app/lib/error-codes";
import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors";
import { OrgServiceActor } from "@app/lib/types";
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
@@ -53,12 +54,14 @@ import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { bitbucketSecretScanningService } from "./bitbucket/bitbucket-secret-scanning-service";
import { gitlabSecretScanningService } from "./gitlab/gitlab-secret-scanning-service";
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
import { TSecretScanningV2QueueServiceFactory } from "./secret-scanning-v2-queue";
export type TSecretScanningV2ServiceFactoryDep = {
secretScanningV2DAL: TSecretScanningV2DALFactory;
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getOrgPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
secretScanningV2Queue: Pick<
@@ -76,6 +79,7 @@ export const secretScanningV2ServiceFactory = ({
appConnectionService,
licenseService,
secretScanningV2Queue,
appConnectionDAL,
kmsService
}: TSecretScanningV2ServiceFactoryDep) => {
const $checkListSecretScanningDataSourcesByProjectIdPermissions = async (
@@ -255,7 +259,10 @@ export const secretScanningV2ServiceFactory = ({
);
}
const factory = SECRET_SCANNING_FACTORY_MAP[payload.type]();
const factory = SECRET_SCANNING_FACTORY_MAP[payload.type]({
appConnectionDAL,
kmsService
});
try {
const createdDataSource = await factory.initialize(
@@ -363,6 +370,31 @@ export const secretScanningV2ServiceFactory = ({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
let connection: TAppConnection | null = null;
if (dataSource.connectionId) {
// validates permission to connect and app is valid for data source
connection = await appConnectionService.connectAppConnectionById(
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[dataSource.type],
dataSource.connectionId,
actor
);
}
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type]({
appConnectionDAL,
kmsService
});
if (payload.config) {
await factory.validateConfigUpdate({
dataSource: {
...dataSource,
connection
} as TSecretScanningDataSourceWithConnection,
config: payload.config as TSecretScanningDataSourceWithConnection["config"]
});
}
try {
const updatedDataSource = await secretScanningV2DAL.dataSources.updateById(dataSourceId, payload);
@@ -416,7 +448,10 @@ export const secretScanningV2ServiceFactory = ({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
const factory = SECRET_SCANNING_FACTORY_MAP[type]({
appConnectionDAL,
kmsService
});
let connection: TAppConnection | null = null;
if (dataSource.connection) {
@@ -903,6 +938,7 @@ export const secretScanningV2ServiceFactory = ({
findSecretScanningConfigByProjectId,
upsertSecretScanningConfig,
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue),
bitbucket: bitbucketSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService)
bitbucket: bitbucketSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService),
gitlab: gitlabSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService)
};
};

View File

@@ -21,14 +21,25 @@ import {
TGitHubFinding,
TQueueGitHubResourceDiffScan
} from "@app/ee/services/secret-scanning-v2/github";
import {
TGitLabDataSource,
TGitLabDataSourceCredentials,
TGitLabDataSourceInput,
TGitLabDataSourceListItem,
TGitLabDataSourceWithConnection,
TGitLabFinding,
TQueueGitLabResourceDiffScan
} from "@app/ee/services/secret-scanning-v2/gitlab";
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
import {
SecretScanningDataSource,
SecretScanningFindingStatus,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
export type TSecretScanningDataSource = TGitHubDataSource | TBitbucketDataSource;
export type TSecretScanningDataSource = TGitHubDataSource | TBitbucketDataSource | TGitLabDataSource;
export type TSecretScanningDataSourceWithDetails = TSecretScanningDataSource & {
lastScannedAt?: Date | null;
@@ -52,15 +63,25 @@ export type TSecretScanningScanWithDetails = TSecretScanningScans & {
export type TSecretScanningDataSourceWithConnection =
| TGitHubDataSourceWithConnection
| TBitbucketDataSourceWithConnection;
| TBitbucketDataSourceWithConnection
| TGitLabDataSourceWithConnection;
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput | TBitbucketDataSourceInput;
export type TSecretScanningDataSourceInput =
| TGitHubDataSourceInput
| TBitbucketDataSourceInput
| TGitLabDataSourceInput;
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem | TBitbucketDataSourceListItem;
export type TSecretScanningDataSourceListItem =
| TGitHubDataSourceListItem
| TBitbucketDataSourceListItem
| TGitLabDataSourceListItem;
export type TSecretScanningDataSourceCredentials = TBitbucketDataSourceCredentials | undefined;
export type TSecretScanningDataSourceCredentials =
| TBitbucketDataSourceCredentials
| TGitLabDataSourceCredentials
| undefined;
export type TSecretScanningFinding = TGitHubFinding | TBitbucketFinding;
export type TSecretScanningFinding = TGitHubFinding | TBitbucketFinding | TGitLabFinding;
export type TListSecretScanningDataSourcesByProjectId = {
projectId: string;
@@ -112,7 +133,10 @@ export type TQueueSecretScanningDataSourceFullScan = {
scanId: string;
};
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan | TQueueBitbucketResourceDiffScan;
export type TQueueSecretScanningResourceDiffScan =
| TQueueGitHubResourceDiffScan
| TQueueBitbucketResourceDiffScan
| TQueueGitLabResourceDiffScan;
export type TQueueSecretScanningSendNotification = {
dataSource: TSecretScanningDataSources;
@@ -170,6 +194,11 @@ export type TSecretScanningFactoryInitialize<
callback: (parameters: { credentials?: C; externalId?: string }) => Promise<TSecretScanningDataSourceRaw>
) => Promise<TSecretScanningDataSourceRaw>;
export type TSecretScanningFactoryValidateConfigUpdate<
C extends TSecretScanningDataSourceInput["config"],
T extends TSecretScanningDataSourceWithConnection
> = (params: { config: C; dataSource: T }) => Promise<void>;
export type TSecretScanningFactoryPostInitialization<
P extends TSecretScanningDataSourceInput,
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
@@ -181,17 +210,23 @@ export type TSecretScanningFactoryTeardown<
C extends TSecretScanningDataSourceCredentials = undefined
> = (params: { dataSource: T; credentials: C }) => Promise<void>;
export type TSecretScanningFactoryParams = {
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
export type TSecretScanningFactory<
T extends TSecretScanningDataSourceWithConnection,
P extends TQueueSecretScanningResourceDiffScan["payload"],
I extends TSecretScanningDataSourceInput,
C extends TSecretScanningDataSourceCredentials | undefined = undefined
> = () => {
> = (params: TSecretScanningFactoryParams) => {
listRawResources: TSecretScanningFactoryListRawResources<T>;
getFullScanPath: TSecretScanningFactoryGetFullScanPath<T>;
initialize: TSecretScanningFactoryInitialize<I, T["connection"] | undefined, C>;
postInitialization: TSecretScanningFactoryPostInitialization<I, T["connection"] | undefined, C>;
teardown: TSecretScanningFactoryTeardown<T, C>;
validateConfigUpdate: TSecretScanningFactoryValidateConfigUpdate<I["config"], T>;
getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<P>;
getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<T, P>;
};

View File

@@ -2,10 +2,12 @@ import { z } from "zod";
import { BitbucketDataSourceSchema, BitbucketFindingSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
import { GitHubDataSourceSchema, GitHubFindingSchema } from "@app/ee/services/secret-scanning-v2/github";
import { GitLabDataSourceSchema, GitLabFindingSchema } from "@app/ee/services/secret-scanning-v2/gitlab";
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [
GitHubDataSourceSchema,
BitbucketDataSourceSchema
BitbucketDataSourceSchema,
GitLabDataSourceSchema
]);
export const SecretScanningFindingSchema = z.discriminatedUnion("dataSourceType", [
@@ -18,5 +20,10 @@ export const SecretScanningFindingSchema = z.discriminatedUnion("dataSourceType"
JSON.stringify({
title: "Bitbucket"
})
),
GitLabFindingSchema.describe(
JSON.stringify({
title: "GitLab"
})
)
]);

View File

@@ -46,7 +46,11 @@ export const KeyStorePrefixes = {
IdentityAccessTokenStatusUpdate: (identityAccessTokenId: string) =>
`identity-access-token-status:${identityAccessTokenId}`,
ServiceTokenStatusUpdate: (serviceTokenId: string) => `service-token-status:${serviceTokenId}`,
GatewayIdentityCredential: (identityId: string) => `gateway-credentials:${identityId}`
GatewayIdentityCredential: (identityId: string) => `gateway-credentials:${identityId}`,
ActiveSSEConnectionsSet: (projectId: string, identityId: string) =>
`sse-connections:${projectId}:${identityId}` as const,
ActiveSSEConnections: (projectId: string, identityId: string, connectionId: string) =>
`sse-connections:${projectId}:${identityId}:${connectionId}` as const
};
export const KeyStoreTtls = {

View File

@@ -664,6 +664,10 @@ export const ORGANIZATIONS = {
organizationId: "The ID of the organization to delete the membership from.",
membershipId: "The ID of the membership to delete."
},
BULK_DELETE_USER_MEMBERSHIPS: {
organizationId: "The ID of the organization to delete the memberships from.",
membershipIds: "The IDs of the memberships to delete."
},
LIST_IDENTITY_MEMBERSHIPS: {
orgId: "The ID of the organization to get identity memberships from.",
offset: "The offset to start from. If you enter 10, it will start from the 10th identity membership.",
@@ -2253,7 +2257,9 @@ export const AppConnections = {
AZURE_DEVOPS: {
code: "The OAuth code to use to connect with Azure DevOps.",
tenantId: "The Tenant ID to use to connect with Azure DevOps.",
orgName: "The Organization name to use to connect with Azure DevOps."
orgName: "The Organization name to use to connect with Azure DevOps.",
clientId: "The Client ID to use to connect with Azure Client Secrets.",
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
},
OCI: {
userOcid: "The OCID (Oracle Cloud Identifier) of the user making the request.",
@@ -2296,6 +2302,9 @@ export const AppConnections = {
DIGITAL_OCEAN_APP_PLATFORM: {
apiToken: "The API token used to authenticate with Digital Ocean App Platform."
},
NETLIFY: {
accessToken: "The Access token used to authenticate with Netlify."
},
OKTA: {
instanceUrl: "The URL used to access your Okta organization.",
apiToken: "The API token used to authenticate with Okta."
@@ -2400,12 +2409,18 @@ export const SecretSyncs = {
env: "The name of the GitHub environment."
},
AZURE_KEY_VAULT: {
vaultBaseUrl: "The base URL of the Azure Key Vault to sync secrets to. Example: https://example.vault.azure.net/"
vaultBaseUrl: "The base URL of the Azure Key Vault to sync secrets to. Example: https://example.vault.azure.net/",
tenantId: "The Tenant ID to use to connect with Azure Client Secrets.",
clientId: "The Client ID to use to connect with Azure Client Secrets.",
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
},
AZURE_APP_CONFIGURATION: {
configurationUrl:
"The URL of the Azure App Configuration to sync secrets to. Example: https://example.azconfig.io/",
label: "An optional label to assign to secrets created in Azure App Configuration."
label: "An optional label to assign to secrets created in Azure App Configuration.",
tenantId: "The Tenant ID to use to connect with Azure Client Secrets.",
clientId: "The Client ID to use to connect with Azure Client Secrets.",
clientSecret: "The Client Secret to use to connect with Azure Client Secrets."
},
AZURE_DEVOPS: {
devopsProjectId: "The ID of the Azure DevOps project to sync secrets to.",
@@ -2521,6 +2536,13 @@ export const SecretSyncs = {
workspaceSlug: "The Bitbucket Workspace slug to sync secrets to.",
repositorySlug: "The Bitbucket Repository slug to sync secrets to.",
environmentId: "The Bitbucket Deployment Environment uuid to sync secrets to."
},
NETLIFY: {
accountId: "The ID of the Netlify account to sync secrets to.",
accountName: "The name of the Netlify account to sync secrets to.",
siteName: "The name of the Netlify site to sync secrets to.",
siteId: "The ID of the Netlify site to sync secrets to.",
context: "The Netlify context to sync secrets to."
}
}
};
@@ -2702,6 +2724,14 @@ export const SecretScanningDataSources = {
GITHUB: {
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
},
GITLAB: {
includeProjects: 'The projects to include when scanning. Defaults to all projects (["*"]).',
scope: "The GitLab scope scanning should occur at (project or group level).",
projectId: "The ID of the project to scan.",
projectName: "The name of the project to scan.",
groupId: "The ID of the group to scan projects from.",
groupName: "The name of the group to scan projects from."
},
BITBUCKET: {
workspaceSlug: "The workspace to scan.",
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'

View File

@@ -496,7 +496,7 @@ export const overwriteSchema: {
]
},
azureAppConfiguration: {
name: "Azure App Configuration",
name: "Azure App Connection: App Configuration",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID",
@@ -509,7 +509,7 @@ export const overwriteSchema: {
]
},
azureKeyVault: {
name: "Azure Key Vault",
name: "Azure App Connection: Key Vault",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID",
@@ -522,7 +522,7 @@ export const overwriteSchema: {
]
},
azureClientSecrets: {
name: "Azure Client Secrets",
name: "Azure App Connection: Client Secrets",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID",
@@ -535,7 +535,7 @@ export const overwriteSchema: {
]
},
azureDevOps: {
name: "Azure DevOps",
name: "Azure App Connection: DevOps",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID",

View File

@@ -11,3 +11,5 @@ export const UserPrincipalNameRegex = new RE2(/^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9._-]
export const LdapUrlRegex = new RE2(/^ldaps?:\/\//);
export const BasicRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/);
export const GitLabProjectRegex = new RE2(/^[a-zA-Z0-9._-]+(?:\/[a-zA-Z0-9._-]+)+$/);

View File

@@ -22,6 +22,7 @@ import { crypto } from "@app/lib/crypto";
import { logger } from "@app/lib/logger";
import { QueueWorkerProfile } from "@app/lib/types";
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
import { ExternalPlatforms } from "@app/services/external-migration/external-migration-types";
import {
TFailedIntegrationSyncEmailsPayload,
TIntegrationSyncPayload,
@@ -228,6 +229,7 @@ export type TQueueJobTypes = {
name: QueueJobs.ImportSecretsFromExternalSource;
payload: {
actorEmail: string;
importType: ExternalPlatforms;
data: {
iv: string;
tag: string;

View File

@@ -22,6 +22,7 @@ export type TAuthMode =
orgId: string;
authMethod: AuthMethod;
isMfaVerified?: boolean;
token: AuthModeJwtTokenPayload;
}
| {
authMode: AuthMode.API_KEY;
@@ -30,6 +31,7 @@ export type TAuthMode =
userId: string;
user: TUsers;
orgId: string;
token: string;
}
| {
authMode: AuthMode.SERVICE_TOKEN;
@@ -38,6 +40,7 @@ export type TAuthMode =
serviceTokenId: string;
orgId: string;
authMethod: null;
token: string;
}
| {
authMode: AuthMode.IDENTITY_ACCESS_TOKEN;
@@ -47,6 +50,7 @@ export type TAuthMode =
orgId: string;
authMethod: null;
isInstanceAdmin?: boolean;
token: TIdentityAccessTokenJwtPayload;
}
| {
authMode: AuthMode.SCIM_TOKEN;
@@ -56,7 +60,7 @@ export type TAuthMode =
authMethod: null;
};
const extractAuth = async (req: FastifyRequest, jwtSecret: string) => {
export const extractAuth = async (req: FastifyRequest, jwtSecret: string) => {
const apiKey = req.headers?.["x-api-key"];
if (apiKey) {
return { authMode: AuthMode.API_KEY, token: apiKey, actor: ActorType.USER } as const;
@@ -133,7 +137,8 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
actor,
orgId: orgId as string,
authMethod: token.authMethod,
isMfaVerified: token.isMfaVerified
isMfaVerified: token.isMfaVerified,
token
};
break;
}
@@ -148,7 +153,8 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
identityId: identity.identityId,
identityName: identity.name,
authMethod: null,
isInstanceAdmin: serverCfg?.adminIdentityIds?.includes(identity.identityId)
isInstanceAdmin: serverCfg?.adminIdentityIds?.includes(identity.identityId),
token
};
if (token?.identityAuth?.oidc) {
requestContext.set("identityAuthInfo", {
@@ -179,7 +185,8 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
serviceToken,
serviceTokenId: serviceToken.id,
actor,
authMethod: null
authMethod: null,
token
};
break;
}
@@ -191,7 +198,8 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
actor,
user,
orgId: "API_KEY", // We set the orgId to an arbitrary value, since we can't link an API key to a specific org. We have to deprecate API keys soon!
authMethod: null
authMethod: null,
token: token as string
};
break;
}

View File

@@ -4,6 +4,8 @@ import { Probot } from "probot";
import { z } from "zod";
import { TBitbucketPushEvent } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-types";
import { TGitLabDataSourcePushEventPayload } from "@app/ee/services/secret-scanning-v2/gitlab";
import { GitLabWebHookEvent } from "@app/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-enums";
import { getConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger";
import { writeLimit } from "@app/server/config/rateLimiter";
@@ -113,4 +115,36 @@ export const registerSecretScanningV2Webhooks = async (server: FastifyZodProvide
return res.send("ok");
}
});
// gitlab push event webhook
server.route({
method: "POST",
url: "/gitlab",
config: {
rateLimit: writeLimit
},
handler: async (req, res) => {
const event = req.headers["x-gitlab-event"] as GitLabWebHookEvent;
const token = req.headers["x-gitlab-token"] as string;
const dataSourceId = req.headers["x-data-source-id"] as string;
if (event !== GitLabWebHookEvent.Push) {
return res.status(400).send({ message: `Event type not supported: ${event as string}` });
}
if (!token) {
return res.status(401).send({ message: "Unauthorized: Missing token" });
}
if (!dataSourceId) return res.status(400).send({ message: "Data Source ID header is required" });
await server.services.secretScanningV2.gitlab.handlePushEvent({
dataSourceId,
payload: req.body as TGitLabDataSourcePushEventPayload,
token
});
return res.send("ok");
}
});
};

View File

@@ -31,6 +31,8 @@ import { buildDynamicSecretProviders } from "@app/ee/services/dynamic-secret/pro
import { dynamicSecretLeaseDALFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-dal";
import { dynamicSecretLeaseQueueServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-queue";
import { dynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { eventBusFactory } from "@app/ee/services/event/event-bus-service";
import { sseServiceFactory } from "@app/ee/services/event/event-sse-service";
import { externalKmsDALFactory } from "@app/ee/services/external-kms/external-kms-dal";
import { externalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
import { gatewayDALFactory } from "@app/ee/services/gateway/gateway-dal";
@@ -495,6 +497,9 @@ export const registerRoutes = async (
const projectMicrosoftTeamsConfigDAL = projectMicrosoftTeamsConfigDALFactory(db);
const secretScanningV2DAL = secretScanningV2DALFactory(db);
const eventBusService = eventBusFactory(server.redis);
const sseService = sseServiceFactory(eventBusService, server.redis);
const permissionService = permissionServiceFactory({
permissionDAL,
orgRoleDAL,
@@ -552,7 +557,8 @@ export const registerRoutes = async (
queueService,
projectDAL,
licenseService,
auditLogStreamDAL
auditLogStreamDAL,
eventBusService
});
const auditLogService = auditLogServiceFactory({ auditLogDAL, permissionService, auditLogQueue });
@@ -1933,7 +1939,8 @@ export const registerRoutes = async (
projectMembershipDAL,
smtpService,
kmsService,
keyStore
keyStore,
appConnectionDAL
});
const secretScanningV2Service = secretScanningV2ServiceFactory({
@@ -1942,7 +1949,8 @@ export const registerRoutes = async (
licenseService,
secretScanningV2DAL,
secretScanningV2Queue,
kmsService
kmsService,
appConnectionDAL
});
// setup the communication with license key server
@@ -1966,6 +1974,7 @@ export const registerRoutes = async (
await kmsService.startService();
await microsoftTeamsService.start();
await dynamicSecretQueueService.init();
await eventBusService.init();
// inject all services
server.decorate<FastifyZodProvider["services"]>("services", {
@@ -2072,7 +2081,9 @@ export const registerRoutes = async (
githubOrgSync: githubOrgSyncConfigService,
folderCommit: folderCommitService,
secretScanningV2: secretScanningV2Service,
reminder: reminderService
reminder: reminderService,
bus: eventBusService,
sse: sseService
});
const cronJobs: CronJob[] = [];
@@ -2188,5 +2199,7 @@ export const registerRoutes = async (
server.addHook("onClose", async () => {
cronJobs.forEach((job) => job.stop());
await telemetryService.flushAll();
await eventBusService.close();
sseService.close();
});
};

View File

@@ -464,6 +464,42 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "DELETE",
url: "/user-management/users",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
userIds: z.string().array()
}),
response: {
200: z.object({
users: UsersSchema.pick({
username: true,
firstName: true,
lastName: true,
email: true,
id: true
}).array()
})
}
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
handler: async (req) => {
const users = await server.services.superAdmin.deleteUsers(req.body.userIds);
return {
users
};
}
});
server.route({
method: "PATCH",
url: "/user-management/users/:userId/admin-access",

View File

@@ -75,6 +75,10 @@ import {
import { LdapConnectionListItemSchema, SanitizedLdapConnectionSchema } from "@app/services/app-connection/ldap";
import { MsSqlConnectionListItemSchema, SanitizedMsSqlConnectionSchema } from "@app/services/app-connection/mssql";
import { MySqlConnectionListItemSchema, SanitizedMySqlConnectionSchema } from "@app/services/app-connection/mysql";
import {
NetlifyConnectionListItemSchema,
SanitizedNetlifyConnectionSchema
} from "@app/services/app-connection/netlify";
import { OktaConnectionListItemSchema, SanitizedOktaConnectionSchema } from "@app/services/app-connection/okta";
import {
PostgresConnectionListItemSchema,
@@ -145,6 +149,7 @@ const SanitizedAppConnectionSchema = z.union([
...SanitizedChecklyConnectionSchema.options,
...SanitizedSupabaseConnectionSchema.options,
...SanitizedDigitalOceanConnectionSchema.options,
...SanitizedNetlifyConnectionSchema.options,
...SanitizedOktaConnectionSchema.options
]);
@@ -184,6 +189,7 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
ChecklyConnectionListItemSchema,
SupabaseConnectionListItemSchema,
DigitalOceanConnectionListItemSchema,
NetlifyConnectionListItemSchema,
OktaConnectionListItemSchema
]);

View File

@@ -46,7 +46,6 @@ export const registerCloudflareConnectionRouter = async (server: FastifyZodProvi
const { connectionId } = req.params;
const projects = await server.services.appConnection.cloudflare.listPagesProjects(connectionId, req.permission);
return projects;
}
});
@@ -73,9 +72,36 @@ export const registerCloudflareConnectionRouter = async (server: FastifyZodProvi
handler: async (req) => {
const { connectionId } = req.params;
const projects = await server.services.appConnection.cloudflare.listWorkersScripts(connectionId, req.permission);
const scripts = await server.services.appConnection.cloudflare.listWorkersScripts(connectionId, req.permission);
return scripts;
}
});
return projects;
server.route({
method: "GET",
url: `/:connectionId/cloudflare-zones`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z
.object({
id: z.string(),
name: z.string()
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const zones = await server.services.appConnection.cloudflare.listZones(connectionId, req.permission);
return zones;
}
});
};

View File

@@ -26,6 +26,7 @@ import { registerHumanitecConnectionRouter } from "./humanitec-connection-router
import { registerLdapConnectionRouter } from "./ldap-connection-router";
import { registerMsSqlConnectionRouter } from "./mssql-connection-router";
import { registerMySqlConnectionRouter } from "./mysql-connection-router";
import { registerNetlifyConnectionRouter } from "./netlify-connection-router";
import { registerOktaConnectionRouter } from "./okta-connection-router";
import { registerPostgresConnectionRouter } from "./postgres-connection-router";
import { registerRailwayConnectionRouter } from "./railway-connection-router";
@@ -76,5 +77,6 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
[AppConnection.Checkly]: registerChecklyConnectionRouter,
[AppConnection.Supabase]: registerSupabaseConnectionRouter,
[AppConnection.DigitalOcean]: registerDigitalOceanConnectionRouter,
[AppConnection.Netlify]: registerNetlifyConnectionRouter,
[AppConnection.Okta]: registerOktaConnectionRouter
};

View File

@@ -0,0 +1,87 @@
import { z } from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateNetlifyConnectionSchema,
SanitizedNetlifyConnectionSchema,
UpdateNetlifyConnectionSchema
} from "@app/services/app-connection/netlify";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerNetlifyConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.Netlify,
server,
sanitizedResponseSchema: SanitizedNetlifyConnectionSchema,
createSchema: CreateNetlifyConnectionSchema,
updateSchema: UpdateNetlifyConnectionSchema
});
// The below endpoints are not exposed and for Infisical App use
server.route({
method: "GET",
url: `/:connectionId/accounts`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z.object({
accounts: z
.object({
name: z.string(),
id: z.string()
})
.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const accounts = await server.services.appConnection.netlify.listAccounts(connectionId, req.permission);
return { accounts };
}
});
server.route({
method: "GET",
url: `/:connectionId/accounts/:accountId/sites`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid(),
accountId: z.string()
}),
response: {
200: z.object({
sites: z
.object({
name: z.string(),
id: z.string()
})
.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId, accountId } = req.params;
const sites = await server.services.appConnection.netlify.listSites(connectionId, req.permission, accountId);
return { sites };
}
});
};

View File

@@ -0,0 +1,118 @@
/* eslint-disable @typescript-eslint/no-floating-promises */
import { subject } from "@casl/ability";
import { pipeline } from "stream/promises";
import { z } from "zod";
import { ActionProjectType, ProjectType } from "@app/db/schemas";
import { getServerSentEventsHeaders } from "@app/ee/services/event/event-sse-stream";
import { EventRegisterSchema } from "@app/ee/services/event/types";
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { BadRequestError, ForbiddenRequestError, RateLimitError } from "@app/lib/errors";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerEventRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/subscribe/project-events",
config: {
rateLimit: readLimit
},
schema: {
body: z.object({
projectId: z.string().trim(),
register: z.array(EventRegisterSchema).max(10)
})
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req, reply) => {
try {
const { sse, permission, identityAccessToken, authToken, license } = req.server.services;
const plan = await license.getPlan(req.auth.orgId);
if (!plan.eventSubscriptions) {
throw new BadRequestError({
message:
"Failed to use event subscriptions due to plan restriction. Upgrade plan to access enterprise event subscriptions."
});
}
const count = await sse.getActiveConnectionsCount(req.body.projectId, req.permission.id);
if (count >= 5) {
throw new RateLimitError({
message: `Too many active connections for project ${req.body.projectId}. Please close some connections before opening a new one.`
});
}
const client = await sse.subscribe({
type: ProjectType.SecretManager,
registered: req.body.register,
async getAuthInfo() {
const ability = await permission.getProjectPermission({
actor: req.auth.actor,
projectId: req.body.projectId,
actionProjectType: ActionProjectType.Any,
actorAuthMethod: req.auth.authMethod,
actorId: req.permission.id,
actorOrgId: req.permission.orgId
});
return { permission: ability.permission, actorId: req.permission.id, projectId: req.body.projectId };
},
async onAuthRefresh(info) {
switch (req.auth.authMode) {
case AuthMode.JWT:
await authToken.fnValidateJwtIdentity(req.auth.token);
break;
case AuthMode.IDENTITY_ACCESS_TOKEN:
await identityAccessToken.fnValidateIdentityAccessToken(req.auth.token, req.realIp);
break;
default:
throw new Error("Unsupported authentication method");
}
req.body.register.forEach((r) => {
const allowed = info.permission.can(
ProjectPermissionSecretActions.Subscribe,
subject(ProjectPermissionSub.Secrets, {
environment: r.conditions?.environmentSlug ?? "",
secretPath: r.conditions?.secretPath ?? "/",
eventType: r.event
})
);
if (!allowed) {
throw new ForbiddenRequestError({
name: "PermissionDenied",
message: `You are not allowed to subscribe on secrets`,
details: {
event: r.event,
environmentSlug: r.conditions?.environmentSlug,
secretPath: r.conditions?.secretPath ?? "/"
}
});
}
});
}
});
// Switches to manual response and enable SSE streaming
reply.hijack();
reply.raw.writeHead(200, getServerSentEventsHeaders()).flushHeaders();
reply.raw.on("close", client.abort);
await pipeline(client.stream, reply.raw, { signal: client.signal });
} catch (error) {
if (error instanceof Error && error.name === "AbortError") {
// If the stream is aborted, we don't need to do anything
return;
}
throw error;
}
}
});
};

View File

@@ -13,6 +13,7 @@ import { registerCaRouter } from "./certificate-authority-router";
import { CERTIFICATE_AUTHORITY_REGISTER_ROUTER_MAP } from "./certificate-authority-routers";
import { registerCertRouter } from "./certificate-router";
import { registerCertificateTemplateRouter } from "./certificate-template-router";
import { registerEventRouter } from "./event-router";
import { registerExternalGroupOrgRoleMappingRouter } from "./external-group-org-role-mapping-router";
import { registerIdentityAccessTokenRouter } from "./identity-access-token-router";
import { registerIdentityAliCloudAuthRouter } from "./identity-alicloud-auth-router";
@@ -183,4 +184,6 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
},
{ prefix: "/reminders" }
);
await server.register(registerEventRouter, { prefix: "/events" });
};

View File

@@ -21,6 +21,7 @@ import { registerGitLabSyncRouter } from "./gitlab-sync-router";
import { registerHCVaultSyncRouter } from "./hc-vault-sync-router";
import { registerHerokuSyncRouter } from "./heroku-sync-router";
import { registerHumanitecSyncRouter } from "./humanitec-sync-router";
import { registerNetlifySyncRouter } from "./netlify-sync-router";
import { registerRailwaySyncRouter } from "./railway-sync-router";
import { registerRenderSyncRouter } from "./render-sync-router";
import { registerSupabaseSyncRouter } from "./supabase-sync-router";
@@ -61,5 +62,6 @@ export const SECRET_SYNC_REGISTER_ROUTER_MAP: Record<SecretSync, (server: Fastif
[SecretSync.Railway]: registerRailwaySyncRouter,
[SecretSync.Checkly]: registerChecklySyncRouter,
[SecretSync.DigitalOceanAppPlatform]: registerDigitalOceanAppPlatformSyncRouter,
[SecretSync.Netlify]: registerNetlifySyncRouter,
[SecretSync.Bitbucket]: registerBitbucketSyncRouter
};

View File

@@ -0,0 +1,17 @@
import {
CreateNetlifySyncSchema,
NetlifySyncSchema,
UpdateNetlifySyncSchema
} from "@app/services/secret-sync/netlify/netlify-sync-schemas";
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
export const registerNetlifySyncRouter = async (server: FastifyZodProvider) =>
registerSyncSecretsEndpoints({
destination: SecretSync.Netlify,
server,
responseSchema: NetlifySyncSchema,
createSchema: CreateNetlifySyncSchema,
updateSchema: UpdateNetlifySyncSchema
});

View File

@@ -44,6 +44,7 @@ import { GitLabSyncListItemSchema, GitLabSyncSchema } from "@app/services/secret
import { HCVaultSyncListItemSchema, HCVaultSyncSchema } from "@app/services/secret-sync/hc-vault";
import { HerokuSyncListItemSchema, HerokuSyncSchema } from "@app/services/secret-sync/heroku";
import { HumanitecSyncListItemSchema, HumanitecSyncSchema } from "@app/services/secret-sync/humanitec";
import { NetlifySyncListItemSchema, NetlifySyncSchema } from "@app/services/secret-sync/netlify";
import { RailwaySyncListItemSchema, RailwaySyncSchema } from "@app/services/secret-sync/railway/railway-sync-schemas";
import { RenderSyncListItemSchema, RenderSyncSchema } from "@app/services/secret-sync/render/render-sync-schemas";
import { SupabaseSyncListItemSchema, SupabaseSyncSchema } from "@app/services/secret-sync/supabase";
@@ -82,6 +83,7 @@ const SecretSyncSchema = z.discriminatedUnion("destination", [
RailwaySyncSchema,
ChecklySyncSchema,
DigitalOceanAppPlatformSyncSchema,
NetlifySyncSchema,
BitbucketSyncSchema
]);
@@ -114,6 +116,7 @@ const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
RailwaySyncListItemSchema,
ChecklySyncListItemSchema,
SupabaseSyncListItemSchema,
NetlifySyncListItemSchema,
BitbucketSyncListItemSchema
]);

View File

@@ -264,6 +264,48 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "DELETE",
url: "/:organizationId/memberships",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.Organizations],
description: "Bulk delete organization user memberships",
security: [
{
bearerAuth: []
}
],
params: z.object({
organizationId: z.string().trim().describe(ORGANIZATIONS.BULK_DELETE_USER_MEMBERSHIPS.organizationId)
}),
body: z.object({
membershipIds: z.string().trim().array().describe(ORGANIZATIONS.BULK_DELETE_USER_MEMBERSHIPS.membershipIds)
}),
response: {
200: z.object({
memberships: OrgMembershipsSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
if (req.auth.actor !== ActorType.USER) return;
const memberships = await server.services.org.bulkDeleteOrgMemberships({
userId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
orgId: req.params.organizationId,
membershipIds: req.body.membershipIds,
actorOrgId: req.permission.orgId
});
return { memberships };
}
});
server.route({
// TODO: re-think endpoint structure in future so users only need to pass in membershipId bc organizationId is redundant
method: "GET",

View File

@@ -34,6 +34,7 @@ export enum AppConnection {
Checkly = "checkly",
Supabase = "supabase",
DigitalOcean = "digital-ocean",
Netlify = "netlify",
Okta = "okta"
}

View File

@@ -97,6 +97,7 @@ import { getLdapConnectionListItem, LdapConnectionMethod, validateLdapConnection
import { getMsSqlConnectionListItem, MsSqlConnectionMethod } from "./mssql";
import { MySqlConnectionMethod } from "./mysql/mysql-connection-enums";
import { getMySqlConnectionListItem } from "./mysql/mysql-connection-fns";
import { getNetlifyConnectionListItem, validateNetlifyConnectionCredentials } from "./netlify";
import { getOktaConnectionListItem, OktaConnectionMethod, validateOktaConnectionCredentials } from "./okta";
import { getPostgresConnectionListItem, PostgresConnectionMethod } from "./postgres";
import { getRailwayConnectionListItem, validateRailwayConnectionCredentials } from "./railway";
@@ -163,6 +164,7 @@ export const listAppConnectionOptions = () => {
getChecklyConnectionListItem(),
getSupabaseConnectionListItem(),
getDigitalOceanConnectionListItem(),
getNetlifyConnectionListItem(),
getOktaConnectionListItem()
].sort((a, b) => a.name.localeCompare(b.name));
};
@@ -251,7 +253,8 @@ export const validateAppConnectionCredentials = async (
[AppConnection.Checkly]: validateChecklyConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Supabase]: validateSupabaseConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.DigitalOcean]: validateDigitalOceanConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Okta]: validateOktaConnectionCredentials as TAppConnectionCredentialsValidator
[AppConnection.Okta]: validateOktaConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Netlify]: validateNetlifyConnectionCredentials as TAppConnectionCredentialsValidator
};
return VALIDATE_APP_CONNECTION_CREDENTIALS_MAP[appConnection.app](appConnection, gatewayService);
@@ -381,6 +384,7 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
[AppConnection.Checkly]: platformManagedCredentialsNotSupported,
[AppConnection.Supabase]: platformManagedCredentialsNotSupported,
[AppConnection.DigitalOcean]: platformManagedCredentialsNotSupported,
[AppConnection.Netlify]: platformManagedCredentialsNotSupported,
[AppConnection.Okta]: platformManagedCredentialsNotSupported
};

View File

@@ -36,6 +36,7 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
[AppConnection.Checkly]: "Checkly",
[AppConnection.Supabase]: "Supabase",
[AppConnection.DigitalOcean]: "DigitalOcean App Platform",
[AppConnection.Netlify]: "Netlify",
[AppConnection.Okta]: "Okta"
};
@@ -75,5 +76,6 @@ export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanTyp
[AppConnection.Checkly]: AppConnectionPlanType.Regular,
[AppConnection.Supabase]: AppConnectionPlanType.Regular,
[AppConnection.DigitalOcean]: AppConnectionPlanType.Regular,
[AppConnection.Netlify]: AppConnectionPlanType.Regular,
[AppConnection.Okta]: AppConnectionPlanType.Regular
};

View File

@@ -81,6 +81,8 @@ import { humanitecConnectionService } from "./humanitec/humanitec-connection-ser
import { ValidateLdapConnectionCredentialsSchema } from "./ldap";
import { ValidateMsSqlConnectionCredentialsSchema } from "./mssql";
import { ValidateMySqlConnectionCredentialsSchema } from "./mysql";
import { ValidateNetlifyConnectionCredentialsSchema } from "./netlify";
import { netlifyConnectionService } from "./netlify/netlify-connection-service";
import { ValidateOktaConnectionCredentialsSchema } from "./okta";
import { oktaConnectionService } from "./okta/okta-connection-service";
import { ValidatePostgresConnectionCredentialsSchema } from "./postgres";
@@ -148,6 +150,7 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
[AppConnection.Checkly]: ValidateChecklyConnectionCredentialsSchema,
[AppConnection.Supabase]: ValidateSupabaseConnectionCredentialsSchema,
[AppConnection.DigitalOcean]: ValidateDigitalOceanConnectionCredentialsSchema,
[AppConnection.Netlify]: ValidateNetlifyConnectionCredentialsSchema,
[AppConnection.Okta]: ValidateOktaConnectionCredentialsSchema
};
@@ -611,6 +614,7 @@ export const appConnectionServiceFactory = ({
checkly: checklyConnectionService(connectAppConnectionById),
supabase: supabaseConnectionService(connectAppConnectionById),
digitalOcean: digitalOceanAppPlatformConnectionService(connectAppConnectionById),
netlify: netlifyConnectionService(connectAppConnectionById),
okta: oktaConnectionService(connectAppConnectionById)
};
};

View File

@@ -149,6 +149,12 @@ import {
} from "./ldap";
import { TMsSqlConnection, TMsSqlConnectionInput, TValidateMsSqlConnectionCredentialsSchema } from "./mssql";
import { TMySqlConnection, TMySqlConnectionInput, TValidateMySqlConnectionCredentialsSchema } from "./mysql";
import {
TNetlifyConnection,
TNetlifyConnectionConfig,
TNetlifyConnectionInput,
TValidateNetlifyConnectionCredentialsSchema
} from "./netlify";
import {
TOktaConnection,
TOktaConnectionConfig,
@@ -245,6 +251,7 @@ export type TAppConnection = { id: string } & (
| TChecklyConnection
| TSupabaseConnection
| TDigitalOceanConnection
| TNetlifyConnection
| TOktaConnection
);
@@ -288,6 +295,7 @@ export type TAppConnectionInput = { id: string } & (
| TChecklyConnectionInput
| TSupabaseConnectionInput
| TDigitalOceanConnectionInput
| TNetlifyConnectionInput
| TOktaConnectionInput
);
@@ -339,6 +347,7 @@ export type TAppConnectionConfig =
| TChecklyConnectionConfig
| TSupabaseConnectionConfig
| TDigitalOceanConnectionConfig
| TNetlifyConnectionConfig
| TOktaConnectionConfig;
export type TValidateAppConnectionCredentialsSchema =
@@ -377,6 +386,7 @@ export type TValidateAppConnectionCredentialsSchema =
| TValidateChecklyConnectionCredentialsSchema
| TValidateSupabaseConnectionCredentialsSchema
| TValidateDigitalOceanCredentialsSchema
| TValidateNetlifyConnectionCredentialsSchema
| TValidateOktaConnectionCredentialsSchema;
export type TListAwsConnectionKmsKeys = {

View File

@@ -1,3 +1,4 @@
export enum AzureAppConfigurationConnectionMethod {
OAuth = "oauth"
OAuth = "oauth",
ClientSecret = "client-secret"
}

View File

@@ -1,3 +1,4 @@
/* eslint-disable no-case-declarations */
import { AxiosError, AxiosResponse } from "axios";
import { getConfig } from "@app/lib/config/env";
@@ -19,7 +20,10 @@ export const getAzureAppConfigurationConnectionListItem = () => {
return {
name: "Azure App Configuration" as const,
app: AppConnection.AzureAppConfiguration as const,
methods: Object.values(AzureAppConfigurationConnectionMethod) as [AzureAppConfigurationConnectionMethod.OAuth],
methods: Object.values(AzureAppConfigurationConnectionMethod) as [
AzureAppConfigurationConnectionMethod.OAuth,
AzureAppConfigurationConnectionMethod.ClientSecret
],
oauthClientId: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID
};
};
@@ -35,71 +39,111 @@ export const validateAzureAppConfigurationConnectionCredentials = async (
SITE_URL
} = getConfig();
if (
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID ||
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET
) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
}
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
let tokenError: AxiosError | null = null;
try {
tokenResp = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", inputCredentials.tenantId || "common"),
new URLSearchParams({
grant_type: "authorization_code",
code: inputCredentials.code,
scope: `openid offline_access https://azconfig.io/.default`,
client_id: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);
} catch (e: unknown) {
if (e instanceof AxiosError) {
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
});
}
}
if (tokenError) {
if (tokenError instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to get access token: ${
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
}`
});
} else {
throw new InternalServerError({
message: "Failed to get access token"
});
}
}
if (!tokenResp) {
throw new InternalServerError({
message: `Failed to get access token: Token was empty with no error`
});
}
switch (method) {
case AzureAppConfigurationConnectionMethod.OAuth:
if (
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID ||
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET
) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
}
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
let tokenError: AxiosError | null = null;
const oauthCredentials = inputCredentials as { code: string; tenantId?: string };
try {
tokenResp = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
new URLSearchParams({
grant_type: "authorization_code",
code: oauthCredentials.code,
scope: `openid offline_access https://azconfig.io/.default`,
client_id: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);
} catch (e: unknown) {
if (e instanceof AxiosError) {
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
});
}
}
if (tokenError) {
if (tokenError instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to get access token: ${
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
}`
});
} else {
throw new InternalServerError({
message: "Failed to get access token"
});
}
}
if (!tokenResp) {
throw new InternalServerError({
message: `Failed to get access token: Token was empty with no error`
});
}
return {
tenantId: inputCredentials.tenantId,
tenantId: oauthCredentials.tenantId,
accessToken: tokenResp.data.access_token,
refreshToken: tokenResp.data.refresh_token,
expiresAt: Date.now() + tokenResp.data.expires_in * 1000
};
case AzureAppConfigurationConnectionMethod.ClientSecret:
const { tenantId, clientId, clientSecret } = inputCredentials as {
tenantId: string;
clientId: string;
clientSecret: string;
};
try {
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
new URLSearchParams({
grant_type: "client_credentials",
scope: `https://azconfig.io/.default`,
client_id: clientId,
client_secret: clientSecret
})
);
return {
tenantId,
accessToken: clientData.access_token,
expiresAt: Date.now() + clientData.expires_in * 1000,
clientId,
clientSecret
};
} catch (e: unknown) {
if (e instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to get access token: ${
(e?.response?.data as { error_description?: string })?.error_description || "Unknown error"
}`
});
} else {
throw new InternalServerError({
message: "Failed to get access token"
});
}
}
default:
throw new InternalServerError({
message: `Unhandled Azure connection method: ${method as AzureAppConfigurationConnectionMethod}`
message: `Unhandled Azure App Configuration connection method: ${method as AzureAppConfigurationConnectionMethod}`
});
}
};

View File

@@ -22,6 +22,29 @@ export const AzureAppConfigurationConnectionOAuthOutputCredentialsSchema = z.obj
expiresAt: z.number()
});
export const AzureAppConfigurationConnectionClientSecretInputCredentialsSchema = z.object({
clientId: z
.string()
.uuid()
.trim()
.min(1, "Client ID required")
.max(50, "Client ID must be at most 50 characters long"),
clientSecret: z
.string()
.trim()
.min(1, "Client Secret required")
.max(50, "Client Secret must be at most 50 characters long"),
tenantId: z.string().uuid().trim().min(1, "Tenant ID required")
});
export const AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema = z.object({
clientId: z.string(),
clientSecret: z.string(),
tenantId: z.string(),
accessToken: z.string(),
expiresAt: z.number()
});
export const ValidateAzureAppConfigurationConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
@@ -30,6 +53,14 @@ export const ValidateAzureAppConfigurationConnectionCredentialsSchema = z.discri
credentials: AzureAppConfigurationConnectionOAuthInputCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.AzureAppConfiguration).credentials
)
}),
z.object({
method: z
.literal(AzureAppConfigurationConnectionMethod.ClientSecret)
.describe(AppConnections.CREATE(AppConnection.AzureAppConfiguration).method),
credentials: AzureAppConfigurationConnectionClientSecretInputCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.AzureAppConfiguration).credentials
)
})
]);
@@ -39,9 +70,13 @@ export const CreateAzureAppConfigurationConnectionSchema = ValidateAzureAppConfi
export const UpdateAzureAppConfigurationConnectionSchema = z
.object({
credentials: AzureAppConfigurationConnectionOAuthInputCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.AzureAppConfiguration).credentials
)
credentials: z
.union([
AzureAppConfigurationConnectionOAuthInputCredentialsSchema,
AzureAppConfigurationConnectionClientSecretInputCredentialsSchema
])
.optional()
.describe(AppConnections.UPDATE(AppConnection.AzureAppConfiguration).credentials)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureAppConfiguration));
@@ -55,6 +90,10 @@ export const AzureAppConfigurationConnectionSchema = z.intersection(
z.object({
method: z.literal(AzureAppConfigurationConnectionMethod.OAuth),
credentials: AzureAppConfigurationConnectionOAuthOutputCredentialsSchema
}),
z.object({
method: z.literal(AzureAppConfigurationConnectionMethod.ClientSecret),
credentials: AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema
})
])
);
@@ -65,6 +104,13 @@ export const SanitizedAzureAppConfigurationConnectionSchema = z.discriminatedUni
credentials: AzureAppConfigurationConnectionOAuthOutputCredentialsSchema.pick({
tenantId: true
})
}),
BaseAzureAppConfigurationConnectionSchema.extend({
method: z.literal(AzureAppConfigurationConnectionMethod.ClientSecret),
credentials: AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema.pick({
clientId: true,
tenantId: true
})
})
]);

View File

@@ -4,6 +4,7 @@ import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema,
AzureAppConfigurationConnectionOAuthOutputCredentialsSchema,
AzureAppConfigurationConnectionSchema,
CreateAzureAppConfigurationConnectionSchema,
@@ -39,3 +40,7 @@ export type ExchangeCodeAzureResponse = {
export type TAzureAppConfigurationConnectionCredentials = z.infer<
typeof AzureAppConfigurationConnectionOAuthOutputCredentialsSchema
>;
export type TAzureAppConfigurationConnectionClientSecretCredentials = z.infer<
typeof AzureAppConfigurationConnectionClientSecretOutputCredentialsSchema
>;

View File

@@ -1,4 +1,5 @@
export enum AzureDevOpsConnectionMethod {
OAuth = "oauth",
AccessToken = "access-token"
AccessToken = "access-token",
ClientSecret = "client-secret"
}

View File

@@ -18,6 +18,7 @@ import { AppConnection } from "../app-connection-enums";
import { AzureDevOpsConnectionMethod } from "./azure-devops-enums";
import {
ExchangeCodeAzureResponse,
TAzureDevOpsConnectionClientSecretCredentials,
TAzureDevOpsConnectionConfig,
TAzureDevOpsConnectionCredentials
} from "./azure-devops-types";
@@ -30,7 +31,8 @@ export const getAzureDevopsConnectionListItem = () => {
app: AppConnection.AzureDevOps as const,
methods: Object.values(AzureDevOpsConnectionMethod) as [
AzureDevOpsConnectionMethod.OAuth,
AzureDevOpsConnectionMethod.AccessToken
AzureDevOpsConnectionMethod.AccessToken,
AzureDevOpsConnectionMethod.ClientSecret
],
oauthClientId: INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID
};
@@ -53,11 +55,7 @@ export const getAzureDevopsConnection = async (
});
}
const credentials = (await decryptAppConnectionCredentials({
orgId: appConnection.orgId,
kmsService,
encryptedCredentials: appConnection.encryptedCredentials
})) as TAzureDevOpsConnectionCredentials;
const currentTime = Date.now();
// Handle different connection methods
switch (appConnection.method) {
@@ -69,12 +67,17 @@ export const getAzureDevopsConnection = async (
});
}
if (!("refreshToken" in credentials)) {
const oauthCredentials = (await decryptAppConnectionCredentials({
orgId: appConnection.orgId,
kmsService,
encryptedCredentials: appConnection.encryptedCredentials
})) as TAzureDevOpsConnectionCredentials;
if (!("refreshToken" in oauthCredentials)) {
throw new BadRequestError({ message: "Invalid OAuth credentials" });
}
const { refreshToken, tenantId } = credentials;
const currentTime = Date.now();
const { refreshToken, tenantId } = oauthCredentials;
const { data } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
@@ -87,29 +90,75 @@ export const getAzureDevopsConnection = async (
})
);
const updatedCredentials = {
...credentials,
const updatedOAuthCredentials = {
...oauthCredentials,
accessToken: data.access_token,
expiresAt: currentTime + data.expires_in * 1000,
refreshToken: data.refresh_token
};
const encryptedCredentials = await encryptAppConnectionCredentials({
credentials: updatedCredentials,
const encryptedOAuthCredentials = await encryptAppConnectionCredentials({
credentials: updatedOAuthCredentials,
orgId: appConnection.orgId,
kmsService
});
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials });
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedOAuthCredentials });
return data.access_token;
case AzureDevOpsConnectionMethod.AccessToken:
if (!("accessToken" in credentials)) {
const accessTokenCredentials = (await decryptAppConnectionCredentials({
orgId: appConnection.orgId,
kmsService,
encryptedCredentials: appConnection.encryptedCredentials
})) as { accessToken: string };
if (!("accessToken" in accessTokenCredentials)) {
throw new BadRequestError({ message: "Invalid API token credentials" });
}
// For access token, return the basic auth token directly
return credentials.accessToken;
return accessTokenCredentials.accessToken;
case AzureDevOpsConnectionMethod.ClientSecret:
const clientSecretCredentials = (await decryptAppConnectionCredentials({
orgId: appConnection.orgId,
kmsService,
encryptedCredentials: appConnection.encryptedCredentials
})) as TAzureDevOpsConnectionClientSecretCredentials;
const { accessToken, expiresAt, clientId, clientSecret, tenantId: clientTenantId } = clientSecretCredentials;
// Check if token is still valid (with 5 minute buffer)
if (accessToken && expiresAt && expiresAt > currentTime + 300000) {
return accessToken;
}
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", clientTenantId || "common"),
new URLSearchParams({
grant_type: "client_credentials",
scope: `https://app.vssps.visualstudio.com/.default`,
client_id: clientId,
client_secret: clientSecret
})
);
const updatedClientCredentials = {
...clientSecretCredentials,
accessToken: clientData.access_token,
expiresAt: currentTime + clientData.expires_in * 1000
};
const encryptedClientCredentials = await encryptAppConnectionCredentials({
credentials: updatedClientCredentials,
orgId: appConnection.orgId,
kmsService
});
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedClientCredentials });
return clientData.access_token;
default:
throw new BadRequestError({ message: `Unsupported connection method` });
@@ -138,7 +187,7 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
let tokenError: AxiosError | null = null;
try {
const oauthCredentials = inputCredentials as { code: string; tenantId: string };
const oauthCredentials = inputCredentials as { code: string; tenantId: string; orgName: string };
tokenResp = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
new URLSearchParams({
@@ -262,9 +311,67 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
});
}
case AzureDevOpsConnectionMethod.ClientSecret:
const { tenantId, clientId, clientSecret, orgName } = inputCredentials as {
tenantId: string;
clientId: string;
clientSecret: string;
orgName: string;
};
try {
// First, get the access token using client credentials flow
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
new URLSearchParams({
grant_type: "client_credentials",
scope: `https://app.vssps.visualstudio.com/.default`,
client_id: clientId,
client_secret: clientSecret
})
);
// Validate access to the specific organization
const response = await request.get(
`${IntegrationUrls.AZURE_DEVOPS_API_URL}/${encodeURIComponent(orgName)}/_apis/projects?api-version=7.2-preview.2&$top=1`,
{
headers: {
Authorization: `Bearer ${clientData.access_token}`
}
}
);
if (response.status !== 200) {
throw new BadRequestError({
message: `Failed to validate connection to organization '${orgName}': ${response.status}`
});
}
return {
tenantId,
clientId,
clientSecret,
orgName,
accessToken: clientData.access_token,
expiresAt: Date.now() + clientData.expires_in * 1000
};
} catch (e: unknown) {
if (e instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to authenticate with Azure DevOps using client credentials: ${
(e?.response?.data as { error_description?: string })?.error_description || e.message
}`
});
} else {
throw new InternalServerError({
message: "Failed to validate Azure DevOps client credentials"
});
}
}
default:
throw new InternalServerError({
message: `Unhandled Azure connection method: ${method as AzureDevOpsConnectionMethod}`
message: `Unhandled Azure DevOps connection method: ${method as AzureDevOpsConnectionMethod}`
});
}
};

View File

@@ -38,6 +38,42 @@ export const AzureDevOpsConnectionAccessTokenOutputCredentialsSchema = z.object(
orgName: z.string()
});
export const AzureDevOpsConnectionClientSecretInputCredentialsSchema = z.object({
clientId: z
.string()
.uuid()
.trim()
.min(1, "Client ID required")
.max(50, "Client ID must be at most 50 characters long")
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.clientId),
clientSecret: z
.string()
.trim()
.min(1, "Client Secret required")
.max(50, "Client Secret must be at most 50 characters long")
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.clientSecret),
tenantId: z
.string()
.uuid()
.trim()
.min(1, "Tenant ID required")
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.tenantId),
orgName: z
.string()
.trim()
.min(1, "Organization name required")
.describe(AppConnections.CREDENTIALS.AZURE_DEVOPS.orgName)
});
export const AzureDevOpsConnectionClientSecretOutputCredentialsSchema = z.object({
clientId: z.string(),
clientSecret: z.string(),
tenantId: z.string(),
orgName: z.string(),
accessToken: z.string(),
expiresAt: z.number()
});
export const ValidateAzureDevOpsConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
@@ -54,6 +90,14 @@ export const ValidateAzureDevOpsConnectionCredentialsSchema = z.discriminatedUni
credentials: AzureDevOpsConnectionAccessTokenInputCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.AzureDevOps).credentials
)
}),
z.object({
method: z
.literal(AzureDevOpsConnectionMethod.ClientSecret)
.describe(AppConnections.CREATE(AppConnection.AzureDevOps).method),
credentials: AzureDevOpsConnectionClientSecretInputCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.AzureDevOps).credentials
)
})
]);
@@ -64,7 +108,11 @@ export const CreateAzureDevOpsConnectionSchema = ValidateAzureDevOpsConnectionCr
export const UpdateAzureDevOpsConnectionSchema = z
.object({
credentials: z
.union([AzureDevOpsConnectionOAuthInputCredentialsSchema, AzureDevOpsConnectionAccessTokenInputCredentialsSchema])
.union([
AzureDevOpsConnectionOAuthInputCredentialsSchema,
AzureDevOpsConnectionAccessTokenInputCredentialsSchema,
AzureDevOpsConnectionClientSecretInputCredentialsSchema
])
.optional()
.describe(AppConnections.UPDATE(AppConnection.AzureDevOps).credentials)
})
@@ -84,6 +132,10 @@ export const AzureDevOpsConnectionSchema = z.intersection(
z.object({
method: z.literal(AzureDevOpsConnectionMethod.AccessToken),
credentials: AzureDevOpsConnectionAccessTokenOutputCredentialsSchema
}),
z.object({
method: z.literal(AzureDevOpsConnectionMethod.ClientSecret),
credentials: AzureDevOpsConnectionClientSecretOutputCredentialsSchema
})
])
);
@@ -101,6 +153,14 @@ export const SanitizedAzureDevOpsConnectionSchema = z.discriminatedUnion("method
credentials: AzureDevOpsConnectionAccessTokenOutputCredentialsSchema.pick({
orgName: true
})
}),
BaseAzureDevOpsConnectionSchema.extend({
method: z.literal(AzureDevOpsConnectionMethod.ClientSecret),
credentials: AzureDevOpsConnectionClientSecretOutputCredentialsSchema.pick({
clientId: true,
tenantId: true,
orgName: true
})
})
]);

View File

@@ -52,6 +52,11 @@ const getAuthHeaders = (appConnection: TAzureDevOpsConnection, accessToken: stri
Authorization: `Basic ${basicAuthToken}`,
Accept: "application/json"
};
case AzureDevOpsConnectionMethod.ClientSecret:
return {
Authorization: `Bearer ${accessToken}`,
Accept: "application/json"
};
default:
throw new BadRequestError({ message: "Unsupported connection method" });
}

View File

@@ -4,6 +4,7 @@ import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
AzureDevOpsConnectionClientSecretOutputCredentialsSchema,
AzureDevOpsConnectionOAuthOutputCredentialsSchema,
AzureDevOpsConnectionSchema,
CreateAzureDevOpsConnectionSchema,
@@ -27,6 +28,10 @@ export type TAzureDevOpsConnectionConfig = DiscriminativePick<
export type TAzureDevOpsConnectionCredentials = z.infer<typeof AzureDevOpsConnectionOAuthOutputCredentialsSchema>;
export type TAzureDevOpsConnectionClientSecretCredentials = z.infer<
typeof AzureDevOpsConnectionClientSecretOutputCredentialsSchema
>;
export interface ExchangeCodeAzureResponse {
token_type: string;
scope: string;

View File

@@ -1,3 +1,4 @@
export enum AzureKeyVaultConnectionMethod {
OAuth = "oauth"
OAuth = "oauth",
ClientSecret = "client-secret"
}

View File

@@ -1,3 +1,4 @@
/* eslint-disable no-case-declarations */
import { AxiosError, AxiosResponse } from "axios";
import { getConfig } from "@app/lib/config/env";
@@ -16,25 +17,16 @@ import { AppConnection } from "../app-connection-enums";
import { AzureKeyVaultConnectionMethod } from "./azure-key-vault-connection-enums";
import {
ExchangeCodeAzureResponse,
TAzureKeyVaultConnectionClientSecretCredentials,
TAzureKeyVaultConnectionConfig,
TAzureKeyVaultConnectionCredentials
} from "./azure-key-vault-connection-types";
export const getAzureConnectionAccessToken = async (
connectionId: string,
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">,
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "updateById">,
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
const appCfg = getConfig();
if (
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID ||
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET
) {
throw new BadRequestError({
message: `Azure environment variables have not been configured`
});
}
const appConnection = await appConnectionDAL.findById(connectionId);
if (!appConnection) {
@@ -49,49 +41,101 @@ export const getAzureConnectionAccessToken = async (
throw new BadRequestError({ message: `Connection with ID '${connectionId}' is not a valid Azure connection` });
}
const credentials = (await decryptAppConnectionCredentials({
orgId: appConnection.orgId,
kmsService,
encryptedCredentials: appConnection.encryptedCredentials
})) as TAzureKeyVaultConnectionCredentials;
const currentTime = Date.now();
const { data } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", credentials.tenantId || "common"),
new URLSearchParams({
grant_type: "refresh_token",
scope: `openid offline_access`,
client_id: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
refresh_token: credentials.refreshToken
})
);
switch (appConnection.method) {
case AzureKeyVaultConnectionMethod.OAuth:
const appCfg = getConfig();
if (
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID ||
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET
) {
throw new BadRequestError({
message: `Azure environment variables have not been configured`
});
}
const accessExpiresAt = new Date();
accessExpiresAt.setSeconds(accessExpiresAt.getSeconds() + data.expires_in);
const oauthCredentials = (await decryptAppConnectionCredentials({
orgId: appConnection.orgId,
kmsService,
encryptedCredentials: appConnection.encryptedCredentials
})) as TAzureKeyVaultConnectionCredentials;
const updatedCredentials = {
...credentials,
accessToken: data.access_token,
expiresAt: accessExpiresAt.getTime(),
refreshToken: data.refresh_token
};
const { data } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
new URLSearchParams({
grant_type: "refresh_token",
scope: `openid offline_access https://vault.azure.net/.default`,
client_id: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
refresh_token: oauthCredentials.refreshToken
})
);
const encryptedCredentials = await encryptAppConnectionCredentials({
credentials: updatedCredentials,
orgId: appConnection.orgId,
kmsService
});
const updatedOAuthCredentials = {
...oauthCredentials,
accessToken: data.access_token,
expiresAt: currentTime + data.expires_in * 1000,
refreshToken: data.refresh_token
};
await appConnectionDAL.update(
{ id: connectionId },
{
encryptedCredentials
}
);
const encryptedOAuthCredentials = await encryptAppConnectionCredentials({
credentials: updatedOAuthCredentials,
orgId: appConnection.orgId,
kmsService
});
return {
accessToken: data.access_token
};
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedOAuthCredentials });
return {
accessToken: data.access_token
};
case AzureKeyVaultConnectionMethod.ClientSecret:
const clientSecretCredentials = (await decryptAppConnectionCredentials({
orgId: appConnection.orgId,
kmsService,
encryptedCredentials: appConnection.encryptedCredentials
})) as TAzureKeyVaultConnectionClientSecretCredentials;
const { accessToken, expiresAt, clientId, clientSecret, tenantId } = clientSecretCredentials;
// Check if token is still valid (with 5 minute buffer)
if (accessToken && expiresAt && expiresAt > currentTime + 300000) {
return { accessToken };
}
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
new URLSearchParams({
grant_type: "client_credentials",
scope: `https://vault.azure.net/.default`,
client_id: clientId,
client_secret: clientSecret
})
);
const updatedClientCredentials = {
...clientSecretCredentials,
accessToken: clientData.access_token,
expiresAt: currentTime + clientData.expires_in * 1000
};
const encryptedClientCredentials = await encryptAppConnectionCredentials({
credentials: updatedClientCredentials,
orgId: appConnection.orgId,
kmsService
});
await appConnectionDAL.updateById(appConnection.id, { encryptedCredentials: encryptedClientCredentials });
return { accessToken: clientData.access_token };
default:
throw new InternalServerError({
message: `Unhandled Azure Key Vault connection method: ${appConnection.method as AzureKeyVaultConnectionMethod}`
});
}
};
export const getAzureKeyVaultConnectionListItem = () => {
@@ -100,7 +144,10 @@ export const getAzureKeyVaultConnectionListItem = () => {
return {
name: "Azure Key Vault" as const,
app: AppConnection.AzureKeyVault as const,
methods: Object.values(AzureKeyVaultConnectionMethod) as [AzureKeyVaultConnectionMethod.OAuth],
methods: Object.values(AzureKeyVaultConnectionMethod) as [
AzureKeyVaultConnectionMethod.OAuth,
AzureKeyVaultConnectionMethod.ClientSecret
],
oauthClientId: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID
};
};
@@ -111,68 +158,108 @@ export const validateAzureKeyVaultConnectionCredentials = async (config: TAzureK
const { INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID, INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET, SITE_URL } =
getConfig();
if (!INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID || !INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
}
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
let tokenError: AxiosError | null = null;
try {
tokenResp = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", inputCredentials.tenantId || "common"),
new URLSearchParams({
grant_type: "authorization_code",
code: inputCredentials.code,
scope: `openid offline_access https://vault.azure.net/.default`,
client_id: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);
} catch (e: unknown) {
if (e instanceof AxiosError) {
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
});
}
}
if (tokenError) {
if (tokenError instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to get access token: ${
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
}`
});
} else {
throw new InternalServerError({
message: "Failed to get access token"
});
}
}
if (!tokenResp) {
throw new InternalServerError({
message: `Failed to get access token: Token was empty with no error`
});
}
switch (method) {
case AzureKeyVaultConnectionMethod.OAuth:
if (!INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID || !INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
}
let tokenResp: AxiosResponse<ExchangeCodeAzureResponse> | null = null;
let tokenError: AxiosError | null = null;
const oauthCredentials = inputCredentials as { code: string; tenantId?: string };
try {
tokenResp = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", oauthCredentials.tenantId || "common"),
new URLSearchParams({
grant_type: "authorization_code",
code: oauthCredentials.code,
scope: `openid offline_access https://vault.azure.net/.default`,
client_id: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);
} catch (e: unknown) {
if (e instanceof AxiosError) {
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
});
}
}
if (tokenError) {
if (tokenError instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to get access token: ${
(tokenError?.response?.data as { error_description?: string })?.error_description || "Unknown error"
}`
});
} else {
throw new InternalServerError({
message: "Failed to get access token"
});
}
}
if (!tokenResp) {
throw new InternalServerError({
message: `Failed to get access token: Token was empty with no error`
});
}
return {
tenantId: inputCredentials.tenantId,
tenantId: oauthCredentials.tenantId,
accessToken: tokenResp.data.access_token,
refreshToken: tokenResp.data.refresh_token,
expiresAt: Date.now() + tokenResp.data.expires_in * 1000
};
case AzureKeyVaultConnectionMethod.ClientSecret:
const { tenantId, clientId, clientSecret } = inputCredentials as {
tenantId: string;
clientId: string;
clientSecret: string;
};
try {
const { data: clientData } = await request.post<ExchangeCodeAzureResponse>(
IntegrationUrls.AZURE_TOKEN_URL.replace("common", tenantId || "common"),
new URLSearchParams({
grant_type: "client_credentials",
scope: `https://vault.azure.net/.default`,
client_id: clientId,
client_secret: clientSecret
})
);
return {
tenantId,
accessToken: clientData.access_token,
expiresAt: Date.now() + clientData.expires_in * 1000,
clientId,
clientSecret
};
} catch (e: unknown) {
if (e instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to get access token: ${
(e?.response?.data as { error_description?: string })?.error_description || "Unknown error"
}`
});
} else {
throw new InternalServerError({
message: "Failed to get access token"
});
}
}
default:
throw new InternalServerError({
message: `Unhandled Azure connection method: ${method as AzureKeyVaultConnectionMethod}`
message: `Unhandled Azure Key Vault connection method: ${method as AzureKeyVaultConnectionMethod}`
});
}
};

View File

@@ -22,6 +22,29 @@ export const AzureKeyVaultConnectionOAuthOutputCredentialsSchema = z.object({
expiresAt: z.number()
});
export const AzureKeyVaultConnectionClientSecretInputCredentialsSchema = z.object({
clientId: z
.string()
.uuid()
.trim()
.min(1, "Client ID required")
.max(50, "Client ID must be at most 50 characters long"),
clientSecret: z
.string()
.trim()
.min(1, "Client Secret required")
.max(50, "Client Secret must be at most 50 characters long"),
tenantId: z.string().uuid().trim().min(1, "Tenant ID required")
});
export const AzureKeyVaultConnectionClientSecretOutputCredentialsSchema = z.object({
clientId: z.string(),
clientSecret: z.string(),
tenantId: z.string(),
accessToken: z.string(),
expiresAt: z.number()
});
export const ValidateAzureKeyVaultConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
@@ -30,6 +53,14 @@ export const ValidateAzureKeyVaultConnectionCredentialsSchema = z.discriminatedU
credentials: AzureKeyVaultConnectionOAuthInputCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.AzureKeyVault).credentials
)
}),
z.object({
method: z
.literal(AzureKeyVaultConnectionMethod.ClientSecret)
.describe(AppConnections.CREATE(AppConnection.AzureKeyVault).method),
credentials: AzureKeyVaultConnectionClientSecretInputCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.AzureKeyVault).credentials
)
})
]);
@@ -39,9 +70,13 @@ export const CreateAzureKeyVaultConnectionSchema = ValidateAzureKeyVaultConnecti
export const UpdateAzureKeyVaultConnectionSchema = z
.object({
credentials: AzureKeyVaultConnectionOAuthInputCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.AzureKeyVault).credentials
)
credentials: z
.union([
AzureKeyVaultConnectionOAuthInputCredentialsSchema,
AzureKeyVaultConnectionClientSecretInputCredentialsSchema
])
.optional()
.describe(AppConnections.UPDATE(AppConnection.AzureKeyVault).credentials)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureKeyVault));
@@ -55,6 +90,10 @@ export const AzureKeyVaultConnectionSchema = z.intersection(
z.object({
method: z.literal(AzureKeyVaultConnectionMethod.OAuth),
credentials: AzureKeyVaultConnectionOAuthOutputCredentialsSchema
}),
z.object({
method: z.literal(AzureKeyVaultConnectionMethod.ClientSecret),
credentials: AzureKeyVaultConnectionClientSecretOutputCredentialsSchema
})
])
);
@@ -65,6 +104,13 @@ export const SanitizedAzureKeyVaultConnectionSchema = z.discriminatedUnion("meth
credentials: AzureKeyVaultConnectionOAuthOutputCredentialsSchema.pick({
tenantId: true
})
}),
BaseAzureKeyVaultConnectionSchema.extend({
method: z.literal(AzureKeyVaultConnectionMethod.ClientSecret),
credentials: AzureKeyVaultConnectionClientSecretOutputCredentialsSchema.pick({
clientId: true,
tenantId: true
})
})
]);

View File

@@ -4,6 +4,7 @@ import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
AzureKeyVaultConnectionClientSecretOutputCredentialsSchema,
AzureKeyVaultConnectionOAuthOutputCredentialsSchema,
AzureKeyVaultConnectionSchema,
CreateAzureKeyVaultConnectionSchema,
@@ -36,3 +37,7 @@ export type ExchangeCodeAzureResponse = {
};
export type TAzureKeyVaultConnectionCredentials = z.infer<typeof AzureKeyVaultConnectionOAuthOutputCredentialsSchema>;
export type TAzureKeyVaultConnectionClientSecretCredentials = z.infer<
typeof AzureKeyVaultConnectionClientSecretOutputCredentialsSchema
>;

View File

@@ -10,7 +10,8 @@ import {
TCloudflareConnection,
TCloudflareConnectionConfig,
TCloudflarePagesProject,
TCloudflareWorkersScript
TCloudflareWorkersScript,
TCloudflareZone
} from "./cloudflare-connection-types";
export const getCloudflareConnectionListItem = () => {
@@ -66,6 +67,27 @@ export const listCloudflareWorkersScripts = async (
}));
};
export const listCloudflareZones = async (appConnection: TCloudflareConnection): Promise<TCloudflareZone[]> => {
const {
credentials: { apiToken }
} = appConnection;
const { data } = await request.get<{ result: { name: string; id: string }[] }>(
`${IntegrationUrls.CLOUDFLARE_API_URL}/client/v4/zones`,
{
headers: {
Authorization: `Bearer ${apiToken}`,
Accept: "application/json"
}
}
);
return data.result.map((a) => ({
name: a.name,
id: a.id
}));
};
export const validateCloudflareConnectionCredentials = async (config: TCloudflareConnectionConfig) => {
const { apiToken, accountId } = config.credentials;

View File

@@ -2,7 +2,11 @@ import { logger } from "@app/lib/logger";
import { OrgServiceActor } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import { listCloudflarePagesProjects, listCloudflareWorkersScripts } from "./cloudflare-connection-fns";
import {
listCloudflarePagesProjects,
listCloudflareWorkersScripts,
listCloudflareZones
} from "./cloudflare-connection-fns";
import { TCloudflareConnection } from "./cloudflare-connection-types";
type TGetAppConnectionFunc = (
@@ -16,7 +20,6 @@ export const cloudflareConnectionService = (getAppConnection: TGetAppConnectionF
const appConnection = await getAppConnection(AppConnection.Cloudflare, connectionId, actor);
try {
const projects = await listCloudflarePagesProjects(appConnection);
return projects;
} catch (error) {
logger.error(
@@ -30,9 +33,8 @@ export const cloudflareConnectionService = (getAppConnection: TGetAppConnectionF
const listWorkersScripts = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.Cloudflare, connectionId, actor);
try {
const projects = await listCloudflareWorkersScripts(appConnection);
return projects;
const scripts = await listCloudflareWorkersScripts(appConnection);
return scripts;
} catch (error) {
logger.error(
error,
@@ -42,8 +44,20 @@ export const cloudflareConnectionService = (getAppConnection: TGetAppConnectionF
}
};
const listZones = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.Cloudflare, connectionId, actor);
try {
const zones = await listCloudflareZones(appConnection);
return zones;
} catch (error) {
logger.error(error, `Failed to list Cloudflare Zones for Cloudflare connection [connectionId=${connectionId}]`);
return [];
}
};
return {
listPagesProjects,
listWorkersScripts
listWorkersScripts,
listZones
};
};

View File

@@ -32,3 +32,8 @@ export type TCloudflarePagesProject = {
export type TCloudflareWorkersScript = {
id: string;
};
export type TCloudflareZone = {
id: string;
name: string;
};

View File

@@ -222,6 +222,37 @@ export const validateGitLabConnectionCredentials = async (config: TGitLabConnect
return inputCredentials;
};
export const getGitLabConnectionClient = async (
appConnection: TGitLabConnection,
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">,
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
let { accessToken } = appConnection.credentials;
if (
appConnection.method === GitLabConnectionMethod.OAuth &&
appConnection.credentials.refreshToken &&
new Date(appConnection.credentials.expiresAt) < new Date()
) {
accessToken = await refreshGitLabToken(
appConnection.credentials.refreshToken,
appConnection.id,
appConnection.orgId,
appConnectionDAL,
kmsService,
appConnection.credentials.instanceUrl
);
}
const client = await getGitLabClient(
accessToken,
appConnection.credentials.instanceUrl,
appConnection.method === GitLabConnectionMethod.OAuth
);
return client;
};
export const listGitLabProjects = async ({
appConnection,
appConnectionDAL,

View File

@@ -0,0 +1,4 @@
export * from "./netlify-connection-constants";
export * from "./netlify-connection-fns";
export * from "./netlify-connection-schemas";
export * from "./netlify-connection-types";

View File

@@ -0,0 +1,3 @@
export enum NetlifyConnectionMethod {
AccessToken = "access-token"
}

View File

@@ -0,0 +1,35 @@
/* eslint-disable no-await-in-loop */
import { AxiosError } from "axios";
import { BadRequestError } from "@app/lib/errors";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { NetlifyConnectionMethod } from "./netlify-connection-constants";
import { NetlifyPublicAPI } from "./netlify-connection-public-client";
import { TNetlifyConnectionConfig } from "./netlify-connection-types";
export const getNetlifyConnectionListItem = () => {
return {
name: "Netlify" as const,
app: AppConnection.Netlify as const,
methods: Object.values(NetlifyConnectionMethod)
};
};
export const validateNetlifyConnectionCredentials = async (config: TNetlifyConnectionConfig) => {
try {
await NetlifyPublicAPI.healthcheck(config);
} catch (error: unknown) {
if (error instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
});
}
throw new BadRequestError({
message: "Unable to validate connection - verify credentials"
});
}
return config.credentials;
};

View File

@@ -0,0 +1,261 @@
/* eslint-disable @typescript-eslint/no-unsafe-call */
/* eslint-disable no-await-in-loop */
/* eslint-disable class-methods-use-this */
import { AxiosInstance, AxiosRequestConfig, AxiosResponse, HttpStatusCode, isAxiosError } from "axios";
import { createRequestClient } from "@app/lib/config/request";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { NetlifyConnectionMethod } from "./netlify-connection-constants";
import { TNetlifyAccount, TNetlifyConnectionConfig, TNetlifySite, TNetlifyVariable } from "./netlify-connection-types";
export function getNetlifyAuthHeaders(connection: TNetlifyConnectionConfig): Record<string, string> {
switch (connection.method) {
case NetlifyConnectionMethod.AccessToken:
return {
Authorization: `Bearer ${connection.credentials.accessToken}`
};
default:
throw new Error(`Unsupported Netlify connection method`);
}
}
export function getNetlifyRatelimiter(response: AxiosResponse): {
maxAttempts: number;
isRatelimited: boolean;
wait: () => Promise<void>;
} {
const wait = (seconds: number = 60) => {
return new Promise<void>((res) => {
setTimeout(res, seconds * 1000);
});
};
let remaining = parseInt(response.headers["X-RateLimit-Remaining"] as string, 10);
let isRatelimited = response.status === HttpStatusCode.TooManyRequests;
if (isRatelimited) {
if (Math.round(remaining) > 0) {
isRatelimited = true;
remaining += 1; // Jitter to ensure we wait at least 1 second
} else {
remaining = 60;
}
}
return {
isRatelimited,
wait: () => wait(remaining),
maxAttempts: 3
};
}
type NetlifyParams = {
account_id: string;
context_name?: string;
site_id?: string;
};
class NetlifyPublicClient {
private client: AxiosInstance;
constructor() {
this.client = createRequestClient({
baseURL: `${IntegrationUrls.NETLIFY_API_URL}/api/v1`,
headers: {
"Content-Type": "application/json"
}
});
}
async send<T>(connection: TNetlifyConnectionConfig, config: AxiosRequestConfig, retryAttempt = 0): Promise<T> {
const response = await this.client.request<T>({
...config,
timeout: 1000 * 60, // 60 seconds timeout
validateStatus: (status) => (status >= 200 && status < 300) || status === HttpStatusCode.TooManyRequests,
headers: getNetlifyAuthHeaders(connection)
});
const limiter = getNetlifyRatelimiter(response);
if (limiter.isRatelimited && retryAttempt <= limiter.maxAttempts) {
await limiter.wait();
return this.send(connection, config, retryAttempt + 1);
}
return response.data;
}
healthcheck(connection: TNetlifyConnectionConfig) {
switch (connection.method) {
case NetlifyConnectionMethod.AccessToken:
return this.getNetlifyAccounts(connection);
default:
throw new Error(`Unsupported Netlify connection method`);
}
}
async getVariables(
connection: TNetlifyConnectionConfig,
{ account_id, ...params }: NetlifyParams,
limit: number = 50,
page: number = 1
) {
const res = await this.send<TNetlifyVariable[]>(connection, {
method: "GET",
url: `/accounts/${account_id}/env`,
params: {
...params,
limit,
page
}
});
return res;
}
async createVariable(
connection: TNetlifyConnectionConfig,
{ account_id, ...params }: NetlifyParams,
...variables: TNetlifyVariable[]
) {
const res = await this.send<TNetlifyVariable>(connection, {
method: "POST",
url: `/accounts/${account_id}/env`,
data: variables,
params
});
return res;
}
async updateVariableValue(
connection: TNetlifyConnectionConfig,
{ account_id, ...params }: NetlifyParams,
variable: TNetlifyVariable
) {
const res = await this.send<TNetlifyVariable>(connection, {
method: "PATCH",
url: `/accounts/${account_id}/env/${variable.key}`,
data: variable,
params
});
return res;
}
async updateVariable(
connection: TNetlifyConnectionConfig,
{ account_id, ...params }: NetlifyParams,
variable: TNetlifyVariable
) {
const res = await this.send<TNetlifyVariable>(connection, {
method: "PUT",
url: `/accounts/${account_id}/env/${variable.key}`,
data: variable,
params
});
return res;
}
async getVariable(
connection: TNetlifyConnectionConfig,
{ account_id, ...params }: NetlifyParams,
variable: Pick<TNetlifyVariable, "key">
) {
try {
const res = await this.send<TNetlifyVariable>(connection, {
method: "GET",
url: `/accounts/${account_id}/env/${variable.key}`,
params
});
return res;
} catch (error) {
if (isAxiosError(error) && error.response?.status === HttpStatusCode.NotFound) {
return null;
}
throw error;
}
}
async upsertVariable(connection: TNetlifyConnectionConfig, params: NetlifyParams, variable: TNetlifyVariable) {
const res = await this.getVariable(connection, params, variable);
if (!res) {
return this.createVariable(connection, params, variable);
}
if (res.is_secret) {
await this.deleteVariable(connection, params, variable);
return this.createVariable(connection, params, variable);
}
return this.updateVariable(connection, params, variable);
}
async deleteVariable(
connection: TNetlifyConnectionConfig,
{ account_id, ...params }: NetlifyParams,
variable: Pick<TNetlifyVariable, "key">
) {
try {
const res = await this.send<TNetlifyVariable>(connection, {
method: "DELETE",
url: `/accounts/${account_id}/env/${variable.key}`,
params
});
return res;
} catch (error) {
if (isAxiosError(error) && error.response?.status === HttpStatusCode.NotFound) {
return null;
}
throw error;
}
}
async deleteVariableValue(
connection: TNetlifyConnectionConfig,
{ account_id, value_id, ...params }: NetlifyParams & { value_id: string },
variable: Pick<TNetlifyVariable, "key" | "id">
) {
try {
const res = await this.send<TNetlifyVariable>(connection, {
method: "DELETE",
url: `/accounts/${account_id}/${variable.key}/value/${value_id}`,
params
});
return res;
} catch (error) {
if (isAxiosError(error) && error.response?.status === HttpStatusCode.NotFound) {
return null;
}
throw error;
}
}
async getSites(connection: TNetlifyConnectionConfig, accountId: string) {
const res = await this.send<TNetlifySite[]>(connection, {
method: "GET",
url: `/${accountId}/sites`
});
return res;
}
async getNetlifyAccounts(connection: TNetlifyConnectionConfig) {
const res = await this.send<TNetlifyAccount[]>(connection, {
method: "GET",
url: `/accounts`
});
return res;
}
}
export const NetlifyPublicAPI = new NetlifyPublicClient();

View File

@@ -0,0 +1,67 @@
import z from "zod";
import { AppConnections } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
BaseAppConnectionSchema,
GenericCreateAppConnectionFieldsSchema,
GenericUpdateAppConnectionFieldsSchema
} from "@app/services/app-connection/app-connection-schemas";
import { NetlifyConnectionMethod } from "./netlify-connection-constants";
export const NetlifyConnectionMethodSchema = z
.nativeEnum(NetlifyConnectionMethod)
.describe(AppConnections.CREATE(AppConnection.Netlify).method);
export const NetlifyConnectionAccessTokenCredentialsSchema = z.object({
accessToken: z
.string()
.trim()
.min(1, "Access Token required")
.max(255)
.describe(AppConnections.CREDENTIALS.NETLIFY.accessToken)
});
const BaseNetlifyConnectionSchema = BaseAppConnectionSchema.extend({
app: z.literal(AppConnection.Netlify)
});
export const NetlifyConnectionSchema = BaseNetlifyConnectionSchema.extend({
method: NetlifyConnectionMethodSchema,
credentials: NetlifyConnectionAccessTokenCredentialsSchema
});
export const SanitizedNetlifyConnectionSchema = z.discriminatedUnion("method", [
BaseNetlifyConnectionSchema.extend({
method: NetlifyConnectionMethodSchema,
credentials: NetlifyConnectionAccessTokenCredentialsSchema.pick({})
})
]);
export const ValidateNetlifyConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: NetlifyConnectionMethodSchema,
credentials: NetlifyConnectionAccessTokenCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.Netlify).credentials
)
})
]);
export const CreateNetlifyConnectionSchema = ValidateNetlifyConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.Netlify)
);
export const UpdateNetlifyConnectionSchema = z
.object({
credentials: NetlifyConnectionAccessTokenCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.Netlify).credentials
)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Netlify));
export const NetlifyConnectionListItemSchema = z.object({
name: z.literal("Netlify"),
app: z.literal(AppConnection.Netlify),
methods: z.nativeEnum(NetlifyConnectionMethod).array()
});

View File

@@ -0,0 +1,42 @@
import { logger } from "@app/lib/logger";
import { OrgServiceActor } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import { NetlifyPublicAPI } from "./netlify-connection-public-client";
import { TNetlifyConnection } from "./netlify-connection-types";
type TGetAppConnectionFunc = (
app: AppConnection,
connectionId: string,
actor: OrgServiceActor
) => Promise<TNetlifyConnection>;
// eslint-disable-next-line @typescript-eslint/no-unused-vars
export const netlifyConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
const listAccounts = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.Netlify, connectionId, actor);
try {
const accounts = await NetlifyPublicAPI.getNetlifyAccounts(appConnection);
return accounts;
} catch (error) {
logger.error(error, "Failed to list accounts on Netlify");
return [];
}
};
const listSites = async (connectionId: string, actor: OrgServiceActor, accountId: string) => {
const appConnection = await getAppConnection(AppConnection.Netlify, connectionId, actor);
try {
const sites = await NetlifyPublicAPI.getSites(appConnection, accountId);
return sites;
} catch (error) {
logger.error(error, "Failed to list sites on Netlify");
return [];
}
};
return {
listAccounts,
listSites
};
};

View File

@@ -0,0 +1,51 @@
import z from "zod";
import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
CreateNetlifyConnectionSchema,
NetlifyConnectionSchema,
ValidateNetlifyConnectionCredentialsSchema
} from "./netlify-connection-schemas";
export type TNetlifyConnection = z.infer<typeof NetlifyConnectionSchema>;
export type TNetlifyConnectionInput = z.infer<typeof CreateNetlifyConnectionSchema> & {
app: AppConnection.Netlify;
};
export type TValidateNetlifyConnectionCredentialsSchema = typeof ValidateNetlifyConnectionCredentialsSchema;
export type TNetlifyConnectionConfig = DiscriminativePick<TNetlifyConnection, "method" | "app" | "credentials"> & {
orgId: string;
};
export type TNetlifyVariable = {
key: string;
id?: string; // ID of the variable (present in responses)
created_at?: string;
updated_at?: string;
is_secret?: boolean;
scopes?: ("builds" | "functions" | "runtime" | "post_processing")[];
values: TNetlifyVariableValue[];
};
export type TNetlifyVariableValue = {
id?: string;
context?: string; // "all", "dev", "branch-deploy", etc.
value?: string; // Omitted in response if `is_secret` is true
site_id?: string; // Optional: overrides at site-level
created_at?: string;
updated_at?: string;
};
export type TNetlifyAccount = {
id: string;
name: string;
};
export type TNetlifySite = {
id: string;
name: string;
};

View File

@@ -1,3 +1,4 @@
export enum AcmeDnsProvider {
Route53 = "route53"
Route53 = "route53",
Cloudflare = "cloudflare"
}

View File

@@ -1,19 +1,17 @@
import { ChangeResourceRecordSetsCommand, Route53Client } from "@aws-sdk/client-route-53";
import * as x509 from "@peculiar/x509";
import acme from "acme-client";
import { TableName } from "@app/db/schemas";
import { CustomAWSHasher } from "@app/lib/aws/hashing";
import { crypto } from "@app/lib/crypto/cryptography";
import { BadRequestError, CryptographyError, NotFoundError } from "@app/lib/errors";
import { OrgServiceActor } from "@app/lib/types";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
import { AppConnection, AWSRegion } from "@app/services/app-connection/app-connection-enums";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
import { getAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-fns";
import { TAwsConnection, TAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-types";
import { TAwsConnection } from "@app/services/app-connection/aws/aws-connection-types";
import { TCloudflareConnection } from "@app/services/app-connection/cloudflare/cloudflare-connection-types";
import { TCertificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal";
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
import { TCertificateSecretDALFactory } from "@app/services/certificate/certificate-secret-dal";
@@ -39,6 +37,8 @@ import {
TCreateAcmeCertificateAuthorityDTO,
TUpdateAcmeCertificateAuthorityDTO
} from "./acme-certificate-authority-types";
import { cloudflareDeleteTxtRecord, cloudflareInsertTxtRecord } from "./dns-providers/cloudflare";
import { route53DeleteTxtRecord, route53InsertTxtRecord } from "./dns-providers/route54";
type TAcmeCertificateAuthorityFnsDeps = {
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById">;
@@ -95,74 +95,6 @@ export const castDbEntryToAcmeCertificateAuthority = (
};
};
export const route53InsertTxtRecord = async (
connection: TAwsConnectionConfig,
hostedZoneId: string,
domain: string,
value: string
) => {
const config = await getAwsConnectionConfig(connection, AWSRegion.US_WEST_1); // REGION is irrelevant because Route53 is global
const route53Client = new Route53Client({
sha256: CustomAWSHasher,
useFipsEndpoint: crypto.isFipsModeEnabled(),
credentials: config.credentials!,
region: config.region
});
const command = new ChangeResourceRecordSetsCommand({
HostedZoneId: hostedZoneId,
ChangeBatch: {
Comment: "Set ACME challenge TXT record",
Changes: [
{
Action: "UPSERT",
ResourceRecordSet: {
Name: domain,
Type: "TXT",
TTL: 30,
ResourceRecords: [{ Value: value }]
}
}
]
}
});
await route53Client.send(command);
};
export const route53DeleteTxtRecord = async (
connection: TAwsConnectionConfig,
hostedZoneId: string,
domain: string,
value: string
) => {
const config = await getAwsConnectionConfig(connection, AWSRegion.US_WEST_1); // REGION is irrelevant because Route53 is global
const route53Client = new Route53Client({
credentials: config.credentials!,
region: config.region
});
const command = new ChangeResourceRecordSetsCommand({
HostedZoneId: hostedZoneId,
ChangeBatch: {
Comment: "Delete ACME challenge TXT record",
Changes: [
{
Action: "DELETE",
ResourceRecordSet: {
Name: domain,
Type: "TXT",
TTL: 30,
ResourceRecords: [{ Value: value }]
}
}
]
}
});
await route53Client.send(command);
};
export const AcmeCertificateAuthorityFns = ({
appConnectionDAL,
appConnectionService,
@@ -209,6 +141,12 @@ export const AcmeCertificateAuthorityFns = ({
});
}
if (dnsProviderConfig.provider === AcmeDnsProvider.Cloudflare && appConnection.app !== AppConnection.Cloudflare) {
throw new BadRequestError({
message: `App connection with ID '${dnsAppConnectionId}' is not a Cloudflare connection`
});
}
// validates permission to connect
await appConnectionService.connectAppConnectionById(appConnection.app as AppConnection, dnsAppConnectionId, actor);
@@ -289,6 +227,15 @@ export const AcmeCertificateAuthorityFns = ({
});
}
if (
dnsProviderConfig.provider === AcmeDnsProvider.Cloudflare &&
appConnection.app !== AppConnection.Cloudflare
) {
throw new BadRequestError({
message: `App connection with ID '${dnsAppConnectionId}' is not a Cloudflare connection`
});
}
// validates permission to connect
await appConnectionService.connectAppConnectionById(
appConnection.app as AppConnection,
@@ -443,26 +390,56 @@ export const AcmeCertificateAuthorityFns = ({
const recordName = `_acme-challenge.${authz.identifier.value}`; // e.g., "_acme-challenge.example.com"
const recordValue = `"${keyAuthorization}"`; // must be double quoted
if (acmeCa.configuration.dnsProviderConfig.provider === AcmeDnsProvider.Route53) {
await route53InsertTxtRecord(
connection as TAwsConnection,
acmeCa.configuration.dnsProviderConfig.hostedZoneId,
recordName,
recordValue
);
switch (acmeCa.configuration.dnsProviderConfig.provider) {
case AcmeDnsProvider.Route53: {
await route53InsertTxtRecord(
connection as TAwsConnection,
acmeCa.configuration.dnsProviderConfig.hostedZoneId,
recordName,
recordValue
);
break;
}
case AcmeDnsProvider.Cloudflare: {
await cloudflareInsertTxtRecord(
connection as TCloudflareConnection,
acmeCa.configuration.dnsProviderConfig.hostedZoneId,
recordName,
recordValue
);
break;
}
default: {
throw new Error(`Unsupported DNS provider: ${acmeCa.configuration.dnsProviderConfig.provider as string}`);
}
}
},
challengeRemoveFn: async (authz, challenge, keyAuthorization) => {
const recordName = `_acme-challenge.${authz.identifier.value}`; // e.g., "_acme-challenge.example.com"
const recordValue = `"${keyAuthorization}"`; // must be double quoted
if (acmeCa.configuration.dnsProviderConfig.provider === AcmeDnsProvider.Route53) {
await route53DeleteTxtRecord(
connection as TAwsConnection,
acmeCa.configuration.dnsProviderConfig.hostedZoneId,
recordName,
recordValue
);
switch (acmeCa.configuration.dnsProviderConfig.provider) {
case AcmeDnsProvider.Route53: {
await route53DeleteTxtRecord(
connection as TAwsConnection,
acmeCa.configuration.dnsProviderConfig.hostedZoneId,
recordName,
recordValue
);
break;
}
case AcmeDnsProvider.Cloudflare: {
await cloudflareDeleteTxtRecord(
connection as TCloudflareConnection,
acmeCa.configuration.dnsProviderConfig.hostedZoneId,
recordName,
recordValue
);
break;
}
default: {
throw new Error(`Unsupported DNS provider: ${acmeCa.configuration.dnsProviderConfig.provider as string}`);
}
}
}
});

View File

@@ -0,0 +1,109 @@
import axios from "axios";
import { request } from "@app/lib/config/request";
import { TCloudflareConnectionConfig } from "@app/services/app-connection/cloudflare/cloudflare-connection-types";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
export const cloudflareInsertTxtRecord = async (
connection: TCloudflareConnectionConfig,
hostedZoneId: string,
domain: string,
value: string
) => {
const {
credentials: { apiToken }
} = connection;
try {
await request.post(
`${IntegrationUrls.CLOUDFLARE_API_URL}/client/v4/zones/${encodeURIComponent(hostedZoneId)}/dns_records`,
{
type: "TXT",
name: domain,
content: value,
ttl: 60,
proxied: false
},
{
headers: {
Authorization: `Bearer ${apiToken}`,
"Content-Type": "application/json",
Accept: "application/json"
}
}
);
} catch (error) {
if (axios.isAxiosError(error)) {
const firstErrorMessage = (
error.response?.data as {
errors?: { message: string }[];
}
)?.errors?.[0]?.message;
if (firstErrorMessage) {
throw new Error(firstErrorMessage);
}
}
throw error;
}
};
export const cloudflareDeleteTxtRecord = async (
connection: TCloudflareConnectionConfig,
hostedZoneId: string,
domain: string,
value: string
) => {
const {
credentials: { apiToken }
} = connection;
try {
const listRecordsResponse = await request.get<{
result: { id: string; type: string; name: string; content: string }[];
}>(`${IntegrationUrls.CLOUDFLARE_API_URL}/client/v4/zones/${encodeURIComponent(hostedZoneId)}/dns_records`, {
headers: {
Authorization: `Bearer ${apiToken}`,
"Content-Type": "application/json",
Accept: "application/json"
},
params: {
type: "TXT",
name: domain,
content: value
}
});
const dnsRecords = listRecordsResponse.data?.result;
if (Array.isArray(dnsRecords) && dnsRecords.length > 0) {
const recordToDelete = dnsRecords.find(
(record) => record.type === "TXT" && record.name === domain && record.content === value
);
if (recordToDelete) {
await request.delete(
`${IntegrationUrls.CLOUDFLARE_API_URL}/client/v4/zones/${encodeURIComponent(hostedZoneId)}/dns_records/${recordToDelete.id}`,
{
headers: {
Authorization: `Bearer ${apiToken}`,
"Content-Type": "application/json",
Accept: "application/json"
}
}
);
}
}
} catch (error) {
if (axios.isAxiosError(error)) {
const firstErrorMessage = (
error.response?.data as {
errors?: { message: string }[];
}
)?.errors?.[0]?.message;
if (firstErrorMessage) {
throw new Error(firstErrorMessage);
}
}
throw error;
}
};

View File

@@ -0,0 +1,75 @@
import { ChangeResourceRecordSetsCommand, Route53Client } from "@aws-sdk/client-route-53";
import { CustomAWSHasher } from "@app/lib/aws/hashing";
import { crypto } from "@app/lib/crypto/cryptography";
import { AWSRegion } from "@app/services/app-connection/app-connection-enums";
import { getAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-fns";
import { TAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-types";
export const route53InsertTxtRecord = async (
connection: TAwsConnectionConfig,
hostedZoneId: string,
domain: string,
value: string
) => {
const config = await getAwsConnectionConfig(connection, AWSRegion.US_WEST_1); // REGION is irrelevant because Route53 is global
const route53Client = new Route53Client({
sha256: CustomAWSHasher,
useFipsEndpoint: crypto.isFipsModeEnabled(),
credentials: config.credentials!,
region: config.region
});
const command = new ChangeResourceRecordSetsCommand({
HostedZoneId: hostedZoneId,
ChangeBatch: {
Comment: "Set ACME challenge TXT record",
Changes: [
{
Action: "UPSERT",
ResourceRecordSet: {
Name: domain,
Type: "TXT",
TTL: 30,
ResourceRecords: [{ Value: value }]
}
}
]
}
});
await route53Client.send(command);
};
export const route53DeleteTxtRecord = async (
connection: TAwsConnectionConfig,
hostedZoneId: string,
domain: string,
value: string
) => {
const config = await getAwsConnectionConfig(connection, AWSRegion.US_WEST_1); // REGION is irrelevant because Route53 is global
const route53Client = new Route53Client({
credentials: config.credentials!,
region: config.region
});
const command = new ChangeResourceRecordSetsCommand({
HostedZoneId: hostedZoneId,
ChangeBatch: {
Comment: "Delete ACME challenge TXT record",
Changes: [
{
Action: "DELETE",
ResourceRecordSet: {
Name: domain,
Type: "TXT",
TTL: 30,
ResourceRecords: [{ Value: value }]
}
}
]
}
});
await route53Client.send(command);
};

View File

@@ -17,25 +17,16 @@ type VaultData = {
const vaultFactory = () => {
const getMounts = async (request: AxiosInstance) => {
const response = await request
.get<
Record<
string,
{
accessor: string;
options: {
version?: string;
} | null;
type: string;
}
>
>("/v1/sys/mounts")
.get<{
data: Record<string, { accessor: string; options: { version?: string } | null; type: string }>;
}>("/v1/sys/mounts")
.catch((err) => {
if (axios.isAxiosError(err)) {
logger.error(err.response?.data, "External migration: Failed to get Vault mounts");
}
throw err;
});
return response.data;
return response.data.data;
};
const getPaths = async (

View File

@@ -19,7 +19,7 @@ import { TSecretVersionV2DALFactory } from "../secret-v2-bridge/secret-version-d
import { TSecretVersionV2TagDALFactory } from "../secret-v2-bridge/secret-version-tag-dal";
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
import { importDataIntoInfisicalFn } from "./external-migration-fns";
import { ExternalPlatforms, ImportType, TImportInfisicalDataCreate } from "./external-migration-types";
import { ExternalPlatforms, TImportInfisicalDataCreate } from "./external-migration-types";
export type TExternalMigrationQueueFactoryDep = {
smtpService: TSmtpService;
@@ -66,8 +66,8 @@ export const externalMigrationQueueFactory = ({
}: TExternalMigrationQueueFactoryDep) => {
const startImport = async (dto: {
actorEmail: string;
importType: ExternalPlatforms;
data: {
importType: ImportType;
iv: string;
tag: string;
ciphertext: string;
@@ -87,14 +87,14 @@ export const externalMigrationQueueFactory = ({
};
queueService.start(QueueName.ImportSecretsFromExternalSource, async (job) => {
try {
const { data, actorEmail } = job.data;
const { data, actorEmail, importType } = job.data;
try {
await smtpService.sendMail({
recipients: [actorEmail],
subjectLine: "Infisical import started",
substitutions: {
provider: ExternalPlatforms.EnvKey
provider: importType
},
template: SmtpTemplates.ExternalImportStarted
});
@@ -141,7 +141,7 @@ export const externalMigrationQueueFactory = ({
recipients: [actorEmail],
subjectLine: "Infisical import successful",
substitutions: {
provider: ExternalPlatforms.EnvKey
provider: importType
},
template: SmtpTemplates.ExternalImportSuccessful
});
@@ -150,7 +150,7 @@ export const externalMigrationQueueFactory = ({
recipients: [job.data.actorEmail],
subjectLine: "Infisical import failed",
substitutions: {
provider: ExternalPlatforms.EnvKey,
provider: importType,
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-assignment
error: (err as any)?.message || "Unknown error"
},

View File

@@ -6,7 +6,7 @@ import { BadRequestError, ForbiddenRequestError } from "@app/lib/errors";
import { TUserDALFactory } from "../user/user-dal";
import { decryptEnvKeyDataFn, importVaultDataFn, parseEnvKeyDataFn } from "./external-migration-fns";
import { TExternalMigrationQueueFactory } from "./external-migration-queue";
import { ImportType, TImportEnvKeyDataDTO, TImportVaultDataDTO } from "./external-migration-types";
import { ExternalPlatforms, TImportEnvKeyDataDTO, TImportVaultDataDTO } from "./external-migration-types";
type TExternalMigrationServiceFactoryDep = {
permissionService: TPermissionServiceFactory;
@@ -60,8 +60,8 @@ export const externalMigrationServiceFactory = ({
await externalMigrationQueue.startImport({
actorEmail: user.email!,
importType: ExternalPlatforms.EnvKey,
data: {
importType: ImportType.EnvKey,
...encrypted
}
});
@@ -110,8 +110,8 @@ export const externalMigrationServiceFactory = ({
await externalMigrationQueue.startImport({
actorEmail: user.email!,
importType: ExternalPlatforms.Vault,
data: {
importType: ImportType.Vault,
...encrypted
}
});

View File

@@ -2,11 +2,6 @@ import { TOrgPermission } from "@app/lib/types";
import { ActorAuthMethod, ActorType } from "../auth/auth-type";
export enum ImportType {
EnvKey = "envkey",
Vault = "vault"
}
export enum VaultMappingType {
Namespace = "namespace",
KeyVault = "key-vault"
@@ -112,5 +107,6 @@ export type TEnvKeyExportJSON = {
};
export enum ExternalPlatforms {
EnvKey = "EnvKey"
EnvKey = "EnvKey",
Vault = "Vault"
}

View File

@@ -513,6 +513,21 @@ export const orgDALFactory = (db: TDbClient) => {
}
};
const deleteMembershipsById = async (ids: string[], orgId: string, tx?: Knex) => {
try {
const memberships = await (tx || db)(TableName.OrgMembership)
.where({
orgId
})
.whereIn("id", ids)
.delete()
.returning("*");
return memberships;
} catch (error) {
throw new DatabaseError({ error, name: "Delete org memberships" });
}
};
const findMembership = async (
filter: TFindFilter<TOrgMemberships>,
{ offset, limit, sort, tx }: TFindOpt<TOrgMemberships> = {}
@@ -634,6 +649,7 @@ export const orgDALFactory = (db: TDbClient) => {
createMembership,
updateMembershipById,
deleteMembershipById,
deleteMembershipsById,
updateMembership
});
};

View File

@@ -1,5 +1,6 @@
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TProjectUserAdditionalPrivilegeDALFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-dal";
import { BadRequestError } from "@app/lib/errors";
import { TOrgDALFactory } from "@app/services/org/org-dal";
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
@@ -14,6 +15,19 @@ type TDeleteOrgMembership = {
userAliasDAL: Pick<TUserAliasDALFactory, "delete">;
licenseService: Pick<TLicenseServiceFactory, "updateSubscriptionOrgMemberCount">;
projectUserAdditionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "delete">;
userId?: string;
};
type TDeleteOrgMemberships = {
orgMembershipIds: string[];
orgId: string;
orgDAL: Pick<TOrgDALFactory, "findMembership" | "deleteMembershipsById" | "transaction">;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "delete" | "findProjectMembershipsByUserIds">;
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete">;
userAliasDAL: Pick<TUserAliasDALFactory, "delete">;
licenseService: Pick<TLicenseServiceFactory, "updateSubscriptionOrgMemberCount">;
projectUserAdditionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "delete">;
userId?: string;
};
export const deleteOrgMembershipFn = async ({
@@ -24,11 +38,17 @@ export const deleteOrgMembershipFn = async ({
projectUserAdditionalPrivilegeDAL,
projectKeyDAL,
userAliasDAL,
licenseService
licenseService,
userId
}: TDeleteOrgMembership) => {
const deletedMembership = await orgDAL.transaction(async (tx) => {
const orgMembership = await orgDAL.deleteMembershipById(orgMembershipId, orgId, tx);
if (userId && orgMembership.userId === userId) {
// scott: this is temporary, we will add a leave org endpoint with proper handling to ensure org isn't abandoned/broken
throw new BadRequestError({ message: "You cannot remove yourself from an organization" });
}
if (!orgMembership.userId) {
await licenseService.updateSubscriptionOrgMemberCount(orgId);
return orgMembership;
@@ -86,3 +106,88 @@ export const deleteOrgMembershipFn = async ({
return deletedMembership;
};
export const deleteOrgMembershipsFn = async ({
orgMembershipIds,
orgId,
orgDAL,
projectMembershipDAL,
projectUserAdditionalPrivilegeDAL,
projectKeyDAL,
userAliasDAL,
licenseService,
userId
}: TDeleteOrgMemberships) => {
const deletedMemberships = await orgDAL.transaction(async (tx) => {
const orgMemberships = await orgDAL.deleteMembershipsById(orgMembershipIds, orgId, tx);
const membershipUserIds = orgMemberships
.filter((member) => Boolean(member.userId))
.map((member) => member.userId) as string[];
if (userId && membershipUserIds.includes(userId)) {
// scott: this is temporary, we will add a leave org endpoint with proper handling to ensure org isn't abandoned/broken
throw new BadRequestError({ message: "You cannot remove yourself from an organization" });
}
if (!membershipUserIds.length) {
await licenseService.updateSubscriptionOrgMemberCount(orgId);
return orgMemberships;
}
await userAliasDAL.delete(
{
$in: {
userId: membershipUserIds
},
orgId
},
tx
);
await projectUserAdditionalPrivilegeDAL.delete(
{
$in: {
userId: membershipUserIds
}
},
tx
);
// Get all the project memberships of the users in the organization
const projectMemberships = await projectMembershipDAL.findProjectMembershipsByUserIds(orgId, membershipUserIds);
// Delete all the project memberships of the users in the organization
await projectMembershipDAL.delete(
{
$in: {
id: projectMemberships.map((membership) => membership.id)
}
},
tx
);
// Get all the project keys of the user in the organization
const projectKeys = await projectKeyDAL.find({
$in: {
projectId: projectMemberships.map((membership) => membership.projectId),
receiverId: membershipUserIds
}
});
// Delete all the project keys of the user in the organization
await projectKeyDAL.delete(
{
$in: {
id: projectKeys.map((key) => key.id)
}
},
tx
);
await licenseService.updateSubscriptionOrgMemberCount(orgId);
return orgMemberships;
});
return deletedMemberships;
};

View File

@@ -75,10 +75,11 @@ import { TUserDALFactory } from "../user/user-dal";
import { TIncidentContactsDALFactory } from "./incident-contacts-dal";
import { TOrgBotDALFactory } from "./org-bot-dal";
import { TOrgDALFactory } from "./org-dal";
import { deleteOrgMembershipFn } from "./org-fns";
import { deleteOrgMembershipFn, deleteOrgMembershipsFn } from "./org-fns";
import { TOrgRoleDALFactory } from "./org-role-dal";
import {
TDeleteOrgMembershipDTO,
TDeleteOrgMembershipsDTO,
TFindAllWorkspacesDTO,
TFindOrgMembersByEmailDTO,
TGetOrgGroupsDTO,
@@ -106,7 +107,13 @@ type TOrgServiceFactoryDep = {
identityMetadataDAL: Pick<TIdentityMetadataDALFactory, "delete" | "insertMany" | "transaction">;
projectMembershipDAL: Pick<
TProjectMembershipDALFactory,
"findProjectMembershipsByUserId" | "delete" | "create" | "find" | "insertMany" | "transaction"
| "findProjectMembershipsByUserId"
| "delete"
| "create"
| "find"
| "insertMany"
| "transaction"
| "findProjectMembershipsByUserIds"
>;
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete" | "insertMany" | "findLatestProjectKey" | "create">;
orgMembershipDAL: Pick<
@@ -1369,12 +1376,42 @@ export const orgServiceFactory = ({
projectUserAdditionalPrivilegeDAL,
projectKeyDAL,
userAliasDAL,
licenseService
licenseService,
userId
});
return deletedMembership;
};
const bulkDeleteOrgMemberships = async ({
orgId,
userId,
membershipIds,
actorAuthMethod,
actorOrgId
}: TDeleteOrgMembershipsDTO) => {
const { permission } = await permissionService.getUserOrgPermission(userId, orgId, actorAuthMethod, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Member);
if (membershipIds.includes(userId)) {
throw new BadRequestError({ message: "You cannot delete your own organization membership" });
}
const deletedMemberships = await deleteOrgMembershipsFn({
orgMembershipIds: membershipIds,
orgId,
orgDAL,
projectMembershipDAL,
projectUserAdditionalPrivilegeDAL,
projectKeyDAL,
userAliasDAL,
licenseService,
userId
});
return deletedMemberships;
};
const listProjectMembershipsByOrgMembershipId = async ({
orgMembershipId,
orgId,
@@ -1528,6 +1565,7 @@ export const orgServiceFactory = ({
findOrgBySlug,
resendOrgMemberInvitation,
upgradePrivilegeSystem,
notifyInvitedUsers
notifyInvitedUsers,
bulkDeleteOrgMemberships
};
};

View File

@@ -25,6 +25,14 @@ export type TDeleteOrgMembershipDTO = {
actorAuthMethod: ActorAuthMethod;
};
export type TDeleteOrgMembershipsDTO = {
userId: string;
orgId: string;
membershipIds: string[];
actorOrgId: string | undefined;
actorAuthMethod: ActorAuthMethod;
};
export type TInviteUserToOrgDTO = {
inviteeEmails: string[];
organizationRoleSlug: string;

View File

@@ -314,11 +314,122 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
}
};
const findProjectMembershipsByUserIds = async (orgId: string, userIds: string[]) => {
try {
const docs = await db
.replicaNode()(TableName.ProjectMembership)
.join(TableName.Project, `${TableName.ProjectMembership}.projectId`, `${TableName.Project}.id`)
.join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`)
.whereIn(`${TableName.Users}.id`, userIds)
.where(`${TableName.Project}.orgId`, orgId)
.join<TUserEncryptionKeys>(
TableName.UserEncryptionKey,
`${TableName.UserEncryptionKey}.userId`,
`${TableName.Users}.id`
)
.join(
TableName.ProjectUserMembershipRole,
`${TableName.ProjectUserMembershipRole}.projectMembershipId`,
`${TableName.ProjectMembership}.id`
)
.leftJoin(
TableName.ProjectRoles,
`${TableName.ProjectUserMembershipRole}.customRoleId`,
`${TableName.ProjectRoles}.id`
)
.select(
db.ref("id").withSchema(TableName.ProjectMembership),
db.ref("isGhost").withSchema(TableName.Users),
db.ref("username").withSchema(TableName.Users),
db.ref("email").withSchema(TableName.Users),
db.ref("publicKey").withSchema(TableName.UserEncryptionKey),
db.ref("firstName").withSchema(TableName.Users),
db.ref("lastName").withSchema(TableName.Users),
db.ref("id").withSchema(TableName.Users).as("userId"),
db.ref("role").withSchema(TableName.ProjectUserMembershipRole),
db.ref("id").withSchema(TableName.ProjectUserMembershipRole).as("membershipRoleId"),
db.ref("customRoleId").withSchema(TableName.ProjectUserMembershipRole),
db.ref("name").withSchema(TableName.ProjectRoles).as("customRoleName"),
db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug"),
db.ref("temporaryMode").withSchema(TableName.ProjectUserMembershipRole),
db.ref("isTemporary").withSchema(TableName.ProjectUserMembershipRole),
db.ref("temporaryRange").withSchema(TableName.ProjectUserMembershipRole),
db.ref("temporaryAccessStartTime").withSchema(TableName.ProjectUserMembershipRole),
db.ref("temporaryAccessEndTime").withSchema(TableName.ProjectUserMembershipRole),
db.ref("name").as("projectName").withSchema(TableName.Project),
db.ref("id").as("projectId").withSchema(TableName.Project),
db.ref("type").as("projectType").withSchema(TableName.Project)
)
.where({ isGhost: false });
const members = sqlNestRelationships({
data: docs,
parentMapper: ({
email,
firstName,
username,
lastName,
publicKey,
isGhost,
id,
projectId,
projectName,
projectType,
userId
}) => ({
id,
userId,
projectId,
user: { email, username, firstName, lastName, id: userId, publicKey, isGhost },
project: {
id: projectId,
name: projectName,
type: projectType
}
}),
key: "id",
childrenMapper: [
{
label: "roles" as const,
key: "membershipRoleId",
mapper: ({
role,
customRoleId,
customRoleName,
customRoleSlug,
membershipRoleId,
temporaryRange,
temporaryMode,
temporaryAccessEndTime,
temporaryAccessStartTime,
isTemporary
}) => ({
id: membershipRoleId,
role,
customRoleId,
customRoleName,
customRoleSlug,
temporaryRange,
temporaryMode,
temporaryAccessEndTime,
temporaryAccessStartTime,
isTemporary
})
}
]
});
return members;
} catch (error) {
throw new DatabaseError({ error, name: "Find project memberships by user ids" });
}
};
return {
...projectMemberOrm,
findAllProjectMembers,
findProjectGhostUser,
findMembershipsByUsername,
findProjectMembershipsByUserId
findProjectMembershipsByUserId,
findProjectMembershipsByUserIds
};
};

View File

@@ -13,7 +13,7 @@ import { TSecretMap } from "@app/services/secret-sync/secret-sync-types";
import { TAzureAppConfigurationSyncWithCredentials } from "./azure-app-configuration-sync-types";
type TAzureAppConfigurationSyncFactoryDeps = {
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">;
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "updateById">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};

View File

@@ -12,7 +12,7 @@ import { SecretSyncError } from "../secret-sync-errors";
import { GetAzureKeyVaultSecret, TAzureKeyVaultSyncWithCredentials } from "./azure-key-vault-sync-types";
type TAzureKeyVaultSyncFactoryDeps = {
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">;
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "updateById">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};

Some files were not shown because too many files have changed in this diff Show More