Compare commits

...

88 Commits

Author SHA1 Message Date
x032205
9b6a315825 Merge pull request #3593 from Infisical/ENG-2742
Fixed project roles not being editable in some cases
2025-05-13 17:10:23 -04:00
x032205
13b2f65b7e lint fix 2025-05-13 16:51:05 -04:00
x032205
6cf1e046b0 Fixed project roles not being editable in some cases 2025-05-13 16:38:26 -04:00
Scott Wilson
f6e1441dc0 Merge pull request #3570 from Infisical/policy-templates
feature(project-roles): Project Role Templates
2025-05-13 12:47:40 -07:00
Scott Wilson
9eeb72ac80 fix: correct import 2025-05-13 12:18:35 -07:00
Scott Wilson
f6e566a028 merge main 2025-05-13 12:10:49 -07:00
x032205
a34c74e958 Merge pull request #3580 from Infisical/feat/return-metadata-with-identity-create
Return metadata with identity post endpoints
2025-05-13 14:22:34 -04:00
x032205
eef7a875a1 Merge pull request #3585 from Infisical/ENG-2748
feat(docs): Self approval
2025-05-13 14:05:59 -04:00
x032205
09938a911b nit fix 2025-05-13 13:58:52 -04:00
x032205
af08c41008 Merge pull request #3567 from Infisical/ENG-2636
feat(secret-sync): OCI Vault
2025-05-13 13:25:11 -04:00
x032205
443c8854ea Merge branch 'main' into ENG-2636 2025-05-13 13:16:59 -04:00
x032205
f7a25e7601 Merge pull request #3592 from Infisical/lint-fix
lint fix
2025-05-13 13:16:06 -04:00
x032205
4c6e5c9c4c lint fix 2025-05-13 13:11:20 -04:00
Maidul Islam
98a4e6c96d Merge pull request #3591 from akhilmhdh/fix/ui-skew
feat: added new cache control for index html
2025-05-13 12:30:50 -04:00
Maidul Islam
c93ce06409 Merge pull request #3589 from Infisical/misc/updated-org-delete-flow
misc: updated org delete flow to clear session
2025-05-13 11:41:09 -04:00
=
672e4baec4 feat: added new cache control for index html 2025-05-13 21:03:15 +05:30
Sheen
b5ef2a6837 Merge pull request #3569 from Infisical/pki-subscriber
Infisical PKI: Subscriber Functionality
2025-05-13 16:34:05 +08:00
Sheen Capadngan
9c611daada misc: updated org delete flow to clear session 2025-05-13 16:09:26 +08:00
x032205
71edb08942 Merge pull request #3587 from Infisical/ENG-2763
Fix approval request ordering
2025-05-12 23:54:27 -04:00
x032205
89d8261a43 Fix approval request ordering 2025-05-12 23:13:57 -04:00
Scott Wilson
a2b2b07185 Merge pull request #3584 from Infisical/sso-page
Improvements(org-settings): Refactor Organization Security Settings to SSO Page
2025-05-12 18:43:35 -07:00
Scott Wilson
76864ababa fix: correct doc casing 2025-05-12 18:37:05 -07:00
x032205
52858dad79 Update docs/documentation/platform/pr-workflows.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-05-12 21:07:57 -04:00
x032205
1d7a6ea50e Update docs/documentation/platform/pr-workflows.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-05-12 21:07:34 -04:00
x032205
c031233247 feat(docs): Self approval 2025-05-12 21:04:05 -04:00
Scott Wilson
d17d40ebd9 improvements: refactor org security settings tab to sso page and update doc images 2025-05-12 17:18:40 -07:00
x032205
70fff1f2da review fixes 2025-05-12 19:38:00 -04:00
x032205
3f8eaa0679 remove schema change 2025-05-12 18:13:14 -04:00
Scott Wilson
50d0035d7b fix: correct remove oci secret if secret value is empty logic 2025-05-12 14:56:13 -07:00
Tuan Dang
9743ad02d5 Fix lint issues 2025-05-12 14:56:00 -07:00
x032205
50f5248e3e Merge branch 'main' into feat/return-metadata-with-identity-create 2025-05-12 17:50:13 -04:00
x032205
8d7b573988 final reviews 2025-05-12 17:39:29 -04:00
Tuan Dang
26d0ab1dc2 Fix lint issues 2025-05-12 14:34:14 -07:00
x032205
4acdbd24e9 remove useless schema 2025-05-12 16:50:47 -04:00
x032205
c3c907788a review fixes 2025-05-12 16:42:48 -04:00
Tuan Dang
bf833a57cd Fix merge conflicts 2025-05-12 12:59:54 -07:00
Tuan Dang
e8519f6612 Revise PR based on review 2025-05-12 12:56:56 -07:00
x032205
0b4675e7b5 Merge branch 'main' into ENG-2636 2025-05-12 14:56:01 -04:00
Daniel Hougaard
07df6803a5 Merge pull request #3581 from Infisical/daniel/unblock-dev
fix: move cli install to aws
2025-05-12 18:54:55 +04:00
Daniel Hougaard
a09d0e8948 fix: move cli install to aws 2025-05-12 18:47:02 +04:00
Daniel Hougaard
ee598560ec Merge pull request #3572 from Infisical/daniel/fix-secret-scaninng-public-keys
fix: update secret scanner to latest version
2025-05-12 11:13:51 +04:00
x032205
2793ac22aa remove duplicate field 2025-05-11 22:27:09 -04:00
x032205
31fad03af8 Return metadata with identity post endpoints 2025-05-09 23:41:11 -04:00
carlosmonastyrski
c629705c9c Merge pull request #3535 from Infisical/feat/addGroupsToSshHosts
feat(ssh-hosts): Add groups to ssh hosts allowed principals
2025-05-09 22:52:35 -03:00
Daniel Hougaard
be10f6e52a Merge pull request #3579 from Infisical/daniel/horizontal-scaling-ms-teams
fix(workflow-integrations): microsoft teams scaling issues
2025-05-10 01:11:37 +04:00
Scott Wilson
40c5ff0ad6 Merge pull request #3578 from Infisical/project-template-improvements
improvement(project-templates): Project templates UI improvements
2025-05-09 13:50:50 -07:00
Daniel Hougaard
ab6a2b7dbb fix(workflow-integrations): microsoft teams scaling issues 2025-05-10 00:47:22 +04:00
carlosmonastyrski
81bfc04e7c Trim hostname input on SSH Host permission form and fix getWorkspaceUsers key invalidation 2025-05-09 17:10:01 -03:00
x032205
a757fceaed Merge pull request #3577 from Infisical/feat/docs-support-openapi-titles
feat(docs): Support OpenAPI titles for Zod descriptions
2025-05-09 15:49:49 -04:00
Daniel Hougaard
859b643e43 Delete ssh 2025-05-09 22:49:39 +04:00
Daniel Hougaard
91f71e0ef6 feat(cli): upgrade secret scanner 2025-05-09 22:48:56 +04:00
x032205
4e9e31eeb7 added credit 2025-05-09 13:45:36 -04:00
x032205
f6bc99b964 support openapi titles for zod description 2025-05-09 13:40:15 -04:00
carlosmonastyrski
aea44088db Merge branch 'main' into feat/addGroupsToSshHosts 2025-05-09 09:21:29 -03:00
x032205
cd71db416d cancel deletion + update on creation for scheduled for deletion secrets 2025-05-09 02:34:50 -04:00
x032205
9d682ca874 added RE2 to regex 2025-05-09 02:10:53 -04:00
x032205
9054db80ad truncation and UI tweaks 2025-05-09 02:05:30 -04:00
x032205
5bb8756c67 only list compartments which the user is authorized to 'use vaults' in 2025-05-09 01:49:34 -04:00
x032205
8b7cb4c4eb Merge branch 'main' into ENG-2636 2025-05-09 01:34:19 -04:00
Daniel Hougaard
e584c9ea95 test 2025-05-09 09:04:30 +04:00
Daniel Hougaard
1921763fa8 fix: update to upcoming version 2025-05-09 04:43:13 +04:00
Daniel Hougaard
5408859a18 fix: update gitleaks/go-diff to latest version 2025-05-09 04:40:09 +04:00
Scott Wilson
5b7627585f improvements: address feedback 2025-05-08 16:17:25 -07:00
Scott Wilson
800ea5ce78 feature: project role templates 2025-05-08 16:02:41 -07:00
Tuan Dang
a6b3be72a9 Make minor PR adjustments 2025-05-08 14:02:25 -07:00
Tuan Dang
531607dcb7 Revise pr based on greptile review 2025-05-08 10:37:33 -07:00
Tuan Dang
182de009b2 Fix lint issues 2025-05-08 10:01:44 -07:00
Tuan Dang
f1651ce171 Rename migration file 2025-05-08 09:10:49 -07:00
Tuan Dang
e1f563dbd4 Fix merge conflicts 2025-05-08 09:07:28 -07:00
Tuan Dang
107cca0b62 Complete preliminary docs for pki subscribers 2025-05-08 08:52:10 -07:00
x032205
72abc08f04 Merge branch 'main' into ENG-2636 2025-05-08 10:29:52 -04:00
x032205
d6b31cde44 greptile review fixes 2025-05-08 01:16:42 -04:00
x032205
2c94f9ec3c revert eslint memory increase 2025-05-08 00:50:31 -04:00
x032205
42ad63b58d increase max old space size for lint:fix 2025-05-08 00:44:03 -04:00
x032205
f2d5112585 Merge branch 'main' into ENG-2636 2025-05-08 00:27:28 -04:00
x032205
9c7b25de49 docs + tweaks 2025-05-08 00:25:19 -04:00
x032205
36954a9df9 secret sync + tweaks 2025-05-07 17:57:00 -04:00
x032205
581840a701 fixed app connection endpoints 2025-05-07 13:53:05 -04:00
x032205
326742c2d5 feat(app-connections): OCI 2025-05-07 10:59:27 -04:00
Daniel Hougaard
c891b8f5d3 fix routing 2025-05-07 03:00:20 +04:00
Tuan Dang
a32bb95703 Start work on PkiSubscriberDetailsByIDPage 2025-05-06 15:46:54 -07:00
Tuan Dang
0410c83cef Fix merge conflicts 2025-05-06 09:46:31 -07:00
Tuan Dang
cf4f2ea6b1 Begin developing pki subscriber 2025-05-06 09:44:57 -07:00
carlosmonastyrski
bf85df7e36 Fix SSH table UI user groups issues 2025-05-06 08:37:19 -03:00
carlosmonastyrski
b9070a8fa3 Merge branch 'main' into feat/addGroupsToSshHosts 2025-05-05 14:51:01 -03:00
carlosmonastyrski
3ea450e94a Add groups to ssh hosts allowed principals fix delete principal row issue 2025-05-02 13:41:53 -03:00
carlosmonastyrski
7d0574087c Add groups to ssh hosts allowed principals bot improvements 2025-05-02 13:36:05 -03:00
carlosmonastyrski
36916704be Add groups to ssh hosts allowed principals 2025-05-02 11:14:43 -03:00
348 changed files with 16363 additions and 6402 deletions

View File

@@ -28,3 +28,15 @@ frontend/src/pages/secret-manager/OverviewPage/components/SecretOverviewTableRow
docs/cli/commands/user.mdx:generic-api-key:51
frontend/src/pages/secret-manager/OverviewPage/components/SecretOverviewTableRow/SecretOverviewTableRow.tsx:generic-api-key:76
docs/integrations/app-connections/hashicorp-vault.mdx:generic-api-key:188
cli/detect/config/gitleaks.toml:gcp-api-key:567
cli/detect/config/gitleaks.toml:gcp-api-key:569
cli/detect/config/gitleaks.toml:gcp-api-key:570
cli/detect/config/gitleaks.toml:gcp-api-key:572
cli/detect/config/gitleaks.toml:gcp-api-key:574
cli/detect/config/gitleaks.toml:gcp-api-key:575
cli/detect/config/gitleaks.toml:gcp-api-key:576
cli/detect/config/gitleaks.toml:gcp-api-key:577
cli/detect/config/gitleaks.toml:gcp-api-key:578
cli/detect/config/gitleaks.toml:gcp-api-key:579
cli/detect/config/gitleaks.toml:gcp-api-key:581
cli/detect/config/gitleaks.toml:gcp-api-key:582

View File

@@ -133,8 +133,8 @@ RUN apt-get update && apt-get install -y \
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.41.2 \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user

View File

@@ -127,8 +127,8 @@ RUN apt-get update && apt-get install -y \
&& rm -rf /var/lib/apt/lists/*
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.41.2 \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /

View File

@@ -54,8 +54,8 @@ COPY --from=build /app .
# Install Infisical CLI
RUN apt-get install -y curl bash && \
curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
apt-get update && apt-get install -y infisical=0.8.1 git
curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && apt-get install -y infisical=0.41.2 git
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js

View File

@@ -55,9 +55,9 @@ RUN mkdir -p /etc/softhsm2/tokens && \
# ? App setup
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.8.1
apt-get install -y infisical=0.41.2
WORKDIR /app

View File

@@ -64,9 +64,9 @@ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
# ? App setup
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.8.1
apt-get install -y infisical=0.41.2
WORKDIR /app

1952
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -38,8 +38,8 @@
"build:frontend": "npm run build --prefix ../frontend",
"start": "node --enable-source-maps dist/main.mjs",
"type:check": "tsc --noEmit",
"lint:fix": "eslint --fix --ext js,ts ./src",
"lint": "eslint 'src/**/*.ts'",
"lint:fix": "node --max-old-space-size=8192 ./node_modules/.bin/eslint --fix --ext js,ts ./src",
"lint": "node --max-old-space-size=8192 ./node_modules/.bin/eslint 'src/**/*.ts'",
"test:unit": "vitest run -c vitest.unit.config.ts",
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
@@ -209,6 +209,7 @@
"mysql2": "^3.9.8",
"nanoid": "^3.3.8",
"nodemailer": "^6.9.9",
"oci-sdk": "^2.108.0",
"odbc": "^2.4.9",
"openid-client": "^5.6.5",
"ora": "^7.0.1",
@@ -241,6 +242,6 @@
"tweetnacl-util": "^0.15.1",
"uuid": "^9.0.1",
"zod": "^3.22.4",
"zod-to-json-schema": "^3.22.4"
"zod-to-json-schema": "^3.24.5"
}
}

View File

@@ -80,6 +80,7 @@ import { TOrgServiceFactory } from "@app/services/org/org-service";
import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
import { TPkiSubscriberServiceFactory } from "@app/services/pki-subscriber/pki-subscriber-service";
import { TProjectServiceFactory } from "@app/services/project/project-service";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
@@ -232,6 +233,7 @@ declare module "fastify" {
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
certificateEst: TCertificateEstServiceFactory;
pkiCollection: TPkiCollectionServiceFactory;
pkiSubscriber: TPkiSubscriberServiceFactory;
secretScanning: TSecretScanningServiceFactory;
license: TLicenseServiceFactory;
trustedIp: TTrustedIpServiceFactory;

View File

@@ -209,6 +209,9 @@ import {
TPkiCollections,
TPkiCollectionsInsert,
TPkiCollectionsUpdate,
TPkiSubscribers,
TPkiSubscribersInsert,
TPkiSubscribersUpdate,
TProjectBots,
TProjectBotsInsert,
TProjectBotsUpdate,
@@ -564,6 +567,11 @@ declare module "knex/types/tables" {
TPkiCollectionItemsInsert,
TPkiCollectionItemsUpdate
>;
[TableName.PkiSubscriber]: KnexOriginal.CompositeTableType<
TPkiSubscribers,
TPkiSubscribersInsert,
TPkiSubscribersUpdate
>;
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
TUserGroupMembership,
TUserGroupMembershipInsert,

View File

@@ -0,0 +1,22 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SshHostLoginUserMapping, "groupId"))) {
await knex.schema.alterTable(TableName.SshHostLoginUserMapping, (t) => {
t.uuid("groupId").nullable();
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.unique(["sshHostLoginUserId", "groupId"]);
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SshHostLoginUserMapping, "groupId")) {
await knex.schema.alterTable(TableName.SshHostLoginUserMapping, (t) => {
t.dropUnique(["sshHostLoginUserId", "groupId"]);
t.dropColumn("groupId");
});
}
}

View File

@@ -0,0 +1,46 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.PkiSubscriber))) {
await knex.schema.createTable(TableName.PkiSubscriber, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("caId").nullable();
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("SET NULL");
t.string("name").notNullable();
t.string("commonName").notNullable();
t.specificType("subjectAlternativeNames", "text[]").notNullable();
t.string("ttl").notNullable();
t.specificType("keyUsages", "text[]").notNullable();
t.specificType("extendedKeyUsages", "text[]").notNullable();
t.string("status").notNullable(); // active / disabled
t.unique(["projectId", "name"]);
});
await createOnUpdateTrigger(knex, TableName.PkiSubscriber);
}
const hasSubscriberCol = await knex.schema.hasColumn(TableName.Certificate, "pkiSubscriberId");
if (!hasSubscriberCol) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("pkiSubscriberId").nullable();
t.foreign("pkiSubscriberId").references("id").inTable(TableName.PkiSubscriber).onDelete("SET NULL");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasSubscriberCol = await knex.schema.hasColumn(TableName.Certificate, "pkiSubscriberId");
if (hasSubscriberCol) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.dropColumn("pkiSubscriberId");
});
}
await knex.schema.dropTableIfExists(TableName.PkiSubscriber);
await dropOnUpdateTrigger(knex, TableName.PkiSubscriber);
}

View File

@@ -24,7 +24,8 @@ export const CertificatesSchema = z.object({
caCertId: z.string().uuid(),
certificateTemplateId: z.string().uuid().nullable().optional(),
keyUsages: z.string().array().nullable().optional(),
extendedKeyUsages: z.string().array().nullable().optional()
extendedKeyUsages: z.string().array().nullable().optional(),
pkiSubscriberId: z.string().uuid().nullable().optional()
});
export type TCertificates = z.infer<typeof CertificatesSchema>;

View File

@@ -69,6 +69,7 @@ export * from "./organizations";
export * from "./pki-alerts";
export * from "./pki-collection-items";
export * from "./pki-collections";
export * from "./pki-subscribers";
export * from "./project-bots";
export * from "./project-environments";
export * from "./project-gateways";

View File

@@ -21,6 +21,7 @@ export enum TableName {
CertificateBody = "certificate_bodies",
CertificateSecret = "certificate_secrets",
CertificateTemplate = "certificate_templates",
PkiSubscriber = "pki_subscribers",
PkiAlert = "pki_alerts",
PkiCollection = "pki_collections",
PkiCollectionItem = "pki_collection_items",

View File

@@ -0,0 +1,27 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const PkiSubscribersSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string(),
caId: z.string().uuid().nullable().optional(),
name: z.string(),
commonName: z.string(),
subjectAlternativeNames: z.string().array(),
ttl: z.string(),
keyUsages: z.string().array(),
extendedKeyUsages: z.string().array(),
status: z.string()
});
export type TPkiSubscribers = z.infer<typeof PkiSubscribersSchema>;
export type TPkiSubscribersInsert = Omit<z.input<typeof PkiSubscribersSchema>, TImmutableDBKeys>;
export type TPkiSubscribersUpdate = Partial<Omit<z.input<typeof PkiSubscribersSchema>, TImmutableDBKeys>>;

View File

@@ -12,7 +12,8 @@ export const SshHostLoginUserMappingsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
sshHostLoginUserId: z.string().uuid(),
userId: z.string().uuid().nullable().optional()
userId: z.string().uuid().nullable().optional(),
groupId: z.string().uuid().nullable().optional()
});
export type TSshHostLoginUserMappings = z.infer<typeof SshHostLoginUserMappingsSchema>;

View File

@@ -73,7 +73,7 @@ export const registerSshHostRouter = async (server: FastifyZodProvider) => {
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const host = await server.services.sshHost.getSshHost({
const host = await server.services.sshHost.getSshHostById({
sshHostId: req.params.sshHostId,
actor: req.permission.type,
actorId: req.permission.id,

View File

@@ -19,7 +19,7 @@ import { TProjectPermission } from "@app/lib/types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { TCreateAppConnectionDTO, TUpdateAppConnectionDTO } from "@app/services/app-connection/app-connection-types";
import { ActorType } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "@app/services/certificate/certificate-types";
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
import { TAllowedFields } from "@app/services/identity-ldap-auth/identity-ldap-auth-types";
@@ -254,6 +254,13 @@ export enum EventType {
GET_PKI_COLLECTION_ITEMS = "get-pki-collection-items",
ADD_PKI_COLLECTION_ITEM = "add-pki-collection-item",
DELETE_PKI_COLLECTION_ITEM = "delete-pki-collection-item",
CREATE_PKI_SUBSCRIBER = "create-pki-subscriber",
UPDATE_PKI_SUBSCRIBER = "update-pki-subscriber",
DELETE_PKI_SUBSCRIBER = "delete-pki-subscriber",
GET_PKI_SUBSCRIBER = "get-pki-subscriber",
ISSUE_PKI_SUBSCRIBER_CERT = "issue-pki-subscriber-cert",
SIGN_PKI_SUBSCRIBER_CERT = "sign-pki-subscriber-cert",
LIST_PKI_SUBSCRIBER_CERTS = "list-pki-subscriber-certs",
CREATE_KMS = "create-kms",
UPDATE_KMS = "update-kms",
DELETE_KMS = "delete-kms",
@@ -1965,6 +1972,77 @@ interface DeletePkiCollectionItem {
};
}
interface CreatePkiSubscriber {
type: EventType.CREATE_PKI_SUBSCRIBER;
metadata: {
pkiSubscriberId: string;
caId?: string;
name: string;
commonName: string;
ttl: string;
subjectAlternativeNames: string[];
keyUsages: CertKeyUsage[];
extendedKeyUsages: CertExtendedKeyUsage[];
};
}
interface UpdatePkiSubscriber {
type: EventType.UPDATE_PKI_SUBSCRIBER;
metadata: {
pkiSubscriberId: string;
caId?: string;
name?: string;
commonName?: string;
ttl?: string;
subjectAlternativeNames?: string[];
keyUsages?: CertKeyUsage[];
extendedKeyUsages?: CertExtendedKeyUsage[];
};
}
interface DeletePkiSubscriber {
type: EventType.DELETE_PKI_SUBSCRIBER;
metadata: {
pkiSubscriberId: string;
name: string;
};
}
interface GetPkiSubscriber {
type: EventType.GET_PKI_SUBSCRIBER;
metadata: {
pkiSubscriberId: string;
name: string;
};
}
interface IssuePkiSubscriberCert {
type: EventType.ISSUE_PKI_SUBSCRIBER_CERT;
metadata: {
subscriberId: string;
name: string;
serialNumber: string;
};
}
interface SignPkiSubscriberCert {
type: EventType.SIGN_PKI_SUBSCRIBER_CERT;
metadata: {
subscriberId: string;
name: string;
serialNumber: string;
};
}
interface ListPkiSubscriberCerts {
type: EventType.LIST_PKI_SUBSCRIBER_CERTS;
metadata: {
subscriberId: string;
name: string;
projectId: string;
};
}
interface CreateKmsEvent {
type: EventType.CREATE_KMS;
metadata: {
@@ -2928,6 +3006,13 @@ export type Event =
| GetPkiCollectionItems
| AddPkiCollectionItem
| DeletePkiCollectionItem
| CreatePkiSubscriber
| UpdatePkiSubscriber
| DeletePkiSubscriber
| GetPkiSubscriber
| IssuePkiSubscriberCert
| SignPkiSubscriberCert
| ListPkiSubscriberCerts
| CreateKmsEvent
| UpdateKmsEvent
| DeleteKmsEvent

View File

@@ -157,10 +157,23 @@ export const groupDALFactory = (db: TDbClient) => {
}
};
const findGroupsByProjectId = async (projectId: string, tx?: Knex) => {
try {
const docs = await (tx || db.replicaNode())(TableName.Groups)
.join(TableName.GroupProjectMembership, `${TableName.Groups}.id`, `${TableName.GroupProjectMembership}.groupId`)
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
.select(selectAllTableCols(TableName.Groups));
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "Find groups by project id" });
}
};
return {
findGroups,
findByOrgId,
findAllGroupPossibleMembers,
findGroupsByProjectId,
...groupOrm
};
};

View File

@@ -176,7 +176,8 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
db.ref("name").withSchema(TableName.Groups).as("groupName"),
db.ref("id").withSchema(TableName.OrgMembership).as("orgMembershipId"),
db.ref("firstName").withSchema(TableName.Users).as("firstName"),
db.ref("lastName").withSchema(TableName.Users).as("lastName")
db.ref("lastName").withSchema(TableName.Users).as("lastName"),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
);
return docs;

View File

@@ -9,6 +9,7 @@ import {
ProjectPermissionIdentityActions,
ProjectPermissionKmipActions,
ProjectPermissionMemberActions,
ProjectPermissionPkiSubscriberActions,
ProjectPermissionSecretActions,
ProjectPermissionSecretRotationActions,
ProjectPermissionSecretSyncActions,
@@ -76,6 +77,18 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSub.SshHosts
);
can(
[
ProjectPermissionPkiSubscriberActions.Edit,
ProjectPermissionPkiSubscriberActions.Read,
ProjectPermissionPkiSubscriberActions.Create,
ProjectPermissionPkiSubscriberActions.Delete,
ProjectPermissionPkiSubscriberActions.IssueCert,
ProjectPermissionPkiSubscriberActions.ListCerts
],
ProjectPermissionSub.PkiSubscribers
);
can(
[
ProjectPermissionMemberActions.Create,
@@ -338,6 +351,7 @@ const buildMemberPermissionRules = () => {
can([ProjectPermissionActions.Read], ProjectPermissionSub.SshCertificateTemplates);
can([ProjectPermissionSshHostActions.Read], ProjectPermissionSub.SshHosts);
can([ProjectPermissionPkiSubscriberActions.Read], ProjectPermissionSub.PkiSubscribers);
can(
[

View File

@@ -132,7 +132,7 @@ export const permissionDALFactory = (db: TDbClient) => {
}
};
const getProjectGroupPermissions = async (projectId: string) => {
const getProjectGroupPermissions = async (projectId: string, filterGroupId?: string) => {
try {
const docs = await db
.replicaNode()(TableName.GroupProjectMembership)
@@ -148,6 +148,11 @@ export const permissionDALFactory = (db: TDbClient) => {
`groupCustomRoles.id`
)
.where(`${TableName.GroupProjectMembership}.projectId`, "=", projectId)
.where((bd) => {
if (filterGroupId) {
void bd.where(`${TableName.GroupProjectMembership}.groupId`, "=", filterGroupId);
}
})
.select(
db.ref("id").withSchema(TableName.GroupProjectMembership).as("membershipId"),
db.ref("id").withSchema(TableName.Groups).as("groupId"),

View File

@@ -630,6 +630,34 @@ export const permissionServiceFactory = ({
return { permission };
};
const checkGroupProjectPermission = async ({
groupId,
projectId,
checkPermissions
}: {
groupId: string;
projectId: string;
checkPermissions: ProjectPermissionSet;
}) => {
const rawGroupProjectPermissions = await permissionDAL.getProjectGroupPermissions(projectId, groupId);
const groupPermissions = rawGroupProjectPermissions.map((groupProjectPermission) => {
const rolePermissions =
groupProjectPermission.roles?.map(({ role, permissions }) => ({ role, permissions })) || [];
const rules = buildProjectPermissionRules(rolePermissions);
const permission = createMongoAbility<ProjectPermissionSet>(rules, {
conditionsMatcher
});
return {
permission,
id: groupProjectPermission.groupId,
name: groupProjectPermission.username,
membershipId: groupProjectPermission.id
};
});
return groupPermissions.some((groupPermission) => groupPermission.permission.can(...checkPermissions));
};
return {
getUserOrgPermission,
getOrgPermission,
@@ -639,6 +667,7 @@ export const permissionServiceFactory = ({
getOrgPermissionByRole,
getProjectPermissionByRole,
buildOrgPermission,
buildProjectPermissionRules
buildProjectPermissionRules,
checkGroupProjectPermission
};
};

View File

@@ -87,6 +87,15 @@ export enum ProjectPermissionSshHostActions {
IssueHostCert = "issue-host-cert"
}
export enum ProjectPermissionPkiSubscriberActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
IssueCert = "issue-cert",
ListCerts = "list-certs"
}
export enum ProjectPermissionSecretSyncActions {
Read = "read",
Create = "create",
@@ -143,6 +152,7 @@ export enum ProjectPermissionSub {
SshCertificateTemplates = "ssh-certificate-templates",
SshHosts = "ssh-hosts",
SshHostGroups = "ssh-host-groups",
PkiSubscribers = "pki-subscribers",
PkiAlerts = "pki-alerts",
PkiCollections = "pki-collections",
Kms = "kms",
@@ -190,6 +200,11 @@ export type SshHostSubjectFields = {
hostname: string;
};
export type PkiSubscriberSubjectFields = {
name: string;
// (dangtony98): consider adding [commonName] as a subject field in the future
};
export type ProjectPermissionSet =
| [
ProjectPermissionSecretActions,
@@ -249,6 +264,13 @@ export type ProjectPermissionSet =
ProjectPermissionSshHostActions,
ProjectPermissionSub.SshHosts | (ForcedSubject<ProjectPermissionSub.SshHosts> & SshHostSubjectFields)
]
| [
ProjectPermissionPkiSubscriberActions,
(
| ProjectPermissionSub.PkiSubscribers
| (ForcedSubject<ProjectPermissionSub.PkiSubscribers> & PkiSubscriberSubjectFields)
)
]
| [ProjectPermissionActions, ProjectPermissionSub.SshHostGroups]
| [ProjectPermissionActions, ProjectPermissionSub.PkiAlerts]
| [ProjectPermissionActions, ProjectPermissionSub.PkiCollections]
@@ -399,6 +421,21 @@ const SshHostConditionSchema = z
})
.partial();
const PkiSubscriberConditionSchema = z
.object({
name: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
})
.partial()
])
})
.partial();
const GeneralPermissionSchema = [
z.object({
subject: z.literal(ProjectPermissionSub.SecretApproval).describe("The entity this permission pertains to."),
@@ -663,6 +700,16 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),
z.object({
subject: z.literal(ProjectPermissionSub.PkiSubscribers).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionPkiSubscriberActions).describe(
"Describe what action an entity can take."
),
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
conditions: PkiSubscriberConditionSchema.describe(
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),
z.object({
subject: z.literal(ProjectPermissionSub.SecretRotation).describe("The entity this permission pertains to."),
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),

View File

@@ -334,7 +334,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
db.ref("secretId").withSchema(TableName.SecretApprovalRequestSecret).as("commitSecretId"),
db.ref("id").withSchema(TableName.SecretApprovalRequestSecret).as("commitId"),
db.raw(
`DENSE_RANK() OVER (partition by ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."id" DESC) as rank`
`DENSE_RANK() OVER (PARTITION BY ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."createdAt" DESC) as rank`
),
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
db.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
@@ -483,7 +483,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
db.ref("secretId").withSchema(TableName.SecretApprovalRequestSecretV2).as("commitSecretId"),
db.ref("id").withSchema(TableName.SecretApprovalRequestSecretV2).as("commitId"),
db.raw(
`DENSE_RANK() OVER (partition by ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."id" DESC) as rank`
`DENSE_RANK() OVER (PARTITION BY ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."createdAt" DESC) as rank`
),
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
db.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),

View File

@@ -28,6 +28,7 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHostGroup}.projectId`, projectId)
.select(
db.ref("id").withSchema(TableName.SshHostGroup).as("sshHostGroupId"),
@@ -35,7 +36,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
db.ref("name").withSchema(TableName.SshHostGroup),
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping)
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
)
.orderBy(`${TableName.SshHostGroup}.updatedAt`, "desc");
@@ -69,7 +71,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
const loginMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
}
}));
return {
@@ -99,6 +102,7 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHostGroup}.id`, sshHostGroupId)
.select(
db.ref("id").withSchema(TableName.SshHostGroup).as("sshHostGroupId"),
@@ -106,7 +110,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
db.ref("name").withSchema(TableName.SshHostGroup),
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping)
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
);
if (rows.length === 0) return null;
@@ -121,7 +126,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
const loginMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
}
}));

View File

@@ -12,6 +12,7 @@ import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TGroupDALFactory } from "../group/group-dal";
import { TLicenseServiceFactory } from "../license/license-service";
import { createSshLoginMappings } from "../ssh-host/ssh-host-fns";
import {
@@ -43,8 +44,12 @@ type TSshHostGroupServiceFactoryDep = {
sshHostLoginUserDAL: Pick<TSshHostLoginUserDALFactory, "create" | "transaction" | "delete">;
sshHostLoginUserMappingDAL: Pick<TSshHostLoginUserMappingDALFactory, "insertMany">;
userDAL: Pick<TUserDALFactory, "find">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getUserProjectPermission">;
permissionService: Pick<
TPermissionServiceFactory,
"getProjectPermission" | "getUserProjectPermission" | "checkGroupProjectPermission"
>;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
groupDAL: Pick<TGroupDALFactory, "findGroupsByProjectId">;
};
export type TSshHostGroupServiceFactory = ReturnType<typeof sshHostGroupServiceFactory>;
@@ -58,7 +63,8 @@ export const sshHostGroupServiceFactory = ({
sshHostLoginUserMappingDAL,
userDAL,
permissionService,
licenseService
licenseService,
groupDAL
}: TSshHostGroupServiceFactoryDep) => {
const createSshHostGroup = async ({
projectId,
@@ -127,6 +133,7 @@ export const sshHostGroupServiceFactory = ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId,
@@ -179,13 +186,42 @@ export const sshHostGroupServiceFactory = ({
});
const updatedSshHostGroup = await sshHostGroupDAL.transaction(async (tx) => {
await sshHostGroupDAL.updateById(
sshHostGroupId,
{
name
},
tx
);
if (name && name !== sshHostGroup.name) {
// (dangtony98): room to optimize check to ensure that
// the SSH host group name is unique across the whole org
const project = await projectDAL.findById(sshHostGroup.projectId, tx);
if (!project) throw new NotFoundError({ message: `Project with ID '${sshHostGroup.projectId}' not found` });
const projects = await projectDAL.find(
{
orgId: project.orgId
},
{ tx }
);
const existingSshHostGroup = await sshHostGroupDAL.find(
{
name,
$in: {
projectId: projects.map((p) => p.id)
}
},
{ tx }
);
if (existingSshHostGroup.length) {
throw new BadRequestError({
message: `SSH host group with name '${name}' already exists in the organization`
});
}
await sshHostGroupDAL.updateById(
sshHostGroupId,
{
name
},
tx
);
}
if (loginMappings) {
await sshHostLoginUserDAL.delete({ sshHostGroupId: sshHostGroup.id }, tx);
if (loginMappings.length) {
@@ -194,6 +230,7 @@ export const sshHostGroupServiceFactory = ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId: sshHostGroup.projectId,

View File

@@ -9,12 +9,7 @@ export type TCreateSshHostGroupDTO = {
export type TUpdateSshHostGroupDTO = {
sshHostGroupId: string;
name?: string;
loginMappings?: {
loginUser: string;
allowedPrincipals: {
usernames: string[];
};
}[];
loginMappings?: TLoginMapping[];
} & Omit<TProjectPermission, "projectId">;
export type TGetSshHostGroupDTO = {

View File

@@ -31,8 +31,18 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUser}.id`,
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.Users}.id`, `${TableName.SshHostLoginUserMapping}.userId`)
.leftJoin(
TableName.UserGroupMembership,
`${TableName.UserGroupMembership}.groupId`,
`${TableName.SshHostLoginUserMapping}.groupId`
)
.whereIn(`${TableName.SshHost}.projectId`, projectIds)
.andWhere(`${TableName.SshHostLoginUserMapping}.userId`, userId)
.andWhere((bd) => {
void bd
.where(`${TableName.SshHostLoginUserMapping}.userId`, userId)
.orWhere(`${TableName.UserGroupMembership}.userId`, userId);
})
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
db.ref("projectId").withSchema(TableName.SshHost),
@@ -58,8 +68,17 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.join(TableName.SshHost, `${TableName.SshHostGroupMembership}.sshHostId`, `${TableName.SshHost}.id`)
.leftJoin(
TableName.UserGroupMembership,
`${TableName.UserGroupMembership}.groupId`,
`${TableName.SshHostLoginUserMapping}.groupId`
)
.whereIn(`${TableName.SshHost}.projectId`, projectIds)
.andWhere(`${TableName.SshHostLoginUserMapping}.userId`, userId)
.andWhere((bd) => {
void bd
.where(`${TableName.SshHostLoginUserMapping}.userId`, userId)
.orWhere(`${TableName.UserGroupMembership}.userId`, userId);
})
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
db.ref("projectId").withSchema(TableName.SshHost),
@@ -133,6 +152,7 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHost}.projectId`, projectId)
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
@@ -144,6 +164,7 @@ export const sshHostDALFactory = (db: TDbClient) => {
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug"),
db.ref("userSshCaId").withSchema(TableName.SshHost),
db.ref("hostSshCaId").withSchema(TableName.SshHost)
)
@@ -163,10 +184,12 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.select(
db.ref("sshHostId").withSchema(TableName.SshHostGroupMembership),
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users)
db.ref("username").withSchema(TableName.Users),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
)
.whereIn(`${TableName.SshHostGroupMembership}.sshHostId`, hostIds);
@@ -185,7 +208,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
const directMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
},
source: LoginMappingSource.HOST
}));
@@ -197,7 +221,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
const groupMappings = Object.entries(inheritedGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
},
source: LoginMappingSource.HOST_GROUP
}));
@@ -229,6 +254,7 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHost}.id`, sshHostId)
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
@@ -241,7 +267,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("userSshCaId").withSchema(TableName.SshHost),
db.ref("hostSshCaId").withSchema(TableName.SshHost)
db.ref("hostSshCaId").withSchema(TableName.SshHost),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
);
if (rows.length === 0) return null;
@@ -257,7 +284,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
const directMappings = Object.entries(directGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
},
source: LoginMappingSource.HOST
}));
@@ -275,10 +303,12 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHostGroupMembership}.sshHostId`, sshHostId)
.select(
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users)
db.ref("username").withSchema(TableName.Users),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
);
const groupGrouped = groupBy(
@@ -289,7 +319,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
const groupMappings = Object.entries(groupGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
},
source: LoginMappingSource.HOST_GROUP
}));

View File

@@ -3,6 +3,7 @@ import { Knex } from "knex";
import { ActionProjectType } from "@app/db/schemas";
import { BadRequestError } from "@app/lib/errors";
import { ProjectPermissionSshHostActions, ProjectPermissionSub } from "../permission/project-permission";
import { TCreateSshLoginMappingsDTO } from "./ssh-host-types";
/**
@@ -15,6 +16,7 @@ export const createSshLoginMappings = async ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId,
@@ -35,7 +37,7 @@ export const createSshLoginMappings = async ({
tx
);
if (allowedPrincipals.usernames.length > 0) {
if (allowedPrincipals.usernames && allowedPrincipals.usernames.length > 0) {
const users = await userDAL.find(
{
$in: {
@@ -74,6 +76,41 @@ export const createSshLoginMappings = async ({
tx
);
}
if (allowedPrincipals.groups && allowedPrincipals.groups.length > 0) {
const projectGroups = await groupDAL.findGroupsByProjectId(projectId);
const groups = projectGroups.filter((g) => allowedPrincipals.groups?.includes(g.slug));
if (groups.length !== allowedPrincipals.groups?.length) {
throw new BadRequestError({
message: `Invalid group slugs: ${allowedPrincipals.groups
.filter((g) => !projectGroups.some((pg) => pg.slug === g))
.join(", ")}`
});
}
for await (const group of groups) {
// check that each group has access to the SSH project and have read access to hosts
const hasPermission = await permissionService.checkGroupProjectPermission({
groupId: group.id,
projectId,
checkPermissions: [ProjectPermissionSshHostActions.Read, ProjectPermissionSub.SshHosts]
});
if (!hasPermission) {
throw new BadRequestError({
message: `Group ${group.slug} does not have access to the SSH project`
});
}
}
await sshHostLoginUserMappingDAL.insertMany(
groups.map((group) => ({
sshHostLoginUserId: sshHostLoginUser.id,
groupId: group.id
})),
tx
);
}
}
};

View File

@@ -15,7 +15,24 @@ export const sanitizedSshHost = SshHostsSchema.pick({
export const loginMappingSchema = z.object({
loginUser: z.string().trim(),
allowedPrincipals: z.object({
usernames: z.array(z.string().trim()).transform((usernames) => Array.from(new Set(usernames)))
})
allowedPrincipals: z
.object({
usernames: z
.array(z.string().trim())
.transform((usernames) => Array.from(new Set(usernames)))
.optional(),
groups: z
.array(z.string().trim())
.transform((groups) => Array.from(new Set(groups)))
.optional()
})
.refine(
(data) => {
return (data.usernames && data.usernames.length > 0) || (data.groups && data.groups.length > 0);
},
{
message: "At least one username or group must be provided",
path: ["allowedPrincipals"]
}
)
});

View File

@@ -1,6 +1,7 @@
import { ForbiddenError, subject } from "@casl/ability";
import { ActionProjectType, ProjectType } from "@app/db/schemas";
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionSshHostActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { TSshCertificateAuthorityDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-dal";
@@ -19,6 +20,7 @@ import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectSshConfigDALFactory } from "@app/services/project/project-ssh-config-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TUserGroupMembershipDALFactory } from "../group/user-group-membership-dal";
import {
convertActorToPrincipals,
createSshCert,
@@ -39,12 +41,14 @@ import {
type TSshHostServiceFactoryDep = {
userDAL: Pick<TUserDALFactory, "findById" | "find">;
groupDAL: Pick<TGroupDALFactory, "findGroupsByProjectId">;
projectDAL: Pick<TProjectDALFactory, "find">;
projectSshConfigDAL: Pick<TProjectSshConfigDALFactory, "findOne">;
sshCertificateAuthorityDAL: Pick<TSshCertificateAuthorityDALFactory, "findOne">;
sshCertificateAuthoritySecretDAL: Pick<TSshCertificateAuthoritySecretDALFactory, "findOne">;
sshCertificateDAL: Pick<TSshCertificateDALFactory, "create" | "transaction">;
sshCertificateBodyDAL: Pick<TSshCertificateBodyDALFactory, "create">;
userGroupMembershipDAL: Pick<TUserGroupMembershipDALFactory, "findGroupMembershipsByUserIdInOrg">;
sshHostDAL: Pick<
TSshHostDALFactory,
| "transaction"
@@ -58,7 +62,10 @@ type TSshHostServiceFactoryDep = {
>;
sshHostLoginUserDAL: TSshHostLoginUserDALFactory;
sshHostLoginUserMappingDAL: TSshHostLoginUserMappingDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getUserProjectPermission">;
permissionService: Pick<
TPermissionServiceFactory,
"getProjectPermission" | "getUserProjectPermission" | "checkGroupProjectPermission"
>;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
@@ -66,6 +73,8 @@ export type TSshHostServiceFactory = ReturnType<typeof sshHostServiceFactory>;
export const sshHostServiceFactory = ({
userDAL,
userGroupMembershipDAL,
groupDAL,
projectDAL,
projectSshConfigDAL,
sshCertificateAuthorityDAL,
@@ -208,6 +217,7 @@ export const sshHostServiceFactory = ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId,
@@ -278,6 +288,7 @@ export const sshHostServiceFactory = ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId: host.projectId,
@@ -324,7 +335,7 @@ export const sshHostServiceFactory = ({
return host;
};
const getSshHost = async ({ sshHostId, actorId, actorAuthMethod, actor, actorOrgId }: TGetSshHostDTO) => {
const getSshHostById = async ({ sshHostId, actorId, actorAuthMethod, actor, actorOrgId }: TGetSshHostDTO) => {
const host = await sshHostDAL.findSshHostByIdWithLoginMappings(sshHostId);
if (!host) {
throw new NotFoundError({
@@ -387,10 +398,14 @@ export const sshHostServiceFactory = ({
userDAL
});
const userGroups = await userGroupMembershipDAL.findGroupMembershipsByUserIdInOrg(actorId, actorOrgId);
const userGroupSlugs = userGroups.map((g) => g.groupSlug);
const mapping = host.loginMappings.find(
(m) =>
m.loginUser === loginUser &&
m.allowedPrincipals.usernames.some((allowed) => internalPrincipals.includes(allowed))
(m.allowedPrincipals.usernames?.some((allowed) => internalPrincipals.includes(allowed)) ||
m.allowedPrincipals.groups?.some((allowed) => userGroupSlugs.includes(allowed)))
);
if (!mapping) {
@@ -616,7 +631,7 @@ export const sshHostServiceFactory = ({
createSshHost,
updateSshHost,
deleteSshHost,
getSshHost,
getSshHostById,
issueSshHostUserCert,
issueSshHostHostCert,
getSshHostUserCaPk,

View File

@@ -7,12 +7,15 @@ import { TProjectPermission } from "@app/lib/types";
import { ActorAuthMethod } from "@app/services/auth/auth-type";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TGroupDALFactory } from "../group/group-dal";
export type TListSshHostsDTO = Omit<TProjectPermission, "projectId">;
export type TLoginMapping = {
loginUser: string;
allowedPrincipals: {
usernames: string[];
usernames?: string[];
groups?: string[];
};
};
@@ -63,7 +66,8 @@ type BaseCreateSshLoginMappingsDTO = {
sshHostLoginUserDAL: Pick<TSshHostLoginUserDALFactory, "create" | "transaction">;
sshHostLoginUserMappingDAL: Pick<TSshHostLoginUserMappingDALFactory, "insertMany">;
userDAL: Pick<TUserDALFactory, "find">;
permissionService: Pick<TPermissionServiceFactory, "getUserProjectPermission">;
permissionService: Pick<TPermissionServiceFactory, "getUserProjectPermission" | "checkGroupProjectPermission">;
groupDAL: Pick<TGroupDALFactory, "findGroupsByProjectId">;
projectId: string;
actorAuthMethod: ActorAuthMethod;
actorOrgId: string;

View File

@@ -46,6 +46,7 @@ export enum ApiDocsTags {
PkiCertificateTemplates = "PKI Certificate Templates",
PkiCertificateCollections = "PKI Certificate Collections",
PkiAlerting = "PKI Alerting",
PkiSubscribers = "PKI Subscribers",
SshCertificates = "SSH Certificates",
SshCertificateAuthorities = "SSH Certificate Authorities",
SshCertificateTemplates = "SSH Certificate Templates",
@@ -639,6 +640,9 @@ export const PROJECTS = {
commonName: "The common name of the certificate to filter by.",
offset: "The offset to start from. If you enter 10, it will start from the 10th certificate.",
limit: "The number of certificates to return."
},
LIST_PKI_SUBSCRIBERS: {
projectId: "The ID of the project to list PKI subscribers for."
}
} as const;
@@ -1478,7 +1482,7 @@ export const SSH_HOSTS = {
loginUser: "A login user on the remote machine (e.g. 'ec2-user', 'deploy', 'admin')",
allowedPrincipals: "A list of allowed principals that can log in as the login user.",
loginMappings:
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users in the Infisical SSH project.",
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users or groups slugs in the Infisical SSH project.",
userSshCaId:
"The ID of the SSH CA to use for user certificates. If not specified, the default user SSH CA will be used if it exists.",
hostSshCaId:
@@ -1493,7 +1497,7 @@ export const SSH_HOSTS = {
loginUser: "A login user on the remote machine (e.g. 'ec2-user', 'deploy', 'admin')",
allowedPrincipals: "A list of allowed principals that can log in as the login user.",
loginMappings:
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users in the Infisical SSH project."
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users or groups slugs in the Infisical SSH project."
},
DELETE: {
sshHostId: "The ID of the SSH host to delete."
@@ -1731,6 +1735,67 @@ export const ALERTS = {
}
};
export const PKI_SUBSCRIBERS = {
GET: {
subscriberName: "The name of the PKI subscriber to get.",
projectId: "The ID of the project to get the PKI subscriber for."
},
CREATE: {
projectId: "The ID of the project to create the PKI subscriber in.",
caId: "The ID of the CA that will issue certificates for the PKI subscriber.",
name: "The name of the PKI subscriber.",
commonName: "The common name (CN) to be used on certificates issued for this subscriber.",
status: "The status of the PKI subscriber. This can be one of active or disabled.",
ttl: "The time to live for the certificates issued for this subscriber such as 1m, 1h, 1d, 1y, ...",
subjectAlternativeNames:
"A list of Subject Alternative Names (SANs) to be used on certificates issued for this subscriber; these can be host names or email addresses.",
keyUsages: "The key usage extension to be used on certificates issued for this subscriber.",
extendedKeyUsages: "The extended key usage extension to be used on certificates issued for this subscriber."
},
UPDATE: {
projectId: "The ID of the project to update the PKI subscriber in.",
subscriberName: "The name of the PKI subscriber to update.",
caId: "The ID of the CA that will issue certificates for the PKI subscriber to update to.",
name: "The name of the PKI subscriber to update to.",
commonName: "The common name (CN) to be used on certificates issued for this subscriber to update to.",
status: "The status of the PKI subscriber to update to. This can be one of active or disabled.",
ttl: "The time to live for the certificates issued for this subscriber such as 1m, 1h, 1d, 1y, ...",
subjectAlternativeNames:
"A comma-delimited list of Subject Alternative Names (SANs) to be used on certificates issued for this subscriber; these can be host names or email addresses.",
keyUsages: "The key usage extension to be used on certificates issued for this subscriber to update to.",
extendedKeyUsages:
"The extended key usage extension to be used on certificates issued for this subscriber to update to."
},
DELETE: {
subscriberName: "The name of the PKI subscriber to delete.",
projectId: "The ID of the project of the PKI subscriber to delete."
},
ISSUE_CERT: {
subscriberName: "The name of the PKI subscriber to issue the certificate for.",
projectId: "The ID of the project of the PKI subscriber to issue the certificate for.",
certificate: "The issued certificate.",
issuingCaCertificate: "The certificate of the issuing CA.",
certificateChain: "The certificate chain of the issued certificate.",
privateKey: "The private key of the issued certificate.",
serialNumber: "The serial number of the issued certificate."
},
SIGN_CERT: {
subscriberName: "The name of the PKI subscriber to sign the certificate for.",
projectId: "The ID of the project of the PKI subscriber to sign the certificate for.",
csr: "The CSR to be used to sign the certificate.",
certificate: "The signed certificate.",
issuingCaCertificate: "The certificate of the issuing CA.",
certificateChain: "The certificate chain of the signed certificate.",
serialNumber: "The serial number of the signed certificate."
},
LIST_CERTS: {
subscriberName: "The name of the PKI subscriber to list the certificates for.",
projectId: "The ID of the project of the PKI subscriber to list the certificates for.",
offset: "The offset to start from.",
limit: "The number of certificates to return."
}
};
export const PKI_COLLECTIONS = {
CREATE: {
projectId: "The ID of the project to create the PKI collection in.",
@@ -1974,6 +2039,13 @@ export const AppConnections = {
AZURE_CLIENT_SECRETS: {
code: "The OAuth code to use to connect with Azure Client Secrets.",
tenantId: "The Tenant ID to use to connect with Azure Client Secrets."
},
OCI: {
userOcid: "The OCID (Oracle Cloud Identifier) of the user making the request.",
tenancyOcid: "The OCID (Oracle Cloud Identifier) of the tenancy in Oracle Cloud Infrastructure.",
region: "The region identifier in Oracle Cloud Infrastructure where the vault is located.",
fingerprint: "The fingerprint of the public key uploaded to the user's API keys.",
privateKey: "The private key content in PEM format used to sign API requests."
}
}
};
@@ -2121,6 +2193,11 @@ export const SecretSyncs = {
TEAMCITY: {
project: "The TeamCity project to sync secrets to.",
buildConfig: "The TeamCity build configuration to sync secrets to."
},
OCI_VAULT: {
compartmentOcid: "The OCID (Oracle Cloud Identifier) of the compartment where the vault is located.",
vaultOcid: "The OCID (Oracle Cloud Identifier) of the vault to sync secrets to.",
keyOcid: "The OCID (Oracle Cloud Identifier) of the encryption key to use when creating secrets in the vault."
}
}
};

View File

@@ -5,7 +5,7 @@
import type { FastifySchema, FastifySchemaCompiler, FastifyTypeProvider } from "fastify";
import type { FastifySerializerCompiler } from "fastify/types/schema";
import type { z, ZodAny, ZodTypeAny } from "zod";
import { zodToJsonSchema } from "zod-to-json-schema";
import { PostProcessCallback, zodToJsonSchema } from "zod-to-json-schema";
// eslint-disable-next-line @typescript-eslint/no-explicit-any
type FreeformRecord = Record<string, any>;
@@ -28,9 +28,25 @@ interface Schema extends FastifySchema {
hide?: boolean;
}
// Credit: https://github.com/StefanTerdell/zod-to-json-schema
const jsonDescription: PostProcessCallback = (jsonSchema, def) => {
if (def.description) {
try {
return {
...jsonSchema,
description: undefined,
...JSON.parse(def.description)
};
} catch {}
}
return jsonSchema;
};
const zodToJsonSchemaOptions = {
target: "openApi3",
$refStrategy: "none"
$refStrategy: "none",
postProcess: jsonDescription
} as const;
// eslint-disable-next-line @typescript-eslint/no-explicit-any

View File

@@ -57,7 +57,9 @@ export const registerServeUI = async (
reply.callNotFound();
return;
}
return reply.sendFile("index.html");
// reference: https://github.com/fastify/fastify-static?tab=readme-ov-file#managing-cache-control-headers
// to avoid ui bundle skew on new deployment
return reply.sendFile("index.html", { maxAge: 0, immutable: false });
}
});
}

View File

@@ -197,6 +197,8 @@ import { pkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-servic
import { pkiCollectionDALFactory } from "@app/services/pki-collection/pki-collection-dal";
import { pkiCollectionItemDALFactory } from "@app/services/pki-collection/pki-collection-item-dal";
import { pkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
import { pkiSubscriberDALFactory } from "@app/services/pki-subscriber/pki-subscriber-dal";
import { pkiSubscriberServiceFactory } from "@app/services/pki-subscriber/pki-subscriber-service";
import { projectDALFactory } from "@app/services/project/project-dal";
import { projectQueueFactory } from "@app/services/project/project-queue";
import { projectServiceFactory } from "@app/services/project/project-service";
@@ -828,6 +830,7 @@ export const registerRoutes = async (
const pkiAlertDAL = pkiAlertDALFactory(db);
const pkiCollectionDAL = pkiCollectionDALFactory(db);
const pkiCollectionItemDAL = pkiCollectionItemDALFactory(db);
const pkiSubscriberDAL = pkiSubscriberDALFactory(db);
const certificateService = certificateServiceFactory({
certificateDAL,
@@ -870,6 +873,8 @@ export const registerRoutes = async (
const sshHostService = sshHostServiceFactory({
userDAL,
groupDAL,
userGroupMembershipDAL,
projectDAL,
projectSshConfigDAL,
sshCertificateAuthorityDAL,
@@ -892,7 +897,8 @@ export const registerRoutes = async (
sshHostLoginUserMappingDAL,
userDAL,
permissionService,
licenseService
licenseService,
groupDAL
});
const certificateAuthorityService = certificateAuthorityServiceFactory({
@@ -959,6 +965,20 @@ export const registerRoutes = async (
projectDAL
});
const pkiSubscriberService = pkiSubscriberServiceFactory({
pkiSubscriberDAL,
certificateAuthorityDAL,
certificateAuthorityCertDAL,
certificateAuthoritySecretDAL,
certificateAuthorityCrlDAL,
certificateDAL,
certificateBodyDAL,
certificateSecretDAL,
projectDAL,
kmsService,
permissionService
});
const projectTemplateService = projectTemplateServiceFactory({
licenseService,
permissionService,
@@ -1056,6 +1076,7 @@ export const registerRoutes = async (
projectRoleDAL,
folderDAL,
licenseService,
pkiSubscriberDAL,
certificateAuthorityDAL,
certificateDAL,
pkiAlertDAL,
@@ -1742,6 +1763,7 @@ export const registerRoutes = async (
certificateEst: certificateEstService,
pkiAlert: pkiAlertService,
pkiCollection: pkiCollectionService,
pkiSubscriber: pkiSubscriberService,
secretScanning: secretScanningService,
license: licenseService,
trustedIp: trustedIpService,
@@ -1784,6 +1806,10 @@ export const registerRoutes = async (
if (licenseSyncJob) {
cronJobs.push(licenseSyncJob);
}
const microsoftTeamsSyncJob = await microsoftTeamsService.initializeBackgroundSync();
if (microsoftTeamsSyncJob) {
cronJobs.push(microsoftTeamsSyncJob);
}
}
server.decorate<FastifyZodProvider["store"]>("store", {

View File

@@ -38,6 +38,7 @@ import {
} from "@app/services/app-connection/humanitec";
import { LdapConnectionListItemSchema, SanitizedLdapConnectionSchema } from "@app/services/app-connection/ldap";
import { MsSqlConnectionListItemSchema, SanitizedMsSqlConnectionSchema } from "@app/services/app-connection/mssql";
import { OCIConnectionListItemSchema, SanitizedOCIConnectionSchema } from "@app/services/app-connection/oci";
import {
PostgresConnectionListItemSchema,
SanitizedPostgresConnectionSchema
@@ -76,7 +77,8 @@ const SanitizedAppConnectionSchema = z.union([
...SanitizedAzureClientSecretsConnectionSchema.options,
...SanitizedWindmillConnectionSchema.options,
...SanitizedLdapConnectionSchema.options,
...SanitizedTeamCityConnectionSchema.options
...SanitizedTeamCityConnectionSchema.options,
...SanitizedOCIConnectionSchema.options
]);
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
@@ -97,7 +99,8 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
AzureClientSecretsConnectionListItemSchema,
WindmillConnectionListItemSchema,
LdapConnectionListItemSchema,
TeamCityConnectionListItemSchema
TeamCityConnectionListItemSchema,
OCIConnectionListItemSchema
]);
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {

View File

@@ -13,6 +13,7 @@ import { registerHCVaultConnectionRouter } from "./hc-vault-connection-router";
import { registerHumanitecConnectionRouter } from "./humanitec-connection-router";
import { registerLdapConnectionRouter } from "./ldap-connection-router";
import { registerMsSqlConnectionRouter } from "./mssql-connection-router";
import { registerOCIConnectionRouter } from "./oci-connection-router";
import { registerPostgresConnectionRouter } from "./postgres-connection-router";
import { registerTeamCityConnectionRouter } from "./teamcity-connection-router";
import { registerTerraformCloudConnectionRouter } from "./terraform-cloud-router";
@@ -40,5 +41,6 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
[AppConnection.Auth0]: registerAuth0ConnectionRouter,
[AppConnection.HCVault]: registerHCVaultConnectionRouter,
[AppConnection.LDAP]: registerLdapConnectionRouter,
[AppConnection.TeamCity]: registerTeamCityConnectionRouter
[AppConnection.TeamCity]: registerTeamCityConnectionRouter,
[AppConnection.OCI]: registerOCIConnectionRouter
};

View File

@@ -0,0 +1,123 @@
import z from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateOCIConnectionSchema,
SanitizedOCIConnectionSchema,
UpdateOCIConnectionSchema
} from "@app/services/app-connection/oci";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerOCIConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.OCI,
server,
sanitizedResponseSchema: SanitizedOCIConnectionSchema,
createSchema: CreateOCIConnectionSchema,
updateSchema: UpdateOCIConnectionSchema
});
// The following endpoints are for internal Infisical App use only and not part of the public API
server.route({
method: "GET",
url: `/:connectionId/compartments`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z
.object({
id: z.string(),
name: z.string()
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const compartments = await server.services.appConnection.oci.listCompartments(connectionId, req.permission);
return compartments;
}
});
server.route({
method: "GET",
url: `/:connectionId/vaults`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
querystring: z.object({
compartmentOcid: z.string().min(1, "Compartment OCID required")
}),
response: {
200: z
.object({
id: z.string(),
displayName: z.string()
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const { compartmentOcid } = req.query;
const vaults = await server.services.appConnection.oci.listVaults(
{ connectionId, compartmentOcid },
req.permission
);
return vaults;
}
});
server.route({
method: "GET",
url: `/:connectionId/vault-keys`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
querystring: z.object({
compartmentOcid: z.string().min(1, "Compartment OCID required"),
vaultOcid: z.string().min(1, "Vault OCID required")
}),
response: {
200: z
.object({
id: z.string(),
displayName: z.string()
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const { compartmentOcid, vaultOcid } = req.query;
const keys = await server.services.appConnection.oci.listVaultKeys(
{ connectionId, compartmentOcid, vaultOcid },
req.permission
);
return keys;
}
});
};

View File

@@ -52,7 +52,8 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
response: {
200: z.object({
identity: IdentitiesSchema.extend({
authMethods: z.array(z.string())
authMethods: z.array(z.string()),
metadata: z.object({ id: z.string(), key: z.string(), value: z.string() }).array()
})
})
}
@@ -123,7 +124,9 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
identity: IdentitiesSchema
identity: IdentitiesSchema.extend({
metadata: z.object({ id: z.string(), key: z.string(), value: z.string() }).array()
})
})
}
},
@@ -227,8 +230,8 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
identity: IdentityOrgMembershipsSchema.extend({
metadata: z
.object({
key: z.string().trim().min(1),
id: z.string().trim().min(1),
key: z.string().trim().min(1),
value: z.string().trim().min(1)
})
.array()

View File

@@ -33,6 +33,7 @@ import { registerOrgRouter } from "./organization-router";
import { registerPasswordRouter } from "./password-router";
import { registerPkiAlertRouter } from "./pki-alert-router";
import { registerPkiCollectionRouter } from "./pki-collection-router";
import { registerPkiSubscriberRouter } from "./pki-subscriber-router";
import { registerProjectEnvRouter } from "./project-env-router";
import { registerProjectKeyRouter } from "./project-key-router";
import { registerProjectMembershipRouter } from "./project-membership-router";
@@ -105,6 +106,7 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
await pkiRouter.register(registerCertificateTemplateRouter, { prefix: "/certificate-templates" });
await pkiRouter.register(registerPkiAlertRouter, { prefix: "/alerts" });
await pkiRouter.register(registerPkiCollectionRouter, { prefix: "/collections" });
await pkiRouter.register(registerPkiSubscriberRouter, { prefix: "/subscribers" });
},
{ prefix: "/pki" }
);

View File

@@ -0,0 +1,478 @@
import { z } from "zod";
import { CertificatesSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { ApiDocsTags, PKI_SUBSCRIBERS } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertExtendedKeyUsage, CertKeyUsage } from "@app/services/certificate/certificate-types";
import { validateAltNameField } from "@app/services/certificate-authority/certificate-authority-validators";
import { sanitizedPkiSubscriber } from "@app/services/pki-subscriber/pki-subscriber-schema";
import { PkiSubscriberStatus } from "@app/services/pki-subscriber/pki-subscriber-types";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
export const registerPkiSubscriberRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:subscriberName",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.PkiSubscribers],
description: "Get PKI Subscriber",
params: z.object({
subscriberName: z.string().describe(PKI_SUBSCRIBERS.GET.subscriberName)
}),
querystring: z.object({
projectId: z.string().describe(PKI_SUBSCRIBERS.GET.projectId)
}),
response: {
200: sanitizedPkiSubscriber
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const subscriber = await server.services.pkiSubscriber.getSubscriber({
subscriberName: req.params.subscriberName,
projectId: req.query.projectId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: subscriber.projectId,
event: {
type: EventType.GET_PKI_SUBSCRIBER,
metadata: {
pkiSubscriberId: subscriber.id,
name: subscriber.name
}
}
});
return subscriber;
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.PkiSubscribers],
description: "Create PKI Subscriber",
body: z.object({
projectId: z.string().trim().describe(PKI_SUBSCRIBERS.CREATE.projectId),
caId: z
.string()
.trim()
.uuid("CA ID must be a valid UUID")
.min(1, "CA ID is required")
.describe(PKI_SUBSCRIBERS.CREATE.caId),
name: slugSchema({ min: 1, max: 64, field: "name" }).describe(PKI_SUBSCRIBERS.CREATE.name),
commonName: z.string().trim().min(1).describe(PKI_SUBSCRIBERS.CREATE.commonName),
status: z
.nativeEnum(PkiSubscriberStatus)
.default(PkiSubscriberStatus.ACTIVE)
.describe(PKI_SUBSCRIBERS.CREATE.status),
ttl: z
.string()
.trim()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.describe(PKI_SUBSCRIBERS.CREATE.ttl),
subjectAlternativeNames: validateAltNameField
.array()
.default([])
.transform((arr) => Array.from(new Set(arr)))
.describe(PKI_SUBSCRIBERS.CREATE.subjectAlternativeNames),
keyUsages: z
.nativeEnum(CertKeyUsage)
.array()
.default([CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT])
.transform((arr) => Array.from(new Set(arr)))
.describe(PKI_SUBSCRIBERS.CREATE.keyUsages),
extendedKeyUsages: z
.nativeEnum(CertExtendedKeyUsage)
.array()
.default([])
.transform((arr) => Array.from(new Set(arr)))
.describe(PKI_SUBSCRIBERS.CREATE.extendedKeyUsages)
}),
response: {
200: sanitizedPkiSubscriber
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const subscriber = await server.services.pkiSubscriber.createSubscriber({
...req.body,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: subscriber.projectId,
event: {
type: EventType.CREATE_PKI_SUBSCRIBER,
metadata: {
pkiSubscriberId: subscriber.id,
caId: subscriber.caId ?? undefined,
name: subscriber.name,
commonName: subscriber.commonName,
ttl: subscriber.ttl,
subjectAlternativeNames: subscriber.subjectAlternativeNames,
keyUsages: subscriber.keyUsages as CertKeyUsage[],
extendedKeyUsages: subscriber.extendedKeyUsages as CertExtendedKeyUsage[]
}
}
});
return subscriber;
}
});
server.route({
method: "PATCH",
url: "/:subscriberName",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.PkiSubscribers],
description: "Update PKI Subscriber",
params: z.object({
subscriberName: z.string().trim().describe(PKI_SUBSCRIBERS.UPDATE.subscriberName)
}),
body: z.object({
projectId: z.string().trim().describe(PKI_SUBSCRIBERS.UPDATE.projectId),
caId: z
.string()
.trim()
.uuid("CA ID must be a valid UUID")
.min(1, "CA ID is required")
.optional()
.describe(PKI_SUBSCRIBERS.UPDATE.caId),
name: slugSchema({ min: 1, max: 64, field: "name" }).describe(PKI_SUBSCRIBERS.UPDATE.name).optional(),
commonName: z.string().trim().min(1).describe(PKI_SUBSCRIBERS.UPDATE.commonName).optional(),
status: z.nativeEnum(PkiSubscriberStatus).optional().describe(PKI_SUBSCRIBERS.UPDATE.status),
subjectAlternativeNames: validateAltNameField
.array()
.optional()
.describe(PKI_SUBSCRIBERS.UPDATE.subjectAlternativeNames),
ttl: z
.string()
.trim()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.optional()
.describe(PKI_SUBSCRIBERS.UPDATE.ttl),
keyUsages: z
.nativeEnum(CertKeyUsage)
.array()
.transform((arr) => Array.from(new Set(arr)))
.optional()
.describe(PKI_SUBSCRIBERS.UPDATE.keyUsages),
extendedKeyUsages: z
.nativeEnum(CertExtendedKeyUsage)
.array()
.transform((arr) => Array.from(new Set(arr)))
.optional()
.describe(PKI_SUBSCRIBERS.UPDATE.extendedKeyUsages)
}),
response: {
200: sanitizedPkiSubscriber
}
},
handler: async (req) => {
const subscriber = await server.services.pkiSubscriber.updateSubscriber({
subscriberName: req.params.subscriberName,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: subscriber.projectId,
event: {
type: EventType.UPDATE_PKI_SUBSCRIBER,
metadata: {
pkiSubscriberId: subscriber.id,
caId: subscriber.caId ?? undefined,
name: subscriber.name,
commonName: subscriber.commonName,
ttl: subscriber.ttl,
subjectAlternativeNames: subscriber.subjectAlternativeNames,
keyUsages: subscriber.keyUsages as CertKeyUsage[],
extendedKeyUsages: subscriber.extendedKeyUsages as CertExtendedKeyUsage[]
}
}
});
return subscriber;
}
});
server.route({
method: "DELETE",
url: "/:subscriberName",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.PkiSubscribers],
description: "Delete PKI Subscriber",
params: z.object({
subscriberName: z.string().describe(PKI_SUBSCRIBERS.DELETE.subscriberName)
}),
body: z.object({
projectId: z.string().trim().describe(PKI_SUBSCRIBERS.DELETE.projectId)
}),
response: {
200: sanitizedPkiSubscriber
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const subscriber = await server.services.pkiSubscriber.deleteSubscriber({
subscriberName: req.params.subscriberName,
projectId: req.body.projectId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: subscriber.projectId,
event: {
type: EventType.DELETE_PKI_SUBSCRIBER,
metadata: {
pkiSubscriberId: subscriber.id,
name: subscriber.name
}
}
});
return subscriber;
}
});
server.route({
method: "POST",
url: "/:subscriberName/issue-certificate",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.PkiSubscribers],
description: "Issue certificate",
params: z.object({
subscriberName: z.string().describe(PKI_SUBSCRIBERS.ISSUE_CERT.subscriberName)
}),
body: z.object({
projectId: z.string().trim().describe(PKI_SUBSCRIBERS.ISSUE_CERT.projectId)
}),
response: {
200: z.object({
certificate: z.string().trim().describe(PKI_SUBSCRIBERS.ISSUE_CERT.certificate),
issuingCaCertificate: z.string().trim().describe(PKI_SUBSCRIBERS.ISSUE_CERT.issuingCaCertificate),
certificateChain: z.string().trim().describe(PKI_SUBSCRIBERS.ISSUE_CERT.certificateChain),
privateKey: z.string().trim().describe(PKI_SUBSCRIBERS.ISSUE_CERT.privateKey),
serialNumber: z.string().trim().describe(PKI_SUBSCRIBERS.ISSUE_CERT.serialNumber)
})
}
},
handler: async (req) => {
const { certificate, certificateChain, issuingCaCertificate, privateKey, serialNumber, subscriber } =
await server.services.pkiSubscriber.issueSubscriberCert({
subscriberName: req.params.subscriberName,
projectId: req.body.projectId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: subscriber.projectId,
event: {
type: EventType.ISSUE_PKI_SUBSCRIBER_CERT,
metadata: {
subscriberId: subscriber.id,
name: subscriber.name,
serialNumber
}
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IssueCert,
distinctId: getTelemetryDistinctId(req),
properties: {
subscriberId: subscriber.id,
commonName: subscriber.commonName,
...req.auditLogInfo
}
});
return {
certificate,
certificateChain,
issuingCaCertificate,
privateKey,
serialNumber
};
}
});
server.route({
method: "POST",
url: "/:subscriberName/sign-certificate",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.PkiSubscribers],
description: "Sign certificate",
params: z.object({
subscriberName: z.string().describe(PKI_SUBSCRIBERS.SIGN_CERT.subscriberName)
}),
body: z.object({
projectId: z.string().trim().describe(PKI_SUBSCRIBERS.SIGN_CERT.projectId),
csr: z.string().trim().min(1).max(3000).describe(PKI_SUBSCRIBERS.SIGN_CERT.csr)
}),
response: {
200: z.object({
certificate: z.string().trim().describe(PKI_SUBSCRIBERS.SIGN_CERT.certificate),
issuingCaCertificate: z.string().trim().describe(PKI_SUBSCRIBERS.SIGN_CERT.issuingCaCertificate),
certificateChain: z.string().trim().describe(PKI_SUBSCRIBERS.SIGN_CERT.certificateChain),
serialNumber: z.string().trim().describe(PKI_SUBSCRIBERS.ISSUE_CERT.serialNumber)
})
}
},
handler: async (req) => {
const { certificate, certificateChain, issuingCaCertificate, serialNumber, subscriber } =
await server.services.pkiSubscriber.signSubscriberCert({
subscriberName: req.params.subscriberName,
projectId: req.body.projectId,
csr: req.body.csr,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: subscriber.projectId,
event: {
type: EventType.SIGN_PKI_SUBSCRIBER_CERT,
metadata: {
subscriberId: subscriber.id,
name: subscriber.name,
serialNumber
}
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SignCert,
distinctId: getTelemetryDistinctId(req),
properties: {
subscriberId: subscriber.id,
commonName: subscriber.commonName,
...req.auditLogInfo
}
});
return {
certificate,
certificateChain,
issuingCaCertificate,
serialNumber
};
}
});
server.route({
method: "GET",
url: "/:subscriberName/certificates",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.PkiSubscribers],
description: "List PKI Subscriber certificates",
params: z.object({
subscriberName: z.string().describe(PKI_SUBSCRIBERS.GET.subscriberName)
}),
querystring: z.object({
projectId: z.string().trim().describe(PKI_SUBSCRIBERS.LIST_CERTS.projectId),
offset: z.coerce.number().min(0).max(100).default(0).describe(PKI_SUBSCRIBERS.LIST_CERTS.offset),
limit: z.coerce.number().min(1).max(100).default(25).describe(PKI_SUBSCRIBERS.LIST_CERTS.limit)
}),
response: {
200: z.object({
certificates: z.array(CertificatesSchema),
totalCount: z.number()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { totalCount, certificates } = await server.services.pkiSubscriber.listSubscriberCerts({
subscriberName: req.params.subscriberName,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.query
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.LIST_PKI_SUBSCRIBER_CERTS,
metadata: {
subscriberId: req.params.subscriberName,
name: req.params.subscriberName,
projectId: req.query.projectId
}
}
});
return {
certificates,
totalCount
};
}
});
};

View File

@@ -10,6 +10,7 @@ import { registerGcpSyncRouter } from "./gcp-sync-router";
import { registerGitHubSyncRouter } from "./github-sync-router";
import { registerHCVaultSyncRouter } from "./hc-vault-sync-router";
import { registerHumanitecSyncRouter } from "./humanitec-sync-router";
import { registerOCIVaultSyncRouter } from "./oci-vault-sync-router";
import { registerTeamCitySyncRouter } from "./teamcity-sync-router";
import { registerTerraformCloudSyncRouter } from "./terraform-cloud-sync-router";
import { registerVercelSyncRouter } from "./vercel-sync-router";
@@ -31,5 +32,6 @@ export const SECRET_SYNC_REGISTER_ROUTER_MAP: Record<SecretSync, (server: Fastif
[SecretSync.Vercel]: registerVercelSyncRouter,
[SecretSync.Windmill]: registerWindmillSyncRouter,
[SecretSync.HCVault]: registerHCVaultSyncRouter,
[SecretSync.TeamCity]: registerTeamCitySyncRouter
[SecretSync.TeamCity]: registerTeamCitySyncRouter,
[SecretSync.OCIVault]: registerOCIVaultSyncRouter
};

View File

@@ -0,0 +1,17 @@
import {
CreateOCIVaultSyncSchema,
OCIVaultSyncSchema,
UpdateOCIVaultSyncSchema
} from "@app/services/secret-sync/oci-vault";
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
export const registerOCIVaultSyncRouter = async (server: FastifyZodProvider) =>
registerSyncSecretsEndpoints({
destination: SecretSync.OCIVault,
server,
responseSchema: OCIVaultSyncSchema,
createSchema: CreateOCIVaultSyncSchema,
updateSchema: UpdateOCIVaultSyncSchema
});

View File

@@ -24,6 +24,7 @@ import { GcpSyncListItemSchema, GcpSyncSchema } from "@app/services/secret-sync/
import { GitHubSyncListItemSchema, GitHubSyncSchema } from "@app/services/secret-sync/github";
import { HCVaultSyncListItemSchema, HCVaultSyncSchema } from "@app/services/secret-sync/hc-vault";
import { HumanitecSyncListItemSchema, HumanitecSyncSchema } from "@app/services/secret-sync/humanitec";
import { OCIVaultSyncListItemSchema, OCIVaultSyncSchema } from "@app/services/secret-sync/oci-vault";
import { TeamCitySyncListItemSchema, TeamCitySyncSchema } from "@app/services/secret-sync/teamcity";
import { TerraformCloudSyncListItemSchema, TerraformCloudSyncSchema } from "@app/services/secret-sync/terraform-cloud";
import { VercelSyncListItemSchema, VercelSyncSchema } from "@app/services/secret-sync/vercel";
@@ -43,7 +44,8 @@ const SecretSyncSchema = z.discriminatedUnion("destination", [
VercelSyncSchema,
WindmillSyncSchema,
HCVaultSyncSchema,
TeamCitySyncSchema
TeamCitySyncSchema,
OCIVaultSyncSchema
]);
const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
@@ -60,7 +62,8 @@ const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
VercelSyncListItemSchema,
WindmillSyncListItemSchema,
HCVaultSyncListItemSchema,
TeamCitySyncListItemSchema
TeamCitySyncListItemSchema,
OCIVaultSyncListItemSchema
]);
export const registerSecretSyncRouter = async (server: FastifyZodProvider) => {

View File

@@ -24,6 +24,7 @@ import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
import { sanitizedCertificateTemplate } from "@app/services/certificate-template/certificate-template-schema";
import { sanitizedPkiSubscriber } from "@app/services/pki-subscriber/pki-subscriber-schema";
import { ProjectFilterType } from "@app/services/project/project-types";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
@@ -490,6 +491,38 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "GET",
url: "/:projectId/pki-subscribers",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.PkiSubscribers],
params: z.object({
projectId: z.string().trim().describe(PROJECTS.LIST_PKI_SUBSCRIBERS.projectId)
}),
response: {
200: z.object({
subscribers: z.array(sanitizedPkiSubscriber)
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const subscribers = await server.services.project.listProjectPkiSubscribers({
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
actor: req.permission.type,
projectId: req.params.projectId
});
return { subscribers };
}
});
server.route({
method: "GET",
url: "/:projectId/certificate-templates",
@@ -628,6 +661,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SshHosts],
params: z.object({
projectId: z.string().trim().describe(PROJECTS.LIST_SSH_HOSTS.projectId)
}),
@@ -666,6 +701,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SshHostGroups],
params: z.object({
projectId: z.string().trim().describe(PROJECTS.LIST_SSH_HOST_GROUPS.projectId)
}),

View File

@@ -16,7 +16,8 @@ export enum AppConnection {
Auth0 = "auth0",
HCVault = "hashicorp-vault",
LDAP = "ldap",
TeamCity = "teamcity"
TeamCity = "teamcity",
OCI = "oci"
}
export enum AWSRegion {

View File

@@ -53,6 +53,7 @@ import {
} from "./humanitec";
import { getLdapConnectionListItem, LdapConnectionMethod, validateLdapConnectionCredentials } from "./ldap";
import { getMsSqlConnectionListItem, MsSqlConnectionMethod } from "./mssql";
import { getOCIConnectionListItem, OCIConnectionMethod, validateOCIConnectionCredentials } from "./oci";
import { getPostgresConnectionListItem, PostgresConnectionMethod } from "./postgres";
import {
getTeamCityConnectionListItem,
@@ -91,7 +92,8 @@ export const listAppConnectionOptions = () => {
getAuth0ConnectionListItem(),
getHCVaultConnectionListItem(),
getLdapConnectionListItem(),
getTeamCityConnectionListItem()
getTeamCityConnectionListItem(),
getOCIConnectionListItem()
].sort((a, b) => a.name.localeCompare(b.name));
};
@@ -160,7 +162,8 @@ export const validateAppConnectionCredentials = async (
[AppConnection.Windmill]: validateWindmillConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.HCVault]: validateHCVaultConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.LDAP]: validateLdapConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.TeamCity]: validateTeamCityConnectionCredentials as TAppConnectionCredentialsValidator
[AppConnection.TeamCity]: validateTeamCityConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.OCI]: validateOCIConnectionCredentials as TAppConnectionCredentialsValidator
};
return VALIDATE_APP_CONNECTION_CREDENTIALS_MAP[appConnection.app](appConnection);
@@ -176,6 +179,7 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
case GitHubConnectionMethod.OAuth:
return "OAuth";
case AwsConnectionMethod.AccessKey:
case OCIConnectionMethod.AccessKey:
return "Access Key";
case AwsConnectionMethod.AssumeRole:
return "Assume Role";
@@ -250,5 +254,6 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
[AppConnection.Auth0]: platformManagedCredentialsNotSupported,
[AppConnection.HCVault]: platformManagedCredentialsNotSupported,
[AppConnection.LDAP]: platformManagedCredentialsNotSupported, // we could support this in the future
[AppConnection.TeamCity]: platformManagedCredentialsNotSupported
[AppConnection.TeamCity]: platformManagedCredentialsNotSupported,
[AppConnection.OCI]: platformManagedCredentialsNotSupported
};

View File

@@ -18,5 +18,6 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
[AppConnection.Auth0]: "Auth0",
[AppConnection.HCVault]: "Hashicorp Vault",
[AppConnection.LDAP]: "LDAP",
[AppConnection.TeamCity]: "TeamCity"
[AppConnection.TeamCity]: "TeamCity",
[AppConnection.OCI]: "OCI"
};

View File

@@ -49,6 +49,8 @@ import { ValidateHumanitecConnectionCredentialsSchema } from "./humanitec";
import { humanitecConnectionService } from "./humanitec/humanitec-connection-service";
import { ValidateLdapConnectionCredentialsSchema } from "./ldap";
import { ValidateMsSqlConnectionCredentialsSchema } from "./mssql";
import { ValidateOCIConnectionCredentialsSchema } from "./oci";
import { ociConnectionService } from "./oci/oci-connection-service";
import { ValidatePostgresConnectionCredentialsSchema } from "./postgres";
import { ValidateTeamCityConnectionCredentialsSchema } from "./teamcity";
import { teamcityConnectionService } from "./teamcity/teamcity-connection-service";
@@ -85,7 +87,8 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
[AppConnection.Auth0]: ValidateAuth0ConnectionCredentialsSchema,
[AppConnection.HCVault]: ValidateHCVaultConnectionCredentialsSchema,
[AppConnection.LDAP]: ValidateLdapConnectionCredentialsSchema,
[AppConnection.TeamCity]: ValidateTeamCityConnectionCredentialsSchema
[AppConnection.TeamCity]: ValidateTeamCityConnectionCredentialsSchema,
[AppConnection.OCI]: ValidateOCIConnectionCredentialsSchema
};
export const appConnectionServiceFactory = ({
@@ -464,6 +467,7 @@ export const appConnectionServiceFactory = ({
auth0: auth0ConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
hcvault: hcVaultConnectionService(connectAppConnectionById),
windmill: windmillConnectionService(connectAppConnectionById),
teamcity: teamcityConnectionService(connectAppConnectionById)
teamcity: teamcityConnectionService(connectAppConnectionById),
oci: ociConnectionService(connectAppConnectionById)
};
};

View File

@@ -76,6 +76,12 @@ import {
TValidateLdapConnectionCredentialsSchema
} from "./ldap";
import { TMsSqlConnection, TMsSqlConnectionInput, TValidateMsSqlConnectionCredentialsSchema } from "./mssql";
import {
TOCIConnection,
TOCIConnectionConfig,
TOCIConnectionInput,
TValidateOCIConnectionCredentialsSchema
} from "./oci";
import {
TPostgresConnection,
TPostgresConnectionInput,
@@ -125,6 +131,7 @@ export type TAppConnection = { id: string } & (
| THCVaultConnection
| TLdapConnection
| TTeamCityConnection
| TOCIConnection
);
export type TAppConnectionRaw = NonNullable<Awaited<ReturnType<TAppConnectionDALFactory["findById"]>>>;
@@ -150,6 +157,7 @@ export type TAppConnectionInput = { id: string } & (
| THCVaultConnectionInput
| TLdapConnectionInput
| TTeamCityConnectionInput
| TOCIConnectionInput
);
export type TSqlConnectionInput = TPostgresConnectionInput | TMsSqlConnectionInput;
@@ -180,7 +188,8 @@ export type TAppConnectionConfig =
| TAuth0ConnectionConfig
| THCVaultConnectionConfig
| TLdapConnectionConfig
| TTeamCityConnectionConfig;
| TTeamCityConnectionConfig
| TOCIConnectionConfig;
export type TValidateAppConnectionCredentialsSchema =
| TValidateAwsConnectionCredentialsSchema
@@ -200,7 +209,8 @@ export type TValidateAppConnectionCredentialsSchema =
| TValidateAuth0ConnectionCredentialsSchema
| TValidateHCVaultConnectionCredentialsSchema
| TValidateLdapConnectionCredentialsSchema
| TValidateTeamCityConnectionCredentialsSchema;
| TValidateTeamCityConnectionCredentialsSchema
| TValidateOCIConnectionCredentialsSchema;
export type TListAwsConnectionKmsKeys = {
connectionId: string;

View File

@@ -0,0 +1,4 @@
export * from "./oci-connection-enums";
export * from "./oci-connection-fns";
export * from "./oci-connection-schemas";
export * from "./oci-connection-types";

View File

@@ -0,0 +1,3 @@
export enum OCIConnectionMethod {
AccessKey = "access-key"
}

View File

@@ -0,0 +1,139 @@
import { common, identity, keymanagement } from "oci-sdk";
import { BadRequestError } from "@app/lib/errors";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { OCIConnectionMethod } from "./oci-connection-enums";
import { TOCIConnection, TOCIConnectionConfig } from "./oci-connection-types";
export const getOCIProvider = async (config: TOCIConnectionConfig) => {
const {
credentials: { fingerprint, privateKey, region, tenancyOcid, userOcid }
} = config;
const provider = new common.SimpleAuthenticationDetailsProvider(
tenancyOcid,
userOcid,
fingerprint,
privateKey,
null,
common.Region.fromRegionId(region)
);
return provider;
};
export const getOCIConnectionListItem = () => {
return {
name: "OCI" as const,
app: AppConnection.OCI as const,
methods: Object.values(OCIConnectionMethod) as [OCIConnectionMethod.AccessKey]
};
};
export const validateOCIConnectionCredentials = async (config: TOCIConnectionConfig) => {
const provider = await getOCIProvider(config);
try {
const identityClient = new identity.IdentityClient({
authenticationDetailsProvider: provider
});
// Get user details - a lightweight call that validates all credentials
await identityClient.getUser({ userId: config.credentials.userOcid });
} catch (error: unknown) {
if (error instanceof Error) {
throw new BadRequestError({
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
});
}
throw new BadRequestError({
message: "Unable to validate connection: verify credentials"
});
}
return config.credentials;
};
export const listOCICompartments = async (appConnection: TOCIConnection) => {
const provider = await getOCIProvider(appConnection);
const identityClient = new identity.IdentityClient({ authenticationDetailsProvider: provider });
const keyManagementClient = new keymanagement.KmsVaultClient({
authenticationDetailsProvider: provider
});
const rootCompartment = await identityClient
.getTenancy({
tenancyId: appConnection.credentials.tenancyOcid
})
.then((response) => ({
...response.tenancy,
id: appConnection.credentials.tenancyOcid,
name: response.tenancy.name ? `${response.tenancy.name} (root)` : "root"
}));
const compartments = await identityClient.listCompartments({
compartmentId: appConnection.credentials.tenancyOcid,
compartmentIdInSubtree: true,
accessLevel: identity.requests.ListCompartmentsRequest.AccessLevel.Any,
lifecycleState: identity.models.Compartment.LifecycleState.Active
});
const allCompartments = [rootCompartment, ...compartments.items];
const filteredCompartments = [];
for await (const compartment of allCompartments) {
try {
// Check if user can list vaults in this compartment
await keyManagementClient.listVaults({
compartmentId: compartment.id,
limit: 1
});
filteredCompartments.push(compartment);
} catch (error) {
// Do nothing
}
}
return filteredCompartments;
};
export const listOCIVaults = async (appConnection: TOCIConnection, compartmentOcid: string) => {
const provider = await getOCIProvider(appConnection);
const keyManagementClient = new keymanagement.KmsVaultClient({
authenticationDetailsProvider: provider
});
const vaults = await keyManagementClient.listVaults({
compartmentId: compartmentOcid
});
return vaults.items.filter((v) => v.lifecycleState === keymanagement.models.Vault.LifecycleState.Active);
};
export const listOCIVaultKeys = async (appConnection: TOCIConnection, compartmentOcid: string, vaultOcid: string) => {
const provider = await getOCIProvider(appConnection);
const kmsVaultClient = new keymanagement.KmsVaultClient({
authenticationDetailsProvider: provider
});
const vault = await kmsVaultClient.getVault({
vaultId: vaultOcid
});
const keyManagementClient = new keymanagement.KmsManagementClient({
authenticationDetailsProvider: provider
});
keyManagementClient.endpoint = vault.vault.managementEndpoint;
const keys = await keyManagementClient.listKeys({
compartmentId: compartmentOcid
});
return keys.items.filter((v) => v.lifecycleState === keymanagement.models.KeySummary.LifecycleState.Enabled);
};

View File

@@ -0,0 +1,65 @@
import z from "zod";
import { AppConnections } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
BaseAppConnectionSchema,
GenericCreateAppConnectionFieldsSchema,
GenericUpdateAppConnectionFieldsSchema
} from "@app/services/app-connection/app-connection-schemas";
import { OCIConnectionMethod } from "./oci-connection-enums";
export const OCIConnectionAccessTokenCredentialsSchema = z.object({
userOcid: z.string().trim().min(1, "User OCID required").describe(AppConnections.CREDENTIALS.OCI.userOcid),
tenancyOcid: z.string().trim().min(1, "Tenancy OCID required").describe(AppConnections.CREDENTIALS.OCI.tenancyOcid),
region: z.string().trim().min(1, "Region required").describe(AppConnections.CREDENTIALS.OCI.region),
fingerprint: z.string().trim().min(1, "Fingerprint required").describe(AppConnections.CREDENTIALS.OCI.fingerprint),
privateKey: z.string().trim().min(1, "Private Key required").describe(AppConnections.CREDENTIALS.OCI.privateKey)
});
const BaseOCIConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.OCI) });
export const OCIConnectionSchema = BaseOCIConnectionSchema.extend({
method: z.literal(OCIConnectionMethod.AccessKey),
credentials: OCIConnectionAccessTokenCredentialsSchema
});
export const SanitizedOCIConnectionSchema = z.discriminatedUnion("method", [
BaseOCIConnectionSchema.extend({
method: z.literal(OCIConnectionMethod.AccessKey),
credentials: OCIConnectionAccessTokenCredentialsSchema.pick({
userOcid: true,
tenancyOcid: true,
region: true,
fingerprint: true
})
})
]);
export const ValidateOCIConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z.literal(OCIConnectionMethod.AccessKey).describe(AppConnections.CREATE(AppConnection.OCI).method),
credentials: OCIConnectionAccessTokenCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.OCI).credentials
)
})
]);
export const CreateOCIConnectionSchema = ValidateOCIConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.OCI)
);
export const UpdateOCIConnectionSchema = z
.object({
credentials: OCIConnectionAccessTokenCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.OCI).credentials
)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.OCI));
export const OCIConnectionListItemSchema = z.object({
name: z.literal("OCI"),
app: z.literal(AppConnection.OCI),
methods: z.nativeEnum(OCIConnectionMethod).array()
});

View File

@@ -0,0 +1,70 @@
import { logger } from "@app/lib/logger";
import { OrgServiceActor } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import { listOCICompartments, listOCIVaultKeys, listOCIVaults } from "./oci-connection-fns";
import { TOCIConnection } from "./oci-connection-types";
type TGetAppConnectionFunc = (
app: AppConnection,
connectionId: string,
actor: OrgServiceActor
) => Promise<TOCIConnection>;
type TListOCIVaultsDTO = {
connectionId: string;
compartmentOcid: string;
};
type TListOCIVaultKeysDTO = {
connectionId: string;
compartmentOcid: string;
vaultOcid: string;
};
export const ociConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
const listCompartments = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.OCI, connectionId, actor);
try {
const compartments = await listOCICompartments(appConnection);
return compartments;
} catch (error) {
logger.error(error, "Failed to establish connection with OCI");
return [];
}
};
const listVaults = async ({ connectionId, compartmentOcid }: TListOCIVaultsDTO, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.OCI, connectionId, actor);
try {
const vaults = await listOCIVaults(appConnection, compartmentOcid);
return vaults;
} catch (error) {
logger.error(error, "Failed to establish connection with OCI");
return [];
}
};
const listVaultKeys = async (
{ connectionId, compartmentOcid, vaultOcid }: TListOCIVaultKeysDTO,
actor: OrgServiceActor
) => {
const appConnection = await getAppConnection(AppConnection.OCI, connectionId, actor);
try {
const keys = await listOCIVaultKeys(appConnection, compartmentOcid, vaultOcid);
return keys;
} catch (error) {
logger.error(error, "Failed to establish connection with OCI");
return [];
}
};
return {
listCompartments,
listVaults,
listVaultKeys
};
};

View File

@@ -0,0 +1,22 @@
import z from "zod";
import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
CreateOCIConnectionSchema,
OCIConnectionSchema,
ValidateOCIConnectionCredentialsSchema
} from "./oci-connection-schemas";
export type TOCIConnection = z.infer<typeof OCIConnectionSchema>;
export type TOCIConnectionInput = z.infer<typeof CreateOCIConnectionSchema> & {
app: AppConnection.OCI;
};
export type TValidateOCIConnectionCredentialsSchema = typeof ValidateOCIConnectionCredentialsSchema;
export type TOCIConnectionConfig = DiscriminativePick<TOCIConnectionInput, "method" | "app" | "credentials"> & {
orgId: string;
};

View File

@@ -1169,7 +1169,7 @@ export const certificateAuthorityServiceFactory = ({
ProjectPermissionSub.Certificates
);
if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" });
if (ca.status !== CaStatus.ACTIVE) throw new BadRequestError({ message: "CA is not active" });
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
if (ca.requireTemplateForIssuance && !certificateTemplate) {
throw new BadRequestError({ message: "Certificate template is required for issuance" });
@@ -1520,7 +1520,7 @@ export const certificateAuthorityServiceFactory = ({
);
}
if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" });
if (ca.status !== CaStatus.ACTIVE) throw new BadRequestError({ message: "CA is not active" });
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
if (ca.requireTemplateForIssuance && !certificateTemplate) {
throw new BadRequestError({ message: "Certificate template is required for issuance" });

View File

@@ -10,6 +10,18 @@ const isValidDate = (dateString: string) => {
export const validateCaDateField = z.string().trim().refine(isValidDate, { message: "Invalid date format" });
export const validateAltNameField = z
.string()
.trim()
.refine(
(name) => {
return isFQDN(name) || z.string().email().safeParse(name).success || isValidIp(name);
},
{
message: "SAN must be a valid hostname, email address, or IP address"
}
);
export const validateAltNamesField = z
.string()
.trim()

View File

@@ -44,8 +44,27 @@ export const certificateDALFactory = (db: TDbClient) => {
}
};
const countCertificatesForPkiSubscriber = async (subscriberId: string) => {
try {
interface CountResult {
count: string;
}
const query = db
.replicaNode()(TableName.Certificate)
.where(`${TableName.Certificate}.pkiSubscriberId`, subscriberId);
const count = await query.count("*").first();
return parseInt((count as unknown as CountResult).count || "0", 10);
} catch (error) {
throw new DatabaseError({ error, name: "Count all subscriber certificates" });
}
};
return {
...certificateOrm,
countCertificatesInProject
countCertificatesInProject,
countCertificatesForPkiSubscriber
};
};

View File

@@ -106,18 +106,29 @@ export const identityServiceFactory = ({
},
tx
);
let insertedMetadata: Array<{
id: string;
key: string;
value: string;
}> = [];
if (metadata && metadata.length) {
await identityMetadataDAL.insertMany(
metadata.map(({ key, value }) => ({
identityId: newIdentity.id,
orgId,
key,
value
})),
tx
);
const rowsToInsert = metadata.map(({ key, value }) => ({
identityId: newIdentity.id,
orgId,
key,
value
}));
insertedMetadata = await identityMetadataDAL.insertMany(rowsToInsert, tx);
}
return { ...newIdentity, authMethods: [] };
return {
...newIdentity,
authMethods: [],
metadata: insertedMetadata
};
});
await licenseService.updateSubscriptionOrgMemberCount(orgId);
@@ -189,21 +200,31 @@ export const identityServiceFactory = ({
tx
);
}
let insertedMetadata: Array<{
id: string;
key: string;
value: string;
}> = [];
if (metadata) {
await identityMetadataDAL.delete({ orgId: identityOrgMembership.orgId, identityId: id }, tx);
if (metadata.length) {
await identityMetadataDAL.insertMany(
metadata.map(({ key, value }) => ({
identityId: newIdentity.id,
orgId: identityOrgMembership.orgId,
key,
value
})),
tx
);
const rowsToInsert = metadata.map(({ key, value }) => ({
identityId: newIdentity.id,
orgId: identityOrgMembership.orgId,
key,
value
}));
insertedMetadata = await identityMetadataDAL.insertMany(rowsToInsert, tx);
}
}
return newIdentity;
return {
...newIdentity,
metadata: insertedMetadata
};
});
return { ...identity, orgId: identityOrgMembership.orgId };
@@ -224,6 +245,7 @@ export const identityServiceFactory = ({
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
return identity;
};

View File

@@ -6,6 +6,7 @@ import {
Request,
Response
} from "botbuilder";
import { CronJob } from "cron";
import { FastifyReply, FastifyRequest } from "fastify";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
@@ -86,8 +87,17 @@ export const microsoftTeamsServiceFactory = ({
}: TMicrosoftTeamsServiceFactoryDep) => {
let teamsBot: TeamsBot | null = null;
let adapter: CloudAdapter | null = null;
let lastKnownUpdatedAt = new Date();
const initializeTeamsBot = async ({ botAppId, botAppPassword }: { botAppId: string; botAppPassword: string }) => {
const initializeTeamsBot = async ({
botAppId,
botAppPassword,
lastUpdatedAt
}: {
botAppId: string;
botAppPassword: string;
lastUpdatedAt?: Date;
}) => {
logger.info("Initializing Microsoft Teams bot");
teamsBot = new TeamsBot({
botAppId,
@@ -106,6 +116,57 @@ export const microsoftTeamsServiceFactory = ({
})
)
);
if (lastUpdatedAt) {
lastKnownUpdatedAt = lastUpdatedAt;
}
};
const $syncMicrosoftTeamsIntegrationConfiguration = async () => {
try {
const serverCfg = await serverCfgDAL.findById(ADMIN_CONFIG_DB_UUID);
if (!serverCfg) {
throw new BadRequestError({
message: "Failed to get server configuration."
});
}
if (lastKnownUpdatedAt.getTime() === serverCfg.updatedAt.getTime()) {
logger.info("No changes to Microsoft Teams integration configuration, skipping sync");
return;
}
lastKnownUpdatedAt = serverCfg.updatedAt;
if (
serverCfg.encryptedMicrosoftTeamsAppId &&
serverCfg.encryptedMicrosoftTeamsClientSecret &&
serverCfg.encryptedMicrosoftTeamsBotId
) {
const decryptWithRoot = kmsService.decryptWithRootKey();
const decryptedAppId = decryptWithRoot(serverCfg.encryptedMicrosoftTeamsAppId);
const decryptedAppPassword = decryptWithRoot(serverCfg.encryptedMicrosoftTeamsClientSecret);
await initializeTeamsBot({
botAppId: decryptedAppId.toString(),
botAppPassword: decryptedAppPassword.toString()
});
}
} catch (err) {
logger.error(err, "Error syncing Microsoft Teams integration configuration");
}
};
const initializeBackgroundSync = async () => {
logger.info("Setting up background sync process for Microsoft Teams workflow integration configuration");
// initial sync upon startup
await $syncMicrosoftTeamsIntegrationConfiguration();
// sync rate limits configuration every 5 minutes
const job = new CronJob("*/5 * * * *", $syncMicrosoftTeamsIntegrationConfiguration);
job.start();
return job;
};
const start = async () => {
@@ -703,6 +764,7 @@ export const microsoftTeamsServiceFactory = ({
getTeams,
handleMessageEndpoint,
start,
initializeBackgroundSync,
sendNotification,
checkInstallationStatus,
getClientId

View File

@@ -0,0 +1,10 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TPkiSubscriberDALFactory = ReturnType<typeof pkiSubscriberDALFactory>;
export const pkiSubscriberDALFactory = (db: TDbClient) => {
const pkiSubscriberOrm = ormify(db, TableName.PkiSubscriber);
return pkiSubscriberOrm;
};

View File

@@ -0,0 +1,14 @@
import { PkiSubscribersSchema } from "@app/db/schemas";
export const sanitizedPkiSubscriber = PkiSubscribersSchema.pick({
id: true,
projectId: true,
caId: true,
name: true,
commonName: true,
status: true,
subjectAlternativeNames: true,
ttl: true,
keyUsages: true,
extendedKeyUsages: true
});

View File

@@ -0,0 +1,805 @@
/* eslint-disable no-bitwise */
import { ForbiddenError, subject } from "@casl/ability";
import * as x509 from "@peculiar/x509";
import crypto, { KeyObject } from "crypto";
import { z } from "zod";
import { ActionProjectType } from "@app/db/schemas";
import { TCertificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import {
ProjectPermissionPkiSubscriberActions,
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { isFQDN } from "@app/lib/validator/validate-url";
import { TCertificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal";
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
import { TCertificateSecretDALFactory } from "@app/services/certificate/certificate-secret-dal";
import {
CertExtendedKeyUsage,
CertExtendedKeyUsageOIDToName,
CertKeyAlgorithm,
CertKeyUsage,
CertStatus
} from "@app/services/certificate/certificate-types";
import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal";
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
import {
createSerialNumber,
getCaCertChain,
getCaCredentials,
keyAlgorithmToAlgCfg,
parseDistinguishedName
} from "@app/services/certificate-authority/certificate-authority-fns";
import { TCertificateAuthoritySecretDALFactory } from "@app/services/certificate-authority/certificate-authority-secret-dal";
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TPkiSubscriberDALFactory } from "@app/services/pki-subscriber/pki-subscriber-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns";
import {
PkiSubscriberStatus,
TCreatePkiSubscriberDTO,
TDeletePkiSubscriberDTO,
TGetPkiSubscriberDTO,
TIssuePkiSubscriberCertDTO,
TListPkiSubscriberCertsDTO,
TSignPkiSubscriberCertDTO,
TUpdatePkiSubscriberDTO
} from "./pki-subscriber-types";
type TPkiSubscriberServiceFactoryDep = {
pkiSubscriberDAL: Pick<
TPkiSubscriberDALFactory,
"create" | "findById" | "updateById" | "deleteById" | "transaction" | "find" | "findOne"
>;
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findById">;
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "findById">;
certificateAuthoritySecretDAL: Pick<TCertificateAuthoritySecretDALFactory, "findOne">;
certificateAuthorityCrlDAL: Pick<TCertificateAuthorityCrlDALFactory, "findOne">;
certificateDAL: Pick<TCertificateDALFactory, "create" | "transaction" | "countCertificatesForPkiSubscriber" | "find">;
certificateBodyDAL: Pick<TCertificateBodyDALFactory, "create">;
certificateSecretDAL: Pick<TCertificateSecretDALFactory, "create">;
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction" | "findById" | "find">;
kmsService: Pick<TKmsServiceFactory, "generateKmsKey" | "decryptWithKmsKey" | "encryptWithKmsKey">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
};
export type TPkiSubscriberServiceFactory = ReturnType<typeof pkiSubscriberServiceFactory>;
export const pkiSubscriberServiceFactory = ({
pkiSubscriberDAL,
certificateAuthorityDAL,
certificateAuthorityCertDAL,
certificateAuthoritySecretDAL,
certificateAuthorityCrlDAL,
certificateDAL,
certificateBodyDAL,
certificateSecretDAL,
projectDAL,
kmsService,
permissionService
}: TPkiSubscriberServiceFactoryDep) => {
const createSubscriber = async ({
name,
commonName,
status,
caId,
ttl,
subjectAlternativeNames,
keyUsages,
extendedKeyUsages,
projectId,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TCreatePkiSubscriberDTO) => {
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionPkiSubscriberActions.Create,
subject(ProjectPermissionSub.PkiSubscribers, {
name
})
);
const newSubscriber = await pkiSubscriberDAL.create({
caId,
projectId,
name,
commonName,
status,
ttl,
subjectAlternativeNames,
keyUsages,
extendedKeyUsages
});
return newSubscriber;
};
const getSubscriber = async ({
subscriberName,
projectId,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TGetPkiSubscriberDTO) => {
const subscriber = await pkiSubscriberDAL.findOne({
name: subscriberName,
projectId
});
if (!subscriber) throw new NotFoundError({ message: `PKI subscriber named '${subscriberName}' not found` });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: subscriber.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionPkiSubscriberActions.Read,
subject(ProjectPermissionSub.PkiSubscribers, {
name: subscriber.name
})
);
return subscriber;
};
const updateSubscriber = async ({
subscriberName,
projectId,
name,
commonName,
status,
caId,
ttl,
subjectAlternativeNames,
keyUsages,
extendedKeyUsages,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TUpdatePkiSubscriberDTO) => {
const subscriber = await pkiSubscriberDAL.findOne({
name: subscriberName,
projectId
});
if (!subscriber) throw new NotFoundError({ message: `PKI subscriber named '${subscriberName}' not found` });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: subscriber.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionPkiSubscriberActions.Edit,
subject(ProjectPermissionSub.PkiSubscribers, {
name: subscriber.name
})
);
const updatedSubscriber = await pkiSubscriberDAL.updateById(subscriber.id, {
caId,
name,
commonName,
status,
ttl,
subjectAlternativeNames,
keyUsages,
extendedKeyUsages
});
return updatedSubscriber;
};
const deleteSubscriber = async ({
subscriberName,
projectId,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TDeletePkiSubscriberDTO) => {
const subscriber = await pkiSubscriberDAL.findOne({
name: subscriberName,
projectId
});
if (!subscriber) throw new NotFoundError({ message: `PKI subscriber named '${subscriberName}' not found` });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: subscriber.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionPkiSubscriberActions.Delete,
subject(ProjectPermissionSub.PkiSubscribers, {
name: subscriber.name
})
);
await pkiSubscriberDAL.deleteById(subscriber.id);
return subscriber;
};
const issueSubscriberCert = async ({
subscriberName,
projectId,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TIssuePkiSubscriberCertDTO) => {
const subscriber = await pkiSubscriberDAL.findOne({
name: subscriberName,
projectId
});
if (!subscriber) throw new NotFoundError({ message: `PKI subscriber named '${subscriberName}' not found` });
if (!subscriber.caId) throw new BadRequestError({ message: "Subscriber does not have an assigned issuing CA" });
const ca = await certificateAuthorityDAL.findById(subscriber.caId);
if (!ca) throw new NotFoundError({ message: `CA with ID '${subscriber.caId}' not found` });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: ca.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionPkiSubscriberActions.IssueCert,
subject(ProjectPermissionSub.PkiSubscribers, {
name: subscriber.name
})
);
if (subscriber.status !== PkiSubscriberStatus.ACTIVE)
throw new BadRequestError({ message: "Subscriber is not active" });
if (ca.status !== CaStatus.ACTIVE) throw new BadRequestError({ message: "CA is not active" });
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
if (ca.requireTemplateForIssuance) {
throw new BadRequestError({ message: "Certificate template is required for issuance" });
}
const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId);
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
projectId: ca.projectId,
projectDAL,
kmsService
});
const kmsDecryptor = await kmsService.decryptWithKmsKey({
kmsId: certificateManagerKmsId
});
const decryptedCaCert = await kmsDecryptor({
cipherTextBlob: caCert.encryptedCertificate
});
const caCertObj = new x509.X509Certificate(decryptedCaCert);
const notBeforeDate = new Date();
const notAfterDate = new Date(new Date().getTime() + ms(subscriber.ttl));
const caCertNotBeforeDate = new Date(caCertObj.notBefore);
const caCertNotAfterDate = new Date(caCertObj.notAfter);
// check not before constraint
if (notBeforeDate < caCertNotBeforeDate) {
throw new BadRequestError({ message: "notBefore date is before CA certificate's notBefore date" });
}
// check not after constraint
if (notAfterDate > caCertNotAfterDate) {
throw new BadRequestError({ message: "notAfter date is after CA certificate's notAfter date" });
}
const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm);
const leafKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
const csrObj = await x509.Pkcs10CertificateRequestGenerator.create({
name: `CN=${subscriber.commonName}`,
keys: leafKeys,
signingAlgorithm: alg,
extensions: [
// eslint-disable-next-line no-bitwise
new x509.KeyUsagesExtension(x509.KeyUsageFlags.digitalSignature | x509.KeyUsageFlags.keyEncipherment)
],
attributes: [new x509.ChallengePasswordAttribute("password")]
});
const { caPrivateKey, caSecret } = await getCaCredentials({
caId: ca.id,
certificateAuthorityDAL,
certificateAuthoritySecretDAL,
projectDAL,
kmsService
});
const caCrl = await certificateAuthorityCrlDAL.findOne({ caSecretId: caSecret.id });
const appCfg = getConfig();
const distributionPointUrl = `${appCfg.SITE_URL}/api/v1/pki/crl/${caCrl.id}/der`;
const caIssuerUrl = `${appCfg.SITE_URL}/api/v1/pki/ca/${ca.id}/certificates/${caCert.id}/der`;
const extensions: x509.Extension[] = [
new x509.BasicConstraintsExtension(false),
new x509.CRLDistributionPointsExtension([distributionPointUrl]),
await x509.AuthorityKeyIdentifierExtension.create(caCertObj, false),
await x509.SubjectKeyIdentifierExtension.create(csrObj.publicKey),
new x509.AuthorityInfoAccessExtension({
caIssuers: new x509.GeneralName("url", caIssuerUrl)
}),
new x509.CertificatePolicyExtension(["2.5.29.32.0"]) // anyPolicy
];
const selectedKeyUsages = subscriber.keyUsages as CertKeyUsage[];
const keyUsagesBitValue = selectedKeyUsages.reduce((accum, keyUsage) => accum | x509.KeyUsageFlags[keyUsage], 0);
if (keyUsagesBitValue) {
extensions.push(new x509.KeyUsagesExtension(keyUsagesBitValue, true));
}
if (subscriber.extendedKeyUsages.length) {
const extendedKeyUsagesExtension = new x509.ExtendedKeyUsageExtension(
subscriber.extendedKeyUsages.map((eku) => x509.ExtendedKeyUsage[eku as CertExtendedKeyUsage]),
true
);
extensions.push(extendedKeyUsagesExtension);
}
let altNamesArray: { type: "email" | "dns"; value: string }[] = [];
if (subscriber.subjectAlternativeNames?.length) {
altNamesArray = subscriber.subjectAlternativeNames.map((altName) => {
if (z.string().email().safeParse(altName).success) {
return { type: "email", value: altName };
}
if (isFQDN(altName, { allow_wildcard: true })) {
return { type: "dns", value: altName };
}
throw new BadRequestError({ message: `Invalid SAN entry: ${altName}` });
});
const altNamesExtension = new x509.SubjectAlternativeNameExtension(altNamesArray, false);
extensions.push(altNamesExtension);
}
const serialNumber = createSerialNumber();
const leafCert = await x509.X509CertificateGenerator.create({
serialNumber,
subject: csrObj.subject,
issuer: caCertObj.subject,
notBefore: notBeforeDate,
notAfter: notAfterDate,
signingKey: caPrivateKey,
publicKey: csrObj.publicKey,
signingAlgorithm: alg,
extensions
});
const skLeafObj = KeyObject.from(leafKeys.privateKey);
const skLeaf = skLeafObj.export({ format: "pem", type: "pkcs8" }) as string;
const kmsEncryptor = await kmsService.encryptWithKmsKey({
kmsId: certificateManagerKmsId
});
const { cipherTextBlob: encryptedCertificate } = await kmsEncryptor({
plainText: Buffer.from(new Uint8Array(leafCert.rawData))
});
const { cipherTextBlob: encryptedPrivateKey } = await kmsEncryptor({
plainText: Buffer.from(skLeaf)
});
const { caCert: issuingCaCertificate, caCertChain } = await getCaCertChain({
caCertId: caCert.id,
certificateAuthorityDAL,
certificateAuthorityCertDAL,
projectDAL,
kmsService
});
const certificateChainPem = `${issuingCaCertificate}\n${caCertChain}`.trim();
const { cipherTextBlob: encryptedCertificateChain } = await kmsEncryptor({
plainText: Buffer.from(certificateChainPem)
});
await certificateDAL.transaction(async (tx) => {
const cert = await certificateDAL.create(
{
caId: ca.id,
caCertId: caCert.id,
pkiSubscriberId: subscriber.id,
status: CertStatus.ACTIVE,
friendlyName: subscriber.commonName,
commonName: subscriber.commonName,
altNames: subscriber.subjectAlternativeNames.join(","),
serialNumber,
notBefore: notBeforeDate,
notAfter: notAfterDate,
keyUsages: selectedKeyUsages,
extendedKeyUsages: subscriber.extendedKeyUsages as CertExtendedKeyUsage[]
},
tx
);
await certificateBodyDAL.create(
{
certId: cert.id,
encryptedCertificate,
encryptedCertificateChain
},
tx
);
await certificateSecretDAL.create(
{
certId: cert.id,
encryptedPrivateKey
},
tx
);
});
return {
certificate: leafCert.toString("pem"),
certificateChain: certificateChainPem,
issuingCaCertificate,
privateKey: skLeaf,
serialNumber,
ca,
subscriber
};
};
const signSubscriberCert = async ({
subscriberName,
projectId,
csr,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TSignPkiSubscriberCertDTO) => {
const appCfg = getConfig();
const subscriber = await pkiSubscriberDAL.findOne({
name: subscriberName,
projectId
});
if (!subscriber) throw new NotFoundError({ message: `PKI subscriber named '${subscriberName}' not found` });
if (!subscriber.caId) throw new BadRequestError({ message: "Subscriber does not have an assigned issuing CA" });
const ca = await certificateAuthorityDAL.findById(subscriber.caId);
if (!ca) throw new NotFoundError({ message: `CA with ID '${subscriber.caId}' not found` });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: ca.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionPkiSubscriberActions.IssueCert,
subject(ProjectPermissionSub.PkiSubscribers, {
name: subscriber.name
})
);
if (subscriber.status !== PkiSubscriberStatus.ACTIVE)
throw new BadRequestError({ message: "Subscriber is not active" });
if (ca.status !== CaStatus.ACTIVE) throw new BadRequestError({ message: "CA is not active" });
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
if (ca.requireTemplateForIssuance) {
throw new BadRequestError({ message: "Certificate template is required for issuance" });
}
const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId);
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
projectId: ca.projectId,
projectDAL,
kmsService
});
const kmsDecryptor = await kmsService.decryptWithKmsKey({
kmsId: certificateManagerKmsId
});
const decryptedCaCert = await kmsDecryptor({
cipherTextBlob: caCert.encryptedCertificate
});
const caCertObj = new x509.X509Certificate(decryptedCaCert);
const notBeforeDate = new Date();
const notAfterDate = new Date(new Date().getTime() + ms(subscriber.ttl));
const caCertNotBeforeDate = new Date(caCertObj.notBefore);
const caCertNotAfterDate = new Date(caCertObj.notAfter);
// check not before constraint
if (notBeforeDate < caCertNotBeforeDate) {
throw new BadRequestError({ message: "notBefore date is before CA certificate's notBefore date" });
}
// check not after constraint
if (notAfterDate > caCertNotAfterDate) {
throw new BadRequestError({ message: "notAfter date is after CA certificate's notAfter date" });
}
const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm);
const csrObj = new x509.Pkcs10CertificateRequest(csr);
const dn = parseDistinguishedName(csrObj.subject);
const cn = dn.commonName;
if (cn !== subscriber.commonName) {
throw new BadRequestError({ message: "Common name (CN) in the CSR does not match the subscriber's common name" });
}
const { caPrivateKey, caSecret } = await getCaCredentials({
caId: ca.id,
certificateAuthorityDAL,
certificateAuthoritySecretDAL,
projectDAL,
kmsService
});
const caCrl = await certificateAuthorityCrlDAL.findOne({ caSecretId: caSecret.id });
const distributionPointUrl = `${appCfg.SITE_URL}/api/v1/pki/crl/${caCrl.id}/der`;
const caIssuerUrl = `${appCfg.SITE_URL}/api/v1/pki/ca/${ca.id}/certificates/${caCert.id}/der`;
const extensions: x509.Extension[] = [
new x509.BasicConstraintsExtension(false),
await x509.AuthorityKeyIdentifierExtension.create(caCertObj, false),
await x509.SubjectKeyIdentifierExtension.create(csrObj.publicKey),
new x509.CRLDistributionPointsExtension([distributionPointUrl]),
new x509.AuthorityInfoAccessExtension({
caIssuers: new x509.GeneralName("url", caIssuerUrl)
}),
new x509.CertificatePolicyExtension(["2.5.29.32.0"]) // anyPolicy
];
// handle key usages
const csrKeyUsageExtension = csrObj.getExtension("2.5.29.15") as x509.KeyUsagesExtension;
let csrKeyUsages: CertKeyUsage[] = [];
if (csrKeyUsageExtension) {
csrKeyUsages = Object.values(CertKeyUsage).filter(
(keyUsage) => (x509.KeyUsageFlags[keyUsage] & csrKeyUsageExtension.usages) !== 0
);
}
const selectedKeyUsages = subscriber.keyUsages as CertKeyUsage[];
if (csrKeyUsages.some((keyUsage) => !selectedKeyUsages.includes(keyUsage))) {
throw new BadRequestError({
message: "Invalid key usage value based on subscriber's specified key usages"
});
}
const keyUsagesBitValue = selectedKeyUsages.reduce((accum, keyUsage) => accum | x509.KeyUsageFlags[keyUsage], 0);
if (keyUsagesBitValue) {
extensions.push(new x509.KeyUsagesExtension(keyUsagesBitValue, true));
}
// handle extended key usages
const csrExtendedKeyUsageExtension = csrObj.getExtension("2.5.29.37") as x509.ExtendedKeyUsageExtension;
let csrExtendedKeyUsages: CertExtendedKeyUsage[] = [];
if (csrExtendedKeyUsageExtension) {
csrExtendedKeyUsages = csrExtendedKeyUsageExtension.usages.map(
(ekuOid) => CertExtendedKeyUsageOIDToName[ekuOid as string]
);
}
const selectedExtendedKeyUsages = subscriber.extendedKeyUsages as CertExtendedKeyUsage[];
if (csrExtendedKeyUsages.some((eku) => !selectedExtendedKeyUsages.includes(eku))) {
throw new BadRequestError({
message: "Invalid extended key usage value based on subscriber's specified extended key usages"
});
}
if (selectedExtendedKeyUsages.length) {
extensions.push(
new x509.ExtendedKeyUsageExtension(
selectedExtendedKeyUsages.map((eku) => x509.ExtendedKeyUsage[eku]),
true
)
);
}
// attempt to read from CSR if altNames is not explicitly provided
let altNamesArray: {
type: "email" | "dns";
value: string;
}[] = [];
const sanExtension = csrObj.extensions.find((ext) => ext.type === "2.5.29.17");
if (sanExtension) {
const sanNames = new x509.GeneralNames(sanExtension.value);
altNamesArray = sanNames.items
.filter((value) => value.type === "email" || value.type === "dns")
.map((name) => ({
type: name.type as "email" | "dns",
value: name.value
}));
}
if (
altNamesArray
.map((altName) => altName.value)
.some((altName) => !subscriber.subjectAlternativeNames.includes(altName))
) {
throw new BadRequestError({
message: "Invalid subject alternative name based on subscriber's specified subject alternative names"
});
}
if (altNamesArray.length) {
const altNamesExtension = new x509.SubjectAlternativeNameExtension(altNamesArray, false);
extensions.push(altNamesExtension);
}
const serialNumber = createSerialNumber();
const leafCert = await x509.X509CertificateGenerator.create({
serialNumber,
subject: csrObj.subject,
issuer: caCertObj.subject,
notBefore: notBeforeDate,
notAfter: notAfterDate,
signingKey: caPrivateKey,
publicKey: csrObj.publicKey,
signingAlgorithm: alg,
extensions
});
const kmsEncryptor = await kmsService.encryptWithKmsKey({
kmsId: certificateManagerKmsId
});
const { cipherTextBlob: encryptedCertificate } = await kmsEncryptor({
plainText: Buffer.from(new Uint8Array(leafCert.rawData))
});
const { caCert: issuingCaCertificate, caCertChain } = await getCaCertChain({
caCertId: ca.activeCaCertId,
certificateAuthorityDAL,
certificateAuthorityCertDAL,
projectDAL,
kmsService
});
const certificateChainPem = `${issuingCaCertificate}\n${caCertChain}`.trim();
const { cipherTextBlob: encryptedCertificateChain } = await kmsEncryptor({
plainText: Buffer.from(certificateChainPem)
});
await certificateDAL.transaction(async (tx) => {
const cert = await certificateDAL.create(
{
caId: ca.id,
caCertId: caCert.id,
pkiSubscriberId: subscriber.id,
status: CertStatus.ACTIVE,
friendlyName: subscriber.commonName,
commonName: subscriber.commonName,
altNames: subscriber.subjectAlternativeNames.join(","),
serialNumber,
notBefore: notBeforeDate,
notAfter: notAfterDate,
keyUsages: selectedKeyUsages,
extendedKeyUsages: selectedExtendedKeyUsages
},
tx
);
await certificateBodyDAL.create(
{
certId: cert.id,
encryptedCertificate,
encryptedCertificateChain
},
tx
);
return cert;
});
return {
certificate: leafCert.toString("pem"),
certificateChain: `${issuingCaCertificate}\n${caCertChain}`.trim(),
issuingCaCertificate,
serialNumber,
ca,
commonName: subscriber.commonName,
subscriber
};
};
const listSubscriberCerts = async ({
subscriberName,
projectId,
offset,
limit,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TListPkiSubscriberCertsDTO) => {
const subscriber = await pkiSubscriberDAL.findOne({
name: subscriberName,
projectId
});
if (!subscriber) throw new NotFoundError({ message: `PKI subscriber named '${subscriberName}' not found` });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: subscriber.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionPkiSubscriberActions.ListCerts,
subject(ProjectPermissionSub.PkiSubscribers, {
name: subscriber.name
})
);
const certificates = await certificateDAL.find(
{
pkiSubscriberId: subscriber.id
},
{ offset, limit, sort: [["updatedAt", "desc"]] }
);
const count = await certificateDAL.countCertificatesForPkiSubscriber(subscriber.id);
return {
certificates,
totalCount: count
};
};
return {
createSubscriber,
getSubscriber,
updateSubscriber,
deleteSubscriber,
issueSubscriberCert,
signSubscriberCert,
listSubscriberCerts
};
};

View File

@@ -0,0 +1,54 @@
import { TProjectPermission } from "@app/lib/types";
import { CertExtendedKeyUsage, CertKeyUsage } from "../certificate/certificate-types";
export enum PkiSubscriberStatus {
ACTIVE = "active",
DISABLED = "disabled"
}
export type TCreatePkiSubscriberDTO = {
caId: string;
name: string;
commonName: string;
status: PkiSubscriberStatus;
ttl: string;
subjectAlternativeNames: string[];
keyUsages: CertKeyUsage[];
extendedKeyUsages: CertExtendedKeyUsage[];
} & TProjectPermission;
export type TGetPkiSubscriberDTO = {
subscriberName: string;
} & TProjectPermission;
export type TUpdatePkiSubscriberDTO = {
subscriberName: string;
caId?: string;
name?: string;
commonName?: string;
status?: PkiSubscriberStatus;
ttl?: string;
subjectAlternativeNames?: string[];
keyUsages?: CertKeyUsage[];
extendedKeyUsages?: CertExtendedKeyUsage[];
} & TProjectPermission;
export type TDeletePkiSubscriberDTO = {
subscriberName: string;
} & TProjectPermission;
export type TIssuePkiSubscriberCertDTO = {
subscriberName: string;
} & TProjectPermission;
export type TSignPkiSubscriberCertDTO = {
subscriberName: string;
csr: string;
} & TProjectPermission;
export type TListPkiSubscriberCertsDTO = {
subscriberName: string;
offset: number;
limit: number;
} & TProjectPermission;

View File

@@ -15,6 +15,7 @@ import { TPermissionServiceFactory } from "@app/ee/services/permission/permissio
import {
ProjectPermissionActions,
ProjectPermissionCertificateActions,
ProjectPermissionPkiSubscriberActions,
ProjectPermissionSecretActions,
ProjectPermissionSshHostActions,
ProjectPermissionSub
@@ -35,6 +36,7 @@ import { groupBy } from "@app/lib/fn";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TProjectPermission } from "@app/lib/types";
import { TQueueServiceFactory } from "@app/queue";
import { TPkiSubscriberDALFactory } from "@app/services/pki-subscriber/pki-subscriber-dal";
import { ActorType } from "../auth/auth-type";
import { TCertificateDALFactory } from "../certificate/certificate-dal";
@@ -86,6 +88,7 @@ import {
TListProjectCasDTO,
TListProjectCertificateTemplatesDTO,
TListProjectCertsDTO,
TListProjectPkiSubscribersDTO,
TListProjectsDTO,
TListProjectSshCasDTO,
TListProjectSshCertificatesDTO,
@@ -145,6 +148,7 @@ type TProjectServiceFactoryDep = {
"findById" | "findByIdWithWorkflowIntegrationDetails"
>;
projectUserMembershipRoleDAL: Pick<TProjectUserMembershipRoleDALFactory, "create">;
pkiSubscriberDAL: Pick<TPkiSubscriberDALFactory, "find">;
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "find">;
certificateDAL: Pick<TCertificateDALFactory, "find" | "countCertificatesInProject">;
certificateTemplateDAL: Pick<TCertificateTemplateDALFactory, "getCertTemplatesByProjectId">;
@@ -207,6 +211,7 @@ export const projectServiceFactory = ({
certificateTemplateDAL,
pkiCollectionDAL,
pkiAlertDAL,
pkiSubscriberDAL,
sshCertificateAuthorityDAL,
sshCertificateAuthoritySecretDAL,
sshCertificateDAL,
@@ -1057,6 +1062,45 @@ export const projectServiceFactory = ({
};
};
/**
* Return list of PKI subscribers for project
*/
const listProjectPkiSubscribers = async ({
actorId,
actorOrgId,
actorAuthMethod,
actor,
projectId
}: TListProjectPkiSubscribersDTO) => {
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
});
const allowedSubscribers = [];
// (dangtony98): room to optimize
const subscribers = await pkiSubscriberDAL.find({ projectId });
for (const subscriber of subscribers) {
const canRead = permission.can(
ProjectPermissionPkiSubscriberActions.Read,
subject(ProjectPermissionSub.PkiSubscribers, {
name: subscriber.name
})
);
if (canRead) {
allowedSubscribers.push(subscriber);
}
}
return allowedSubscribers;
};
/**
* Return list of certificate templates for project
*/
@@ -1156,17 +1200,15 @@ export const projectServiceFactory = ({
const hosts = await sshHostDAL.findSshHostsWithLoginMappings(projectId);
for (const host of hosts) {
try {
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSshHostActions.Read,
subject(ProjectPermissionSub.SshHosts, {
hostname: host.hostname
})
);
const canRead = permission.can(
ProjectPermissionSshHostActions.Read,
subject(ProjectPermissionSub.SshHosts, {
hostname: host.hostname
})
);
if (canRead) {
allowedHosts.push(host);
} catch {
// intentionally ignore projects where user lacks access
}
}
@@ -1930,6 +1972,7 @@ export const projectServiceFactory = ({
listProjectSshCas,
listProjectSshHosts,
listProjectSshHostGroups,
listProjectPkiSubscribers,
listProjectSshCertificates,
listProjectSshCertificateTemplates,
updateVersionLimit,

View File

@@ -155,6 +155,7 @@ export type TListProjectCertificateTemplatesDTO = TProjectPermission;
export type TListProjectSshCasDTO = TProjectPermission;
export type TListProjectSshHostsDTO = TProjectPermission;
export type TListProjectSshCertificateTemplatesDTO = TProjectPermission;
export type TListProjectPkiSubscribersDTO = TProjectPermission;
export type TListProjectSshCertificatesDTO = {
offset: number;
limit: number;

View File

@@ -0,0 +1,4 @@
export * from "./oci-vault-sync-constants";
export * from "./oci-vault-sync-fns";
export * from "./oci-vault-sync-schemas";
export * from "./oci-vault-sync-types";

View File

@@ -0,0 +1,10 @@
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import { TSecretSyncListItem } from "@app/services/secret-sync/secret-sync-types";
export const OCI_VAULT_SYNC_LIST_OPTION: TSecretSyncListItem = {
name: "OCI Vault",
destination: SecretSync.OCIVault,
connection: AppConnection.OCI,
canImportSecrets: true
};

View File

@@ -0,0 +1,292 @@
import { secrets, vault } from "oci-sdk";
import { delay } from "@app/lib/delay";
import { getOCIProvider } from "@app/services/app-connection/oci";
import {
TCreateOCIVaultVariable,
TDeleteOCIVaultVariable,
TOCIVaultListVariables,
TOCIVaultSyncWithCredentials,
TUnmarkOCIVaultVariableFromDeletion,
TUpdateOCIVaultVariable
} from "@app/services/secret-sync/oci-vault/oci-vault-sync-types";
import { SecretSyncError } from "@app/services/secret-sync/secret-sync-errors";
import { TSecretMap } from "@app/services/secret-sync/secret-sync-types";
const listOCIVaultVariables = async ({ provider, compartmentId, vaultId, onlyActive }: TOCIVaultListVariables) => {
const vaultsClient = new vault.VaultsClient({ authenticationDetailsProvider: provider });
const secretsClient = new secrets.SecretsClient({ authenticationDetailsProvider: provider });
const secretsRes = await vaultsClient.listSecrets({
compartmentId,
vaultId,
lifecycleState: onlyActive ? vault.models.SecretSummary.LifecycleState.Active : undefined
});
const result: Record<string, vault.models.SecretSummary & { name: string; value: string }> = {};
for await (const s of secretsRes.items) {
let secretValue = "";
if (s.lifecycleState === vault.models.SecretSummary.LifecycleState.Active) {
const secretBundle = await secretsClient.getSecretBundle({
secretId: s.id
});
secretValue = Buffer.from(secretBundle.secretBundle.secretBundleContent?.content || "", "base64").toString(
"utf-8"
);
}
result[s.secretName] = {
...s,
name: s.secretName,
value: secretValue
};
}
return result;
};
const createOCIVaultVariable = async ({
provider,
compartmentId,
vaultId,
keyId,
name,
value
}: TCreateOCIVaultVariable) => {
if (!value) return;
const vaultsClient = new vault.VaultsClient({ authenticationDetailsProvider: provider });
return vaultsClient.createSecret({
createSecretDetails: {
compartmentId,
vaultId,
keyId,
secretName: name,
enableAutoGeneration: false,
secretContent: {
content: Buffer.from(value).toString("base64"),
contentType: "BASE64"
}
}
});
};
const updateOCIVaultVariable = async ({ provider, secretId, value }: TUpdateOCIVaultVariable) => {
if (!value) return;
const vaultsClient = new vault.VaultsClient({ authenticationDetailsProvider: provider });
return vaultsClient.updateSecret({
secretId,
updateSecretDetails: {
enableAutoGeneration: false,
secretContent: {
content: Buffer.from(value).toString("base64"),
contentType: "BASE64"
}
}
});
};
const deleteOCIVaultVariable = async ({ provider, secretId }: TDeleteOCIVaultVariable) => {
const vaultsClient = new vault.VaultsClient({ authenticationDetailsProvider: provider });
// Schedule a secret deletion 7 days from now. OCI Vault requires a MINIMUM buffer period of 7 days
return vaultsClient.scheduleSecretDeletion({
secretId,
scheduleSecretDeletionDetails: {
timeOfDeletion: new Date(Date.now() + 7 * 24 * 60 * 60 * 1000)
}
});
};
const unmarkOCIVaultVariableFromDeletion = async ({ provider, secretId }: TUnmarkOCIVaultVariableFromDeletion) => {
const vaultsClient = new vault.VaultsClient({ authenticationDetailsProvider: provider });
return vaultsClient.cancelSecretDeletion({
secretId
});
};
export const OCIVaultSyncFns = {
syncSecrets: async (secretSync: TOCIVaultSyncWithCredentials, secretMap: TSecretMap) => {
const {
connection,
destinationConfig: { compartmentOcid, vaultOcid, keyOcid }
} = secretSync;
const provider = await getOCIProvider(connection);
const variables = await listOCIVaultVariables({ provider, compartmentId: compartmentOcid, vaultId: vaultOcid });
// Throw an error if any keys are updating in OCI vault to prevent skipped updates
if (
Object.entries(variables).some(
([, secret]) =>
secret.lifecycleState === vault.models.SecretSummary.LifecycleState.Updating ||
secret.lifecycleState === vault.models.SecretSummary.LifecycleState.CancellingDeletion ||
secret.lifecycleState === vault.models.SecretSummary.LifecycleState.Creating ||
secret.lifecycleState === vault.models.SecretSummary.LifecycleState.Deleting ||
secret.lifecycleState === vault.models.SecretSummary.LifecycleState.SchedulingDeletion
)
) {
throw new SecretSyncError({
error: "Cannot sync while keys are updating in OCI Vault."
});
}
// Create secrets
for await (const entry of Object.entries(secretMap)) {
const [key, { value }] = entry;
// skip secrets that don't have a value set
if (!value) {
// eslint-disable-next-line no-continue
continue;
}
const existingVariable = Object.values(variables).find((v) => v.secretName === key);
if (!existingVariable) {
try {
await createOCIVaultVariable({
compartmentId: compartmentOcid,
vaultId: vaultOcid,
provider,
keyId: keyOcid,
name: key,
value
});
} catch (error) {
throw new SecretSyncError({
error,
secretKey: key
});
}
} else if (existingVariable.lifecycleState === vault.models.SecretSummary.LifecycleState.PendingDeletion) {
// If a secret exists but is pending deletion, cancel the deletion and update the secret
await unmarkOCIVaultVariableFromDeletion({
provider,
compartmentId: compartmentOcid,
vaultId: vaultOcid,
secretId: existingVariable.id
});
const vaultsClient = new vault.VaultsClient({ authenticationDetailsProvider: provider });
const MAX_RETRIES = 10;
for (let i = 0; i < MAX_RETRIES; i += 1) {
// eslint-disable-next-line no-await-in-loop
await delay(5000);
// eslint-disable-next-line no-await-in-loop
const secret = await vaultsClient.getSecret({
secretId: existingVariable.id
});
if (secret.secret.lifecycleState === vault.models.SecretSummary.LifecycleState.Active) {
// eslint-disable-next-line no-await-in-loop
await updateOCIVaultVariable({
provider,
compartmentId: compartmentOcid,
vaultId: vaultOcid,
secretId: existingVariable.id,
value
});
break;
}
if (i === MAX_RETRIES - 1) {
throw new SecretSyncError({
error: "Failed to update secret after cancelling deletion.",
secretKey: key
});
}
}
}
}
// Update and delete secrets
for await (const [key, variable] of Object.entries(variables)) {
// Only update / delete active secrets
if (variable.lifecycleState === vault.models.SecretSummary.LifecycleState.Active) {
if (key in secretMap && secretMap[key].value.length > 0) {
if (variable.value !== secretMap[key].value) {
try {
await updateOCIVaultVariable({
compartmentId: compartmentOcid,
vaultId: vaultOcid,
provider,
secretId: variable.id,
value: secretMap[key].value
});
} catch (error) {
throw new SecretSyncError({
error,
secretKey: key
});
}
}
} else if (!secretSync.syncOptions.disableSecretDeletion) {
try {
await deleteOCIVaultVariable({
compartmentId: compartmentOcid,
vaultId: vaultOcid,
provider,
secretId: variable.id
});
} catch (error) {
throw new SecretSyncError({
error,
secretKey: key
});
}
}
}
}
},
removeSecrets: async (secretSync: TOCIVaultSyncWithCredentials, secretMap: TSecretMap) => {
const {
connection,
destinationConfig: { compartmentOcid, vaultOcid }
} = secretSync;
const provider = await getOCIProvider(connection);
const variables = await listOCIVaultVariables({
provider,
compartmentId: compartmentOcid,
vaultId: vaultOcid,
onlyActive: true
});
for await (const [key, variable] of Object.entries(variables)) {
if (key in secretMap) {
try {
await deleteOCIVaultVariable({
compartmentId: compartmentOcid,
vaultId: vaultOcid,
provider,
secretId: variable.id
});
} catch (error) {
throw new SecretSyncError({
error,
secretKey: key
});
}
}
}
},
getSecrets: async (secretSync: TOCIVaultSyncWithCredentials) => {
const {
connection,
destinationConfig: { compartmentOcid, vaultOcid }
} = secretSync;
const provider = await getOCIProvider(connection);
return listOCIVaultVariables({ provider, compartmentId: compartmentOcid, vaultId: vaultOcid, onlyActive: true });
}
};

View File

@@ -0,0 +1,70 @@
import RE2 from "re2";
import { z } from "zod";
import { SecretSyncs } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import {
BaseSecretSyncSchema,
GenericCreateSecretSyncFieldsSchema,
GenericUpdateSecretSyncFieldsSchema
} from "@app/services/secret-sync/secret-sync-schemas";
import { TSyncOptionsConfig } from "@app/services/secret-sync/secret-sync-types";
const OCIVaultSyncDestinationConfigSchema = z.object({
compartmentOcid: z
.string()
.trim()
.min(1, "Compartment OCID required")
.refine(
(val) => new RE2("^ocid1\\.(tenancy|compartment)\\.oc1\\..+$").test(val),
"Invalid Compartment OCID format. Must start with ocid1.tenancy.oc1. or ocid1.compartment.oc1."
)
.describe(SecretSyncs.DESTINATION_CONFIG.OCI_VAULT.compartmentOcid),
vaultOcid: z
.string()
.trim()
.min(1, "Vault OCID required")
.refine(
(val) => new RE2("^ocid1\\.vault\\.oc1\\..+$").test(val),
"Invalid Vault OCID format. Must start with ocid1.vault.oc1."
)
.describe(SecretSyncs.DESTINATION_CONFIG.OCI_VAULT.vaultOcid),
keyOcid: z
.string()
.trim()
.min(1, "Key OCID required")
.refine(
(val) => new RE2("^ocid1\\.key\\.oc1\\..+$").test(val),
"Invalid Key OCID format. Must start with ocid1.key.oc1."
)
.describe(SecretSyncs.DESTINATION_CONFIG.OCI_VAULT.keyOcid)
});
const OCIVaultSyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: true };
export const OCIVaultSyncSchema = BaseSecretSyncSchema(SecretSync.OCIVault, OCIVaultSyncOptionsConfig).extend({
destination: z.literal(SecretSync.OCIVault),
destinationConfig: OCIVaultSyncDestinationConfigSchema
});
export const CreateOCIVaultSyncSchema = GenericCreateSecretSyncFieldsSchema(
SecretSync.OCIVault,
OCIVaultSyncOptionsConfig
).extend({
destinationConfig: OCIVaultSyncDestinationConfigSchema
});
export const UpdateOCIVaultSyncSchema = GenericUpdateSecretSyncFieldsSchema(
SecretSync.OCIVault,
OCIVaultSyncOptionsConfig
).extend({
destinationConfig: OCIVaultSyncDestinationConfigSchema.optional()
});
export const OCIVaultSyncListItemSchema = z.object({
name: z.literal("OCI Vault"),
connection: z.literal(AppConnection.OCI),
destination: z.literal(SecretSync.OCIVault),
canImportSecrets: z.literal(true)
});

View File

@@ -0,0 +1,48 @@
import { SimpleAuthenticationDetailsProvider } from "oci-sdk";
import { z } from "zod";
import { TOCIConnection } from "@app/services/app-connection/oci";
import { CreateOCIVaultSyncSchema, OCIVaultSyncListItemSchema, OCIVaultSyncSchema } from "./oci-vault-sync-schemas";
export type TOCIVaultSync = z.infer<typeof OCIVaultSyncSchema>;
export type TOCIVaultSyncInput = z.infer<typeof CreateOCIVaultSyncSchema>;
export type TOCIVaultSyncListItem = z.infer<typeof OCIVaultSyncListItemSchema>;
export type TOCIVaultSyncWithCredentials = TOCIVaultSync & {
connection: TOCIConnection;
};
export type TOCIVaultVariable = {
id: string;
name: string;
value: string;
};
export type TOCIVaultListVariables = {
provider: SimpleAuthenticationDetailsProvider;
compartmentId: string;
vaultId: string;
onlyActive?: boolean; // Whether to filter for only active secrets. Removes deleted / scheduled for deletion secrets
};
export type TCreateOCIVaultVariable = TOCIVaultListVariables & {
keyId: string;
name: string;
value: string;
};
export type TUpdateOCIVaultVariable = TOCIVaultListVariables & {
secretId: string;
value: string;
};
export type TDeleteOCIVaultVariable = TOCIVaultListVariables & {
secretId: string;
};
export type TUnmarkOCIVaultVariableFromDeletion = TOCIVaultListVariables & {
secretId: string;
};

View File

@@ -12,7 +12,8 @@ export enum SecretSync {
Vercel = "vercel",
Windmill = "windmill",
HCVault = "hashicorp-vault",
TeamCity = "teamcity"
TeamCity = "teamcity",
OCIVault = "oci-vault"
}
export enum SecretSyncInitialSyncBehavior {

View File

@@ -28,6 +28,7 @@ import { GcpSyncFns } from "./gcp/gcp-sync-fns";
import { HC_VAULT_SYNC_LIST_OPTION, HCVaultSyncFns } from "./hc-vault";
import { HUMANITEC_SYNC_LIST_OPTION } from "./humanitec";
import { HumanitecSyncFns } from "./humanitec/humanitec-sync-fns";
import { OCI_VAULT_SYNC_LIST_OPTION, OCIVaultSyncFns } from "./oci-vault";
import { TEAMCITY_SYNC_LIST_OPTION, TeamCitySyncFns } from "./teamcity";
import { TERRAFORM_CLOUD_SYNC_LIST_OPTION, TerraformCloudSyncFns } from "./terraform-cloud";
import { VERCEL_SYNC_LIST_OPTION, VercelSyncFns } from "./vercel";
@@ -47,7 +48,8 @@ const SECRET_SYNC_LIST_OPTIONS: Record<SecretSync, TSecretSyncListItem> = {
[SecretSync.Vercel]: VERCEL_SYNC_LIST_OPTION,
[SecretSync.Windmill]: WINDMILL_SYNC_LIST_OPTION,
[SecretSync.HCVault]: HC_VAULT_SYNC_LIST_OPTION,
[SecretSync.TeamCity]: TEAMCITY_SYNC_LIST_OPTION
[SecretSync.TeamCity]: TEAMCITY_SYNC_LIST_OPTION,
[SecretSync.OCIVault]: OCI_VAULT_SYNC_LIST_OPTION
};
export const listSecretSyncOptions = () => {
@@ -148,6 +150,8 @@ export const SecretSyncFns = {
return HCVaultSyncFns.syncSecrets(secretSync, secretMap);
case SecretSync.TeamCity:
return TeamCitySyncFns.syncSecrets(secretSync, secretMap);
case SecretSync.OCIVault:
return OCIVaultSyncFns.syncSecrets(secretSync, secretMap);
default:
throw new Error(
`Unhandled sync destination for sync secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`
@@ -213,6 +217,9 @@ export const SecretSyncFns = {
case SecretSync.TeamCity:
secretMap = await TeamCitySyncFns.getSecrets(secretSync);
break;
case SecretSync.OCIVault:
secretMap = await OCIVaultSyncFns.getSecrets(secretSync);
break;
default:
throw new Error(
`Unhandled sync destination for get secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`
@@ -270,6 +277,8 @@ export const SecretSyncFns = {
return HCVaultSyncFns.removeSecrets(secretSync, secretMap);
case SecretSync.TeamCity:
return TeamCitySyncFns.removeSecrets(secretSync, secretMap);
case SecretSync.OCIVault:
return OCIVaultSyncFns.removeSecrets(secretSync, secretMap);
default:
throw new Error(
`Unhandled sync destination for remove secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`

View File

@@ -15,7 +15,8 @@ export const SECRET_SYNC_NAME_MAP: Record<SecretSync, string> = {
[SecretSync.Vercel]: "Vercel",
[SecretSync.Windmill]: "Windmill",
[SecretSync.HCVault]: "Hashicorp Vault",
[SecretSync.TeamCity]: "TeamCity"
[SecretSync.TeamCity]: "TeamCity",
[SecretSync.OCIVault]: "OCI Vault"
};
export const SECRET_SYNC_CONNECTION_MAP: Record<SecretSync, AppConnection> = {
@@ -32,5 +33,6 @@ export const SECRET_SYNC_CONNECTION_MAP: Record<SecretSync, AppConnection> = {
[SecretSync.Vercel]: AppConnection.Vercel,
[SecretSync.Windmill]: AppConnection.Windmill,
[SecretSync.HCVault]: AppConnection.HCVault,
[SecretSync.TeamCity]: AppConnection.TeamCity
[SecretSync.TeamCity]: AppConnection.TeamCity,
[SecretSync.OCIVault]: AppConnection.OCI
};

View File

@@ -67,6 +67,7 @@ import {
THumanitecSyncListItem,
THumanitecSyncWithCredentials
} from "./humanitec";
import { TOCIVaultSync, TOCIVaultSyncInput, TOCIVaultSyncListItem, TOCIVaultSyncWithCredentials } from "./oci-vault";
import {
TTeamCitySync,
TTeamCitySyncInput,
@@ -95,7 +96,8 @@ export type TSecretSync =
| TVercelSync
| TWindmillSync
| THCVaultSync
| TTeamCitySync;
| TTeamCitySync
| TOCIVaultSync;
export type TSecretSyncWithCredentials =
| TAwsParameterStoreSyncWithCredentials
@@ -111,7 +113,8 @@ export type TSecretSyncWithCredentials =
| TVercelSyncWithCredentials
| TWindmillSyncWithCredentials
| THCVaultSyncWithCredentials
| TTeamCitySyncWithCredentials;
| TTeamCitySyncWithCredentials
| TOCIVaultSyncWithCredentials;
export type TSecretSyncInput =
| TAwsParameterStoreSyncInput
@@ -127,7 +130,8 @@ export type TSecretSyncInput =
| TVercelSyncInput
| TWindmillSyncInput
| THCVaultSyncInput
| TTeamCitySyncInput;
| TTeamCitySyncInput
| TOCIVaultSyncInput;
export type TSecretSyncListItem =
| TAwsParameterStoreSyncListItem
@@ -143,7 +147,8 @@ export type TSecretSyncListItem =
| TVercelSyncListItem
| TWindmillSyncListItem
| THCVaultSyncListItem
| TTeamCitySyncListItem;
| TTeamCitySyncListItem
| TOCIVaultSyncListItem;
export type TSyncOptionsConfig = {
canImportSecrets: boolean;

View File

@@ -246,7 +246,8 @@ export const superAdminServiceFactory = ({
await microsoftTeamsService.initializeTeamsBot({
botAppId: decryptedAppId.toString(),
botAppPassword: decryptedAppPassword.toString()
botAppPassword: decryptedAppPassword.toString(),
lastUpdatedAt: updatedServerCfg.updatedAt
});
}

View File

@@ -189,6 +189,7 @@ export type TSignCertificateEvent = {
properties: {
caId?: string;
certificateTemplateId?: string;
subscriberId?: string;
commonName: string;
userAgent?: string;
};
@@ -199,6 +200,7 @@ export type TIssueCertificateEvent = {
properties: {
caId?: string;
certificateTemplateId?: string;
subscriberId?: string;
commonName: string;
userAgent?: string;
};

View File

@@ -1,115 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"regexp"
"testing"
"github.com/stretchr/testify/assert"
)
func TestCommitAllowed(t *testing.T) {
tests := []struct {
allowlist Allowlist
commit string
commitAllowed bool
}{
{
allowlist: Allowlist{
Commits: []string{"commitA"},
},
commit: "commitA",
commitAllowed: true,
},
{
allowlist: Allowlist{
Commits: []string{"commitB"},
},
commit: "commitA",
commitAllowed: false,
},
{
allowlist: Allowlist{
Commits: []string{"commitB"},
},
commit: "",
commitAllowed: false,
},
}
for _, tt := range tests {
assert.Equal(t, tt.commitAllowed, tt.allowlist.CommitAllowed(tt.commit))
}
}
func TestRegexAllowed(t *testing.T) {
tests := []struct {
allowlist Allowlist
secret string
regexAllowed bool
}{
{
allowlist: Allowlist{
Regexes: []*regexp.Regexp{regexp.MustCompile("matchthis")},
},
secret: "a secret: matchthis, done",
regexAllowed: true,
},
{
allowlist: Allowlist{
Regexes: []*regexp.Regexp{regexp.MustCompile("matchthis")},
},
secret: "a secret",
regexAllowed: false,
},
}
for _, tt := range tests {
assert.Equal(t, tt.regexAllowed, tt.allowlist.RegexAllowed(tt.secret))
}
}
func TestPathAllowed(t *testing.T) {
tests := []struct {
allowlist Allowlist
path string
pathAllowed bool
}{
{
allowlist: Allowlist{
Paths: []*regexp.Regexp{regexp.MustCompile("path")},
},
path: "a path",
pathAllowed: true,
},
{
allowlist: Allowlist{
Paths: []*regexp.Regexp{regexp.MustCompile("path")},
},
path: "a ???",
pathAllowed: false,
},
}
for _, tt := range tests {
assert.Equal(t, tt.pathAllowed, tt.allowlist.PathAllowed(tt.path))
}
}

View File

@@ -1,279 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
_ "embed"
"fmt"
"regexp"
"strings"
"github.com/rs/zerolog/log"
"github.com/spf13/viper"
)
//go:embed infisical-scan.toml
var DefaultConfig string
// use to keep track of how many configs we can extend
// yea I know, globals bad
var extendDepth int
const maxExtendDepth = 2
const DefaultScanConfigFileName = ".infisical-scan.toml"
const DefaultScanConfigEnvName = "INFISICAL_SCAN_CONFIG"
const DefaultInfisicalIgnoreFineName = ".infisicalignore"
// ViperConfig is the config struct used by the Viper config package
// to parse the config file. This struct does not include regular expressions.
// It is used as an intermediary to convert the Viper config to the Config struct.
type ViperConfig struct {
Description string
Extend Extend
Rules []struct {
ID string
Description string
Entropy float64
SecretGroup int
Regex string
Keywords []string
Path string
Tags []string
Allowlist struct {
RegexTarget string
Regexes []string
Paths []string
Commits []string
StopWords []string
}
}
Allowlist struct {
RegexTarget string
Regexes []string
Paths []string
Commits []string
StopWords []string
}
}
// Config is a configuration struct that contains rules and an allowlist if present.
type Config struct {
Extend Extend
Path string
Description string
Rules map[string]Rule
Allowlist Allowlist
Keywords []string
// used to keep sarif results consistent
orderedRules []string
}
// Extend is a struct that allows users to define how they want their
// configuration extended by other configuration files.
type Extend struct {
Path string
URL string
UseDefault bool
}
func (vc *ViperConfig) Translate() (Config, error) {
var (
keywords []string
orderedRules []string
)
rulesMap := make(map[string]Rule)
for _, r := range vc.Rules {
var allowlistRegexes []*regexp.Regexp
for _, a := range r.Allowlist.Regexes {
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
}
var allowlistPaths []*regexp.Regexp
for _, a := range r.Allowlist.Paths {
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
}
if r.Keywords == nil {
r.Keywords = []string{}
} else {
for _, k := range r.Keywords {
keywords = append(keywords, strings.ToLower(k))
}
}
if r.Tags == nil {
r.Tags = []string{}
}
var configRegex *regexp.Regexp
var configPathRegex *regexp.Regexp
if r.Regex == "" {
configRegex = nil
} else {
configRegex = regexp.MustCompile(r.Regex)
}
if r.Path == "" {
configPathRegex = nil
} else {
configPathRegex = regexp.MustCompile(r.Path)
}
r := Rule{
Description: r.Description,
RuleID: r.ID,
Regex: configRegex,
Path: configPathRegex,
SecretGroup: r.SecretGroup,
Entropy: r.Entropy,
Tags: r.Tags,
Keywords: r.Keywords,
Allowlist: Allowlist{
RegexTarget: r.Allowlist.RegexTarget,
Regexes: allowlistRegexes,
Paths: allowlistPaths,
Commits: r.Allowlist.Commits,
StopWords: r.Allowlist.StopWords,
},
}
orderedRules = append(orderedRules, r.RuleID)
if r.Regex != nil && r.SecretGroup > r.Regex.NumSubexp() {
return Config{}, fmt.Errorf("%s invalid regex secret group %d, max regex secret group %d", r.Description, r.SecretGroup, r.Regex.NumSubexp())
}
rulesMap[r.RuleID] = r
}
var allowlistRegexes []*regexp.Regexp
for _, a := range vc.Allowlist.Regexes {
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
}
var allowlistPaths []*regexp.Regexp
for _, a := range vc.Allowlist.Paths {
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
}
c := Config{
Description: vc.Description,
Extend: vc.Extend,
Rules: rulesMap,
Allowlist: Allowlist{
RegexTarget: vc.Allowlist.RegexTarget,
Regexes: allowlistRegexes,
Paths: allowlistPaths,
Commits: vc.Allowlist.Commits,
StopWords: vc.Allowlist.StopWords,
},
Keywords: keywords,
orderedRules: orderedRules,
}
if maxExtendDepth != extendDepth {
// disallow both usedefault and path from being set
if c.Extend.Path != "" && c.Extend.UseDefault {
log.Fatal().Msg("unable to load config due to extend.path and extend.useDefault being set")
}
if c.Extend.UseDefault {
c.extendDefault()
} else if c.Extend.Path != "" {
c.extendPath()
}
}
return c, nil
}
func (c *Config) OrderedRules() []Rule {
var orderedRules []Rule
for _, id := range c.orderedRules {
if _, ok := c.Rules[id]; ok {
orderedRules = append(orderedRules, c.Rules[id])
}
}
return orderedRules
}
func (c *Config) extendDefault() {
extendDepth++
viper.SetConfigType("toml")
if err := viper.ReadConfig(strings.NewReader(DefaultConfig)); err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
defaultViperConfig := ViperConfig{}
if err := viper.Unmarshal(&defaultViperConfig); err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
cfg, err := defaultViperConfig.Translate()
if err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
log.Debug().Msg("extending config with default config")
c.extend(cfg)
}
func (c *Config) extendPath() {
extendDepth++
viper.SetConfigFile(c.Extend.Path)
if err := viper.ReadInConfig(); err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
extensionViperConfig := ViperConfig{}
if err := viper.Unmarshal(&extensionViperConfig); err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
cfg, err := extensionViperConfig.Translate()
if err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
log.Debug().Msgf("extending config with %s", c.Extend.Path)
c.extend(cfg)
}
func (c *Config) extendURL() {
// TODO
}
func (c *Config) extend(extensionConfig Config) {
for ruleID, rule := range extensionConfig.Rules {
if _, ok := c.Rules[ruleID]; !ok {
log.Trace().Msgf("adding %s to base config", ruleID)
c.Rules[ruleID] = rule
c.Keywords = append(c.Keywords, rule.Keywords...)
}
}
// append allowlists, not attempting to merge
c.Allowlist.Commits = append(c.Allowlist.Commits,
extensionConfig.Allowlist.Commits...)
c.Allowlist.Paths = append(c.Allowlist.Paths,
extensionConfig.Allowlist.Paths...)
c.Allowlist.Regexes = append(c.Allowlist.Regexes,
extensionConfig.Allowlist.Regexes...)
}

View File

@@ -1,170 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"fmt"
"regexp"
"testing"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
)
const configPath = "../testdata/config/"
func TestTranslate(t *testing.T) {
tests := []struct {
cfgName string
cfg Config
wantError error
}{
{
cfgName: "allow_aws_re",
cfg: Config{
Rules: map[string]Rule{"aws-access-key": {
Description: "AWS Access Key",
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-access-key",
Allowlist: Allowlist{
Regexes: []*regexp.Regexp{
regexp.MustCompile("AKIALALEMEL33243OLIA"),
},
},
},
},
},
},
{
cfgName: "allow_commit",
cfg: Config{
Rules: map[string]Rule{"aws-access-key": {
Description: "AWS Access Key",
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-access-key",
Allowlist: Allowlist{
Commits: []string{"allowthiscommit"},
},
},
},
},
},
{
cfgName: "allow_path",
cfg: Config{
Rules: map[string]Rule{"aws-access-key": {
Description: "AWS Access Key",
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-access-key",
Allowlist: Allowlist{
Paths: []*regexp.Regexp{
regexp.MustCompile(".go"),
},
},
},
},
},
},
{
cfgName: "entropy_group",
cfg: Config{
Rules: map[string]Rule{"discord-api-key": {
Description: "Discord API key",
Regex: regexp.MustCompile(`(?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{64})['\"]`),
RuleID: "discord-api-key",
Allowlist: Allowlist{},
Entropy: 3.5,
SecretGroup: 3,
Tags: []string{},
Keywords: []string{},
},
},
},
},
{
cfgName: "bad_entropy_group",
cfg: Config{},
wantError: fmt.Errorf("Discord API key invalid regex secret group 5, max regex secret group 3"),
},
{
cfgName: "base",
cfg: Config{
Rules: map[string]Rule{
"aws-access-key": {
Description: "AWS Access Key",
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-access-key",
},
"aws-secret-key": {
Description: "AWS Secret Key",
Regex: regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-secret-key",
},
"aws-secret-key-again": {
Description: "AWS Secret Key",
Regex: regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-secret-key-again",
},
},
},
},
}
for _, tt := range tests {
viper.Reset()
viper.AddConfigPath(configPath)
viper.SetConfigName(tt.cfgName)
viper.SetConfigType("toml")
err := viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, err := vc.Translate()
if tt.wantError != nil {
if err == nil {
t.Errorf("expected error")
}
assert.Equal(t, tt.wantError, err)
}
assert.Equal(t, cfg.Rules, tt.cfg.Rules)
}
}

View File

@@ -1,8 +0,0 @@
public_ip: 127.0.0.1
auth_secret: example-auth-secret
realm: infisical.org
# set port 5349 for tls
# port: 5349
# tls_private_key_path: /full-path
# tls_ca_path: /full-path
# tls_cert_path: /full-path

View File

@@ -1,8 +0,0 @@
public_ip: 127.0.0.1
auth_secret: changeThisOnProduction
realm: infisical.org
# set port 5349 for tls
# port: 5349
# tls_private_key_path: /full-path
# tls_ca_path: /full-path
# tls_cert_path: /full-path

File diff suppressed because it is too large Load Diff

View File

@@ -1,43 +0,0 @@
package config
import (
"regexp"
)
// Rules contain information that define details on how to detect secrets
type Rule struct {
// Description is the description of the rule.
Description string
// RuleID is a unique identifier for this rule
RuleID string
// Entropy is a float representing the minimum shannon
// entropy a regex group must have to be considered a secret.
Entropy float64
// SecretGroup is an int used to extract secret from regex
// match and used as the group that will have its entropy
// checked if `entropy` is set.
SecretGroup int
// Regex is a golang regular expression used to detect secrets.
Regex *regexp.Regexp
// Path is a golang regular expression used to
// filter secrets by path
Path *regexp.Regexp
// Tags is an array of strings used for metadata
// and reporting purposes.
Tags []string
// Keywords are used for pre-regex check filtering. Rules that contain
// keywords will perform a quick string compare check to make sure the
// keyword(s) are in the content being scanned.
Keywords []string
// Allowlist allows a rule to be ignored for specific
// regexes, paths, and/or commits
Allowlist Allowlist
}

View File

@@ -1,24 +0,0 @@
package config
import (
"regexp"
)
func anyRegexMatch(f string, res []*regexp.Regexp) bool {
for _, re := range res {
if regexMatched(f, re) {
return true
}
}
return false
}
func regexMatched(f string, re *regexp.Regexp) bool {
if re == nil {
return false
}
if re.FindString(f) != "" {
return true
}
return false
}

View File

@@ -25,35 +25,31 @@ package detect
import (
"encoding/json"
"fmt"
"io"
"os"
"path/filepath"
"github.com/rs/zerolog/log"
"github.com/Infisical/infisical-merge/report"
"github.com/Infisical/infisical-merge/detect/report"
)
func IsNew(finding report.Finding, baseline []report.Finding) bool {
func IsNew(finding report.Finding, redact uint, baseline []report.Finding) bool {
// Explicitly testing each property as it gives significantly better performance in comparison to cmp.Equal(). Drawback is that
// the code requires maintanance if/when the Finding struct changes
// the code requires maintenance if/when the Finding struct changes
for _, b := range baseline {
if finding.Author == b.Author &&
finding.Commit == b.Commit &&
finding.Date == b.Date &&
if finding.RuleID == b.RuleID &&
finding.Description == b.Description &&
finding.Email == b.Email &&
finding.EndColumn == b.EndColumn &&
finding.StartLine == b.StartLine &&
finding.EndLine == b.EndLine &&
finding.Entropy == b.Entropy &&
finding.File == b.File &&
// Omit checking finding.Fingerprint - if the format of the fingerprint changes, the users will see unexpected behaviour
finding.Match == b.Match &&
finding.Message == b.Message &&
finding.RuleID == b.RuleID &&
finding.Secret == b.Secret &&
finding.StartColumn == b.StartColumn &&
finding.StartLine == b.StartLine {
finding.EndColumn == b.EndColumn &&
(redact > 0 || (finding.Match == b.Match && finding.Secret == b.Secret)) &&
finding.File == b.File &&
finding.Commit == b.Commit &&
finding.Author == b.Author &&
finding.Email == b.Email &&
finding.Date == b.Date &&
finding.Message == b.Message &&
// Omit checking finding.Fingerprint - if the format of the fingerprint changes, the users will see unexpected behaviour
finding.Entropy == b.Entropy {
return false
}
}
@@ -61,23 +57,12 @@ func IsNew(finding report.Finding, baseline []report.Finding) bool {
}
func LoadBaseline(baselinePath string) ([]report.Finding, error) {
var previousFindings []report.Finding
jsonFile, err := os.Open(baselinePath)
bytes, err := os.ReadFile(baselinePath)
if err != nil {
return nil, fmt.Errorf("could not open %s", baselinePath)
}
defer func() {
if cerr := jsonFile.Close(); cerr != nil {
log.Warn().Err(cerr).Msg("problem closing jsonFile handle")
}
}()
bytes, err := io.ReadAll(jsonFile)
if err != nil {
return nil, fmt.Errorf("could not read data from the file %s", baselinePath)
}
var previousFindings []report.Finding
err = json.Unmarshal(bytes, &previousFindings)
if err != nil {
return nil, fmt.Errorf("the format of the file %s is not supported", baselinePath)
@@ -85,3 +70,34 @@ func LoadBaseline(baselinePath string) ([]report.Finding, error) {
return previousFindings, nil
}
func (d *Detector) AddBaseline(baselinePath string, source string) error {
if baselinePath != "" {
absoluteSource, err := filepath.Abs(source)
if err != nil {
return err
}
absoluteBaseline, err := filepath.Abs(baselinePath)
if err != nil {
return err
}
relativeBaseline, err := filepath.Rel(absoluteSource, absoluteBaseline)
if err != nil {
return err
}
baseline, err := LoadBaseline(baselinePath)
if err != nil {
return err
}
d.baseline = baseline
baselinePath = relativeBaseline
}
d.baselinePath = baselinePath
return nil
}

View File

@@ -1,160 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"errors"
"testing"
"github.com/stretchr/testify/assert"
"github.com/Infisical/infisical-merge/report"
)
func TestIsNew(t *testing.T) {
tests := []struct {
findings report.Finding
baseline []report.Finding
expect bool
}{
{
findings: report.Finding{
Author: "a",
Commit: "0000",
},
baseline: []report.Finding{
{
Author: "a",
Commit: "0000",
},
},
expect: false,
},
{
findings: report.Finding{
Author: "a",
Commit: "0000",
},
baseline: []report.Finding{
{
Author: "a",
Commit: "0002",
},
},
expect: true,
},
{
findings: report.Finding{
Author: "a",
Commit: "0000",
Tags: []string{"a", "b"},
},
baseline: []report.Finding{
{
Author: "a",
Commit: "0000",
Tags: []string{"a", "c"},
},
},
expect: false, // Updated tags doesn't make it a new finding
},
}
for _, test := range tests {
assert.Equal(t, test.expect, IsNew(test.findings, test.baseline))
}
}
func TestFileLoadBaseline(t *testing.T) {
tests := []struct {
Filename string
ExpectedError error
}{
{
Filename: "../testdata/baseline/baseline.csv",
ExpectedError: errors.New("the format of the file ../testdata/baseline/baseline.csv is not supported"),
},
{
Filename: "../testdata/baseline/baseline.sarif",
ExpectedError: errors.New("the format of the file ../testdata/baseline/baseline.sarif is not supported"),
},
{
Filename: "../testdata/baseline/notfound.json",
ExpectedError: errors.New("could not open ../testdata/baseline/notfound.json"),
},
}
for _, test := range tests {
_, err := LoadBaseline(test.Filename)
assert.Equal(t, test.ExpectedError.Error(), err.Error())
}
}
func TestIgnoreIssuesInBaseline(t *testing.T) {
tests := []struct {
findings []report.Finding
baseline []report.Finding
expectCount int
}{
{
findings: []report.Finding{
{
Author: "a",
Commit: "5",
},
},
baseline: []report.Finding{
{
Author: "a",
Commit: "5",
},
},
expectCount: 0,
},
{
findings: []report.Finding{
{
Author: "a",
Commit: "5",
Fingerprint: "a",
},
},
baseline: []report.Finding{
{
Author: "a",
Commit: "5",
Fingerprint: "b",
},
},
expectCount: 0,
},
}
for _, test := range tests {
d, _ := NewDetectorDefaultConfig()
d.baseline = test.baseline
for _, finding := range test.findings {
d.addFinding(finding)
}
assert.Equal(t, test.expectCount, len(d.findings))
}
}

66
cli/detect/cmd/scm/scm.go Normal file
View File

@@ -0,0 +1,66 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package scm
import (
"fmt"
"strings"
)
type Platform int
const (
UnknownPlatform Platform = iota
NoPlatform // Explicitly disable the feature
GitHubPlatform
GitLabPlatform
AzureDevOpsPlatform
// TODO: Add others.
)
func (p Platform) String() string {
return [...]string{
"unknown",
"none",
"github",
"gitlab",
"azuredevops",
}[p]
}
func PlatformFromString(s string) (Platform, error) {
switch strings.ToLower(s) {
case "", "unknown":
return UnknownPlatform, nil
case "none":
return NoPlatform, nil
case "github":
return GitHubPlatform, nil
case "gitlab":
return GitLabPlatform, nil
case "azuredevops":
return AzureDevOpsPlatform, nil
default:
return UnknownPlatform, fmt.Errorf("invalid scm platform value: %s", s)
}
}

View File

@@ -23,63 +23,137 @@
package config
import (
"regexp"
"fmt"
"strings"
"golang.org/x/exp/maps"
"github.com/Infisical/infisical-merge/detect/regexp"
)
type AllowlistMatchCondition int
const (
AllowlistMatchOr AllowlistMatchCondition = iota
AllowlistMatchAnd
)
func (a AllowlistMatchCondition) String() string {
return [...]string{
"OR",
"AND",
}[a]
}
// Allowlist allows a rule to be ignored for specific
// regexes, paths, and/or commits
type Allowlist struct {
// Short human readable description of the allowlist.
Description string
// Regexes is slice of content regular expressions that are allowed to be ignored.
Regexes []*regexp.Regexp
// MatchCondition determines whether all criteria must match.
MatchCondition AllowlistMatchCondition
// RegexTarget
RegexTarget string
// Commits is a slice of commit SHAs that are allowed to be ignored. Defaults to "OR".
Commits []string
// Paths is a slice of path regular expressions that are allowed to be ignored.
Paths []*regexp.Regexp
// Commits is a slice of commit SHAs that are allowed to be ignored.
Commits []string
// Can be `match` or `line`.
//
// If `match` the _Regexes_ will be tested against the match of the _Rule.Regex_.
//
// If `line` the _Regexes_ will be tested against the entire line.
//
// If RegexTarget is empty, it will be tested against the found secret.
RegexTarget string
// Regexes is slice of content regular expressions that are allowed to be ignored.
Regexes []*regexp.Regexp
// StopWords is a slice of stop words that are allowed to be ignored.
// This targets the _secret_, not the content of the regex match like the
// Regexes slice.
StopWords []string
// validated is an internal flag to track whether `Validate()` has been called.
validated bool
}
func (a *Allowlist) Validate() error {
if a.validated {
return nil
}
// Disallow empty allowlists.
if len(a.Commits) == 0 &&
len(a.Paths) == 0 &&
len(a.Regexes) == 0 &&
len(a.StopWords) == 0 {
return fmt.Errorf("must contain at least one check for: commits, paths, regexes, or stopwords")
}
// Deduplicate commits and stopwords.
if len(a.Commits) > 0 {
uniqueCommits := make(map[string]struct{})
for _, commit := range a.Commits {
uniqueCommits[commit] = struct{}{}
}
a.Commits = maps.Keys(uniqueCommits)
}
if len(a.StopWords) > 0 {
uniqueStopwords := make(map[string]struct{})
for _, stopWord := range a.StopWords {
uniqueStopwords[stopWord] = struct{}{}
}
a.StopWords = maps.Keys(uniqueStopwords)
}
a.validated = true
return nil
}
// CommitAllowed returns true if the commit is allowed to be ignored.
func (a *Allowlist) CommitAllowed(c string) bool {
if c == "" {
return false
func (a *Allowlist) CommitAllowed(c string) (bool, string) {
if a == nil || c == "" {
return false, ""
}
for _, commit := range a.Commits {
if commit == c {
return true
return true, c
}
}
return false
return false, ""
}
// PathAllowed returns true if the path is allowed to be ignored.
func (a *Allowlist) PathAllowed(path string) bool {
if a == nil || path == "" {
return false
}
return anyRegexMatch(path, a.Paths)
}
// RegexAllowed returns true if the regex is allowed to be ignored.
func (a *Allowlist) RegexAllowed(s string) bool {
return anyRegexMatch(s, a.Regexes)
func (a *Allowlist) RegexAllowed(secret string) bool {
if a == nil || secret == "" {
return false
}
return anyRegexMatch(secret, a.Regexes)
}
func (a *Allowlist) ContainsStopWord(s string) bool {
func (a *Allowlist) ContainsStopWord(s string) (bool, string) {
if a == nil || s == "" {
return false, ""
}
s = strings.ToLower(s)
for _, stopWord := range a.StopWords {
if strings.Contains(s, strings.ToLower(stopWord)) {
return true
return true, stopWord
}
}
return false
return false, ""
}

426
cli/detect/config/config.go Normal file
View File

@@ -0,0 +1,426 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
_ "embed"
"errors"
"fmt"
"sort"
"strings"
"github.com/spf13/viper"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/regexp"
)
const DefaultScanConfigFileName = ".infisical-scan.toml"
const DefaultScanConfigEnvName = "INFISICAL_SCAN_CONFIG"
const DefaultInfisicalIgnoreFineName = ".infisicalignore"
var (
//go:embed gitleaks.toml
DefaultConfig string
// use to keep track of how many configs we can extend
// yea I know, globals bad
extendDepth int
)
const maxExtendDepth = 2
// ViperConfig is the config struct used by the Viper config package
// to parse the config file. This struct does not include regular expressions.
// It is used as an intermediary to convert the Viper config to the Config struct.
type ViperConfig struct {
Title string
Description string
Extend Extend
Rules []struct {
ID string
Description string
Path string
Regex string
SecretGroup int
Entropy float64
Keywords []string
Tags []string
// Deprecated: this is a shim for backwards-compatibility.
// TODO: Remove this in 9.x.
AllowList *viperRuleAllowlist
Allowlists []*viperRuleAllowlist
}
// Deprecated: this is a shim for backwards-compatibility.
// TODO: Remove this in 9.x.
AllowList *viperGlobalAllowlist
Allowlists []*viperGlobalAllowlist
}
type viperRuleAllowlist struct {
Description string
Condition string
Commits []string
Paths []string
RegexTarget string
Regexes []string
StopWords []string
}
type viperGlobalAllowlist struct {
TargetRules []string
viperRuleAllowlist `mapstructure:",squash"`
}
// Config is a configuration struct that contains rules and an allowlist if present.
type Config struct {
Title string
Extend Extend
Path string
Description string
Rules map[string]Rule
Keywords map[string]struct{}
// used to keep sarif results consistent
OrderedRules []string
Allowlists []*Allowlist
}
// Extend is a struct that allows users to define how they want their
// configuration extended by other configuration files.
type Extend struct {
Path string
URL string
UseDefault bool
DisabledRules []string
}
func (vc *ViperConfig) Translate() (Config, error) {
var (
keywords = make(map[string]struct{})
orderedRules []string
rulesMap = make(map[string]Rule)
ruleAllowlists = make(map[string][]*Allowlist)
)
// Validate individual rules.
for _, vr := range vc.Rules {
var (
pathPat *regexp.Regexp
regexPat *regexp.Regexp
)
if vr.Path != "" {
pathPat = regexp.MustCompile(vr.Path)
}
if vr.Regex != "" {
regexPat = regexp.MustCompile(vr.Regex)
}
if vr.Keywords == nil {
vr.Keywords = []string{}
} else {
for i, k := range vr.Keywords {
keyword := strings.ToLower(k)
keywords[keyword] = struct{}{}
vr.Keywords[i] = keyword
}
}
if vr.Tags == nil {
vr.Tags = []string{}
}
cr := Rule{
RuleID: vr.ID,
Description: vr.Description,
Regex: regexPat,
SecretGroup: vr.SecretGroup,
Entropy: vr.Entropy,
Path: pathPat,
Keywords: vr.Keywords,
Tags: vr.Tags,
}
// Parse the rule allowlists, including the older format for backwards compatibility.
if vr.AllowList != nil {
// TODO: Remove this in v9.
if len(vr.Allowlists) > 0 {
return Config{}, fmt.Errorf("%s: [rules.allowlist] is deprecated, it cannot be used alongside [[rules.allowlist]]", cr.RuleID)
}
vr.Allowlists = append(vr.Allowlists, vr.AllowList)
}
for _, a := range vr.Allowlists {
allowlist, err := parseAllowlist(a)
if err != nil {
return Config{}, fmt.Errorf("%s: [[rules.allowlists]] %w", cr.RuleID, err)
}
cr.Allowlists = append(cr.Allowlists, allowlist)
}
orderedRules = append(orderedRules, cr.RuleID)
rulesMap[cr.RuleID] = cr
}
// Assemble the config.
c := Config{
Title: vc.Title,
Description: vc.Description,
Extend: vc.Extend,
Rules: rulesMap,
Keywords: keywords,
OrderedRules: orderedRules,
}
// Parse the config allowlists, including the older format for backwards compatibility.
if vc.AllowList != nil {
// TODO: Remove this in v9.
if len(vc.Allowlists) > 0 {
return Config{}, errors.New("[allowlist] is deprecated, it cannot be used alongside [[allowlists]]")
}
vc.Allowlists = append(vc.Allowlists, vc.AllowList)
}
for _, a := range vc.Allowlists {
allowlist, err := parseAllowlist(&a.viperRuleAllowlist)
if err != nil {
return Config{}, fmt.Errorf("[[allowlists]] %w", err)
}
// Allowlists with |targetRules| aren't added to the global list.
if len(a.TargetRules) > 0 {
for _, ruleID := range a.TargetRules {
// It's not possible to validate |ruleID| until after extend.
ruleAllowlists[ruleID] = append(ruleAllowlists[ruleID], allowlist)
}
} else {
c.Allowlists = append(c.Allowlists, allowlist)
}
}
if maxExtendDepth != extendDepth {
// disallow both usedefault and path from being set
if c.Extend.Path != "" && c.Extend.UseDefault {
return Config{}, errors.New("unable to load config due to extend.path and extend.useDefault being set")
}
if c.Extend.UseDefault {
if err := c.extendDefault(); err != nil {
return Config{}, err
}
} else if c.Extend.Path != "" {
if err := c.extendPath(); err != nil {
return Config{}, err
}
}
}
// Validate the rules after everything has been assembled (including extended configs).
if extendDepth == 0 {
for _, rule := range c.Rules {
if err := rule.Validate(); err != nil {
return Config{}, err
}
}
// Populate targeted configs.
for ruleID, allowlists := range ruleAllowlists {
rule, ok := c.Rules[ruleID]
if !ok {
return Config{}, fmt.Errorf("[[allowlists]] target rule ID '%s' does not exist", ruleID)
}
rule.Allowlists = append(rule.Allowlists, allowlists...)
c.Rules[ruleID] = rule
}
}
return c, nil
}
func parseAllowlist(a *viperRuleAllowlist) (*Allowlist, error) {
var matchCondition AllowlistMatchCondition
switch strings.ToUpper(a.Condition) {
case "AND", "&&":
matchCondition = AllowlistMatchAnd
case "", "OR", "||":
matchCondition = AllowlistMatchOr
default:
return nil, fmt.Errorf("unknown allowlist |condition| '%s' (expected 'and', 'or')", a.Condition)
}
// Validate the target.
regexTarget := a.RegexTarget
if regexTarget != "" {
switch regexTarget {
case "secret":
regexTarget = ""
case "match", "line":
// do nothing
default:
return nil, fmt.Errorf("unknown allowlist |regexTarget| '%s' (expected 'match', 'line')", regexTarget)
}
}
var allowlistRegexes []*regexp.Regexp
for _, a := range a.Regexes {
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
}
var allowlistPaths []*regexp.Regexp
for _, a := range a.Paths {
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
}
allowlist := &Allowlist{
Description: a.Description,
MatchCondition: matchCondition,
Commits: a.Commits,
Paths: allowlistPaths,
RegexTarget: regexTarget,
Regexes: allowlistRegexes,
StopWords: a.StopWords,
}
if err := allowlist.Validate(); err != nil {
return nil, err
}
return allowlist, nil
}
func (c *Config) GetOrderedRules() []Rule {
var orderedRules []Rule
for _, id := range c.OrderedRules {
if _, ok := c.Rules[id]; ok {
orderedRules = append(orderedRules, c.Rules[id])
}
}
return orderedRules
}
func (c *Config) extendDefault() error {
extendDepth++
viper.SetConfigType("toml")
if err := viper.ReadConfig(strings.NewReader(DefaultConfig)); err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
defaultViperConfig := ViperConfig{}
if err := viper.Unmarshal(&defaultViperConfig); err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
cfg, err := defaultViperConfig.Translate()
if err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
logging.Debug().Msg("extending config with default config")
c.extend(cfg)
return nil
}
func (c *Config) extendPath() error {
extendDepth++
viper.SetConfigFile(c.Extend.Path)
if err := viper.ReadInConfig(); err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
extensionViperConfig := ViperConfig{}
if err := viper.Unmarshal(&extensionViperConfig); err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
cfg, err := extensionViperConfig.Translate()
if err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
logging.Debug().Msgf("extending config with %s", c.Extend.Path)
c.extend(cfg)
return nil
}
func (c *Config) extendURL() {
// TODO
}
func (c *Config) extend(extensionConfig Config) {
// Get config name for helpful log messages.
var configName string
if c.Extend.Path != "" {
configName = c.Extend.Path
} else {
configName = "default"
}
// Convert |Config.DisabledRules| into a map for ease of access.
disabledRuleIDs := map[string]struct{}{}
for _, id := range c.Extend.DisabledRules {
if _, ok := extensionConfig.Rules[id]; !ok {
logging.Warn().
Str("rule-id", id).
Str("config", configName).
Msg("Disabled rule doesn't exist in extended config.")
}
disabledRuleIDs[id] = struct{}{}
}
for ruleID, baseRule := range extensionConfig.Rules {
// Skip the rule.
if _, ok := disabledRuleIDs[ruleID]; ok {
logging.Debug().
Str("rule-id", ruleID).
Str("config", configName).
Msg("Ignoring rule from extended config.")
continue
}
currentRule, ok := c.Rules[ruleID]
if !ok {
// Rule doesn't exist, add it to the config.
c.Rules[ruleID] = baseRule
for _, k := range baseRule.Keywords {
c.Keywords[k] = struct{}{}
}
c.OrderedRules = append(c.OrderedRules, ruleID)
} else {
// Rule exists, merge our changes into the base.
if currentRule.Description != "" {
baseRule.Description = currentRule.Description
}
if currentRule.Entropy != 0 {
baseRule.Entropy = currentRule.Entropy
}
if currentRule.SecretGroup != 0 {
baseRule.SecretGroup = currentRule.SecretGroup
}
if currentRule.Regex != nil {
baseRule.Regex = currentRule.Regex
}
if currentRule.Path != nil {
baseRule.Path = currentRule.Path
}
baseRule.Tags = append(baseRule.Tags, currentRule.Tags...)
baseRule.Keywords = append(baseRule.Keywords, currentRule.Keywords...)
for _, a := range currentRule.Allowlists {
baseRule.Allowlists = append(baseRule.Allowlists, a)
}
// The keywords from the base rule and the extended rule must be merged into the global keywords list
for _, k := range baseRule.Keywords {
c.Keywords[k] = struct{}{}
}
c.Rules[ruleID] = baseRule
}
}
// append allowlists, not attempting to merge
for _, a := range extensionConfig.Allowlists {
c.Allowlists = append(c.Allowlists, a)
}
// sort to keep extended rules in order
sort.Strings(c.OrderedRules)
}

File diff suppressed because it is too large Load Diff

114
cli/detect/config/rule.go Normal file
View File

@@ -0,0 +1,114 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"fmt"
"strings"
"github.com/Infisical/infisical-merge/detect/regexp"
)
// Rules contain information that define details on how to detect secrets
type Rule struct {
// RuleID is a unique identifier for this rule
RuleID string
// Description is the description of the rule.
Description string
// Entropy is a float representing the minimum shannon
// entropy a regex group must have to be considered a secret.
Entropy float64
// SecretGroup is an int used to extract secret from regex
// match and used as the group that will have its entropy
// checked if `entropy` is set.
SecretGroup int
// Regex is a golang regular expression used to detect secrets.
Regex *regexp.Regexp
// Path is a golang regular expression used to
// filter secrets by path
Path *regexp.Regexp
// Tags is an array of strings used for metadata
// and reporting purposes.
Tags []string
// Keywords are used for pre-regex check filtering. Rules that contain
// keywords will perform a quick string compare check to make sure the
// keyword(s) are in the content being scanned.
Keywords []string
// Allowlists allows a rule to be ignored for specific commits, paths, regexes, and/or stopwords.
Allowlists []*Allowlist
// validated is an internal flag to track whether `Validate()` has been called.
validated bool
}
// Validate guards against common misconfigurations.
func (r *Rule) Validate() error {
if r.validated {
return nil
}
// Ensure |id| is present.
if strings.TrimSpace(r.RuleID) == "" {
// Try to provide helpful context, since |id| is empty.
var context string
if r.Regex != nil {
context = ", regex: " + r.Regex.String()
} else if r.Path != nil {
context = ", path: " + r.Path.String()
} else if r.Description != "" {
context = ", description: " + r.Description
}
return fmt.Errorf("rule |id| is missing or empty" + context)
}
// Ensure the rule actually matches something.
if r.Regex == nil && r.Path == nil {
return fmt.Errorf("%s: both |regex| and |path| are empty, this rule will have no effect", r.RuleID)
}
// Ensure |secretGroup| works.
if r.Regex != nil && r.SecretGroup > r.Regex.NumSubexp() {
return fmt.Errorf("%s: invalid regex secret group %d, max regex secret group %d", r.RuleID, r.SecretGroup, r.Regex.NumSubexp())
}
for _, allowlist := range r.Allowlists {
// This will probably never happen.
if allowlist == nil {
continue
}
if err := allowlist.Validate(); err != nil {
return fmt.Errorf("%s: %w", r.RuleID, err)
}
}
r.validated = true
return nil
}

View File

@@ -20,35 +20,27 @@
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
package config
import (
"os"
"strings"
"github.com/Infisical/infisical-merge/config"
"github.com/Infisical/infisical-merge/detect/regexp"
)
const (
// https://cwe.mitre.org/data/definitions/798.html
CWE = "CWE-798"
CWE_DESCRIPTION = "Use of Hard-coded Credentials"
)
func Write(findings []Finding, cfg config.Config, ext string, reportPath string) error {
file, err := os.Create(reportPath)
if err != nil {
return err
func anyRegexMatch(f string, res []*regexp.Regexp) bool {
for _, re := range res {
if regexMatched(f, re) {
return true
}
}
ext = strings.ToLower(ext)
switch ext {
case ".json", "json":
err = writeJson(findings, file)
case ".csv", "csv":
err = writeCsv(findings, file)
case ".sarif", "sarif":
err = writeSarif(cfg, findings, file)
}
return err
return false
}
func regexMatched(f string, re *regexp.Regexp) bool {
if re == nil {
return false
}
if re.FindString(f) != "" {
return true
}
return false
}

328
cli/detect/decoder.go Normal file
View File

@@ -0,0 +1,328 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bytes"
"encoding/base64"
"fmt"
"regexp"
"unicode"
"github.com/Infisical/infisical-merge/detect/logging"
)
var b64LikelyChars [128]byte
var b64Regexp = regexp.MustCompile(`[\w/+-]{16,}={0,3}`)
var decoders = []func(string) ([]byte, error){
base64.StdEncoding.DecodeString,
base64.RawURLEncoding.DecodeString,
}
func init() {
// Basically look for anything that isn't just letters
for _, c := range `0123456789+/-_` {
b64LikelyChars[c] = 1
}
}
// EncodedSegment represents a portion of text that is encoded in some way.
// `decode` supports recusive decoding and can result in "segment trees".
// There can be multiple segments in the original text, so each can be thought
// of as its own tree with the root being the original segment.
type EncodedSegment struct {
// The parent segment in a segment tree. If nil, it is a root segment
parent *EncodedSegment
// Relative start/end are the bounds of the encoded value in the current pass.
relativeStart int
relativeEnd int
// Absolute start/end refer to the bounds of the root segment in this segment
// tree
absoluteStart int
absoluteEnd int
// Decoded start/end refer to the bounds of the decoded value in the current
// pass. These can differ from relative values because decoding can shrink
// or grow the size of the segment.
decodedStart int
decodedEnd int
// This is the actual decoded content in the segment
decodedValue string
// This is the type of encoding
encoding string
}
// isChildOf inspects the bounds of two segments to determine
// if one should be the child of another
func (s EncodedSegment) isChildOf(parent EncodedSegment) bool {
return parent.decodedStart <= s.relativeStart && parent.decodedEnd >= s.relativeEnd
}
// decodedOverlaps checks if the decoded bounds of the segment overlaps a range
func (s EncodedSegment) decodedOverlaps(start, end int) bool {
return start <= s.decodedEnd && end >= s.decodedStart
}
// adjustMatchIndex takes the matchIndex from the current decoding pass and
// updates it to match the absolute matchIndex in the original text.
func (s EncodedSegment) adjustMatchIndex(matchIndex []int) []int {
// The match is within the bounds of the segment so we just return
// the absolute start and end of the root segment.
if s.decodedStart <= matchIndex[0] && matchIndex[1] <= s.decodedEnd {
return []int{
s.absoluteStart,
s.absoluteEnd,
}
}
// Since it overlaps one side and/or the other, we're going to have to adjust
// and climb parents until we're either at the root or we've determined
// we're fully inside one of the parent segments.
adjustedMatchIndex := make([]int, 2)
if matchIndex[0] < s.decodedStart {
// It starts before the encoded segment so adjust the start to match
// the location before it was decoded
matchStartDelta := s.decodedStart - matchIndex[0]
adjustedMatchIndex[0] = s.relativeStart - matchStartDelta
} else {
// It starts within the encoded segment so set the bound to the
// relative start
adjustedMatchIndex[0] = s.relativeStart
}
if matchIndex[1] > s.decodedEnd {
// It ends after the encoded segment so adjust the end to match
// the location before it was decoded
matchEndDelta := matchIndex[1] - s.decodedEnd
adjustedMatchIndex[1] = s.relativeEnd + matchEndDelta
} else {
// It ends within the encoded segment so set the bound to the relative end
adjustedMatchIndex[1] = s.relativeEnd
}
// We're still not at a root segment so we'll need to keep on adjusting
if s.parent != nil {
return s.parent.adjustMatchIndex(adjustedMatchIndex)
}
return adjustedMatchIndex
}
// depth reports how many levels of decoding needed to be done (default is 1)
func (s EncodedSegment) depth() int {
depth := 1
// Climb the tree and increment the depth
for current := &s; current.parent != nil; current = current.parent {
depth++
}
return depth
}
// tags returns additional meta data tags related to the types of segments
func (s EncodedSegment) tags() []string {
return []string{
fmt.Sprintf("decoded:%s", s.encoding),
fmt.Sprintf("decode-depth:%d", s.depth()),
}
}
// Decoder decodes various types of data in place
type Decoder struct {
decodedMap map[string]string
}
// NewDecoder creates a default decoder struct
func NewDecoder() *Decoder {
return &Decoder{
decodedMap: make(map[string]string),
}
}
// decode returns the data with the values decoded in-place
func (d *Decoder) decode(data string, parentSegments []EncodedSegment) (string, []EncodedSegment) {
segments := d.findEncodedSegments(data, parentSegments)
if len(segments) > 0 {
result := bytes.NewBuffer(make([]byte, 0, len(data)))
relativeStart := 0
for _, segment := range segments {
result.WriteString(data[relativeStart:segment.relativeStart])
result.WriteString(segment.decodedValue)
relativeStart = segment.relativeEnd
}
result.WriteString(data[relativeStart:])
return result.String(), segments
}
return data, segments
}
// findEncodedSegments finds the encoded segments in the data and updates the
// segment tree for this pass
func (d *Decoder) findEncodedSegments(data string, parentSegments []EncodedSegment) []EncodedSegment {
if len(data) == 0 {
return []EncodedSegment{}
}
matchIndices := b64Regexp.FindAllStringIndex(data, -1)
if matchIndices == nil {
return []EncodedSegment{}
}
segments := make([]EncodedSegment, 0, len(matchIndices))
// Keeps up with offsets from the text changing size as things are decoded
decodedShift := 0
for _, matchIndex := range matchIndices {
encodedValue := data[matchIndex[0]:matchIndex[1]]
if !isLikelyB64(encodedValue) {
d.decodedMap[encodedValue] = ""
continue
}
decodedValue, alreadyDecoded := d.decodedMap[encodedValue]
// We haven't decoded this yet, so go ahead and decode it
if !alreadyDecoded {
decodedValue = decodeValue(encodedValue)
d.decodedMap[encodedValue] = decodedValue
}
// Skip this segment because there was nothing to check
if len(decodedValue) == 0 {
continue
}
// Create a segment for the encoded data
segment := EncodedSegment{
relativeStart: matchIndex[0],
relativeEnd: matchIndex[1],
absoluteStart: matchIndex[0],
absoluteEnd: matchIndex[1],
decodedStart: matchIndex[0] + decodedShift,
decodedEnd: matchIndex[0] + decodedShift + len(decodedValue),
decodedValue: decodedValue,
encoding: "base64",
}
// Shift decoded start and ends based on size changes
decodedShift += len(decodedValue) - len(encodedValue)
// Adjust the absolute position of segments contained in parent segments
for _, parentSegment := range parentSegments {
if segment.isChildOf(parentSegment) {
segment.absoluteStart = parentSegment.absoluteStart
segment.absoluteEnd = parentSegment.absoluteEnd
segment.parent = &parentSegment
break
}
}
logging.Debug().Msgf("segment found: %#v", segment)
segments = append(segments, segment)
}
return segments
}
// decoders tries a list of decoders and returns the first successful one
func decodeValue(encodedValue string) string {
for _, decoder := range decoders {
decodedValue, err := decoder(encodedValue)
if err == nil && len(decodedValue) > 0 && isASCII(decodedValue) {
return string(decodedValue)
}
}
return ""
}
func isASCII(b []byte) bool {
for i := 0; i < len(b); i++ {
if b[i] > unicode.MaxASCII || b[i] < '\t' {
return false
}
}
return true
}
// Skip a lot of method signatures and things at the risk of missing about
// 1% of base64
func isLikelyB64(s string) bool {
for _, c := range s {
if b64LikelyChars[c] != 0 {
return true
}
}
return false
}
// Find a segment where the decoded bounds overlaps a range
func segmentWithDecodedOverlap(encodedSegments []EncodedSegment, start, end int) *EncodedSegment {
for _, segment := range encodedSegments {
if segment.decodedOverlaps(start, end) {
return &segment
}
}
return nil
}
func (s EncodedSegment) currentLine(currentRaw string) string {
start := 0
end := len(currentRaw)
// Find the start of the range
for i := s.decodedStart; i > -1; i-- {
c := currentRaw[i]
if c == '\n' {
start = i
break
}
}
// Find the end of the range
for i := s.decodedEnd; i < end; i++ {
c := currentRaw[i]
if c == '\n' {
end = i
break
}
}
return currentRaw[start:end]
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,754 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"fmt"
"os"
"path/filepath"
"testing"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
"github.com/Infisical/infisical-merge/config"
"github.com/Infisical/infisical-merge/report"
)
const configPath = "../testdata/config/"
const repoBasePath = "../testdata/repos/"
func TestDetect(t *testing.T) {
tests := []struct {
cfgName string
baselinePath string
fragment Fragment
// NOTE: for expected findings, all line numbers will be 0
// because line deltas are added _after_ the finding is created.
// I.e, if the finding is from a --no-git file, the line number will be
// increase by 1 in DetectFromFiles(). If the finding is from git,
// the line number will be increased by the patch delta.
expectedFindings []report.Finding
wantError error
}{
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OKIA\ // infisical-scan:ignore"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \
\"AKIALALEMEL33243OKIA\ // infisical-scan:ignore"
`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OKIA\"
// infisical-scan:ignore"
`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
Secret: "AKIALALEMEL33243OKIA",
Match: "AKIALALEMEL33243OKIA",
File: "tmp.go",
Line: `awsToken := \"AKIALALEMEL33243OKIA\"`,
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
StartLine: 0,
EndLine: 0,
StartColumn: 15,
EndColumn: 34,
Entropy: 3.1464393,
},
},
},
{
cfgName: "escaped_character_group",
fragment: Fragment{
Raw: `pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{
{
Description: "PyPI upload token",
Secret: "pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB",
Match: "pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB",
Line: `pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB`,
File: "tmp.go",
RuleID: "pypi-upload-token",
Tags: []string{"key", "pypi"},
StartLine: 0,
EndLine: 0,
StartColumn: 1,
EndColumn: 86,
Entropy: 1.9606875,
},
},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
Secret: "AKIALALEMEL33243OLIA",
Match: "AKIALALEMEL33243OLIA",
Line: `awsToken := \"AKIALALEMEL33243OLIA\"`,
File: "tmp.go",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
StartLine: 0,
EndLine: 0,
StartColumn: 15,
EndColumn: 34,
Entropy: 3.0841837,
},
},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `export BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;`,
FilePath: "tmp.sh",
},
expectedFindings: []report.Finding{
{
Description: "Sidekiq Secret",
Match: "BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;",
Secret: "cafebabe:deadbeef",
Line: `export BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;`,
File: "tmp.sh",
RuleID: "sidekiq-secret",
Tags: []string{},
Entropy: 2.6098502,
StartLine: 0,
EndLine: 0,
StartColumn: 8,
EndColumn: 60,
},
},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `echo hello1; export BUNDLE_ENTERPRISE__CONTRIBSYS__COM="cafebabe:deadbeef" && echo hello2`,
FilePath: "tmp.sh",
},
expectedFindings: []report.Finding{
{
Description: "Sidekiq Secret",
Match: "BUNDLE_ENTERPRISE__CONTRIBSYS__COM=\"cafebabe:deadbeef\"",
Secret: "cafebabe:deadbeef",
File: "tmp.sh",
Line: `echo hello1; export BUNDLE_ENTERPRISE__CONTRIBSYS__COM="cafebabe:deadbeef" && echo hello2`,
RuleID: "sidekiq-secret",
Tags: []string{},
Entropy: 2.6098502,
StartLine: 0,
EndLine: 0,
StartColumn: 21,
EndColumn: 74,
},
},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `url = "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:80/path?param1=true&param2=false#heading1"`,
FilePath: "tmp.sh",
},
expectedFindings: []report.Finding{
{
Description: "Sidekiq Sensitive URL",
Match: "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:",
Secret: "cafeb4b3:d3adb33f",
File: "tmp.sh",
Line: `url = "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:80/path?param1=true&param2=false#heading1"`,
RuleID: "sidekiq-sensitive-url",
Tags: []string{},
Entropy: 2.984234,
StartLine: 0,
EndLine: 0,
StartColumn: 8,
EndColumn: 58,
},
},
},
{
cfgName: "allow_aws_re",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "allow_path",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "allow_commit",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
CommitSHA: "allowthiscommit",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "entropy_group",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{
{
Description: "Discord API key",
Match: "Discord_Public_Key = \"e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5\"",
Secret: "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5",
Line: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
File: "tmp.go",
RuleID: "discord-api-key",
Tags: []string{},
Entropy: 3.7906237,
StartLine: 0,
EndLine: 0,
StartColumn: 7,
EndColumn: 93,
},
},
},
{
cfgName: "generic_with_py_path",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "generic_with_py_path",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.py",
},
expectedFindings: []report.Finding{
{
Description: "Generic API Key",
Match: "Key = \"e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5\"",
Secret: "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5",
Line: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
File: "tmp.py",
RuleID: "generic-api-key",
Tags: []string{},
Entropy: 3.7906237,
StartLine: 0,
EndLine: 0,
StartColumn: 22,
EndColumn: 93,
},
},
},
{
cfgName: "path_only",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.py",
},
expectedFindings: []report.Finding{
{
Description: "Python Files",
Match: "file detected: tmp.py",
File: "tmp.py",
RuleID: "python-files-only",
Tags: []string{},
},
},
},
{
cfgName: "bad_entropy_group",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
wantError: fmt.Errorf("Discord API key invalid regex secret group 5, max regex secret group 3"),
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: filepath.Join(configPath, "simple.toml"),
},
expectedFindings: []report.Finding{},
},
{
cfgName: "allow_global_aws_re",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "generic_with_py_path",
fragment: Fragment{
Raw: `const Discord_Public_Key = "load2523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.py",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "path_only",
baselinePath: ".baseline.json",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: ".baseline.json",
},
expectedFindings: []report.Finding{},
},
}
for _, tt := range tests {
viper.Reset()
viper.AddConfigPath(configPath)
viper.SetConfigName(tt.cfgName)
viper.SetConfigType("toml")
err := viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, err := vc.Translate()
cfg.Path = filepath.Join(configPath, tt.cfgName+".toml")
if tt.wantError != nil {
if err == nil {
t.Errorf("expected error")
}
assert.Equal(t, tt.wantError, err)
}
d := NewDetector(cfg)
d.baselinePath = tt.baselinePath
findings := d.Detect(tt.fragment)
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
// TestFromGit tests the FromGit function
func TestFromGit(t *testing.T) {
tests := []struct {
cfgName string
source string
logOpts string
expectedFindings []report.Finding
}{
{
source: filepath.Join(repoBasePath, "small"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 20,
EndLine: 20,
StartColumn: 19,
EndColumn: 38,
Line: "\n awsToken := \"AKIALALEMEL33243OLIA\"",
Secret: "AKIALALEMEL33243OLIA",
Match: "AKIALALEMEL33243OLIA",
File: "main.go",
Date: "2021-11-02T23:37:53Z",
Commit: "1b6da43b82b22e4eaa10bcf8ee591e91abbfc587",
Author: "Zachary Rice",
Email: "zricer@protonmail.com",
Message: "Accidentally add a secret",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "1b6da43b82b22e4eaa10bcf8ee591e91abbfc587:main.go:aws-access-key:20",
},
{
Description: "AWS Access Key",
StartLine: 9,
EndLine: 9,
StartColumn: 17,
EndColumn: 36,
Secret: "AKIALALEMEL33243OLIA",
Match: "AKIALALEMEL33243OLIA",
Line: "\n\taws_token := \"AKIALALEMEL33243OLIA\"",
File: "foo/foo.go",
Date: "2021-11-02T23:48:06Z",
Commit: "491504d5a31946ce75e22554cc34203d8e5ff3ca",
Author: "Zach Rice",
Email: "zricer@protonmail.com",
Message: "adding foo package with secret",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "491504d5a31946ce75e22554cc34203d8e5ff3ca:foo/foo.go:aws-access-key:9",
},
},
},
{
source: filepath.Join(repoBasePath, "small"),
logOpts: "--all foo...",
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 9,
EndLine: 9,
StartColumn: 17,
EndColumn: 36,
Secret: "AKIALALEMEL33243OLIA",
Line: "\n\taws_token := \"AKIALALEMEL33243OLIA\"",
Match: "AKIALALEMEL33243OLIA",
Date: "2021-11-02T23:48:06Z",
File: "foo/foo.go",
Commit: "491504d5a31946ce75e22554cc34203d8e5ff3ca",
Author: "Zach Rice",
Email: "zricer@protonmail.com",
Message: "adding foo package with secret",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "491504d5a31946ce75e22554cc34203d8e5ff3ca:foo/foo.go:aws-access-key:9",
},
},
},
}
err := moveDotGit("dotGit", ".git")
if err != nil {
t.Fatal(err)
}
defer func() {
if err := moveDotGit(".git", "dotGit"); err != nil {
t.Error(err)
}
}()
for _, tt := range tests {
viper.AddConfigPath(configPath)
viper.SetConfigName("simple")
viper.SetConfigType("toml")
err = viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, err := vc.Translate()
if err != nil {
t.Error(err)
}
detector := NewDetector(cfg)
findings, err := detector.DetectGit(tt.source, tt.logOpts, DetectType)
if err != nil {
t.Error(err)
}
for _, f := range findings {
f.Match = "" // remove lines cause copying and pasting them has some wack formatting
}
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
func TestFromGitStaged(t *testing.T) {
tests := []struct {
cfgName string
source string
logOpts string
expectedFindings []report.Finding
}{
{
source: filepath.Join(repoBasePath, "staged"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 7,
EndLine: 7,
StartColumn: 18,
EndColumn: 37,
Line: "\n\taws_token2 := \"AKIALALEMEL33243OLIA\" // this one is not",
Match: "AKIALALEMEL33243OLIA",
Secret: "AKIALALEMEL33243OLIA",
File: "api/api.go",
SymlinkFile: "",
Commit: "",
Entropy: 3.0841837,
Author: "",
Email: "",
Date: "0001-01-01T00:00:00Z",
Message: "",
Tags: []string{
"key",
"AWS",
},
RuleID: "aws-access-key",
Fingerprint: "api/api.go:aws-access-key:7",
},
},
},
}
err := moveDotGit("dotGit", ".git")
if err != nil {
t.Fatal(err)
}
defer func() {
if err := moveDotGit(".git", "dotGit"); err != nil {
t.Error(err)
}
}()
for _, tt := range tests {
viper.AddConfigPath(configPath)
viper.SetConfigName("simple")
viper.SetConfigType("toml")
err = viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, err := vc.Translate()
if err != nil {
t.Error(err)
}
detector := NewDetector(cfg)
detector.AddGitleaksIgnore(filepath.Join(tt.source, ".gitleaksignore"))
findings, err := detector.DetectGit(tt.source, tt.logOpts, ProtectStagedType)
if err != nil {
t.Error(err)
}
for _, f := range findings {
f.Match = "" // remove lines cause copying and pasting them has some wack formatting
}
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
// TestFromFiles tests the FromFiles function
func TestFromFiles(t *testing.T) {
tests := []struct {
cfgName string
source string
expectedFindings []report.Finding
}{
{
source: filepath.Join(repoBasePath, "nogit"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 20,
EndLine: 20,
StartColumn: 16,
EndColumn: 35,
Match: "AKIALALEMEL33243OLIA",
Secret: "AKIALALEMEL33243OLIA",
Line: "\n\tawsToken := \"AKIALALEMEL33243OLIA\"",
File: "../testdata/repos/nogit/main.go",
SymlinkFile: "",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "../testdata/repos/nogit/main.go:aws-access-key:20",
},
},
},
{
source: filepath.Join(repoBasePath, "nogit", "main.go"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 20,
EndLine: 20,
StartColumn: 16,
EndColumn: 35,
Match: "AKIALALEMEL33243OLIA",
Secret: "AKIALALEMEL33243OLIA",
Line: "\n\tawsToken := \"AKIALALEMEL33243OLIA\"",
File: "../testdata/repos/nogit/main.go",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "../testdata/repos/nogit/main.go:aws-access-key:20",
},
},
},
}
for _, tt := range tests {
viper.AddConfigPath(configPath)
viper.SetConfigName("simple")
viper.SetConfigType("toml")
err := viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, _ := vc.Translate()
detector := NewDetector(cfg)
detector.FollowSymlinks = true
findings, err := detector.DetectFiles(tt.source)
if err != nil {
t.Error(err)
}
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
func TestDetectWithSymlinks(t *testing.T) {
tests := []struct {
cfgName string
source string
expectedFindings []report.Finding
}{
{
source: filepath.Join(repoBasePath, "symlinks/file_symlink"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "Asymmetric Private Key",
StartLine: 1,
EndLine: 1,
StartColumn: 1,
EndColumn: 35,
Match: "-----BEGIN OPENSSH PRIVATE KEY-----",
Secret: "-----BEGIN OPENSSH PRIVATE KEY-----",
Line: "-----BEGIN OPENSSH PRIVATE KEY-----",
File: "../testdata/repos/symlinks/source_file/id_ed25519",
SymlinkFile: "../testdata/repos/symlinks/file_symlink/symlinked_id_ed25519",
RuleID: "apkey",
Tags: []string{"key", "AsymmetricPrivateKey"},
Entropy: 3.587164,
Fingerprint: "../testdata/repos/symlinks/source_file/id_ed25519:apkey:1",
},
},
},
}
for _, tt := range tests {
viper.AddConfigPath(configPath)
viper.SetConfigName("simple")
viper.SetConfigType("toml")
err := viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, _ := vc.Translate()
detector := NewDetector(cfg)
detector.FollowSymlinks = true
findings, err := detector.DetectFiles(tt.source)
if err != nil {
t.Error(err)
}
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
func moveDotGit(from, to string) error {
repoDirs, err := os.ReadDir("../testdata/repos")
if err != nil {
return err
}
for _, dir := range repoDirs {
if to == ".git" {
_, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), "dotGit"))
if os.IsNotExist(err) {
// dont want to delete the only copy of .git accidentally
continue
}
os.RemoveAll(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), ".git"))
}
if !dir.IsDir() {
continue
}
_, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from))
if os.IsNotExist(err) {
continue
}
err = os.Rename(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from),
fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), to))
if err != nil {
return err
}
}
return nil
}

Some files were not shown because too many files have changed in this diff Show More