Compare commits

..

53 Commits

Author SHA1 Message Date
x032205
4c6e5c9c4c lint fix 2025-05-13 13:11:20 -04:00
Sheen
b5ef2a6837 Merge pull request #3569 from Infisical/pki-subscriber
Infisical PKI: Subscriber Functionality
2025-05-13 16:34:05 +08:00
x032205
71edb08942 Merge pull request #3587 from Infisical/ENG-2763
Fix approval request ordering
2025-05-12 23:54:27 -04:00
x032205
89d8261a43 Fix approval request ordering 2025-05-12 23:13:57 -04:00
Scott Wilson
a2b2b07185 Merge pull request #3584 from Infisical/sso-page
Improvements(org-settings): Refactor Organization Security Settings to SSO Page
2025-05-12 18:43:35 -07:00
Scott Wilson
76864ababa fix: correct doc casing 2025-05-12 18:37:05 -07:00
Scott Wilson
d17d40ebd9 improvements: refactor org security settings tab to sso page and update doc images 2025-05-12 17:18:40 -07:00
Tuan Dang
9743ad02d5 Fix lint issues 2025-05-12 14:56:00 -07:00
Tuan Dang
26d0ab1dc2 Fix lint issues 2025-05-12 14:34:14 -07:00
Tuan Dang
bf833a57cd Fix merge conflicts 2025-05-12 12:59:54 -07:00
Tuan Dang
e8519f6612 Revise PR based on review 2025-05-12 12:56:56 -07:00
Daniel Hougaard
07df6803a5 Merge pull request #3581 from Infisical/daniel/unblock-dev
fix: move cli install to aws
2025-05-12 18:54:55 +04:00
Daniel Hougaard
a09d0e8948 fix: move cli install to aws 2025-05-12 18:47:02 +04:00
Daniel Hougaard
ee598560ec Merge pull request #3572 from Infisical/daniel/fix-secret-scaninng-public-keys
fix: update secret scanner to latest version
2025-05-12 11:13:51 +04:00
carlosmonastyrski
c629705c9c Merge pull request #3535 from Infisical/feat/addGroupsToSshHosts
feat(ssh-hosts): Add groups to ssh hosts allowed principals
2025-05-09 22:52:35 -03:00
Daniel Hougaard
be10f6e52a Merge pull request #3579 from Infisical/daniel/horizontal-scaling-ms-teams
fix(workflow-integrations): microsoft teams scaling issues
2025-05-10 01:11:37 +04:00
Scott Wilson
40c5ff0ad6 Merge pull request #3578 from Infisical/project-template-improvements
improvement(project-templates): Project templates UI improvements
2025-05-09 13:50:50 -07:00
Scott Wilson
8ecb5ca7bc remove extra margin 2025-05-09 13:47:28 -07:00
Daniel Hougaard
ab6a2b7dbb fix(workflow-integrations): microsoft teams scaling issues 2025-05-10 00:47:22 +04:00
carlosmonastyrski
81bfc04e7c Trim hostname input on SSH Host permission form and fix getWorkspaceUsers key invalidation 2025-05-09 17:10:01 -03:00
x032205
a757fceaed Merge pull request #3577 from Infisical/feat/docs-support-openapi-titles
feat(docs): Support OpenAPI titles for Zod descriptions
2025-05-09 15:49:49 -04:00
Scott Wilson
ce8e18f620 improvement: address feedback 2025-05-09 12:40:07 -07:00
Scott Wilson
d09c964647 fix: use tanstack router link 2025-05-09 12:32:37 -07:00
Scott Wilson
eeddbde600 improvement: update org project templates relocation banner 2025-05-09 12:23:05 -07:00
Daniel Hougaard
859b643e43 Delete ssh 2025-05-09 22:49:39 +04:00
Daniel Hougaard
91f71e0ef6 feat(cli): upgrade secret scanner 2025-05-09 22:48:56 +04:00
x032205
4e9e31eeb7 added credit 2025-05-09 13:45:36 -04:00
x032205
f6bc99b964 support openapi titles for zod description 2025-05-09 13:40:15 -04:00
Scott Wilson
679eb9dffc fix: correct project templates empty table display if feature is disabled 2025-05-09 10:14:03 -07:00
x032205
0754ae3aaf Merge pull request #3576 from Infisical/ENG-2692
feat(api): Rate limit for all email-sending endpoints
2025-05-09 11:37:08 -04:00
x032205
519a0c1bdf Merge branch 'main' into ENG-2692 2025-05-09 11:31:05 -04:00
x032205
e9d8979cf4 add rate limit to all email-sending endpoints 2025-05-09 11:29:53 -04:00
Maidul Islam
486d975fa0 Merge pull request #3575 from akhilmhdh/fix/octokit
feat: resolved esm error in octokit
2025-05-09 10:50:25 -04:00
=
42c49949b4 feat: resolved esm error in octokit 2025-05-09 20:13:08 +05:30
carlosmonastyrski
aea44088db Merge branch 'main' into feat/addGroupsToSshHosts 2025-05-09 09:21:29 -03:00
Daniel Hougaard
e584c9ea95 test 2025-05-09 09:04:30 +04:00
Daniel Hougaard
1921763fa8 fix: update to upcoming version 2025-05-09 04:43:13 +04:00
Daniel Hougaard
5408859a18 fix: update gitleaks/go-diff to latest version 2025-05-09 04:40:09 +04:00
Tuan Dang
a6b3be72a9 Make minor PR adjustments 2025-05-08 14:02:25 -07:00
Tuan Dang
531607dcb7 Revise pr based on greptile review 2025-05-08 10:37:33 -07:00
Tuan Dang
182de009b2 Fix lint issues 2025-05-08 10:01:44 -07:00
Tuan Dang
f1651ce171 Rename migration file 2025-05-08 09:10:49 -07:00
Tuan Dang
e1f563dbd4 Fix merge conflicts 2025-05-08 09:07:28 -07:00
Tuan Dang
107cca0b62 Complete preliminary docs for pki subscribers 2025-05-08 08:52:10 -07:00
Daniel Hougaard
c891b8f5d3 fix routing 2025-05-07 03:00:20 +04:00
Tuan Dang
a32bb95703 Start work on PkiSubscriberDetailsByIDPage 2025-05-06 15:46:54 -07:00
Tuan Dang
0410c83cef Fix merge conflicts 2025-05-06 09:46:31 -07:00
Tuan Dang
cf4f2ea6b1 Begin developing pki subscriber 2025-05-06 09:44:57 -07:00
carlosmonastyrski
bf85df7e36 Fix SSH table UI user groups issues 2025-05-06 08:37:19 -03:00
carlosmonastyrski
b9070a8fa3 Merge branch 'main' into feat/addGroupsToSshHosts 2025-05-05 14:51:01 -03:00
carlosmonastyrski
3ea450e94a Add groups to ssh hosts allowed principals fix delete principal row issue 2025-05-02 13:41:53 -03:00
carlosmonastyrski
7d0574087c Add groups to ssh hosts allowed principals bot improvements 2025-05-02 13:36:05 -03:00
carlosmonastyrski
36916704be Add groups to ssh hosts allowed principals 2025-05-02 11:14:43 -03:00
244 changed files with 11613 additions and 6543 deletions

View File

@@ -28,3 +28,15 @@ frontend/src/pages/secret-manager/OverviewPage/components/SecretOverviewTableRow
docs/cli/commands/user.mdx:generic-api-key:51
frontend/src/pages/secret-manager/OverviewPage/components/SecretOverviewTableRow/SecretOverviewTableRow.tsx:generic-api-key:76
docs/integrations/app-connections/hashicorp-vault.mdx:generic-api-key:188
cli/detect/config/gitleaks.toml:gcp-api-key:567
cli/detect/config/gitleaks.toml:gcp-api-key:569
cli/detect/config/gitleaks.toml:gcp-api-key:570
cli/detect/config/gitleaks.toml:gcp-api-key:572
cli/detect/config/gitleaks.toml:gcp-api-key:574
cli/detect/config/gitleaks.toml:gcp-api-key:575
cli/detect/config/gitleaks.toml:gcp-api-key:576
cli/detect/config/gitleaks.toml:gcp-api-key:577
cli/detect/config/gitleaks.toml:gcp-api-key:578
cli/detect/config/gitleaks.toml:gcp-api-key:579
cli/detect/config/gitleaks.toml:gcp-api-key:581
cli/detect/config/gitleaks.toml:gcp-api-key:582

View File

@@ -133,8 +133,8 @@ RUN apt-get update && apt-get install -y \
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.41.2 \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user

View File

@@ -127,8 +127,8 @@ RUN apt-get update && apt-get install -y \
&& rm -rf /var/lib/apt/lists/*
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.41.2 \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /

View File

@@ -54,8 +54,8 @@ COPY --from=build /app .
# Install Infisical CLI
RUN apt-get install -y curl bash && \
curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
apt-get update && apt-get install -y infisical=0.8.1 git
curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && apt-get install -y infisical=0.41.2 git
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js

View File

@@ -55,9 +55,9 @@ RUN mkdir -p /etc/softhsm2/tokens && \
# ? App setup
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.8.1
apt-get install -y infisical=0.41.2
WORKDIR /app

View File

@@ -64,9 +64,9 @@ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
# ? App setup
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.8.1
apt-get install -y infisical=0.41.2
WORKDIR /app

View File

@@ -33,7 +33,8 @@
"@infisical/quic": "^1.0.8",
"@node-saml/passport-saml": "^5.0.1",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-paginate-graphql": "^5.2.4",
"@octokit/core": "^5.2.1",
"@octokit/plugin-paginate-graphql": "^4.0.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
@@ -121,7 +122,7 @@
"tweetnacl-util": "^0.15.1",
"uuid": "^9.0.1",
"zod": "^3.22.4",
"zod-to-json-schema": "^3.22.4"
"zod-to-json-schema": "^3.24.5"
},
"bin": {
"backend": "dist/main.js"
@@ -7805,119 +7806,38 @@
}
},
"node_modules/@octokit/core": {
"version": "6.1.5",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-6.1.5.tgz",
"integrity": "sha512-vvmsN0r7rguA+FySiCsbaTTobSftpIDIpPW81trAmsv9TGxg3YCujAxRYp/Uy8xmDgYCzzgulG62H7KYUFmeIg==",
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.1.tgz",
"integrity": "sha512-dKYCMuPO1bmrpuogcjQ8z7ICCH3FP6WmxpwC03yjzGfZhj9fTJg6+bS1+UAplekbN2C+M61UNllGOOoAfGCrdQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/auth-token": "^5.0.0",
"@octokit/graphql": "^8.2.2",
"@octokit/request": "^9.2.3",
"@octokit/request-error": "^6.1.8",
"@octokit/types": "^14.0.0",
"before-after-hook": "^3.0.2",
"universal-user-agent": "^7.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/core/node_modules/@octokit/auth-token": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-5.1.2.tgz",
"integrity": "sha512-JcQDsBdg49Yky2w2ld20IHAlwr8d/d8N6NiOXbtuoPCqzbsiJgF633mVUw3x4mo0H5ypataQIX7SFu3yy44Mpw==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/core/node_modules/@octokit/endpoint": {
"version": "10.1.4",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.4.tgz",
"integrity": "sha512-OlYOlZIsfEVZm5HCSR8aSg02T2lbUWOsCQoPKfTXJwDzcHQBrVBGdGXb89dv2Kw2ToZaRtudp8O3ZIYoaOjKlA==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/types": "^14.0.0",
"universal-user-agent": "^7.0.2"
"@octokit/auth-token": "^4.0.0",
"@octokit/graphql": "^7.1.0",
"@octokit/request": "^8.4.1",
"@octokit/request-error": "^5.1.1",
"@octokit/types": "^13.0.0",
"before-after-hook": "^2.2.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/core/node_modules/@octokit/openapi-types": {
"version": "25.0.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.0.0.tgz",
"integrity": "sha512-FZvktFu7HfOIJf2BScLKIEYjDsw6RKc7rBJCdvCTfKsVnx2GEB/Nbzjr29DUdb7vQhlzS/j8qDzdditP0OC6aw==",
"license": "MIT",
"peer": true
},
"node_modules/@octokit/core/node_modules/@octokit/request": {
"version": "9.2.3",
"resolved": "https://registry.npmjs.org/@octokit/request/-/request-9.2.3.tgz",
"integrity": "sha512-Ma+pZU8PXLOEYzsWf0cn/gY+ME57Wq8f49WTXA8FMHp2Ps9djKw//xYJ1je8Hm0pR2lU9FUGeJRWOtxq6olt4w==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/endpoint": "^10.1.4",
"@octokit/request-error": "^6.1.8",
"@octokit/types": "^14.0.0",
"fast-content-type-parse": "^2.0.0",
"universal-user-agent": "^7.0.2"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/core/node_modules/@octokit/request-error": {
"version": "6.1.8",
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.8.tgz",
"integrity": "sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/types": "^14.0.0"
},
"engines": {
"node": ">= 18"
}
"version": "24.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
"license": "MIT"
},
"node_modules/@octokit/core/node_modules/@octokit/types": {
"version": "14.0.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.0.0.tgz",
"integrity": "sha512-VVmZP0lEhbo2O1pdq63gZFiGCKkm8PPp8AUOijlwPO6hojEVjspA0MWKP7E4hbvGxzFKNqKr6p0IYtOH/Wf/zA==",
"version": "13.10.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/openapi-types": "^25.0.0"
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/@octokit/core/node_modules/fast-content-type-parse": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-2.0.1.tgz",
"integrity": "sha512-nGqtvLrj5w0naR6tDPfB4cUmYCqouzyQiz6C5y/LtcDllJdrcc6WaWW6iXyIIOErTa/XRybj28aasdn4LkVk6Q==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/fastify"
},
{
"type": "opencollective",
"url": "https://opencollective.com/fastify"
}
],
"license": "MIT",
"peer": true
},
"node_modules/@octokit/core/node_modules/universal-user-agent": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q==",
"license": "ISC",
"peer": true
},
"node_modules/@octokit/endpoint": {
"version": "9.0.6",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz",
@@ -7947,105 +7867,34 @@
}
},
"node_modules/@octokit/graphql": {
"version": "8.2.2",
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-8.2.2.tgz",
"integrity": "sha512-Yi8hcoqsrXGdt0yObxbebHXFOiUA+2v3n53epuOg1QUgOB6c4XzvisBNVXJSl8RYA5KrDuSL2yq9Qmqe5N0ryA==",
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz",
"integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/request": "^9.2.3",
"@octokit/types": "^14.0.0",
"universal-user-agent": "^7.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/graphql/node_modules/@octokit/endpoint": {
"version": "10.1.4",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.4.tgz",
"integrity": "sha512-OlYOlZIsfEVZm5HCSR8aSg02T2lbUWOsCQoPKfTXJwDzcHQBrVBGdGXb89dv2Kw2ToZaRtudp8O3ZIYoaOjKlA==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/types": "^14.0.0",
"universal-user-agent": "^7.0.2"
"@octokit/request": "^8.4.1",
"@octokit/types": "^13.0.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/graphql/node_modules/@octokit/openapi-types": {
"version": "25.0.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.0.0.tgz",
"integrity": "sha512-FZvktFu7HfOIJf2BScLKIEYjDsw6RKc7rBJCdvCTfKsVnx2GEB/Nbzjr29DUdb7vQhlzS/j8qDzdditP0OC6aw==",
"license": "MIT",
"peer": true
},
"node_modules/@octokit/graphql/node_modules/@octokit/request": {
"version": "9.2.3",
"resolved": "https://registry.npmjs.org/@octokit/request/-/request-9.2.3.tgz",
"integrity": "sha512-Ma+pZU8PXLOEYzsWf0cn/gY+ME57Wq8f49WTXA8FMHp2Ps9djKw//xYJ1je8Hm0pR2lU9FUGeJRWOtxq6olt4w==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/endpoint": "^10.1.4",
"@octokit/request-error": "^6.1.8",
"@octokit/types": "^14.0.0",
"fast-content-type-parse": "^2.0.0",
"universal-user-agent": "^7.0.2"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/graphql/node_modules/@octokit/request-error": {
"version": "6.1.8",
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.8.tgz",
"integrity": "sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/types": "^14.0.0"
},
"engines": {
"node": ">= 18"
}
"version": "24.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
"license": "MIT"
},
"node_modules/@octokit/graphql/node_modules/@octokit/types": {
"version": "14.0.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.0.0.tgz",
"integrity": "sha512-VVmZP0lEhbo2O1pdq63gZFiGCKkm8PPp8AUOijlwPO6hojEVjspA0MWKP7E4hbvGxzFKNqKr6p0IYtOH/Wf/zA==",
"version": "13.10.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/openapi-types": "^25.0.0"
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/@octokit/graphql/node_modules/fast-content-type-parse": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-2.0.1.tgz",
"integrity": "sha512-nGqtvLrj5w0naR6tDPfB4cUmYCqouzyQiz6C5y/LtcDllJdrcc6WaWW6iXyIIOErTa/XRybj28aasdn4LkVk6Q==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/fastify"
},
{
"type": "opencollective",
"url": "https://opencollective.com/fastify"
}
],
"license": "MIT",
"peer": true
},
"node_modules/@octokit/graphql/node_modules/universal-user-agent": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q==",
"license": "ISC",
"peer": true
},
"node_modules/@octokit/oauth-authorization-url": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/@octokit/oauth-authorization-url/-/oauth-authorization-url-7.1.1.tgz",
@@ -8141,15 +7990,15 @@
}
},
"node_modules/@octokit/plugin-paginate-graphql": {
"version": "5.2.4",
"resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-graphql/-/plugin-paginate-graphql-5.2.4.tgz",
"integrity": "sha512-pLZES1jWaOynXKHOqdnwZ5ULeVR6tVVCMm+AUbp0htdcyXDU95WbkYdU4R2ej1wKj5Tu94Mee2Ne0PjPO9cCyA==",
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-graphql/-/plugin-paginate-graphql-4.0.1.tgz",
"integrity": "sha512-R8ZQNmrIKKpHWC6V2gum4x9LG2qF1RxRjo27gjQcG3j+vf2tLsEfE7I/wRWEPzYMaenr1M+qDAtNcwZve1ce1A==",
"license": "MIT",
"engines": {
"node": ">= 18"
},
"peerDependencies": {
"@octokit/core": ">=6"
"@octokit/core": ">=5"
}
},
"node_modules/@octokit/plugin-paginate-rest": {
@@ -8302,59 +8151,6 @@
"node": ">= 18"
}
},
"node_modules/@octokit/rest/node_modules/@octokit/core": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.1.tgz",
"integrity": "sha512-dKYCMuPO1bmrpuogcjQ8z7ICCH3FP6WmxpwC03yjzGfZhj9fTJg6+bS1+UAplekbN2C+M61UNllGOOoAfGCrdQ==",
"license": "MIT",
"dependencies": {
"@octokit/auth-token": "^4.0.0",
"@octokit/graphql": "^7.1.0",
"@octokit/request": "^8.4.1",
"@octokit/request-error": "^5.1.1",
"@octokit/types": "^13.0.0",
"before-after-hook": "^2.2.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/rest/node_modules/@octokit/graphql": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz",
"integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==",
"license": "MIT",
"dependencies": {
"@octokit/request": "^8.4.1",
"@octokit/types": "^13.0.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/rest/node_modules/@octokit/openapi-types": {
"version": "24.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
"license": "MIT"
},
"node_modules/@octokit/rest/node_modules/@octokit/types": {
"version": "13.10.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
"license": "MIT",
"dependencies": {
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/@octokit/rest/node_modules/before-after-hook": {
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
"integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==",
"license": "Apache-2.0"
},
"node_modules/@octokit/types": {
"version": "12.4.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.4.0.tgz",
@@ -12799,11 +12595,10 @@
"integrity": "sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ=="
},
"node_modules/before-after-hook": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-3.0.2.tgz",
"integrity": "sha512-Nik3Sc0ncrMK4UUdXQmAnRtzmNQTAAXmXIopizwZ1W1t8QmfJj+zL4OA2I7XPTPW5z5TDqv4hRo/JzouDJnX3A==",
"license": "Apache-2.0",
"peer": true
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
"integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==",
"license": "Apache-2.0"
},
"node_modules/big-integer": {
"version": "1.6.52",
@@ -21602,62 +21397,6 @@
"node": ">=18"
}
},
"node_modules/probot/node_modules/@octokit/core": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.1.tgz",
"integrity": "sha512-dKYCMuPO1bmrpuogcjQ8z7ICCH3FP6WmxpwC03yjzGfZhj9fTJg6+bS1+UAplekbN2C+M61UNllGOOoAfGCrdQ==",
"license": "MIT",
"dependencies": {
"@octokit/auth-token": "^4.0.0",
"@octokit/graphql": "^7.1.0",
"@octokit/request": "^8.4.1",
"@octokit/request-error": "^5.1.1",
"@octokit/types": "^13.0.0",
"before-after-hook": "^2.2.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/probot/node_modules/@octokit/core/node_modules/@octokit/types": {
"version": "13.10.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
"license": "MIT",
"dependencies": {
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/probot/node_modules/@octokit/graphql": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz",
"integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==",
"license": "MIT",
"dependencies": {
"@octokit/request": "^8.4.1",
"@octokit/types": "^13.0.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/probot/node_modules/@octokit/graphql/node_modules/@octokit/types": {
"version": "13.10.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
"license": "MIT",
"dependencies": {
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/probot/node_modules/@octokit/openapi-types": {
"version": "24.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
"license": "MIT"
},
"node_modules/probot/node_modules/@octokit/plugin-retry": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-6.0.1.tgz",
@@ -21690,12 +21429,6 @@
"@octokit/core": "^5.0.0"
}
},
"node_modules/probot/node_modules/before-after-hook": {
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
"integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==",
"license": "Apache-2.0"
},
"node_modules/probot/node_modules/commander": {
"version": "12.1.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz",
@@ -27709,11 +27442,12 @@
}
},
"node_modules/zod-to-json-schema": {
"version": "3.22.4",
"resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.22.4.tgz",
"integrity": "sha512-2Ed5dJ+n/O3cU383xSY28cuVi0BCQhF8nYqWU5paEpl7fVdqdAmiLdqLyfblbNdfOFwFfi/mqU4O1pwc60iBhQ==",
"version": "3.24.5",
"resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.5.tgz",
"integrity": "sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g==",
"license": "ISC",
"peerDependencies": {
"zod": "^3.22.4"
"zod": "^3.24.1"
}
}
}

View File

@@ -38,8 +38,8 @@
"build:frontend": "npm run build --prefix ../frontend",
"start": "node --enable-source-maps dist/main.mjs",
"type:check": "tsc --noEmit",
"lint:fix": "eslint --fix --ext js,ts ./src",
"lint": "eslint 'src/**/*.ts'",
"lint:fix": "node --max-old-space-size=8192 ./node_modules/.bin/eslint --fix --ext js,ts ./src",
"lint": "node --max-old-space-size=8192 ./node_modules/.bin/eslint 'src/**/*.ts'",
"test:unit": "vitest run -c vitest.unit.config.ts",
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
@@ -152,7 +152,8 @@
"@infisical/quic": "^1.0.8",
"@node-saml/passport-saml": "^5.0.1",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-paginate-graphql": "^5.2.4",
"@octokit/core": "^5.2.1",
"@octokit/plugin-paginate-graphql": "^4.0.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
@@ -240,6 +241,6 @@
"tweetnacl-util": "^0.15.1",
"uuid": "^9.0.1",
"zod": "^3.22.4",
"zod-to-json-schema": "^3.22.4"
"zod-to-json-schema": "^3.24.5"
}
}

View File

@@ -80,6 +80,7 @@ import { TOrgServiceFactory } from "@app/services/org/org-service";
import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
import { TPkiSubscriberServiceFactory } from "@app/services/pki-subscriber/pki-subscriber-service";
import { TProjectServiceFactory } from "@app/services/project/project-service";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
@@ -232,6 +233,7 @@ declare module "fastify" {
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
certificateEst: TCertificateEstServiceFactory;
pkiCollection: TPkiCollectionServiceFactory;
pkiSubscriber: TPkiSubscriberServiceFactory;
secretScanning: TSecretScanningServiceFactory;
license: TLicenseServiceFactory;
trustedIp: TTrustedIpServiceFactory;

View File

@@ -209,6 +209,9 @@ import {
TPkiCollections,
TPkiCollectionsInsert,
TPkiCollectionsUpdate,
TPkiSubscribers,
TPkiSubscribersInsert,
TPkiSubscribersUpdate,
TProjectBots,
TProjectBotsInsert,
TProjectBotsUpdate,
@@ -564,6 +567,11 @@ declare module "knex/types/tables" {
TPkiCollectionItemsInsert,
TPkiCollectionItemsUpdate
>;
[TableName.PkiSubscriber]: KnexOriginal.CompositeTableType<
TPkiSubscribers,
TPkiSubscribersInsert,
TPkiSubscribersUpdate
>;
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
TUserGroupMembership,
TUserGroupMembershipInsert,

View File

@@ -0,0 +1,22 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SshHostLoginUserMapping, "groupId"))) {
await knex.schema.alterTable(TableName.SshHostLoginUserMapping, (t) => {
t.uuid("groupId").nullable();
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.unique(["sshHostLoginUserId", "groupId"]);
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SshHostLoginUserMapping, "groupId")) {
await knex.schema.alterTable(TableName.SshHostLoginUserMapping, (t) => {
t.dropUnique(["sshHostLoginUserId", "groupId"]);
t.dropColumn("groupId");
});
}
}

View File

@@ -0,0 +1,46 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.PkiSubscriber))) {
await knex.schema.createTable(TableName.PkiSubscriber, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("caId").nullable();
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("SET NULL");
t.string("name").notNullable();
t.string("commonName").notNullable();
t.specificType("subjectAlternativeNames", "text[]").notNullable();
t.string("ttl").notNullable();
t.specificType("keyUsages", "text[]").notNullable();
t.specificType("extendedKeyUsages", "text[]").notNullable();
t.string("status").notNullable(); // active / disabled
t.unique(["projectId", "name"]);
});
await createOnUpdateTrigger(knex, TableName.PkiSubscriber);
}
const hasSubscriberCol = await knex.schema.hasColumn(TableName.Certificate, "pkiSubscriberId");
if (!hasSubscriberCol) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("pkiSubscriberId").nullable();
t.foreign("pkiSubscriberId").references("id").inTable(TableName.PkiSubscriber).onDelete("SET NULL");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasSubscriberCol = await knex.schema.hasColumn(TableName.Certificate, "pkiSubscriberId");
if (hasSubscriberCol) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.dropColumn("pkiSubscriberId");
});
}
await knex.schema.dropTableIfExists(TableName.PkiSubscriber);
await dropOnUpdateTrigger(knex, TableName.PkiSubscriber);
}

View File

@@ -24,7 +24,8 @@ export const CertificatesSchema = z.object({
caCertId: z.string().uuid(),
certificateTemplateId: z.string().uuid().nullable().optional(),
keyUsages: z.string().array().nullable().optional(),
extendedKeyUsages: z.string().array().nullable().optional()
extendedKeyUsages: z.string().array().nullable().optional(),
pkiSubscriberId: z.string().uuid().nullable().optional()
});
export type TCertificates = z.infer<typeof CertificatesSchema>;

View File

@@ -69,6 +69,7 @@ export * from "./organizations";
export * from "./pki-alerts";
export * from "./pki-collection-items";
export * from "./pki-collections";
export * from "./pki-subscribers";
export * from "./project-bots";
export * from "./project-environments";
export * from "./project-gateways";

View File

@@ -21,6 +21,7 @@ export enum TableName {
CertificateBody = "certificate_bodies",
CertificateSecret = "certificate_secrets",
CertificateTemplate = "certificate_templates",
PkiSubscriber = "pki_subscribers",
PkiAlert = "pki_alerts",
PkiCollection = "pki_collections",
PkiCollectionItem = "pki_collection_items",

View File

@@ -0,0 +1,27 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const PkiSubscribersSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string(),
caId: z.string().uuid().nullable().optional(),
name: z.string(),
commonName: z.string(),
subjectAlternativeNames: z.string().array(),
ttl: z.string(),
keyUsages: z.string().array(),
extendedKeyUsages: z.string().array(),
status: z.string()
});
export type TPkiSubscribers = z.infer<typeof PkiSubscribersSchema>;
export type TPkiSubscribersInsert = Omit<z.input<typeof PkiSubscribersSchema>, TImmutableDBKeys>;
export type TPkiSubscribersUpdate = Partial<Omit<z.input<typeof PkiSubscribersSchema>, TImmutableDBKeys>>;

View File

@@ -12,7 +12,8 @@ export const SshHostLoginUserMappingsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
sshHostLoginUserId: z.string().uuid(),
userId: z.string().uuid().nullable().optional()
userId: z.string().uuid().nullable().optional(),
groupId: z.string().uuid().nullable().optional()
});
export type TSshHostLoginUserMappings = z.infer<typeof SshHostLoginUserMappingsSchema>;

View File

@@ -2,6 +2,7 @@ import { z } from "zod";
import { AccessApprovalRequestsReviewersSchema, AccessApprovalRequestsSchema, UsersSchema } from "@app/db/schemas";
import { ApprovalStatus } from "@app/ee/services/access-approval-request/access-approval-request-types";
import { writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -18,6 +19,9 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
server.route({
url: "/",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
permissions: z.any().array(),

View File

@@ -98,6 +98,9 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/login",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
organizationSlug: z.string().trim()

View File

@@ -166,6 +166,9 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/redirect/saml2/organizations/:orgSlug",
method: "GET",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
orgSlug: z.string().trim()
@@ -192,6 +195,9 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/redirect/saml2/:samlConfigId",
method: "GET",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
samlConfigId: z.string().trim()
@@ -218,6 +224,9 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/saml2/:samlConfigId",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
samlConfigId: z.string().trim()

View File

@@ -196,6 +196,9 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/Users",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
schemas: z.array(z.string()),

View File

@@ -73,7 +73,7 @@ export const registerSshHostRouter = async (server: FastifyZodProvider) => {
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const host = await server.services.sshHost.getSshHost({
const host = await server.services.sshHost.getSshHostById({
sshHostId: req.params.sshHostId,
actor: req.permission.type,
actorId: req.permission.id,

View File

@@ -19,9 +19,10 @@ import { TProjectPermission } from "@app/lib/types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { TCreateAppConnectionDTO, TUpdateAppConnectionDTO } from "@app/services/app-connection/app-connection-types";
import { ActorType } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "@app/services/certificate/certificate-types";
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
import { TAllowedFields } from "@app/services/identity-ldap-auth/identity-ldap-auth-types";
import { PkiItemType } from "@app/services/pki-collection/pki-collection-types";
import { SecretSync, SecretSyncImportBehavior } from "@app/services/secret-sync/secret-sync-enums";
import {
@@ -34,7 +35,6 @@ import { WorkflowIntegration } from "@app/services/workflow-integration/workflow
import { KmipPermission } from "../kmip/kmip-enum";
import { ApprovalStatus } from "../secret-approval-request/secret-approval-request-types";
import { TAllowedFields } from "@app/services/identity-ldap-auth/identity-ldap-auth-types";
export type TListProjectAuditLogDTO = {
filter: {
@@ -254,6 +254,13 @@ export enum EventType {
GET_PKI_COLLECTION_ITEMS = "get-pki-collection-items",
ADD_PKI_COLLECTION_ITEM = "add-pki-collection-item",
DELETE_PKI_COLLECTION_ITEM = "delete-pki-collection-item",
CREATE_PKI_SUBSCRIBER = "create-pki-subscriber",
UPDATE_PKI_SUBSCRIBER = "update-pki-subscriber",
DELETE_PKI_SUBSCRIBER = "delete-pki-subscriber",
GET_PKI_SUBSCRIBER = "get-pki-subscriber",
ISSUE_PKI_SUBSCRIBER_CERT = "issue-pki-subscriber-cert",
SIGN_PKI_SUBSCRIBER_CERT = "sign-pki-subscriber-cert",
LIST_PKI_SUBSCRIBER_CERTS = "list-pki-subscriber-certs",
CREATE_KMS = "create-kms",
UPDATE_KMS = "update-kms",
DELETE_KMS = "delete-kms",
@@ -1965,6 +1972,77 @@ interface DeletePkiCollectionItem {
};
}
interface CreatePkiSubscriber {
type: EventType.CREATE_PKI_SUBSCRIBER;
metadata: {
pkiSubscriberId: string;
caId?: string;
name: string;
commonName: string;
ttl: string;
subjectAlternativeNames: string[];
keyUsages: CertKeyUsage[];
extendedKeyUsages: CertExtendedKeyUsage[];
};
}
interface UpdatePkiSubscriber {
type: EventType.UPDATE_PKI_SUBSCRIBER;
metadata: {
pkiSubscriberId: string;
caId?: string;
name?: string;
commonName?: string;
ttl?: string;
subjectAlternativeNames?: string[];
keyUsages?: CertKeyUsage[];
extendedKeyUsages?: CertExtendedKeyUsage[];
};
}
interface DeletePkiSubscriber {
type: EventType.DELETE_PKI_SUBSCRIBER;
metadata: {
pkiSubscriberId: string;
name: string;
};
}
interface GetPkiSubscriber {
type: EventType.GET_PKI_SUBSCRIBER;
metadata: {
pkiSubscriberId: string;
name: string;
};
}
interface IssuePkiSubscriberCert {
type: EventType.ISSUE_PKI_SUBSCRIBER_CERT;
metadata: {
subscriberId: string;
name: string;
serialNumber: string;
};
}
interface SignPkiSubscriberCert {
type: EventType.SIGN_PKI_SUBSCRIBER_CERT;
metadata: {
subscriberId: string;
name: string;
serialNumber: string;
};
}
interface ListPkiSubscriberCerts {
type: EventType.LIST_PKI_SUBSCRIBER_CERTS;
metadata: {
subscriberId: string;
name: string;
projectId: string;
};
}
interface CreateKmsEvent {
type: EventType.CREATE_KMS;
metadata: {
@@ -2928,6 +3006,13 @@ export type Event =
| GetPkiCollectionItems
| AddPkiCollectionItem
| DeletePkiCollectionItem
| CreatePkiSubscriber
| UpdatePkiSubscriber
| DeletePkiSubscriber
| GetPkiSubscriber
| IssuePkiSubscriberCert
| SignPkiSubscriberCert
| ListPkiSubscriberCerts
| CreateKmsEvent
| UpdateKmsEvent
| DeleteKmsEvent

View File

@@ -1,6 +1,6 @@
import { ForbiddenError } from "@casl/ability";
import { Octokit } from "@octokit/core";
import { paginateGraphQL } from "@octokit/plugin-paginate-graphql";
import { paginateGraphql } from "@octokit/plugin-paginate-graphql";
import { Octokit as OctokitRest } from "@octokit/rest";
import { OrgMembershipRole } from "@app/db/schemas";
@@ -18,7 +18,7 @@ import { TPermissionServiceFactory } from "../permission/permission-service";
import { TGithubOrgSyncDALFactory } from "./github-org-sync-dal";
import { TCreateGithubOrgSyncDTO, TDeleteGithubOrgSyncDTO, TUpdateGithubOrgSyncDTO } from "./github-org-sync-types";
const OctokitWithPlugin = Octokit.plugin(paginateGraphQL);
const OctokitWithPlugin = Octokit.plugin(paginateGraphql);
type TGithubOrgSyncServiceFactoryDep = {
githubOrgSyncDAL: TGithubOrgSyncDALFactory;

View File

@@ -157,10 +157,23 @@ export const groupDALFactory = (db: TDbClient) => {
}
};
const findGroupsByProjectId = async (projectId: string, tx?: Knex) => {
try {
const docs = await (tx || db.replicaNode())(TableName.Groups)
.join(TableName.GroupProjectMembership, `${TableName.Groups}.id`, `${TableName.GroupProjectMembership}.groupId`)
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
.select(selectAllTableCols(TableName.Groups));
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "Find groups by project id" });
}
};
return {
findGroups,
findByOrgId,
findAllGroupPossibleMembers,
findGroupsByProjectId,
...groupOrm
};
};

View File

@@ -176,7 +176,8 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
db.ref("name").withSchema(TableName.Groups).as("groupName"),
db.ref("id").withSchema(TableName.OrgMembership).as("orgMembershipId"),
db.ref("firstName").withSchema(TableName.Users).as("firstName"),
db.ref("lastName").withSchema(TableName.Users).as("lastName")
db.ref("lastName").withSchema(TableName.Users).as("lastName"),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
);
return docs;

View File

@@ -9,6 +9,7 @@ import {
ProjectPermissionIdentityActions,
ProjectPermissionKmipActions,
ProjectPermissionMemberActions,
ProjectPermissionPkiSubscriberActions,
ProjectPermissionSecretActions,
ProjectPermissionSecretRotationActions,
ProjectPermissionSecretSyncActions,
@@ -76,6 +77,18 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSub.SshHosts
);
can(
[
ProjectPermissionPkiSubscriberActions.Edit,
ProjectPermissionPkiSubscriberActions.Read,
ProjectPermissionPkiSubscriberActions.Create,
ProjectPermissionPkiSubscriberActions.Delete,
ProjectPermissionPkiSubscriberActions.IssueCert,
ProjectPermissionPkiSubscriberActions.ListCerts
],
ProjectPermissionSub.PkiSubscribers
);
can(
[
ProjectPermissionMemberActions.Create,
@@ -338,6 +351,7 @@ const buildMemberPermissionRules = () => {
can([ProjectPermissionActions.Read], ProjectPermissionSub.SshCertificateTemplates);
can([ProjectPermissionSshHostActions.Read], ProjectPermissionSub.SshHosts);
can([ProjectPermissionPkiSubscriberActions.Read], ProjectPermissionSub.PkiSubscribers);
can(
[

View File

@@ -132,7 +132,7 @@ export const permissionDALFactory = (db: TDbClient) => {
}
};
const getProjectGroupPermissions = async (projectId: string) => {
const getProjectGroupPermissions = async (projectId: string, filterGroupId?: string) => {
try {
const docs = await db
.replicaNode()(TableName.GroupProjectMembership)
@@ -148,6 +148,11 @@ export const permissionDALFactory = (db: TDbClient) => {
`groupCustomRoles.id`
)
.where(`${TableName.GroupProjectMembership}.projectId`, "=", projectId)
.where((bd) => {
if (filterGroupId) {
void bd.where(`${TableName.GroupProjectMembership}.groupId`, "=", filterGroupId);
}
})
.select(
db.ref("id").withSchema(TableName.GroupProjectMembership).as("membershipId"),
db.ref("id").withSchema(TableName.Groups).as("groupId"),

View File

@@ -630,6 +630,34 @@ export const permissionServiceFactory = ({
return { permission };
};
const checkGroupProjectPermission = async ({
groupId,
projectId,
checkPermissions
}: {
groupId: string;
projectId: string;
checkPermissions: ProjectPermissionSet;
}) => {
const rawGroupProjectPermissions = await permissionDAL.getProjectGroupPermissions(projectId, groupId);
const groupPermissions = rawGroupProjectPermissions.map((groupProjectPermission) => {
const rolePermissions =
groupProjectPermission.roles?.map(({ role, permissions }) => ({ role, permissions })) || [];
const rules = buildProjectPermissionRules(rolePermissions);
const permission = createMongoAbility<ProjectPermissionSet>(rules, {
conditionsMatcher
});
return {
permission,
id: groupProjectPermission.groupId,
name: groupProjectPermission.username,
membershipId: groupProjectPermission.id
};
});
return groupPermissions.some((groupPermission) => groupPermission.permission.can(...checkPermissions));
};
return {
getUserOrgPermission,
getOrgPermission,
@@ -639,6 +667,7 @@ export const permissionServiceFactory = ({
getOrgPermissionByRole,
getProjectPermissionByRole,
buildOrgPermission,
buildProjectPermissionRules
buildProjectPermissionRules,
checkGroupProjectPermission
};
};

View File

@@ -87,6 +87,15 @@ export enum ProjectPermissionSshHostActions {
IssueHostCert = "issue-host-cert"
}
export enum ProjectPermissionPkiSubscriberActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
IssueCert = "issue-cert",
ListCerts = "list-certs"
}
export enum ProjectPermissionSecretSyncActions {
Read = "read",
Create = "create",
@@ -143,6 +152,7 @@ export enum ProjectPermissionSub {
SshCertificateTemplates = "ssh-certificate-templates",
SshHosts = "ssh-hosts",
SshHostGroups = "ssh-host-groups",
PkiSubscribers = "pki-subscribers",
PkiAlerts = "pki-alerts",
PkiCollections = "pki-collections",
Kms = "kms",
@@ -190,6 +200,11 @@ export type SshHostSubjectFields = {
hostname: string;
};
export type PkiSubscriberSubjectFields = {
name: string;
// (dangtony98): consider adding [commonName] as a subject field in the future
};
export type ProjectPermissionSet =
| [
ProjectPermissionSecretActions,
@@ -249,6 +264,13 @@ export type ProjectPermissionSet =
ProjectPermissionSshHostActions,
ProjectPermissionSub.SshHosts | (ForcedSubject<ProjectPermissionSub.SshHosts> & SshHostSubjectFields)
]
| [
ProjectPermissionPkiSubscriberActions,
(
| ProjectPermissionSub.PkiSubscribers
| (ForcedSubject<ProjectPermissionSub.PkiSubscribers> & PkiSubscriberSubjectFields)
)
]
| [ProjectPermissionActions, ProjectPermissionSub.SshHostGroups]
| [ProjectPermissionActions, ProjectPermissionSub.PkiAlerts]
| [ProjectPermissionActions, ProjectPermissionSub.PkiCollections]
@@ -399,6 +421,21 @@ const SshHostConditionSchema = z
})
.partial();
const PkiSubscriberConditionSchema = z
.object({
name: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
})
.partial()
])
})
.partial();
const GeneralPermissionSchema = [
z.object({
subject: z.literal(ProjectPermissionSub.SecretApproval).describe("The entity this permission pertains to."),
@@ -663,6 +700,16 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),
z.object({
subject: z.literal(ProjectPermissionSub.PkiSubscribers).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionPkiSubscriberActions).describe(
"Describe what action an entity can take."
),
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
conditions: PkiSubscriberConditionSchema.describe(
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),
z.object({
subject: z.literal(ProjectPermissionSub.SecretRotation).describe("The entity this permission pertains to."),
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),

View File

@@ -334,7 +334,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
db.ref("secretId").withSchema(TableName.SecretApprovalRequestSecret).as("commitSecretId"),
db.ref("id").withSchema(TableName.SecretApprovalRequestSecret).as("commitId"),
db.raw(
`DENSE_RANK() OVER (partition by ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."id" DESC) as rank`
`DENSE_RANK() OVER (PARTITION BY ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."createdAt" DESC) as rank`
),
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
db.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
@@ -483,7 +483,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
db.ref("secretId").withSchema(TableName.SecretApprovalRequestSecretV2).as("commitSecretId"),
db.ref("id").withSchema(TableName.SecretApprovalRequestSecretV2).as("commitId"),
db.raw(
`DENSE_RANK() OVER (partition by ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."id" DESC) as rank`
`DENSE_RANK() OVER (PARTITION BY ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."createdAt" DESC) as rank`
),
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
db.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),

View File

@@ -28,6 +28,7 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHostGroup}.projectId`, projectId)
.select(
db.ref("id").withSchema(TableName.SshHostGroup).as("sshHostGroupId"),
@@ -35,7 +36,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
db.ref("name").withSchema(TableName.SshHostGroup),
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping)
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
)
.orderBy(`${TableName.SshHostGroup}.updatedAt`, "desc");
@@ -69,7 +71,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
const loginMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
}
}));
return {
@@ -99,6 +102,7 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHostGroup}.id`, sshHostGroupId)
.select(
db.ref("id").withSchema(TableName.SshHostGroup).as("sshHostGroupId"),
@@ -106,7 +110,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
db.ref("name").withSchema(TableName.SshHostGroup),
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping)
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
);
if (rows.length === 0) return null;
@@ -121,7 +126,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
const loginMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
}
}));

View File

@@ -12,6 +12,7 @@ import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TGroupDALFactory } from "../group/group-dal";
import { TLicenseServiceFactory } from "../license/license-service";
import { createSshLoginMappings } from "../ssh-host/ssh-host-fns";
import {
@@ -43,8 +44,12 @@ type TSshHostGroupServiceFactoryDep = {
sshHostLoginUserDAL: Pick<TSshHostLoginUserDALFactory, "create" | "transaction" | "delete">;
sshHostLoginUserMappingDAL: Pick<TSshHostLoginUserMappingDALFactory, "insertMany">;
userDAL: Pick<TUserDALFactory, "find">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getUserProjectPermission">;
permissionService: Pick<
TPermissionServiceFactory,
"getProjectPermission" | "getUserProjectPermission" | "checkGroupProjectPermission"
>;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
groupDAL: Pick<TGroupDALFactory, "findGroupsByProjectId">;
};
export type TSshHostGroupServiceFactory = ReturnType<typeof sshHostGroupServiceFactory>;
@@ -58,7 +63,8 @@ export const sshHostGroupServiceFactory = ({
sshHostLoginUserMappingDAL,
userDAL,
permissionService,
licenseService
licenseService,
groupDAL
}: TSshHostGroupServiceFactoryDep) => {
const createSshHostGroup = async ({
projectId,
@@ -127,6 +133,7 @@ export const sshHostGroupServiceFactory = ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId,
@@ -179,13 +186,42 @@ export const sshHostGroupServiceFactory = ({
});
const updatedSshHostGroup = await sshHostGroupDAL.transaction(async (tx) => {
await sshHostGroupDAL.updateById(
sshHostGroupId,
{
name
},
tx
);
if (name && name !== sshHostGroup.name) {
// (dangtony98): room to optimize check to ensure that
// the SSH host group name is unique across the whole org
const project = await projectDAL.findById(sshHostGroup.projectId, tx);
if (!project) throw new NotFoundError({ message: `Project with ID '${sshHostGroup.projectId}' not found` });
const projects = await projectDAL.find(
{
orgId: project.orgId
},
{ tx }
);
const existingSshHostGroup = await sshHostGroupDAL.find(
{
name,
$in: {
projectId: projects.map((p) => p.id)
}
},
{ tx }
);
if (existingSshHostGroup.length) {
throw new BadRequestError({
message: `SSH host group with name '${name}' already exists in the organization`
});
}
await sshHostGroupDAL.updateById(
sshHostGroupId,
{
name
},
tx
);
}
if (loginMappings) {
await sshHostLoginUserDAL.delete({ sshHostGroupId: sshHostGroup.id }, tx);
if (loginMappings.length) {
@@ -194,6 +230,7 @@ export const sshHostGroupServiceFactory = ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId: sshHostGroup.projectId,

View File

@@ -9,12 +9,7 @@ export type TCreateSshHostGroupDTO = {
export type TUpdateSshHostGroupDTO = {
sshHostGroupId: string;
name?: string;
loginMappings?: {
loginUser: string;
allowedPrincipals: {
usernames: string[];
};
}[];
loginMappings?: TLoginMapping[];
} & Omit<TProjectPermission, "projectId">;
export type TGetSshHostGroupDTO = {

View File

@@ -31,8 +31,18 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUser}.id`,
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.Users}.id`, `${TableName.SshHostLoginUserMapping}.userId`)
.leftJoin(
TableName.UserGroupMembership,
`${TableName.UserGroupMembership}.groupId`,
`${TableName.SshHostLoginUserMapping}.groupId`
)
.whereIn(`${TableName.SshHost}.projectId`, projectIds)
.andWhere(`${TableName.SshHostLoginUserMapping}.userId`, userId)
.andWhere((bd) => {
void bd
.where(`${TableName.SshHostLoginUserMapping}.userId`, userId)
.orWhere(`${TableName.UserGroupMembership}.userId`, userId);
})
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
db.ref("projectId").withSchema(TableName.SshHost),
@@ -58,8 +68,17 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.join(TableName.SshHost, `${TableName.SshHostGroupMembership}.sshHostId`, `${TableName.SshHost}.id`)
.leftJoin(
TableName.UserGroupMembership,
`${TableName.UserGroupMembership}.groupId`,
`${TableName.SshHostLoginUserMapping}.groupId`
)
.whereIn(`${TableName.SshHost}.projectId`, projectIds)
.andWhere(`${TableName.SshHostLoginUserMapping}.userId`, userId)
.andWhere((bd) => {
void bd
.where(`${TableName.SshHostLoginUserMapping}.userId`, userId)
.orWhere(`${TableName.UserGroupMembership}.userId`, userId);
})
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
db.ref("projectId").withSchema(TableName.SshHost),
@@ -133,6 +152,7 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHost}.projectId`, projectId)
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
@@ -144,6 +164,7 @@ export const sshHostDALFactory = (db: TDbClient) => {
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug"),
db.ref("userSshCaId").withSchema(TableName.SshHost),
db.ref("hostSshCaId").withSchema(TableName.SshHost)
)
@@ -163,10 +184,12 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.select(
db.ref("sshHostId").withSchema(TableName.SshHostGroupMembership),
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users)
db.ref("username").withSchema(TableName.Users),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
)
.whereIn(`${TableName.SshHostGroupMembership}.sshHostId`, hostIds);
@@ -185,7 +208,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
const directMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
},
source: LoginMappingSource.HOST
}));
@@ -197,7 +221,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
const groupMappings = Object.entries(inheritedGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
},
source: LoginMappingSource.HOST_GROUP
}));
@@ -229,6 +254,7 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHost}.id`, sshHostId)
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
@@ -241,7 +267,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("userSshCaId").withSchema(TableName.SshHost),
db.ref("hostSshCaId").withSchema(TableName.SshHost)
db.ref("hostSshCaId").withSchema(TableName.SshHost),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
);
if (rows.length === 0) return null;
@@ -257,7 +284,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
const directMappings = Object.entries(directGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
},
source: LoginMappingSource.HOST
}));
@@ -275,10 +303,12 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHostGroupMembership}.sshHostId`, sshHostId)
.select(
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users)
db.ref("username").withSchema(TableName.Users),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
);
const groupGrouped = groupBy(
@@ -289,7 +319,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
const groupMappings = Object.entries(groupGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
},
source: LoginMappingSource.HOST_GROUP
}));

View File

@@ -3,6 +3,7 @@ import { Knex } from "knex";
import { ActionProjectType } from "@app/db/schemas";
import { BadRequestError } from "@app/lib/errors";
import { ProjectPermissionSshHostActions, ProjectPermissionSub } from "../permission/project-permission";
import { TCreateSshLoginMappingsDTO } from "./ssh-host-types";
/**
@@ -15,6 +16,7 @@ export const createSshLoginMappings = async ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId,
@@ -35,7 +37,7 @@ export const createSshLoginMappings = async ({
tx
);
if (allowedPrincipals.usernames.length > 0) {
if (allowedPrincipals.usernames && allowedPrincipals.usernames.length > 0) {
const users = await userDAL.find(
{
$in: {
@@ -74,6 +76,41 @@ export const createSshLoginMappings = async ({
tx
);
}
if (allowedPrincipals.groups && allowedPrincipals.groups.length > 0) {
const projectGroups = await groupDAL.findGroupsByProjectId(projectId);
const groups = projectGroups.filter((g) => allowedPrincipals.groups?.includes(g.slug));
if (groups.length !== allowedPrincipals.groups?.length) {
throw new BadRequestError({
message: `Invalid group slugs: ${allowedPrincipals.groups
.filter((g) => !projectGroups.some((pg) => pg.slug === g))
.join(", ")}`
});
}
for await (const group of groups) {
// check that each group has access to the SSH project and have read access to hosts
const hasPermission = await permissionService.checkGroupProjectPermission({
groupId: group.id,
projectId,
checkPermissions: [ProjectPermissionSshHostActions.Read, ProjectPermissionSub.SshHosts]
});
if (!hasPermission) {
throw new BadRequestError({
message: `Group ${group.slug} does not have access to the SSH project`
});
}
}
await sshHostLoginUserMappingDAL.insertMany(
groups.map((group) => ({
sshHostLoginUserId: sshHostLoginUser.id,
groupId: group.id
})),
tx
);
}
}
};

View File

@@ -15,7 +15,24 @@ export const sanitizedSshHost = SshHostsSchema.pick({
export const loginMappingSchema = z.object({
loginUser: z.string().trim(),
allowedPrincipals: z.object({
usernames: z.array(z.string().trim()).transform((usernames) => Array.from(new Set(usernames)))
})
allowedPrincipals: z
.object({
usernames: z
.array(z.string().trim())
.transform((usernames) => Array.from(new Set(usernames)))
.optional(),
groups: z
.array(z.string().trim())
.transform((groups) => Array.from(new Set(groups)))
.optional()
})
.refine(
(data) => {
return (data.usernames && data.usernames.length > 0) || (data.groups && data.groups.length > 0);
},
{
message: "At least one username or group must be provided",
path: ["allowedPrincipals"]
}
)
});

View File

@@ -1,6 +1,7 @@
import { ForbiddenError, subject } from "@casl/ability";
import { ActionProjectType, ProjectType } from "@app/db/schemas";
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionSshHostActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { TSshCertificateAuthorityDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-dal";
@@ -19,6 +20,7 @@ import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectSshConfigDALFactory } from "@app/services/project/project-ssh-config-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TUserGroupMembershipDALFactory } from "../group/user-group-membership-dal";
import {
convertActorToPrincipals,
createSshCert,
@@ -39,12 +41,14 @@ import {
type TSshHostServiceFactoryDep = {
userDAL: Pick<TUserDALFactory, "findById" | "find">;
groupDAL: Pick<TGroupDALFactory, "findGroupsByProjectId">;
projectDAL: Pick<TProjectDALFactory, "find">;
projectSshConfigDAL: Pick<TProjectSshConfigDALFactory, "findOne">;
sshCertificateAuthorityDAL: Pick<TSshCertificateAuthorityDALFactory, "findOne">;
sshCertificateAuthoritySecretDAL: Pick<TSshCertificateAuthoritySecretDALFactory, "findOne">;
sshCertificateDAL: Pick<TSshCertificateDALFactory, "create" | "transaction">;
sshCertificateBodyDAL: Pick<TSshCertificateBodyDALFactory, "create">;
userGroupMembershipDAL: Pick<TUserGroupMembershipDALFactory, "findGroupMembershipsByUserIdInOrg">;
sshHostDAL: Pick<
TSshHostDALFactory,
| "transaction"
@@ -58,7 +62,10 @@ type TSshHostServiceFactoryDep = {
>;
sshHostLoginUserDAL: TSshHostLoginUserDALFactory;
sshHostLoginUserMappingDAL: TSshHostLoginUserMappingDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getUserProjectPermission">;
permissionService: Pick<
TPermissionServiceFactory,
"getProjectPermission" | "getUserProjectPermission" | "checkGroupProjectPermission"
>;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
@@ -66,6 +73,8 @@ export type TSshHostServiceFactory = ReturnType<typeof sshHostServiceFactory>;
export const sshHostServiceFactory = ({
userDAL,
userGroupMembershipDAL,
groupDAL,
projectDAL,
projectSshConfigDAL,
sshCertificateAuthorityDAL,
@@ -208,6 +217,7 @@ export const sshHostServiceFactory = ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId,
@@ -278,6 +288,7 @@ export const sshHostServiceFactory = ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId: host.projectId,
@@ -324,7 +335,7 @@ export const sshHostServiceFactory = ({
return host;
};
const getSshHost = async ({ sshHostId, actorId, actorAuthMethod, actor, actorOrgId }: TGetSshHostDTO) => {
const getSshHostById = async ({ sshHostId, actorId, actorAuthMethod, actor, actorOrgId }: TGetSshHostDTO) => {
const host = await sshHostDAL.findSshHostByIdWithLoginMappings(sshHostId);
if (!host) {
throw new NotFoundError({
@@ -387,10 +398,14 @@ export const sshHostServiceFactory = ({
userDAL
});
const userGroups = await userGroupMembershipDAL.findGroupMembershipsByUserIdInOrg(actorId, actorOrgId);
const userGroupSlugs = userGroups.map((g) => g.groupSlug);
const mapping = host.loginMappings.find(
(m) =>
m.loginUser === loginUser &&
m.allowedPrincipals.usernames.some((allowed) => internalPrincipals.includes(allowed))
(m.allowedPrincipals.usernames?.some((allowed) => internalPrincipals.includes(allowed)) ||
m.allowedPrincipals.groups?.some((allowed) => userGroupSlugs.includes(allowed)))
);
if (!mapping) {
@@ -616,7 +631,7 @@ export const sshHostServiceFactory = ({
createSshHost,
updateSshHost,
deleteSshHost,
getSshHost,
getSshHostById,
issueSshHostUserCert,
issueSshHostHostCert,
getSshHostUserCaPk,

View File

@@ -7,12 +7,15 @@ import { TProjectPermission } from "@app/lib/types";
import { ActorAuthMethod } from "@app/services/auth/auth-type";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TGroupDALFactory } from "../group/group-dal";
export type TListSshHostsDTO = Omit<TProjectPermission, "projectId">;
export type TLoginMapping = {
loginUser: string;
allowedPrincipals: {
usernames: string[];
usernames?: string[];
groups?: string[];
};
};
@@ -63,7 +66,8 @@ type BaseCreateSshLoginMappingsDTO = {
sshHostLoginUserDAL: Pick<TSshHostLoginUserDALFactory, "create" | "transaction">;
sshHostLoginUserMappingDAL: Pick<TSshHostLoginUserMappingDALFactory, "insertMany">;
userDAL: Pick<TUserDALFactory, "find">;
permissionService: Pick<TPermissionServiceFactory, "getUserProjectPermission">;
permissionService: Pick<TPermissionServiceFactory, "getUserProjectPermission" | "checkGroupProjectPermission">;
groupDAL: Pick<TGroupDALFactory, "findGroupsByProjectId">;
projectId: string;
actorAuthMethod: ActorAuthMethod;
actorOrgId: string;

View File

@@ -46,6 +46,7 @@ export enum ApiDocsTags {
PkiCertificateTemplates = "PKI Certificate Templates",
PkiCertificateCollections = "PKI Certificate Collections",
PkiAlerting = "PKI Alerting",
PkiSubscribers = "PKI Subscribers",
SshCertificates = "SSH Certificates",
SshCertificateAuthorities = "SSH Certificate Authorities",
SshCertificateTemplates = "SSH Certificate Templates",
@@ -639,6 +640,9 @@ export const PROJECTS = {
commonName: "The common name of the certificate to filter by.",
offset: "The offset to start from. If you enter 10, it will start from the 10th certificate.",
limit: "The number of certificates to return."
},
LIST_PKI_SUBSCRIBERS: {
projectId: "The ID of the project to list PKI subscribers for."
}
} as const;
@@ -1478,7 +1482,7 @@ export const SSH_HOSTS = {
loginUser: "A login user on the remote machine (e.g. 'ec2-user', 'deploy', 'admin')",
allowedPrincipals: "A list of allowed principals that can log in as the login user.",
loginMappings:
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users in the Infisical SSH project.",
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users or groups slugs in the Infisical SSH project.",
userSshCaId:
"The ID of the SSH CA to use for user certificates. If not specified, the default user SSH CA will be used if it exists.",
hostSshCaId:
@@ -1493,7 +1497,7 @@ export const SSH_HOSTS = {
loginUser: "A login user on the remote machine (e.g. 'ec2-user', 'deploy', 'admin')",
allowedPrincipals: "A list of allowed principals that can log in as the login user.",
loginMappings:
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users in the Infisical SSH project."
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users or groups slugs in the Infisical SSH project."
},
DELETE: {
sshHostId: "The ID of the SSH host to delete."
@@ -1731,6 +1735,67 @@ export const ALERTS = {
}
};
export const PKI_SUBSCRIBERS = {
GET: {
subscriberName: "The name of the PKI subscriber to get.",
projectId: "The ID of the project to get the PKI subscriber for."
},
CREATE: {
projectId: "The ID of the project to create the PKI subscriber in.",
caId: "The ID of the CA that will issue certificates for the PKI subscriber.",
name: "The name of the PKI subscriber.",
commonName: "The common name (CN) to be used on certificates issued for this subscriber.",
status: "The status of the PKI subscriber. This can be one of active or disabled.",
ttl: "The time to live for the certificates issued for this subscriber such as 1m, 1h, 1d, 1y, ...",
subjectAlternativeNames:
"A list of Subject Alternative Names (SANs) to be used on certificates issued for this subscriber; these can be host names or email addresses.",
keyUsages: "The key usage extension to be used on certificates issued for this subscriber.",
extendedKeyUsages: "The extended key usage extension to be used on certificates issued for this subscriber."
},
UPDATE: {
projectId: "The ID of the project to update the PKI subscriber in.",
subscriberName: "The name of the PKI subscriber to update.",
caId: "The ID of the CA that will issue certificates for the PKI subscriber to update to.",
name: "The name of the PKI subscriber to update to.",
commonName: "The common name (CN) to be used on certificates issued for this subscriber to update to.",
status: "The status of the PKI subscriber to update to. This can be one of active or disabled.",
ttl: "The time to live for the certificates issued for this subscriber such as 1m, 1h, 1d, 1y, ...",
subjectAlternativeNames:
"A comma-delimited list of Subject Alternative Names (SANs) to be used on certificates issued for this subscriber; these can be host names or email addresses.",
keyUsages: "The key usage extension to be used on certificates issued for this subscriber to update to.",
extendedKeyUsages:
"The extended key usage extension to be used on certificates issued for this subscriber to update to."
},
DELETE: {
subscriberName: "The name of the PKI subscriber to delete.",
projectId: "The ID of the project of the PKI subscriber to delete."
},
ISSUE_CERT: {
subscriberName: "The name of the PKI subscriber to issue the certificate for.",
projectId: "The ID of the project of the PKI subscriber to issue the certificate for.",
certificate: "The issued certificate.",
issuingCaCertificate: "The certificate of the issuing CA.",
certificateChain: "The certificate chain of the issued certificate.",
privateKey: "The private key of the issued certificate.",
serialNumber: "The serial number of the issued certificate."
},
SIGN_CERT: {
subscriberName: "The name of the PKI subscriber to sign the certificate for.",
projectId: "The ID of the project of the PKI subscriber to sign the certificate for.",
csr: "The CSR to be used to sign the certificate.",
certificate: "The signed certificate.",
issuingCaCertificate: "The certificate of the issuing CA.",
certificateChain: "The certificate chain of the signed certificate.",
serialNumber: "The serial number of the signed certificate."
},
LIST_CERTS: {
subscriberName: "The name of the PKI subscriber to list the certificates for.",
projectId: "The ID of the project of the PKI subscriber to list the certificates for.",
offset: "The offset to start from.",
limit: "The number of certificates to return."
}
};
export const PKI_COLLECTIONS = {
CREATE: {
projectId: "The ID of the project to create the PKI collection in.",

View File

@@ -104,6 +104,14 @@ export const publicSshCaLimit: RateLimitOptions = {
export const invalidateCacheLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
hook: "preValidation",
max: 1,
max: 2,
keyGenerator: (req) => req.realIp
};
// Makes spamming "request access" harder, preventing email DDoS
export const requestAccessLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
hook: "preValidation",
max: 10,
keyGenerator: (req) => req.realIp
};

View File

@@ -5,7 +5,7 @@
import type { FastifySchema, FastifySchemaCompiler, FastifyTypeProvider } from "fastify";
import type { FastifySerializerCompiler } from "fastify/types/schema";
import type { z, ZodAny, ZodTypeAny } from "zod";
import { zodToJsonSchema } from "zod-to-json-schema";
import { PostProcessCallback, zodToJsonSchema } from "zod-to-json-schema";
// eslint-disable-next-line @typescript-eslint/no-explicit-any
type FreeformRecord = Record<string, any>;
@@ -28,9 +28,25 @@ interface Schema extends FastifySchema {
hide?: boolean;
}
// Credit: https://github.com/StefanTerdell/zod-to-json-schema
const jsonDescription: PostProcessCallback = (jsonSchema, def) => {
if (def.description) {
try {
return {
...jsonSchema,
description: undefined,
...JSON.parse(def.description)
};
} catch {}
}
return jsonSchema;
};
const zodToJsonSchemaOptions = {
target: "openApi3",
$refStrategy: "none"
$refStrategy: "none",
postProcess: jsonDescription
} as const;
// eslint-disable-next-line @typescript-eslint/no-explicit-any

View File

@@ -197,6 +197,8 @@ import { pkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-servic
import { pkiCollectionDALFactory } from "@app/services/pki-collection/pki-collection-dal";
import { pkiCollectionItemDALFactory } from "@app/services/pki-collection/pki-collection-item-dal";
import { pkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
import { pkiSubscriberDALFactory } from "@app/services/pki-subscriber/pki-subscriber-dal";
import { pkiSubscriberServiceFactory } from "@app/services/pki-subscriber/pki-subscriber-service";
import { projectDALFactory } from "@app/services/project/project-dal";
import { projectQueueFactory } from "@app/services/project/project-queue";
import { projectServiceFactory } from "@app/services/project/project-service";
@@ -828,6 +830,7 @@ export const registerRoutes = async (
const pkiAlertDAL = pkiAlertDALFactory(db);
const pkiCollectionDAL = pkiCollectionDALFactory(db);
const pkiCollectionItemDAL = pkiCollectionItemDALFactory(db);
const pkiSubscriberDAL = pkiSubscriberDALFactory(db);
const certificateService = certificateServiceFactory({
certificateDAL,
@@ -870,6 +873,8 @@ export const registerRoutes = async (
const sshHostService = sshHostServiceFactory({
userDAL,
groupDAL,
userGroupMembershipDAL,
projectDAL,
projectSshConfigDAL,
sshCertificateAuthorityDAL,
@@ -892,7 +897,8 @@ export const registerRoutes = async (
sshHostLoginUserMappingDAL,
userDAL,
permissionService,
licenseService
licenseService,
groupDAL
});
const certificateAuthorityService = certificateAuthorityServiceFactory({
@@ -959,6 +965,20 @@ export const registerRoutes = async (
projectDAL
});
const pkiSubscriberService = pkiSubscriberServiceFactory({
pkiSubscriberDAL,
certificateAuthorityDAL,
certificateAuthorityCertDAL,
certificateAuthoritySecretDAL,
certificateAuthorityCrlDAL,
certificateDAL,
certificateBodyDAL,
certificateSecretDAL,
projectDAL,
kmsService,
permissionService
});
const projectTemplateService = projectTemplateServiceFactory({
licenseService,
permissionService,
@@ -1056,6 +1076,7 @@ export const registerRoutes = async (
projectRoleDAL,
folderDAL,
licenseService,
pkiSubscriberDAL,
certificateAuthorityDAL,
certificateDAL,
pkiAlertDAL,
@@ -1742,6 +1763,7 @@ export const registerRoutes = async (
certificateEst: certificateEstService,
pkiAlert: pkiAlertService,
pkiCollection: pkiCollectionService,
pkiSubscriber: pkiSubscriberService,
secretScanning: secretScanningService,
license: licenseService,
trustedIp: trustedIpService,
@@ -1784,6 +1806,10 @@ export const registerRoutes = async (
if (licenseSyncJob) {
cronJobs.push(licenseSyncJob);
}
const microsoftTeamsSyncJob = await microsoftTeamsService.initializeBackgroundSync();
if (microsoftTeamsSyncJob) {
cronJobs.push(microsoftTeamsSyncJob);
}
}
server.decorate<FastifyZodProvider["store"]>("store", {

View File

@@ -33,6 +33,7 @@ import { registerOrgRouter } from "./organization-router";
import { registerPasswordRouter } from "./password-router";
import { registerPkiAlertRouter } from "./pki-alert-router";
import { registerPkiCollectionRouter } from "./pki-collection-router";
import { registerPkiSubscriberRouter } from "./pki-subscriber-router";
import { registerProjectEnvRouter } from "./project-env-router";
import { registerProjectKeyRouter } from "./project-key-router";
import { registerProjectMembershipRouter } from "./project-membership-router";
@@ -105,6 +106,7 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
await pkiRouter.register(registerCertificateTemplateRouter, { prefix: "/certificate-templates" });
await pkiRouter.register(registerPkiAlertRouter, { prefix: "/alerts" });
await pkiRouter.register(registerPkiCollectionRouter, { prefix: "/collections" });
await pkiRouter.register(registerPkiSubscriberRouter, { prefix: "/subscribers" });
},
{ prefix: "/pki" }
);

View File

@@ -2,7 +2,7 @@ import { z } from "zod";
import { ProjectMembershipsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { readLimit } from "@app/server/config/rateLimiter";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -47,7 +47,7 @@ export const registerOrgAdminRouter = async (server: FastifyZodProvider) => {
method: "POST",
url: "/projects/:projectId/grant-admin-access",
config: {
rateLimit: readLimit
rateLimit: writeLimit
},
schema: {
params: z.object({

View File

@@ -0,0 +1,478 @@
import { z } from "zod";
import { CertificatesSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { ApiDocsTags, PKI_SUBSCRIBERS } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertExtendedKeyUsage, CertKeyUsage } from "@app/services/certificate/certificate-types";
import { validateAltNameField } from "@app/services/certificate-authority/certificate-authority-validators";
import { sanitizedPkiSubscriber } from "@app/services/pki-subscriber/pki-subscriber-schema";
import { PkiSubscriberStatus } from "@app/services/pki-subscriber/pki-subscriber-types";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
export const registerPkiSubscriberRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:subscriberName",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.PkiSubscribers],
description: "Get PKI Subscriber",
params: z.object({
subscriberName: z.string().describe(PKI_SUBSCRIBERS.GET.subscriberName)
}),
querystring: z.object({
projectId: z.string().describe(PKI_SUBSCRIBERS.GET.projectId)
}),
response: {
200: sanitizedPkiSubscriber
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const subscriber = await server.services.pkiSubscriber.getSubscriber({
subscriberName: req.params.subscriberName,
projectId: req.query.projectId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: subscriber.projectId,
event: {
type: EventType.GET_PKI_SUBSCRIBER,
metadata: {
pkiSubscriberId: subscriber.id,
name: subscriber.name
}
}
});
return subscriber;
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.PkiSubscribers],
description: "Create PKI Subscriber",
body: z.object({
projectId: z.string().trim().describe(PKI_SUBSCRIBERS.CREATE.projectId),
caId: z
.string()
.trim()
.uuid("CA ID must be a valid UUID")
.min(1, "CA ID is required")
.describe(PKI_SUBSCRIBERS.CREATE.caId),
name: slugSchema({ min: 1, max: 64, field: "name" }).describe(PKI_SUBSCRIBERS.CREATE.name),
commonName: z.string().trim().min(1).describe(PKI_SUBSCRIBERS.CREATE.commonName),
status: z
.nativeEnum(PkiSubscriberStatus)
.default(PkiSubscriberStatus.ACTIVE)
.describe(PKI_SUBSCRIBERS.CREATE.status),
ttl: z
.string()
.trim()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.describe(PKI_SUBSCRIBERS.CREATE.ttl),
subjectAlternativeNames: validateAltNameField
.array()
.default([])
.transform((arr) => Array.from(new Set(arr)))
.describe(PKI_SUBSCRIBERS.CREATE.subjectAlternativeNames),
keyUsages: z
.nativeEnum(CertKeyUsage)
.array()
.default([CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT])
.transform((arr) => Array.from(new Set(arr)))
.describe(PKI_SUBSCRIBERS.CREATE.keyUsages),
extendedKeyUsages: z
.nativeEnum(CertExtendedKeyUsage)
.array()
.default([])
.transform((arr) => Array.from(new Set(arr)))
.describe(PKI_SUBSCRIBERS.CREATE.extendedKeyUsages)
}),
response: {
200: sanitizedPkiSubscriber
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const subscriber = await server.services.pkiSubscriber.createSubscriber({
...req.body,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: subscriber.projectId,
event: {
type: EventType.CREATE_PKI_SUBSCRIBER,
metadata: {
pkiSubscriberId: subscriber.id,
caId: subscriber.caId ?? undefined,
name: subscriber.name,
commonName: subscriber.commonName,
ttl: subscriber.ttl,
subjectAlternativeNames: subscriber.subjectAlternativeNames,
keyUsages: subscriber.keyUsages as CertKeyUsage[],
extendedKeyUsages: subscriber.extendedKeyUsages as CertExtendedKeyUsage[]
}
}
});
return subscriber;
}
});
server.route({
method: "PATCH",
url: "/:subscriberName",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.PkiSubscribers],
description: "Update PKI Subscriber",
params: z.object({
subscriberName: z.string().trim().describe(PKI_SUBSCRIBERS.UPDATE.subscriberName)
}),
body: z.object({
projectId: z.string().trim().describe(PKI_SUBSCRIBERS.UPDATE.projectId),
caId: z
.string()
.trim()
.uuid("CA ID must be a valid UUID")
.min(1, "CA ID is required")
.optional()
.describe(PKI_SUBSCRIBERS.UPDATE.caId),
name: slugSchema({ min: 1, max: 64, field: "name" }).describe(PKI_SUBSCRIBERS.UPDATE.name).optional(),
commonName: z.string().trim().min(1).describe(PKI_SUBSCRIBERS.UPDATE.commonName).optional(),
status: z.nativeEnum(PkiSubscriberStatus).optional().describe(PKI_SUBSCRIBERS.UPDATE.status),
subjectAlternativeNames: validateAltNameField
.array()
.optional()
.describe(PKI_SUBSCRIBERS.UPDATE.subjectAlternativeNames),
ttl: z
.string()
.trim()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.optional()
.describe(PKI_SUBSCRIBERS.UPDATE.ttl),
keyUsages: z
.nativeEnum(CertKeyUsage)
.array()
.transform((arr) => Array.from(new Set(arr)))
.optional()
.describe(PKI_SUBSCRIBERS.UPDATE.keyUsages),
extendedKeyUsages: z
.nativeEnum(CertExtendedKeyUsage)
.array()
.transform((arr) => Array.from(new Set(arr)))
.optional()
.describe(PKI_SUBSCRIBERS.UPDATE.extendedKeyUsages)
}),
response: {
200: sanitizedPkiSubscriber
}
},
handler: async (req) => {
const subscriber = await server.services.pkiSubscriber.updateSubscriber({
subscriberName: req.params.subscriberName,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: subscriber.projectId,
event: {
type: EventType.UPDATE_PKI_SUBSCRIBER,
metadata: {
pkiSubscriberId: subscriber.id,
caId: subscriber.caId ?? undefined,
name: subscriber.name,
commonName: subscriber.commonName,
ttl: subscriber.ttl,
subjectAlternativeNames: subscriber.subjectAlternativeNames,
keyUsages: subscriber.keyUsages as CertKeyUsage[],
extendedKeyUsages: subscriber.extendedKeyUsages as CertExtendedKeyUsage[]
}
}
});
return subscriber;
}
});
server.route({
method: "DELETE",
url: "/:subscriberName",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.PkiSubscribers],
description: "Delete PKI Subscriber",
params: z.object({
subscriberName: z.string().describe(PKI_SUBSCRIBERS.DELETE.subscriberName)
}),
body: z.object({
projectId: z.string().trim().describe(PKI_SUBSCRIBERS.DELETE.projectId)
}),
response: {
200: sanitizedPkiSubscriber
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const subscriber = await server.services.pkiSubscriber.deleteSubscriber({
subscriberName: req.params.subscriberName,
projectId: req.body.projectId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: subscriber.projectId,
event: {
type: EventType.DELETE_PKI_SUBSCRIBER,
metadata: {
pkiSubscriberId: subscriber.id,
name: subscriber.name
}
}
});
return subscriber;
}
});
server.route({
method: "POST",
url: "/:subscriberName/issue-certificate",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.PkiSubscribers],
description: "Issue certificate",
params: z.object({
subscriberName: z.string().describe(PKI_SUBSCRIBERS.ISSUE_CERT.subscriberName)
}),
body: z.object({
projectId: z.string().trim().describe(PKI_SUBSCRIBERS.ISSUE_CERT.projectId)
}),
response: {
200: z.object({
certificate: z.string().trim().describe(PKI_SUBSCRIBERS.ISSUE_CERT.certificate),
issuingCaCertificate: z.string().trim().describe(PKI_SUBSCRIBERS.ISSUE_CERT.issuingCaCertificate),
certificateChain: z.string().trim().describe(PKI_SUBSCRIBERS.ISSUE_CERT.certificateChain),
privateKey: z.string().trim().describe(PKI_SUBSCRIBERS.ISSUE_CERT.privateKey),
serialNumber: z.string().trim().describe(PKI_SUBSCRIBERS.ISSUE_CERT.serialNumber)
})
}
},
handler: async (req) => {
const { certificate, certificateChain, issuingCaCertificate, privateKey, serialNumber, subscriber } =
await server.services.pkiSubscriber.issueSubscriberCert({
subscriberName: req.params.subscriberName,
projectId: req.body.projectId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: subscriber.projectId,
event: {
type: EventType.ISSUE_PKI_SUBSCRIBER_CERT,
metadata: {
subscriberId: subscriber.id,
name: subscriber.name,
serialNumber
}
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IssueCert,
distinctId: getTelemetryDistinctId(req),
properties: {
subscriberId: subscriber.id,
commonName: subscriber.commonName,
...req.auditLogInfo
}
});
return {
certificate,
certificateChain,
issuingCaCertificate,
privateKey,
serialNumber
};
}
});
server.route({
method: "POST",
url: "/:subscriberName/sign-certificate",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.PkiSubscribers],
description: "Sign certificate",
params: z.object({
subscriberName: z.string().describe(PKI_SUBSCRIBERS.SIGN_CERT.subscriberName)
}),
body: z.object({
projectId: z.string().trim().describe(PKI_SUBSCRIBERS.SIGN_CERT.projectId),
csr: z.string().trim().min(1).max(3000).describe(PKI_SUBSCRIBERS.SIGN_CERT.csr)
}),
response: {
200: z.object({
certificate: z.string().trim().describe(PKI_SUBSCRIBERS.SIGN_CERT.certificate),
issuingCaCertificate: z.string().trim().describe(PKI_SUBSCRIBERS.SIGN_CERT.issuingCaCertificate),
certificateChain: z.string().trim().describe(PKI_SUBSCRIBERS.SIGN_CERT.certificateChain),
serialNumber: z.string().trim().describe(PKI_SUBSCRIBERS.ISSUE_CERT.serialNumber)
})
}
},
handler: async (req) => {
const { certificate, certificateChain, issuingCaCertificate, serialNumber, subscriber } =
await server.services.pkiSubscriber.signSubscriberCert({
subscriberName: req.params.subscriberName,
projectId: req.body.projectId,
csr: req.body.csr,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: subscriber.projectId,
event: {
type: EventType.SIGN_PKI_SUBSCRIBER_CERT,
metadata: {
subscriberId: subscriber.id,
name: subscriber.name,
serialNumber
}
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SignCert,
distinctId: getTelemetryDistinctId(req),
properties: {
subscriberId: subscriber.id,
commonName: subscriber.commonName,
...req.auditLogInfo
}
});
return {
certificate,
certificateChain,
issuingCaCertificate,
serialNumber
};
}
});
server.route({
method: "GET",
url: "/:subscriberName/certificates",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.PkiSubscribers],
description: "List PKI Subscriber certificates",
params: z.object({
subscriberName: z.string().describe(PKI_SUBSCRIBERS.GET.subscriberName)
}),
querystring: z.object({
projectId: z.string().trim().describe(PKI_SUBSCRIBERS.LIST_CERTS.projectId),
offset: z.coerce.number().min(0).max(100).default(0).describe(PKI_SUBSCRIBERS.LIST_CERTS.offset),
limit: z.coerce.number().min(1).max(100).default(25).describe(PKI_SUBSCRIBERS.LIST_CERTS.limit)
}),
response: {
200: z.object({
certificates: z.array(CertificatesSchema),
totalCount: z.number()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { totalCount, certificates } = await server.services.pkiSubscriber.listSubscriberCerts({
subscriberName: req.params.subscriberName,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.query
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.LIST_PKI_SUBSCRIBER_CERTS,
metadata: {
subscriberId: req.params.subscriberName,
name: req.params.subscriberName,
projectId: req.query.projectId
}
}
});
return {
certificates,
totalCount
};
}
});
};

View File

@@ -19,7 +19,7 @@ import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { ApiDocsTags, PROJECTS } from "@app/lib/api-docs";
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
import { re2Validator } from "@app/lib/zod";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { readLimit, requestAccessLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
import { validateMicrosoftTeamsChannelsSchema } from "@app/services/microsoft-teams/microsoft-teams-fns";
@@ -1006,7 +1006,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
method: "POST",
url: "/:workspaceId/project-access",
config: {
rateLimit: writeLimit
rateLimit: requestAccessLimit
},
schema: {
params: z.object({

View File

@@ -24,6 +24,7 @@ import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
import { sanitizedCertificateTemplate } from "@app/services/certificate-template/certificate-template-schema";
import { sanitizedPkiSubscriber } from "@app/services/pki-subscriber/pki-subscriber-schema";
import { ProjectFilterType } from "@app/services/project/project-types";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
@@ -490,6 +491,38 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "GET",
url: "/:projectId/pki-subscribers",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.PkiSubscribers],
params: z.object({
projectId: z.string().trim().describe(PROJECTS.LIST_PKI_SUBSCRIBERS.projectId)
}),
response: {
200: z.object({
subscribers: z.array(sanitizedPkiSubscriber)
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const subscribers = await server.services.project.listProjectPkiSubscribers({
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
actor: req.permission.type,
projectId: req.params.projectId
});
return { subscribers };
}
});
server.route({
method: "GET",
url: "/:projectId/certificate-templates",
@@ -628,6 +661,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SshHosts],
params: z.object({
projectId: z.string().trim().describe(PROJECTS.LIST_SSH_HOSTS.projectId)
}),
@@ -666,6 +701,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SshHostGroups],
params: z.object({
projectId: z.string().trim().describe(PROJECTS.LIST_SSH_HOST_GROUPS.projectId)
}),

View File

@@ -1169,7 +1169,7 @@ export const certificateAuthorityServiceFactory = ({
ProjectPermissionSub.Certificates
);
if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" });
if (ca.status !== CaStatus.ACTIVE) throw new BadRequestError({ message: "CA is not active" });
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
if (ca.requireTemplateForIssuance && !certificateTemplate) {
throw new BadRequestError({ message: "Certificate template is required for issuance" });
@@ -1520,7 +1520,7 @@ export const certificateAuthorityServiceFactory = ({
);
}
if (ca.status === CaStatus.DISABLED) throw new BadRequestError({ message: "CA is disabled" });
if (ca.status !== CaStatus.ACTIVE) throw new BadRequestError({ message: "CA is not active" });
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
if (ca.requireTemplateForIssuance && !certificateTemplate) {
throw new BadRequestError({ message: "Certificate template is required for issuance" });

View File

@@ -10,6 +10,18 @@ const isValidDate = (dateString: string) => {
export const validateCaDateField = z.string().trim().refine(isValidDate, { message: "Invalid date format" });
export const validateAltNameField = z
.string()
.trim()
.refine(
(name) => {
return isFQDN(name) || z.string().email().safeParse(name).success || isValidIp(name);
},
{
message: "SAN must be a valid hostname, email address, or IP address"
}
);
export const validateAltNamesField = z
.string()
.trim()

View File

@@ -44,8 +44,27 @@ export const certificateDALFactory = (db: TDbClient) => {
}
};
const countCertificatesForPkiSubscriber = async (subscriberId: string) => {
try {
interface CountResult {
count: string;
}
const query = db
.replicaNode()(TableName.Certificate)
.where(`${TableName.Certificate}.pkiSubscriberId`, subscriberId);
const count = await query.count("*").first();
return parseInt((count as unknown as CountResult).count || "0", 10);
} catch (error) {
throw new DatabaseError({ error, name: "Count all subscriber certificates" });
}
};
return {
...certificateOrm,
countCertificatesInProject
countCertificatesInProject,
countCertificatesForPkiSubscriber
};
};

View File

@@ -177,7 +177,6 @@ export const deleteGithubSecrets = async ({
selected_repositories_url?: string | undefined;
}
// @ts-expect-error just octokit ts compatiability issue
const OctokitWithRetry = Octokit.plugin(retry);
let octokit: Octokit;
const appCfg = getConfig();

View File

@@ -6,6 +6,7 @@ import {
Request,
Response
} from "botbuilder";
import { CronJob } from "cron";
import { FastifyReply, FastifyRequest } from "fastify";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
@@ -86,8 +87,17 @@ export const microsoftTeamsServiceFactory = ({
}: TMicrosoftTeamsServiceFactoryDep) => {
let teamsBot: TeamsBot | null = null;
let adapter: CloudAdapter | null = null;
let lastKnownUpdatedAt = new Date();
const initializeTeamsBot = async ({ botAppId, botAppPassword }: { botAppId: string; botAppPassword: string }) => {
const initializeTeamsBot = async ({
botAppId,
botAppPassword,
lastUpdatedAt
}: {
botAppId: string;
botAppPassword: string;
lastUpdatedAt?: Date;
}) => {
logger.info("Initializing Microsoft Teams bot");
teamsBot = new TeamsBot({
botAppId,
@@ -106,6 +116,57 @@ export const microsoftTeamsServiceFactory = ({
})
)
);
if (lastUpdatedAt) {
lastKnownUpdatedAt = lastUpdatedAt;
}
};
const $syncMicrosoftTeamsIntegrationConfiguration = async () => {
try {
const serverCfg = await serverCfgDAL.findById(ADMIN_CONFIG_DB_UUID);
if (!serverCfg) {
throw new BadRequestError({
message: "Failed to get server configuration."
});
}
if (lastKnownUpdatedAt.getTime() === serverCfg.updatedAt.getTime()) {
logger.info("No changes to Microsoft Teams integration configuration, skipping sync");
return;
}
lastKnownUpdatedAt = serverCfg.updatedAt;
if (
serverCfg.encryptedMicrosoftTeamsAppId &&
serverCfg.encryptedMicrosoftTeamsClientSecret &&
serverCfg.encryptedMicrosoftTeamsBotId
) {
const decryptWithRoot = kmsService.decryptWithRootKey();
const decryptedAppId = decryptWithRoot(serverCfg.encryptedMicrosoftTeamsAppId);
const decryptedAppPassword = decryptWithRoot(serverCfg.encryptedMicrosoftTeamsClientSecret);
await initializeTeamsBot({
botAppId: decryptedAppId.toString(),
botAppPassword: decryptedAppPassword.toString()
});
}
} catch (err) {
logger.error(err, "Error syncing Microsoft Teams integration configuration");
}
};
const initializeBackgroundSync = async () => {
logger.info("Setting up background sync process for Microsoft Teams workflow integration configuration");
// initial sync upon startup
await $syncMicrosoftTeamsIntegrationConfiguration();
// sync rate limits configuration every 5 minutes
const job = new CronJob("*/5 * * * *", $syncMicrosoftTeamsIntegrationConfiguration);
job.start();
return job;
};
const start = async () => {
@@ -703,6 +764,7 @@ export const microsoftTeamsServiceFactory = ({
getTeams,
handleMessageEndpoint,
start,
initializeBackgroundSync,
sendNotification,
checkInstallationStatus,
getClientId

View File

@@ -0,0 +1,10 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TPkiSubscriberDALFactory = ReturnType<typeof pkiSubscriberDALFactory>;
export const pkiSubscriberDALFactory = (db: TDbClient) => {
const pkiSubscriberOrm = ormify(db, TableName.PkiSubscriber);
return pkiSubscriberOrm;
};

View File

@@ -0,0 +1,14 @@
import { PkiSubscribersSchema } from "@app/db/schemas";
export const sanitizedPkiSubscriber = PkiSubscribersSchema.pick({
id: true,
projectId: true,
caId: true,
name: true,
commonName: true,
status: true,
subjectAlternativeNames: true,
ttl: true,
keyUsages: true,
extendedKeyUsages: true
});

View File

@@ -0,0 +1,805 @@
/* eslint-disable no-bitwise */
import { ForbiddenError, subject } from "@casl/ability";
import * as x509 from "@peculiar/x509";
import crypto, { KeyObject } from "crypto";
import { z } from "zod";
import { ActionProjectType } from "@app/db/schemas";
import { TCertificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import {
ProjectPermissionPkiSubscriberActions,
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { isFQDN } from "@app/lib/validator/validate-url";
import { TCertificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal";
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
import { TCertificateSecretDALFactory } from "@app/services/certificate/certificate-secret-dal";
import {
CertExtendedKeyUsage,
CertExtendedKeyUsageOIDToName,
CertKeyAlgorithm,
CertKeyUsage,
CertStatus
} from "@app/services/certificate/certificate-types";
import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal";
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
import {
createSerialNumber,
getCaCertChain,
getCaCredentials,
keyAlgorithmToAlgCfg,
parseDistinguishedName
} from "@app/services/certificate-authority/certificate-authority-fns";
import { TCertificateAuthoritySecretDALFactory } from "@app/services/certificate-authority/certificate-authority-secret-dal";
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TPkiSubscriberDALFactory } from "@app/services/pki-subscriber/pki-subscriber-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns";
import {
PkiSubscriberStatus,
TCreatePkiSubscriberDTO,
TDeletePkiSubscriberDTO,
TGetPkiSubscriberDTO,
TIssuePkiSubscriberCertDTO,
TListPkiSubscriberCertsDTO,
TSignPkiSubscriberCertDTO,
TUpdatePkiSubscriberDTO
} from "./pki-subscriber-types";
type TPkiSubscriberServiceFactoryDep = {
pkiSubscriberDAL: Pick<
TPkiSubscriberDALFactory,
"create" | "findById" | "updateById" | "deleteById" | "transaction" | "find" | "findOne"
>;
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findById">;
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "findById">;
certificateAuthoritySecretDAL: Pick<TCertificateAuthoritySecretDALFactory, "findOne">;
certificateAuthorityCrlDAL: Pick<TCertificateAuthorityCrlDALFactory, "findOne">;
certificateDAL: Pick<TCertificateDALFactory, "create" | "transaction" | "countCertificatesForPkiSubscriber" | "find">;
certificateBodyDAL: Pick<TCertificateBodyDALFactory, "create">;
certificateSecretDAL: Pick<TCertificateSecretDALFactory, "create">;
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction" | "findById" | "find">;
kmsService: Pick<TKmsServiceFactory, "generateKmsKey" | "decryptWithKmsKey" | "encryptWithKmsKey">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
};
export type TPkiSubscriberServiceFactory = ReturnType<typeof pkiSubscriberServiceFactory>;
export const pkiSubscriberServiceFactory = ({
pkiSubscriberDAL,
certificateAuthorityDAL,
certificateAuthorityCertDAL,
certificateAuthoritySecretDAL,
certificateAuthorityCrlDAL,
certificateDAL,
certificateBodyDAL,
certificateSecretDAL,
projectDAL,
kmsService,
permissionService
}: TPkiSubscriberServiceFactoryDep) => {
const createSubscriber = async ({
name,
commonName,
status,
caId,
ttl,
subjectAlternativeNames,
keyUsages,
extendedKeyUsages,
projectId,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TCreatePkiSubscriberDTO) => {
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionPkiSubscriberActions.Create,
subject(ProjectPermissionSub.PkiSubscribers, {
name
})
);
const newSubscriber = await pkiSubscriberDAL.create({
caId,
projectId,
name,
commonName,
status,
ttl,
subjectAlternativeNames,
keyUsages,
extendedKeyUsages
});
return newSubscriber;
};
const getSubscriber = async ({
subscriberName,
projectId,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TGetPkiSubscriberDTO) => {
const subscriber = await pkiSubscriberDAL.findOne({
name: subscriberName,
projectId
});
if (!subscriber) throw new NotFoundError({ message: `PKI subscriber named '${subscriberName}' not found` });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: subscriber.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionPkiSubscriberActions.Read,
subject(ProjectPermissionSub.PkiSubscribers, {
name: subscriber.name
})
);
return subscriber;
};
const updateSubscriber = async ({
subscriberName,
projectId,
name,
commonName,
status,
caId,
ttl,
subjectAlternativeNames,
keyUsages,
extendedKeyUsages,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TUpdatePkiSubscriberDTO) => {
const subscriber = await pkiSubscriberDAL.findOne({
name: subscriberName,
projectId
});
if (!subscriber) throw new NotFoundError({ message: `PKI subscriber named '${subscriberName}' not found` });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: subscriber.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionPkiSubscriberActions.Edit,
subject(ProjectPermissionSub.PkiSubscribers, {
name: subscriber.name
})
);
const updatedSubscriber = await pkiSubscriberDAL.updateById(subscriber.id, {
caId,
name,
commonName,
status,
ttl,
subjectAlternativeNames,
keyUsages,
extendedKeyUsages
});
return updatedSubscriber;
};
const deleteSubscriber = async ({
subscriberName,
projectId,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TDeletePkiSubscriberDTO) => {
const subscriber = await pkiSubscriberDAL.findOne({
name: subscriberName,
projectId
});
if (!subscriber) throw new NotFoundError({ message: `PKI subscriber named '${subscriberName}' not found` });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: subscriber.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionPkiSubscriberActions.Delete,
subject(ProjectPermissionSub.PkiSubscribers, {
name: subscriber.name
})
);
await pkiSubscriberDAL.deleteById(subscriber.id);
return subscriber;
};
const issueSubscriberCert = async ({
subscriberName,
projectId,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TIssuePkiSubscriberCertDTO) => {
const subscriber = await pkiSubscriberDAL.findOne({
name: subscriberName,
projectId
});
if (!subscriber) throw new NotFoundError({ message: `PKI subscriber named '${subscriberName}' not found` });
if (!subscriber.caId) throw new BadRequestError({ message: "Subscriber does not have an assigned issuing CA" });
const ca = await certificateAuthorityDAL.findById(subscriber.caId);
if (!ca) throw new NotFoundError({ message: `CA with ID '${subscriber.caId}' not found` });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: ca.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionPkiSubscriberActions.IssueCert,
subject(ProjectPermissionSub.PkiSubscribers, {
name: subscriber.name
})
);
if (subscriber.status !== PkiSubscriberStatus.ACTIVE)
throw new BadRequestError({ message: "Subscriber is not active" });
if (ca.status !== CaStatus.ACTIVE) throw new BadRequestError({ message: "CA is not active" });
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
if (ca.requireTemplateForIssuance) {
throw new BadRequestError({ message: "Certificate template is required for issuance" });
}
const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId);
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
projectId: ca.projectId,
projectDAL,
kmsService
});
const kmsDecryptor = await kmsService.decryptWithKmsKey({
kmsId: certificateManagerKmsId
});
const decryptedCaCert = await kmsDecryptor({
cipherTextBlob: caCert.encryptedCertificate
});
const caCertObj = new x509.X509Certificate(decryptedCaCert);
const notBeforeDate = new Date();
const notAfterDate = new Date(new Date().getTime() + ms(subscriber.ttl));
const caCertNotBeforeDate = new Date(caCertObj.notBefore);
const caCertNotAfterDate = new Date(caCertObj.notAfter);
// check not before constraint
if (notBeforeDate < caCertNotBeforeDate) {
throw new BadRequestError({ message: "notBefore date is before CA certificate's notBefore date" });
}
// check not after constraint
if (notAfterDate > caCertNotAfterDate) {
throw new BadRequestError({ message: "notAfter date is after CA certificate's notAfter date" });
}
const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm);
const leafKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
const csrObj = await x509.Pkcs10CertificateRequestGenerator.create({
name: `CN=${subscriber.commonName}`,
keys: leafKeys,
signingAlgorithm: alg,
extensions: [
// eslint-disable-next-line no-bitwise
new x509.KeyUsagesExtension(x509.KeyUsageFlags.digitalSignature | x509.KeyUsageFlags.keyEncipherment)
],
attributes: [new x509.ChallengePasswordAttribute("password")]
});
const { caPrivateKey, caSecret } = await getCaCredentials({
caId: ca.id,
certificateAuthorityDAL,
certificateAuthoritySecretDAL,
projectDAL,
kmsService
});
const caCrl = await certificateAuthorityCrlDAL.findOne({ caSecretId: caSecret.id });
const appCfg = getConfig();
const distributionPointUrl = `${appCfg.SITE_URL}/api/v1/pki/crl/${caCrl.id}/der`;
const caIssuerUrl = `${appCfg.SITE_URL}/api/v1/pki/ca/${ca.id}/certificates/${caCert.id}/der`;
const extensions: x509.Extension[] = [
new x509.BasicConstraintsExtension(false),
new x509.CRLDistributionPointsExtension([distributionPointUrl]),
await x509.AuthorityKeyIdentifierExtension.create(caCertObj, false),
await x509.SubjectKeyIdentifierExtension.create(csrObj.publicKey),
new x509.AuthorityInfoAccessExtension({
caIssuers: new x509.GeneralName("url", caIssuerUrl)
}),
new x509.CertificatePolicyExtension(["2.5.29.32.0"]) // anyPolicy
];
const selectedKeyUsages = subscriber.keyUsages as CertKeyUsage[];
const keyUsagesBitValue = selectedKeyUsages.reduce((accum, keyUsage) => accum | x509.KeyUsageFlags[keyUsage], 0);
if (keyUsagesBitValue) {
extensions.push(new x509.KeyUsagesExtension(keyUsagesBitValue, true));
}
if (subscriber.extendedKeyUsages.length) {
const extendedKeyUsagesExtension = new x509.ExtendedKeyUsageExtension(
subscriber.extendedKeyUsages.map((eku) => x509.ExtendedKeyUsage[eku as CertExtendedKeyUsage]),
true
);
extensions.push(extendedKeyUsagesExtension);
}
let altNamesArray: { type: "email" | "dns"; value: string }[] = [];
if (subscriber.subjectAlternativeNames?.length) {
altNamesArray = subscriber.subjectAlternativeNames.map((altName) => {
if (z.string().email().safeParse(altName).success) {
return { type: "email", value: altName };
}
if (isFQDN(altName, { allow_wildcard: true })) {
return { type: "dns", value: altName };
}
throw new BadRequestError({ message: `Invalid SAN entry: ${altName}` });
});
const altNamesExtension = new x509.SubjectAlternativeNameExtension(altNamesArray, false);
extensions.push(altNamesExtension);
}
const serialNumber = createSerialNumber();
const leafCert = await x509.X509CertificateGenerator.create({
serialNumber,
subject: csrObj.subject,
issuer: caCertObj.subject,
notBefore: notBeforeDate,
notAfter: notAfterDate,
signingKey: caPrivateKey,
publicKey: csrObj.publicKey,
signingAlgorithm: alg,
extensions
});
const skLeafObj = KeyObject.from(leafKeys.privateKey);
const skLeaf = skLeafObj.export({ format: "pem", type: "pkcs8" }) as string;
const kmsEncryptor = await kmsService.encryptWithKmsKey({
kmsId: certificateManagerKmsId
});
const { cipherTextBlob: encryptedCertificate } = await kmsEncryptor({
plainText: Buffer.from(new Uint8Array(leafCert.rawData))
});
const { cipherTextBlob: encryptedPrivateKey } = await kmsEncryptor({
plainText: Buffer.from(skLeaf)
});
const { caCert: issuingCaCertificate, caCertChain } = await getCaCertChain({
caCertId: caCert.id,
certificateAuthorityDAL,
certificateAuthorityCertDAL,
projectDAL,
kmsService
});
const certificateChainPem = `${issuingCaCertificate}\n${caCertChain}`.trim();
const { cipherTextBlob: encryptedCertificateChain } = await kmsEncryptor({
plainText: Buffer.from(certificateChainPem)
});
await certificateDAL.transaction(async (tx) => {
const cert = await certificateDAL.create(
{
caId: ca.id,
caCertId: caCert.id,
pkiSubscriberId: subscriber.id,
status: CertStatus.ACTIVE,
friendlyName: subscriber.commonName,
commonName: subscriber.commonName,
altNames: subscriber.subjectAlternativeNames.join(","),
serialNumber,
notBefore: notBeforeDate,
notAfter: notAfterDate,
keyUsages: selectedKeyUsages,
extendedKeyUsages: subscriber.extendedKeyUsages as CertExtendedKeyUsage[]
},
tx
);
await certificateBodyDAL.create(
{
certId: cert.id,
encryptedCertificate,
encryptedCertificateChain
},
tx
);
await certificateSecretDAL.create(
{
certId: cert.id,
encryptedPrivateKey
},
tx
);
});
return {
certificate: leafCert.toString("pem"),
certificateChain: certificateChainPem,
issuingCaCertificate,
privateKey: skLeaf,
serialNumber,
ca,
subscriber
};
};
const signSubscriberCert = async ({
subscriberName,
projectId,
csr,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TSignPkiSubscriberCertDTO) => {
const appCfg = getConfig();
const subscriber = await pkiSubscriberDAL.findOne({
name: subscriberName,
projectId
});
if (!subscriber) throw new NotFoundError({ message: `PKI subscriber named '${subscriberName}' not found` });
if (!subscriber.caId) throw new BadRequestError({ message: "Subscriber does not have an assigned issuing CA" });
const ca = await certificateAuthorityDAL.findById(subscriber.caId);
if (!ca) throw new NotFoundError({ message: `CA with ID '${subscriber.caId}' not found` });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: ca.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionPkiSubscriberActions.IssueCert,
subject(ProjectPermissionSub.PkiSubscribers, {
name: subscriber.name
})
);
if (subscriber.status !== PkiSubscriberStatus.ACTIVE)
throw new BadRequestError({ message: "Subscriber is not active" });
if (ca.status !== CaStatus.ACTIVE) throw new BadRequestError({ message: "CA is not active" });
if (!ca.activeCaCertId) throw new BadRequestError({ message: "CA does not have a certificate installed" });
if (ca.requireTemplateForIssuance) {
throw new BadRequestError({ message: "Certificate template is required for issuance" });
}
const caCert = await certificateAuthorityCertDAL.findById(ca.activeCaCertId);
const certificateManagerKmsId = await getProjectKmsCertificateKeyId({
projectId: ca.projectId,
projectDAL,
kmsService
});
const kmsDecryptor = await kmsService.decryptWithKmsKey({
kmsId: certificateManagerKmsId
});
const decryptedCaCert = await kmsDecryptor({
cipherTextBlob: caCert.encryptedCertificate
});
const caCertObj = new x509.X509Certificate(decryptedCaCert);
const notBeforeDate = new Date();
const notAfterDate = new Date(new Date().getTime() + ms(subscriber.ttl));
const caCertNotBeforeDate = new Date(caCertObj.notBefore);
const caCertNotAfterDate = new Date(caCertObj.notAfter);
// check not before constraint
if (notBeforeDate < caCertNotBeforeDate) {
throw new BadRequestError({ message: "notBefore date is before CA certificate's notBefore date" });
}
// check not after constraint
if (notAfterDate > caCertNotAfterDate) {
throw new BadRequestError({ message: "notAfter date is after CA certificate's notAfter date" });
}
const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm);
const csrObj = new x509.Pkcs10CertificateRequest(csr);
const dn = parseDistinguishedName(csrObj.subject);
const cn = dn.commonName;
if (cn !== subscriber.commonName) {
throw new BadRequestError({ message: "Common name (CN) in the CSR does not match the subscriber's common name" });
}
const { caPrivateKey, caSecret } = await getCaCredentials({
caId: ca.id,
certificateAuthorityDAL,
certificateAuthoritySecretDAL,
projectDAL,
kmsService
});
const caCrl = await certificateAuthorityCrlDAL.findOne({ caSecretId: caSecret.id });
const distributionPointUrl = `${appCfg.SITE_URL}/api/v1/pki/crl/${caCrl.id}/der`;
const caIssuerUrl = `${appCfg.SITE_URL}/api/v1/pki/ca/${ca.id}/certificates/${caCert.id}/der`;
const extensions: x509.Extension[] = [
new x509.BasicConstraintsExtension(false),
await x509.AuthorityKeyIdentifierExtension.create(caCertObj, false),
await x509.SubjectKeyIdentifierExtension.create(csrObj.publicKey),
new x509.CRLDistributionPointsExtension([distributionPointUrl]),
new x509.AuthorityInfoAccessExtension({
caIssuers: new x509.GeneralName("url", caIssuerUrl)
}),
new x509.CertificatePolicyExtension(["2.5.29.32.0"]) // anyPolicy
];
// handle key usages
const csrKeyUsageExtension = csrObj.getExtension("2.5.29.15") as x509.KeyUsagesExtension;
let csrKeyUsages: CertKeyUsage[] = [];
if (csrKeyUsageExtension) {
csrKeyUsages = Object.values(CertKeyUsage).filter(
(keyUsage) => (x509.KeyUsageFlags[keyUsage] & csrKeyUsageExtension.usages) !== 0
);
}
const selectedKeyUsages = subscriber.keyUsages as CertKeyUsage[];
if (csrKeyUsages.some((keyUsage) => !selectedKeyUsages.includes(keyUsage))) {
throw new BadRequestError({
message: "Invalid key usage value based on subscriber's specified key usages"
});
}
const keyUsagesBitValue = selectedKeyUsages.reduce((accum, keyUsage) => accum | x509.KeyUsageFlags[keyUsage], 0);
if (keyUsagesBitValue) {
extensions.push(new x509.KeyUsagesExtension(keyUsagesBitValue, true));
}
// handle extended key usages
const csrExtendedKeyUsageExtension = csrObj.getExtension("2.5.29.37") as x509.ExtendedKeyUsageExtension;
let csrExtendedKeyUsages: CertExtendedKeyUsage[] = [];
if (csrExtendedKeyUsageExtension) {
csrExtendedKeyUsages = csrExtendedKeyUsageExtension.usages.map(
(ekuOid) => CertExtendedKeyUsageOIDToName[ekuOid as string]
);
}
const selectedExtendedKeyUsages = subscriber.extendedKeyUsages as CertExtendedKeyUsage[];
if (csrExtendedKeyUsages.some((eku) => !selectedExtendedKeyUsages.includes(eku))) {
throw new BadRequestError({
message: "Invalid extended key usage value based on subscriber's specified extended key usages"
});
}
if (selectedExtendedKeyUsages.length) {
extensions.push(
new x509.ExtendedKeyUsageExtension(
selectedExtendedKeyUsages.map((eku) => x509.ExtendedKeyUsage[eku]),
true
)
);
}
// attempt to read from CSR if altNames is not explicitly provided
let altNamesArray: {
type: "email" | "dns";
value: string;
}[] = [];
const sanExtension = csrObj.extensions.find((ext) => ext.type === "2.5.29.17");
if (sanExtension) {
const sanNames = new x509.GeneralNames(sanExtension.value);
altNamesArray = sanNames.items
.filter((value) => value.type === "email" || value.type === "dns")
.map((name) => ({
type: name.type as "email" | "dns",
value: name.value
}));
}
if (
altNamesArray
.map((altName) => altName.value)
.some((altName) => !subscriber.subjectAlternativeNames.includes(altName))
) {
throw new BadRequestError({
message: "Invalid subject alternative name based on subscriber's specified subject alternative names"
});
}
if (altNamesArray.length) {
const altNamesExtension = new x509.SubjectAlternativeNameExtension(altNamesArray, false);
extensions.push(altNamesExtension);
}
const serialNumber = createSerialNumber();
const leafCert = await x509.X509CertificateGenerator.create({
serialNumber,
subject: csrObj.subject,
issuer: caCertObj.subject,
notBefore: notBeforeDate,
notAfter: notAfterDate,
signingKey: caPrivateKey,
publicKey: csrObj.publicKey,
signingAlgorithm: alg,
extensions
});
const kmsEncryptor = await kmsService.encryptWithKmsKey({
kmsId: certificateManagerKmsId
});
const { cipherTextBlob: encryptedCertificate } = await kmsEncryptor({
plainText: Buffer.from(new Uint8Array(leafCert.rawData))
});
const { caCert: issuingCaCertificate, caCertChain } = await getCaCertChain({
caCertId: ca.activeCaCertId,
certificateAuthorityDAL,
certificateAuthorityCertDAL,
projectDAL,
kmsService
});
const certificateChainPem = `${issuingCaCertificate}\n${caCertChain}`.trim();
const { cipherTextBlob: encryptedCertificateChain } = await kmsEncryptor({
plainText: Buffer.from(certificateChainPem)
});
await certificateDAL.transaction(async (tx) => {
const cert = await certificateDAL.create(
{
caId: ca.id,
caCertId: caCert.id,
pkiSubscriberId: subscriber.id,
status: CertStatus.ACTIVE,
friendlyName: subscriber.commonName,
commonName: subscriber.commonName,
altNames: subscriber.subjectAlternativeNames.join(","),
serialNumber,
notBefore: notBeforeDate,
notAfter: notAfterDate,
keyUsages: selectedKeyUsages,
extendedKeyUsages: selectedExtendedKeyUsages
},
tx
);
await certificateBodyDAL.create(
{
certId: cert.id,
encryptedCertificate,
encryptedCertificateChain
},
tx
);
return cert;
});
return {
certificate: leafCert.toString("pem"),
certificateChain: `${issuingCaCertificate}\n${caCertChain}`.trim(),
issuingCaCertificate,
serialNumber,
ca,
commonName: subscriber.commonName,
subscriber
};
};
const listSubscriberCerts = async ({
subscriberName,
projectId,
offset,
limit,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TListPkiSubscriberCertsDTO) => {
const subscriber = await pkiSubscriberDAL.findOne({
name: subscriberName,
projectId
});
if (!subscriber) throw new NotFoundError({ message: `PKI subscriber named '${subscriberName}' not found` });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: subscriber.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionPkiSubscriberActions.ListCerts,
subject(ProjectPermissionSub.PkiSubscribers, {
name: subscriber.name
})
);
const certificates = await certificateDAL.find(
{
pkiSubscriberId: subscriber.id
},
{ offset, limit, sort: [["updatedAt", "desc"]] }
);
const count = await certificateDAL.countCertificatesForPkiSubscriber(subscriber.id);
return {
certificates,
totalCount: count
};
};
return {
createSubscriber,
getSubscriber,
updateSubscriber,
deleteSubscriber,
issueSubscriberCert,
signSubscriberCert,
listSubscriberCerts
};
};

View File

@@ -0,0 +1,54 @@
import { TProjectPermission } from "@app/lib/types";
import { CertExtendedKeyUsage, CertKeyUsage } from "../certificate/certificate-types";
export enum PkiSubscriberStatus {
ACTIVE = "active",
DISABLED = "disabled"
}
export type TCreatePkiSubscriberDTO = {
caId: string;
name: string;
commonName: string;
status: PkiSubscriberStatus;
ttl: string;
subjectAlternativeNames: string[];
keyUsages: CertKeyUsage[];
extendedKeyUsages: CertExtendedKeyUsage[];
} & TProjectPermission;
export type TGetPkiSubscriberDTO = {
subscriberName: string;
} & TProjectPermission;
export type TUpdatePkiSubscriberDTO = {
subscriberName: string;
caId?: string;
name?: string;
commonName?: string;
status?: PkiSubscriberStatus;
ttl?: string;
subjectAlternativeNames?: string[];
keyUsages?: CertKeyUsage[];
extendedKeyUsages?: CertExtendedKeyUsage[];
} & TProjectPermission;
export type TDeletePkiSubscriberDTO = {
subscriberName: string;
} & TProjectPermission;
export type TIssuePkiSubscriberCertDTO = {
subscriberName: string;
} & TProjectPermission;
export type TSignPkiSubscriberCertDTO = {
subscriberName: string;
csr: string;
} & TProjectPermission;
export type TListPkiSubscriberCertsDTO = {
subscriberName: string;
offset: number;
limit: number;
} & TProjectPermission;

View File

@@ -15,6 +15,7 @@ import { TPermissionServiceFactory } from "@app/ee/services/permission/permissio
import {
ProjectPermissionActions,
ProjectPermissionCertificateActions,
ProjectPermissionPkiSubscriberActions,
ProjectPermissionSecretActions,
ProjectPermissionSshHostActions,
ProjectPermissionSub
@@ -35,6 +36,7 @@ import { groupBy } from "@app/lib/fn";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TProjectPermission } from "@app/lib/types";
import { TQueueServiceFactory } from "@app/queue";
import { TPkiSubscriberDALFactory } from "@app/services/pki-subscriber/pki-subscriber-dal";
import { ActorType } from "../auth/auth-type";
import { TCertificateDALFactory } from "../certificate/certificate-dal";
@@ -86,6 +88,7 @@ import {
TListProjectCasDTO,
TListProjectCertificateTemplatesDTO,
TListProjectCertsDTO,
TListProjectPkiSubscribersDTO,
TListProjectsDTO,
TListProjectSshCasDTO,
TListProjectSshCertificatesDTO,
@@ -145,6 +148,7 @@ type TProjectServiceFactoryDep = {
"findById" | "findByIdWithWorkflowIntegrationDetails"
>;
projectUserMembershipRoleDAL: Pick<TProjectUserMembershipRoleDALFactory, "create">;
pkiSubscriberDAL: Pick<TPkiSubscriberDALFactory, "find">;
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "find">;
certificateDAL: Pick<TCertificateDALFactory, "find" | "countCertificatesInProject">;
certificateTemplateDAL: Pick<TCertificateTemplateDALFactory, "getCertTemplatesByProjectId">;
@@ -207,6 +211,7 @@ export const projectServiceFactory = ({
certificateTemplateDAL,
pkiCollectionDAL,
pkiAlertDAL,
pkiSubscriberDAL,
sshCertificateAuthorityDAL,
sshCertificateAuthoritySecretDAL,
sshCertificateDAL,
@@ -1057,6 +1062,45 @@ export const projectServiceFactory = ({
};
};
/**
* Return list of PKI subscribers for project
*/
const listProjectPkiSubscribers = async ({
actorId,
actorOrgId,
actorAuthMethod,
actor,
projectId
}: TListProjectPkiSubscribersDTO) => {
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
});
const allowedSubscribers = [];
// (dangtony98): room to optimize
const subscribers = await pkiSubscriberDAL.find({ projectId });
for (const subscriber of subscribers) {
const canRead = permission.can(
ProjectPermissionPkiSubscriberActions.Read,
subject(ProjectPermissionSub.PkiSubscribers, {
name: subscriber.name
})
);
if (canRead) {
allowedSubscribers.push(subscriber);
}
}
return allowedSubscribers;
};
/**
* Return list of certificate templates for project
*/
@@ -1156,17 +1200,15 @@ export const projectServiceFactory = ({
const hosts = await sshHostDAL.findSshHostsWithLoginMappings(projectId);
for (const host of hosts) {
try {
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSshHostActions.Read,
subject(ProjectPermissionSub.SshHosts, {
hostname: host.hostname
})
);
const canRead = permission.can(
ProjectPermissionSshHostActions.Read,
subject(ProjectPermissionSub.SshHosts, {
hostname: host.hostname
})
);
if (canRead) {
allowedHosts.push(host);
} catch {
// intentionally ignore projects where user lacks access
}
}
@@ -1930,6 +1972,7 @@ export const projectServiceFactory = ({
listProjectSshCas,
listProjectSshHosts,
listProjectSshHostGroups,
listProjectPkiSubscribers,
listProjectSshCertificates,
listProjectSshCertificateTemplates,
updateVersionLimit,

View File

@@ -155,6 +155,7 @@ export type TListProjectCertificateTemplatesDTO = TProjectPermission;
export type TListProjectSshCasDTO = TProjectPermission;
export type TListProjectSshHostsDTO = TProjectPermission;
export type TListProjectSshCertificateTemplatesDTO = TProjectPermission;
export type TListProjectPkiSubscribersDTO = TProjectPermission;
export type TListProjectSshCertificatesDTO = {
offset: number;
limit: number;

View File

@@ -246,7 +246,8 @@ export const superAdminServiceFactory = ({
await microsoftTeamsService.initializeTeamsBot({
botAppId: decryptedAppId.toString(),
botAppPassword: decryptedAppPassword.toString()
botAppPassword: decryptedAppPassword.toString(),
lastUpdatedAt: updatedServerCfg.updatedAt
});
}

View File

@@ -189,6 +189,7 @@ export type TSignCertificateEvent = {
properties: {
caId?: string;
certificateTemplateId?: string;
subscriberId?: string;
commonName: string;
userAgent?: string;
};
@@ -199,6 +200,7 @@ export type TIssueCertificateEvent = {
properties: {
caId?: string;
certificateTemplateId?: string;
subscriberId?: string;
commonName: string;
userAgent?: string;
};

View File

@@ -1,115 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"regexp"
"testing"
"github.com/stretchr/testify/assert"
)
func TestCommitAllowed(t *testing.T) {
tests := []struct {
allowlist Allowlist
commit string
commitAllowed bool
}{
{
allowlist: Allowlist{
Commits: []string{"commitA"},
},
commit: "commitA",
commitAllowed: true,
},
{
allowlist: Allowlist{
Commits: []string{"commitB"},
},
commit: "commitA",
commitAllowed: false,
},
{
allowlist: Allowlist{
Commits: []string{"commitB"},
},
commit: "",
commitAllowed: false,
},
}
for _, tt := range tests {
assert.Equal(t, tt.commitAllowed, tt.allowlist.CommitAllowed(tt.commit))
}
}
func TestRegexAllowed(t *testing.T) {
tests := []struct {
allowlist Allowlist
secret string
regexAllowed bool
}{
{
allowlist: Allowlist{
Regexes: []*regexp.Regexp{regexp.MustCompile("matchthis")},
},
secret: "a secret: matchthis, done",
regexAllowed: true,
},
{
allowlist: Allowlist{
Regexes: []*regexp.Regexp{regexp.MustCompile("matchthis")},
},
secret: "a secret",
regexAllowed: false,
},
}
for _, tt := range tests {
assert.Equal(t, tt.regexAllowed, tt.allowlist.RegexAllowed(tt.secret))
}
}
func TestPathAllowed(t *testing.T) {
tests := []struct {
allowlist Allowlist
path string
pathAllowed bool
}{
{
allowlist: Allowlist{
Paths: []*regexp.Regexp{regexp.MustCompile("path")},
},
path: "a path",
pathAllowed: true,
},
{
allowlist: Allowlist{
Paths: []*regexp.Regexp{regexp.MustCompile("path")},
},
path: "a ???",
pathAllowed: false,
},
}
for _, tt := range tests {
assert.Equal(t, tt.pathAllowed, tt.allowlist.PathAllowed(tt.path))
}
}

View File

@@ -1,279 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
_ "embed"
"fmt"
"regexp"
"strings"
"github.com/rs/zerolog/log"
"github.com/spf13/viper"
)
//go:embed infisical-scan.toml
var DefaultConfig string
// use to keep track of how many configs we can extend
// yea I know, globals bad
var extendDepth int
const maxExtendDepth = 2
const DefaultScanConfigFileName = ".infisical-scan.toml"
const DefaultScanConfigEnvName = "INFISICAL_SCAN_CONFIG"
const DefaultInfisicalIgnoreFineName = ".infisicalignore"
// ViperConfig is the config struct used by the Viper config package
// to parse the config file. This struct does not include regular expressions.
// It is used as an intermediary to convert the Viper config to the Config struct.
type ViperConfig struct {
Description string
Extend Extend
Rules []struct {
ID string
Description string
Entropy float64
SecretGroup int
Regex string
Keywords []string
Path string
Tags []string
Allowlist struct {
RegexTarget string
Regexes []string
Paths []string
Commits []string
StopWords []string
}
}
Allowlist struct {
RegexTarget string
Regexes []string
Paths []string
Commits []string
StopWords []string
}
}
// Config is a configuration struct that contains rules and an allowlist if present.
type Config struct {
Extend Extend
Path string
Description string
Rules map[string]Rule
Allowlist Allowlist
Keywords []string
// used to keep sarif results consistent
orderedRules []string
}
// Extend is a struct that allows users to define how they want their
// configuration extended by other configuration files.
type Extend struct {
Path string
URL string
UseDefault bool
}
func (vc *ViperConfig) Translate() (Config, error) {
var (
keywords []string
orderedRules []string
)
rulesMap := make(map[string]Rule)
for _, r := range vc.Rules {
var allowlistRegexes []*regexp.Regexp
for _, a := range r.Allowlist.Regexes {
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
}
var allowlistPaths []*regexp.Regexp
for _, a := range r.Allowlist.Paths {
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
}
if r.Keywords == nil {
r.Keywords = []string{}
} else {
for _, k := range r.Keywords {
keywords = append(keywords, strings.ToLower(k))
}
}
if r.Tags == nil {
r.Tags = []string{}
}
var configRegex *regexp.Regexp
var configPathRegex *regexp.Regexp
if r.Regex == "" {
configRegex = nil
} else {
configRegex = regexp.MustCompile(r.Regex)
}
if r.Path == "" {
configPathRegex = nil
} else {
configPathRegex = regexp.MustCompile(r.Path)
}
r := Rule{
Description: r.Description,
RuleID: r.ID,
Regex: configRegex,
Path: configPathRegex,
SecretGroup: r.SecretGroup,
Entropy: r.Entropy,
Tags: r.Tags,
Keywords: r.Keywords,
Allowlist: Allowlist{
RegexTarget: r.Allowlist.RegexTarget,
Regexes: allowlistRegexes,
Paths: allowlistPaths,
Commits: r.Allowlist.Commits,
StopWords: r.Allowlist.StopWords,
},
}
orderedRules = append(orderedRules, r.RuleID)
if r.Regex != nil && r.SecretGroup > r.Regex.NumSubexp() {
return Config{}, fmt.Errorf("%s invalid regex secret group %d, max regex secret group %d", r.Description, r.SecretGroup, r.Regex.NumSubexp())
}
rulesMap[r.RuleID] = r
}
var allowlistRegexes []*regexp.Regexp
for _, a := range vc.Allowlist.Regexes {
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
}
var allowlistPaths []*regexp.Regexp
for _, a := range vc.Allowlist.Paths {
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
}
c := Config{
Description: vc.Description,
Extend: vc.Extend,
Rules: rulesMap,
Allowlist: Allowlist{
RegexTarget: vc.Allowlist.RegexTarget,
Regexes: allowlistRegexes,
Paths: allowlistPaths,
Commits: vc.Allowlist.Commits,
StopWords: vc.Allowlist.StopWords,
},
Keywords: keywords,
orderedRules: orderedRules,
}
if maxExtendDepth != extendDepth {
// disallow both usedefault and path from being set
if c.Extend.Path != "" && c.Extend.UseDefault {
log.Fatal().Msg("unable to load config due to extend.path and extend.useDefault being set")
}
if c.Extend.UseDefault {
c.extendDefault()
} else if c.Extend.Path != "" {
c.extendPath()
}
}
return c, nil
}
func (c *Config) OrderedRules() []Rule {
var orderedRules []Rule
for _, id := range c.orderedRules {
if _, ok := c.Rules[id]; ok {
orderedRules = append(orderedRules, c.Rules[id])
}
}
return orderedRules
}
func (c *Config) extendDefault() {
extendDepth++
viper.SetConfigType("toml")
if err := viper.ReadConfig(strings.NewReader(DefaultConfig)); err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
defaultViperConfig := ViperConfig{}
if err := viper.Unmarshal(&defaultViperConfig); err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
cfg, err := defaultViperConfig.Translate()
if err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
log.Debug().Msg("extending config with default config")
c.extend(cfg)
}
func (c *Config) extendPath() {
extendDepth++
viper.SetConfigFile(c.Extend.Path)
if err := viper.ReadInConfig(); err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
extensionViperConfig := ViperConfig{}
if err := viper.Unmarshal(&extensionViperConfig); err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
cfg, err := extensionViperConfig.Translate()
if err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
log.Debug().Msgf("extending config with %s", c.Extend.Path)
c.extend(cfg)
}
func (c *Config) extendURL() {
// TODO
}
func (c *Config) extend(extensionConfig Config) {
for ruleID, rule := range extensionConfig.Rules {
if _, ok := c.Rules[ruleID]; !ok {
log.Trace().Msgf("adding %s to base config", ruleID)
c.Rules[ruleID] = rule
c.Keywords = append(c.Keywords, rule.Keywords...)
}
}
// append allowlists, not attempting to merge
c.Allowlist.Commits = append(c.Allowlist.Commits,
extensionConfig.Allowlist.Commits...)
c.Allowlist.Paths = append(c.Allowlist.Paths,
extensionConfig.Allowlist.Paths...)
c.Allowlist.Regexes = append(c.Allowlist.Regexes,
extensionConfig.Allowlist.Regexes...)
}

View File

@@ -1,170 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"fmt"
"regexp"
"testing"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
)
const configPath = "../testdata/config/"
func TestTranslate(t *testing.T) {
tests := []struct {
cfgName string
cfg Config
wantError error
}{
{
cfgName: "allow_aws_re",
cfg: Config{
Rules: map[string]Rule{"aws-access-key": {
Description: "AWS Access Key",
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-access-key",
Allowlist: Allowlist{
Regexes: []*regexp.Regexp{
regexp.MustCompile("AKIALALEMEL33243OLIA"),
},
},
},
},
},
},
{
cfgName: "allow_commit",
cfg: Config{
Rules: map[string]Rule{"aws-access-key": {
Description: "AWS Access Key",
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-access-key",
Allowlist: Allowlist{
Commits: []string{"allowthiscommit"},
},
},
},
},
},
{
cfgName: "allow_path",
cfg: Config{
Rules: map[string]Rule{"aws-access-key": {
Description: "AWS Access Key",
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-access-key",
Allowlist: Allowlist{
Paths: []*regexp.Regexp{
regexp.MustCompile(".go"),
},
},
},
},
},
},
{
cfgName: "entropy_group",
cfg: Config{
Rules: map[string]Rule{"discord-api-key": {
Description: "Discord API key",
Regex: regexp.MustCompile(`(?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{64})['\"]`),
RuleID: "discord-api-key",
Allowlist: Allowlist{},
Entropy: 3.5,
SecretGroup: 3,
Tags: []string{},
Keywords: []string{},
},
},
},
},
{
cfgName: "bad_entropy_group",
cfg: Config{},
wantError: fmt.Errorf("Discord API key invalid regex secret group 5, max regex secret group 3"),
},
{
cfgName: "base",
cfg: Config{
Rules: map[string]Rule{
"aws-access-key": {
Description: "AWS Access Key",
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-access-key",
},
"aws-secret-key": {
Description: "AWS Secret Key",
Regex: regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-secret-key",
},
"aws-secret-key-again": {
Description: "AWS Secret Key",
Regex: regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-secret-key-again",
},
},
},
},
}
for _, tt := range tests {
viper.Reset()
viper.AddConfigPath(configPath)
viper.SetConfigName(tt.cfgName)
viper.SetConfigType("toml")
err := viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, err := vc.Translate()
if tt.wantError != nil {
if err == nil {
t.Errorf("expected error")
}
assert.Equal(t, tt.wantError, err)
}
assert.Equal(t, cfg.Rules, tt.cfg.Rules)
}
}

View File

@@ -1,8 +0,0 @@
public_ip: 127.0.0.1
auth_secret: example-auth-secret
realm: infisical.org
# set port 5349 for tls
# port: 5349
# tls_private_key_path: /full-path
# tls_ca_path: /full-path
# tls_cert_path: /full-path

View File

@@ -1,8 +0,0 @@
public_ip: 127.0.0.1
auth_secret: changeThisOnProduction
realm: infisical.org
# set port 5349 for tls
# port: 5349
# tls_private_key_path: /full-path
# tls_ca_path: /full-path
# tls_cert_path: /full-path

File diff suppressed because it is too large Load Diff

View File

@@ -1,43 +0,0 @@
package config
import (
"regexp"
)
// Rules contain information that define details on how to detect secrets
type Rule struct {
// Description is the description of the rule.
Description string
// RuleID is a unique identifier for this rule
RuleID string
// Entropy is a float representing the minimum shannon
// entropy a regex group must have to be considered a secret.
Entropy float64
// SecretGroup is an int used to extract secret from regex
// match and used as the group that will have its entropy
// checked if `entropy` is set.
SecretGroup int
// Regex is a golang regular expression used to detect secrets.
Regex *regexp.Regexp
// Path is a golang regular expression used to
// filter secrets by path
Path *regexp.Regexp
// Tags is an array of strings used for metadata
// and reporting purposes.
Tags []string
// Keywords are used for pre-regex check filtering. Rules that contain
// keywords will perform a quick string compare check to make sure the
// keyword(s) are in the content being scanned.
Keywords []string
// Allowlist allows a rule to be ignored for specific
// regexes, paths, and/or commits
Allowlist Allowlist
}

View File

@@ -1,24 +0,0 @@
package config
import (
"regexp"
)
func anyRegexMatch(f string, res []*regexp.Regexp) bool {
for _, re := range res {
if regexMatched(f, re) {
return true
}
}
return false
}
func regexMatched(f string, re *regexp.Regexp) bool {
if re == nil {
return false
}
if re.FindString(f) != "" {
return true
}
return false
}

View File

@@ -25,35 +25,31 @@ package detect
import (
"encoding/json"
"fmt"
"io"
"os"
"path/filepath"
"github.com/rs/zerolog/log"
"github.com/Infisical/infisical-merge/report"
"github.com/Infisical/infisical-merge/detect/report"
)
func IsNew(finding report.Finding, baseline []report.Finding) bool {
func IsNew(finding report.Finding, redact uint, baseline []report.Finding) bool {
// Explicitly testing each property as it gives significantly better performance in comparison to cmp.Equal(). Drawback is that
// the code requires maintanance if/when the Finding struct changes
// the code requires maintenance if/when the Finding struct changes
for _, b := range baseline {
if finding.Author == b.Author &&
finding.Commit == b.Commit &&
finding.Date == b.Date &&
if finding.RuleID == b.RuleID &&
finding.Description == b.Description &&
finding.Email == b.Email &&
finding.EndColumn == b.EndColumn &&
finding.StartLine == b.StartLine &&
finding.EndLine == b.EndLine &&
finding.Entropy == b.Entropy &&
finding.File == b.File &&
// Omit checking finding.Fingerprint - if the format of the fingerprint changes, the users will see unexpected behaviour
finding.Match == b.Match &&
finding.Message == b.Message &&
finding.RuleID == b.RuleID &&
finding.Secret == b.Secret &&
finding.StartColumn == b.StartColumn &&
finding.StartLine == b.StartLine {
finding.EndColumn == b.EndColumn &&
(redact > 0 || (finding.Match == b.Match && finding.Secret == b.Secret)) &&
finding.File == b.File &&
finding.Commit == b.Commit &&
finding.Author == b.Author &&
finding.Email == b.Email &&
finding.Date == b.Date &&
finding.Message == b.Message &&
// Omit checking finding.Fingerprint - if the format of the fingerprint changes, the users will see unexpected behaviour
finding.Entropy == b.Entropy {
return false
}
}
@@ -61,23 +57,12 @@ func IsNew(finding report.Finding, baseline []report.Finding) bool {
}
func LoadBaseline(baselinePath string) ([]report.Finding, error) {
var previousFindings []report.Finding
jsonFile, err := os.Open(baselinePath)
bytes, err := os.ReadFile(baselinePath)
if err != nil {
return nil, fmt.Errorf("could not open %s", baselinePath)
}
defer func() {
if cerr := jsonFile.Close(); cerr != nil {
log.Warn().Err(cerr).Msg("problem closing jsonFile handle")
}
}()
bytes, err := io.ReadAll(jsonFile)
if err != nil {
return nil, fmt.Errorf("could not read data from the file %s", baselinePath)
}
var previousFindings []report.Finding
err = json.Unmarshal(bytes, &previousFindings)
if err != nil {
return nil, fmt.Errorf("the format of the file %s is not supported", baselinePath)
@@ -85,3 +70,34 @@ func LoadBaseline(baselinePath string) ([]report.Finding, error) {
return previousFindings, nil
}
func (d *Detector) AddBaseline(baselinePath string, source string) error {
if baselinePath != "" {
absoluteSource, err := filepath.Abs(source)
if err != nil {
return err
}
absoluteBaseline, err := filepath.Abs(baselinePath)
if err != nil {
return err
}
relativeBaseline, err := filepath.Rel(absoluteSource, absoluteBaseline)
if err != nil {
return err
}
baseline, err := LoadBaseline(baselinePath)
if err != nil {
return err
}
d.baseline = baseline
baselinePath = relativeBaseline
}
d.baselinePath = baselinePath
return nil
}

View File

@@ -1,160 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"errors"
"testing"
"github.com/stretchr/testify/assert"
"github.com/Infisical/infisical-merge/report"
)
func TestIsNew(t *testing.T) {
tests := []struct {
findings report.Finding
baseline []report.Finding
expect bool
}{
{
findings: report.Finding{
Author: "a",
Commit: "0000",
},
baseline: []report.Finding{
{
Author: "a",
Commit: "0000",
},
},
expect: false,
},
{
findings: report.Finding{
Author: "a",
Commit: "0000",
},
baseline: []report.Finding{
{
Author: "a",
Commit: "0002",
},
},
expect: true,
},
{
findings: report.Finding{
Author: "a",
Commit: "0000",
Tags: []string{"a", "b"},
},
baseline: []report.Finding{
{
Author: "a",
Commit: "0000",
Tags: []string{"a", "c"},
},
},
expect: false, // Updated tags doesn't make it a new finding
},
}
for _, test := range tests {
assert.Equal(t, test.expect, IsNew(test.findings, test.baseline))
}
}
func TestFileLoadBaseline(t *testing.T) {
tests := []struct {
Filename string
ExpectedError error
}{
{
Filename: "../testdata/baseline/baseline.csv",
ExpectedError: errors.New("the format of the file ../testdata/baseline/baseline.csv is not supported"),
},
{
Filename: "../testdata/baseline/baseline.sarif",
ExpectedError: errors.New("the format of the file ../testdata/baseline/baseline.sarif is not supported"),
},
{
Filename: "../testdata/baseline/notfound.json",
ExpectedError: errors.New("could not open ../testdata/baseline/notfound.json"),
},
}
for _, test := range tests {
_, err := LoadBaseline(test.Filename)
assert.Equal(t, test.ExpectedError.Error(), err.Error())
}
}
func TestIgnoreIssuesInBaseline(t *testing.T) {
tests := []struct {
findings []report.Finding
baseline []report.Finding
expectCount int
}{
{
findings: []report.Finding{
{
Author: "a",
Commit: "5",
},
},
baseline: []report.Finding{
{
Author: "a",
Commit: "5",
},
},
expectCount: 0,
},
{
findings: []report.Finding{
{
Author: "a",
Commit: "5",
Fingerprint: "a",
},
},
baseline: []report.Finding{
{
Author: "a",
Commit: "5",
Fingerprint: "b",
},
},
expectCount: 0,
},
}
for _, test := range tests {
d, _ := NewDetectorDefaultConfig()
d.baseline = test.baseline
for _, finding := range test.findings {
d.addFinding(finding)
}
assert.Equal(t, test.expectCount, len(d.findings))
}
}

66
cli/detect/cmd/scm/scm.go Normal file
View File

@@ -0,0 +1,66 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package scm
import (
"fmt"
"strings"
)
type Platform int
const (
UnknownPlatform Platform = iota
NoPlatform // Explicitly disable the feature
GitHubPlatform
GitLabPlatform
AzureDevOpsPlatform
// TODO: Add others.
)
func (p Platform) String() string {
return [...]string{
"unknown",
"none",
"github",
"gitlab",
"azuredevops",
}[p]
}
func PlatformFromString(s string) (Platform, error) {
switch strings.ToLower(s) {
case "", "unknown":
return UnknownPlatform, nil
case "none":
return NoPlatform, nil
case "github":
return GitHubPlatform, nil
case "gitlab":
return GitLabPlatform, nil
case "azuredevops":
return AzureDevOpsPlatform, nil
default:
return UnknownPlatform, fmt.Errorf("invalid scm platform value: %s", s)
}
}

View File

@@ -23,63 +23,137 @@
package config
import (
"regexp"
"fmt"
"strings"
"golang.org/x/exp/maps"
"github.com/Infisical/infisical-merge/detect/regexp"
)
type AllowlistMatchCondition int
const (
AllowlistMatchOr AllowlistMatchCondition = iota
AllowlistMatchAnd
)
func (a AllowlistMatchCondition) String() string {
return [...]string{
"OR",
"AND",
}[a]
}
// Allowlist allows a rule to be ignored for specific
// regexes, paths, and/or commits
type Allowlist struct {
// Short human readable description of the allowlist.
Description string
// Regexes is slice of content regular expressions that are allowed to be ignored.
Regexes []*regexp.Regexp
// MatchCondition determines whether all criteria must match.
MatchCondition AllowlistMatchCondition
// RegexTarget
RegexTarget string
// Commits is a slice of commit SHAs that are allowed to be ignored. Defaults to "OR".
Commits []string
// Paths is a slice of path regular expressions that are allowed to be ignored.
Paths []*regexp.Regexp
// Commits is a slice of commit SHAs that are allowed to be ignored.
Commits []string
// Can be `match` or `line`.
//
// If `match` the _Regexes_ will be tested against the match of the _Rule.Regex_.
//
// If `line` the _Regexes_ will be tested against the entire line.
//
// If RegexTarget is empty, it will be tested against the found secret.
RegexTarget string
// Regexes is slice of content regular expressions that are allowed to be ignored.
Regexes []*regexp.Regexp
// StopWords is a slice of stop words that are allowed to be ignored.
// This targets the _secret_, not the content of the regex match like the
// Regexes slice.
StopWords []string
// validated is an internal flag to track whether `Validate()` has been called.
validated bool
}
func (a *Allowlist) Validate() error {
if a.validated {
return nil
}
// Disallow empty allowlists.
if len(a.Commits) == 0 &&
len(a.Paths) == 0 &&
len(a.Regexes) == 0 &&
len(a.StopWords) == 0 {
return fmt.Errorf("must contain at least one check for: commits, paths, regexes, or stopwords")
}
// Deduplicate commits and stopwords.
if len(a.Commits) > 0 {
uniqueCommits := make(map[string]struct{})
for _, commit := range a.Commits {
uniqueCommits[commit] = struct{}{}
}
a.Commits = maps.Keys(uniqueCommits)
}
if len(a.StopWords) > 0 {
uniqueStopwords := make(map[string]struct{})
for _, stopWord := range a.StopWords {
uniqueStopwords[stopWord] = struct{}{}
}
a.StopWords = maps.Keys(uniqueStopwords)
}
a.validated = true
return nil
}
// CommitAllowed returns true if the commit is allowed to be ignored.
func (a *Allowlist) CommitAllowed(c string) bool {
if c == "" {
return false
func (a *Allowlist) CommitAllowed(c string) (bool, string) {
if a == nil || c == "" {
return false, ""
}
for _, commit := range a.Commits {
if commit == c {
return true
return true, c
}
}
return false
return false, ""
}
// PathAllowed returns true if the path is allowed to be ignored.
func (a *Allowlist) PathAllowed(path string) bool {
if a == nil || path == "" {
return false
}
return anyRegexMatch(path, a.Paths)
}
// RegexAllowed returns true if the regex is allowed to be ignored.
func (a *Allowlist) RegexAllowed(s string) bool {
return anyRegexMatch(s, a.Regexes)
func (a *Allowlist) RegexAllowed(secret string) bool {
if a == nil || secret == "" {
return false
}
return anyRegexMatch(secret, a.Regexes)
}
func (a *Allowlist) ContainsStopWord(s string) bool {
func (a *Allowlist) ContainsStopWord(s string) (bool, string) {
if a == nil || s == "" {
return false, ""
}
s = strings.ToLower(s)
for _, stopWord := range a.StopWords {
if strings.Contains(s, strings.ToLower(stopWord)) {
return true
return true, stopWord
}
}
return false
return false, ""
}

426
cli/detect/config/config.go Normal file
View File

@@ -0,0 +1,426 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
_ "embed"
"errors"
"fmt"
"sort"
"strings"
"github.com/spf13/viper"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/regexp"
)
const DefaultScanConfigFileName = ".infisical-scan.toml"
const DefaultScanConfigEnvName = "INFISICAL_SCAN_CONFIG"
const DefaultInfisicalIgnoreFineName = ".infisicalignore"
var (
//go:embed gitleaks.toml
DefaultConfig string
// use to keep track of how many configs we can extend
// yea I know, globals bad
extendDepth int
)
const maxExtendDepth = 2
// ViperConfig is the config struct used by the Viper config package
// to parse the config file. This struct does not include regular expressions.
// It is used as an intermediary to convert the Viper config to the Config struct.
type ViperConfig struct {
Title string
Description string
Extend Extend
Rules []struct {
ID string
Description string
Path string
Regex string
SecretGroup int
Entropy float64
Keywords []string
Tags []string
// Deprecated: this is a shim for backwards-compatibility.
// TODO: Remove this in 9.x.
AllowList *viperRuleAllowlist
Allowlists []*viperRuleAllowlist
}
// Deprecated: this is a shim for backwards-compatibility.
// TODO: Remove this in 9.x.
AllowList *viperGlobalAllowlist
Allowlists []*viperGlobalAllowlist
}
type viperRuleAllowlist struct {
Description string
Condition string
Commits []string
Paths []string
RegexTarget string
Regexes []string
StopWords []string
}
type viperGlobalAllowlist struct {
TargetRules []string
viperRuleAllowlist `mapstructure:",squash"`
}
// Config is a configuration struct that contains rules and an allowlist if present.
type Config struct {
Title string
Extend Extend
Path string
Description string
Rules map[string]Rule
Keywords map[string]struct{}
// used to keep sarif results consistent
OrderedRules []string
Allowlists []*Allowlist
}
// Extend is a struct that allows users to define how they want their
// configuration extended by other configuration files.
type Extend struct {
Path string
URL string
UseDefault bool
DisabledRules []string
}
func (vc *ViperConfig) Translate() (Config, error) {
var (
keywords = make(map[string]struct{})
orderedRules []string
rulesMap = make(map[string]Rule)
ruleAllowlists = make(map[string][]*Allowlist)
)
// Validate individual rules.
for _, vr := range vc.Rules {
var (
pathPat *regexp.Regexp
regexPat *regexp.Regexp
)
if vr.Path != "" {
pathPat = regexp.MustCompile(vr.Path)
}
if vr.Regex != "" {
regexPat = regexp.MustCompile(vr.Regex)
}
if vr.Keywords == nil {
vr.Keywords = []string{}
} else {
for i, k := range vr.Keywords {
keyword := strings.ToLower(k)
keywords[keyword] = struct{}{}
vr.Keywords[i] = keyword
}
}
if vr.Tags == nil {
vr.Tags = []string{}
}
cr := Rule{
RuleID: vr.ID,
Description: vr.Description,
Regex: regexPat,
SecretGroup: vr.SecretGroup,
Entropy: vr.Entropy,
Path: pathPat,
Keywords: vr.Keywords,
Tags: vr.Tags,
}
// Parse the rule allowlists, including the older format for backwards compatibility.
if vr.AllowList != nil {
// TODO: Remove this in v9.
if len(vr.Allowlists) > 0 {
return Config{}, fmt.Errorf("%s: [rules.allowlist] is deprecated, it cannot be used alongside [[rules.allowlist]]", cr.RuleID)
}
vr.Allowlists = append(vr.Allowlists, vr.AllowList)
}
for _, a := range vr.Allowlists {
allowlist, err := parseAllowlist(a)
if err != nil {
return Config{}, fmt.Errorf("%s: [[rules.allowlists]] %w", cr.RuleID, err)
}
cr.Allowlists = append(cr.Allowlists, allowlist)
}
orderedRules = append(orderedRules, cr.RuleID)
rulesMap[cr.RuleID] = cr
}
// Assemble the config.
c := Config{
Title: vc.Title,
Description: vc.Description,
Extend: vc.Extend,
Rules: rulesMap,
Keywords: keywords,
OrderedRules: orderedRules,
}
// Parse the config allowlists, including the older format for backwards compatibility.
if vc.AllowList != nil {
// TODO: Remove this in v9.
if len(vc.Allowlists) > 0 {
return Config{}, errors.New("[allowlist] is deprecated, it cannot be used alongside [[allowlists]]")
}
vc.Allowlists = append(vc.Allowlists, vc.AllowList)
}
for _, a := range vc.Allowlists {
allowlist, err := parseAllowlist(&a.viperRuleAllowlist)
if err != nil {
return Config{}, fmt.Errorf("[[allowlists]] %w", err)
}
// Allowlists with |targetRules| aren't added to the global list.
if len(a.TargetRules) > 0 {
for _, ruleID := range a.TargetRules {
// It's not possible to validate |ruleID| until after extend.
ruleAllowlists[ruleID] = append(ruleAllowlists[ruleID], allowlist)
}
} else {
c.Allowlists = append(c.Allowlists, allowlist)
}
}
if maxExtendDepth != extendDepth {
// disallow both usedefault and path from being set
if c.Extend.Path != "" && c.Extend.UseDefault {
return Config{}, errors.New("unable to load config due to extend.path and extend.useDefault being set")
}
if c.Extend.UseDefault {
if err := c.extendDefault(); err != nil {
return Config{}, err
}
} else if c.Extend.Path != "" {
if err := c.extendPath(); err != nil {
return Config{}, err
}
}
}
// Validate the rules after everything has been assembled (including extended configs).
if extendDepth == 0 {
for _, rule := range c.Rules {
if err := rule.Validate(); err != nil {
return Config{}, err
}
}
// Populate targeted configs.
for ruleID, allowlists := range ruleAllowlists {
rule, ok := c.Rules[ruleID]
if !ok {
return Config{}, fmt.Errorf("[[allowlists]] target rule ID '%s' does not exist", ruleID)
}
rule.Allowlists = append(rule.Allowlists, allowlists...)
c.Rules[ruleID] = rule
}
}
return c, nil
}
func parseAllowlist(a *viperRuleAllowlist) (*Allowlist, error) {
var matchCondition AllowlistMatchCondition
switch strings.ToUpper(a.Condition) {
case "AND", "&&":
matchCondition = AllowlistMatchAnd
case "", "OR", "||":
matchCondition = AllowlistMatchOr
default:
return nil, fmt.Errorf("unknown allowlist |condition| '%s' (expected 'and', 'or')", a.Condition)
}
// Validate the target.
regexTarget := a.RegexTarget
if regexTarget != "" {
switch regexTarget {
case "secret":
regexTarget = ""
case "match", "line":
// do nothing
default:
return nil, fmt.Errorf("unknown allowlist |regexTarget| '%s' (expected 'match', 'line')", regexTarget)
}
}
var allowlistRegexes []*regexp.Regexp
for _, a := range a.Regexes {
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
}
var allowlistPaths []*regexp.Regexp
for _, a := range a.Paths {
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
}
allowlist := &Allowlist{
Description: a.Description,
MatchCondition: matchCondition,
Commits: a.Commits,
Paths: allowlistPaths,
RegexTarget: regexTarget,
Regexes: allowlistRegexes,
StopWords: a.StopWords,
}
if err := allowlist.Validate(); err != nil {
return nil, err
}
return allowlist, nil
}
func (c *Config) GetOrderedRules() []Rule {
var orderedRules []Rule
for _, id := range c.OrderedRules {
if _, ok := c.Rules[id]; ok {
orderedRules = append(orderedRules, c.Rules[id])
}
}
return orderedRules
}
func (c *Config) extendDefault() error {
extendDepth++
viper.SetConfigType("toml")
if err := viper.ReadConfig(strings.NewReader(DefaultConfig)); err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
defaultViperConfig := ViperConfig{}
if err := viper.Unmarshal(&defaultViperConfig); err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
cfg, err := defaultViperConfig.Translate()
if err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
logging.Debug().Msg("extending config with default config")
c.extend(cfg)
return nil
}
func (c *Config) extendPath() error {
extendDepth++
viper.SetConfigFile(c.Extend.Path)
if err := viper.ReadInConfig(); err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
extensionViperConfig := ViperConfig{}
if err := viper.Unmarshal(&extensionViperConfig); err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
cfg, err := extensionViperConfig.Translate()
if err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
logging.Debug().Msgf("extending config with %s", c.Extend.Path)
c.extend(cfg)
return nil
}
func (c *Config) extendURL() {
// TODO
}
func (c *Config) extend(extensionConfig Config) {
// Get config name for helpful log messages.
var configName string
if c.Extend.Path != "" {
configName = c.Extend.Path
} else {
configName = "default"
}
// Convert |Config.DisabledRules| into a map for ease of access.
disabledRuleIDs := map[string]struct{}{}
for _, id := range c.Extend.DisabledRules {
if _, ok := extensionConfig.Rules[id]; !ok {
logging.Warn().
Str("rule-id", id).
Str("config", configName).
Msg("Disabled rule doesn't exist in extended config.")
}
disabledRuleIDs[id] = struct{}{}
}
for ruleID, baseRule := range extensionConfig.Rules {
// Skip the rule.
if _, ok := disabledRuleIDs[ruleID]; ok {
logging.Debug().
Str("rule-id", ruleID).
Str("config", configName).
Msg("Ignoring rule from extended config.")
continue
}
currentRule, ok := c.Rules[ruleID]
if !ok {
// Rule doesn't exist, add it to the config.
c.Rules[ruleID] = baseRule
for _, k := range baseRule.Keywords {
c.Keywords[k] = struct{}{}
}
c.OrderedRules = append(c.OrderedRules, ruleID)
} else {
// Rule exists, merge our changes into the base.
if currentRule.Description != "" {
baseRule.Description = currentRule.Description
}
if currentRule.Entropy != 0 {
baseRule.Entropy = currentRule.Entropy
}
if currentRule.SecretGroup != 0 {
baseRule.SecretGroup = currentRule.SecretGroup
}
if currentRule.Regex != nil {
baseRule.Regex = currentRule.Regex
}
if currentRule.Path != nil {
baseRule.Path = currentRule.Path
}
baseRule.Tags = append(baseRule.Tags, currentRule.Tags...)
baseRule.Keywords = append(baseRule.Keywords, currentRule.Keywords...)
for _, a := range currentRule.Allowlists {
baseRule.Allowlists = append(baseRule.Allowlists, a)
}
// The keywords from the base rule and the extended rule must be merged into the global keywords list
for _, k := range baseRule.Keywords {
c.Keywords[k] = struct{}{}
}
c.Rules[ruleID] = baseRule
}
}
// append allowlists, not attempting to merge
for _, a := range extensionConfig.Allowlists {
c.Allowlists = append(c.Allowlists, a)
}
// sort to keep extended rules in order
sort.Strings(c.OrderedRules)
}

File diff suppressed because it is too large Load Diff

114
cli/detect/config/rule.go Normal file
View File

@@ -0,0 +1,114 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"fmt"
"strings"
"github.com/Infisical/infisical-merge/detect/regexp"
)
// Rules contain information that define details on how to detect secrets
type Rule struct {
// RuleID is a unique identifier for this rule
RuleID string
// Description is the description of the rule.
Description string
// Entropy is a float representing the minimum shannon
// entropy a regex group must have to be considered a secret.
Entropy float64
// SecretGroup is an int used to extract secret from regex
// match and used as the group that will have its entropy
// checked if `entropy` is set.
SecretGroup int
// Regex is a golang regular expression used to detect secrets.
Regex *regexp.Regexp
// Path is a golang regular expression used to
// filter secrets by path
Path *regexp.Regexp
// Tags is an array of strings used for metadata
// and reporting purposes.
Tags []string
// Keywords are used for pre-regex check filtering. Rules that contain
// keywords will perform a quick string compare check to make sure the
// keyword(s) are in the content being scanned.
Keywords []string
// Allowlists allows a rule to be ignored for specific commits, paths, regexes, and/or stopwords.
Allowlists []*Allowlist
// validated is an internal flag to track whether `Validate()` has been called.
validated bool
}
// Validate guards against common misconfigurations.
func (r *Rule) Validate() error {
if r.validated {
return nil
}
// Ensure |id| is present.
if strings.TrimSpace(r.RuleID) == "" {
// Try to provide helpful context, since |id| is empty.
var context string
if r.Regex != nil {
context = ", regex: " + r.Regex.String()
} else if r.Path != nil {
context = ", path: " + r.Path.String()
} else if r.Description != "" {
context = ", description: " + r.Description
}
return fmt.Errorf("rule |id| is missing or empty" + context)
}
// Ensure the rule actually matches something.
if r.Regex == nil && r.Path == nil {
return fmt.Errorf("%s: both |regex| and |path| are empty, this rule will have no effect", r.RuleID)
}
// Ensure |secretGroup| works.
if r.Regex != nil && r.SecretGroup > r.Regex.NumSubexp() {
return fmt.Errorf("%s: invalid regex secret group %d, max regex secret group %d", r.RuleID, r.SecretGroup, r.Regex.NumSubexp())
}
for _, allowlist := range r.Allowlists {
// This will probably never happen.
if allowlist == nil {
continue
}
if err := allowlist.Validate(); err != nil {
return fmt.Errorf("%s: %w", r.RuleID, err)
}
}
r.validated = true
return nil
}

View File

@@ -20,35 +20,27 @@
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
package config
import (
"os"
"strings"
"github.com/Infisical/infisical-merge/config"
"github.com/Infisical/infisical-merge/detect/regexp"
)
const (
// https://cwe.mitre.org/data/definitions/798.html
CWE = "CWE-798"
CWE_DESCRIPTION = "Use of Hard-coded Credentials"
)
func Write(findings []Finding, cfg config.Config, ext string, reportPath string) error {
file, err := os.Create(reportPath)
if err != nil {
return err
func anyRegexMatch(f string, res []*regexp.Regexp) bool {
for _, re := range res {
if regexMatched(f, re) {
return true
}
}
ext = strings.ToLower(ext)
switch ext {
case ".json", "json":
err = writeJson(findings, file)
case ".csv", "csv":
err = writeCsv(findings, file)
case ".sarif", "sarif":
err = writeSarif(cfg, findings, file)
}
return err
return false
}
func regexMatched(f string, re *regexp.Regexp) bool {
if re == nil {
return false
}
if re.FindString(f) != "" {
return true
}
return false
}

328
cli/detect/decoder.go Normal file
View File

@@ -0,0 +1,328 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bytes"
"encoding/base64"
"fmt"
"regexp"
"unicode"
"github.com/Infisical/infisical-merge/detect/logging"
)
var b64LikelyChars [128]byte
var b64Regexp = regexp.MustCompile(`[\w/+-]{16,}={0,3}`)
var decoders = []func(string) ([]byte, error){
base64.StdEncoding.DecodeString,
base64.RawURLEncoding.DecodeString,
}
func init() {
// Basically look for anything that isn't just letters
for _, c := range `0123456789+/-_` {
b64LikelyChars[c] = 1
}
}
// EncodedSegment represents a portion of text that is encoded in some way.
// `decode` supports recusive decoding and can result in "segment trees".
// There can be multiple segments in the original text, so each can be thought
// of as its own tree with the root being the original segment.
type EncodedSegment struct {
// The parent segment in a segment tree. If nil, it is a root segment
parent *EncodedSegment
// Relative start/end are the bounds of the encoded value in the current pass.
relativeStart int
relativeEnd int
// Absolute start/end refer to the bounds of the root segment in this segment
// tree
absoluteStart int
absoluteEnd int
// Decoded start/end refer to the bounds of the decoded value in the current
// pass. These can differ from relative values because decoding can shrink
// or grow the size of the segment.
decodedStart int
decodedEnd int
// This is the actual decoded content in the segment
decodedValue string
// This is the type of encoding
encoding string
}
// isChildOf inspects the bounds of two segments to determine
// if one should be the child of another
func (s EncodedSegment) isChildOf(parent EncodedSegment) bool {
return parent.decodedStart <= s.relativeStart && parent.decodedEnd >= s.relativeEnd
}
// decodedOverlaps checks if the decoded bounds of the segment overlaps a range
func (s EncodedSegment) decodedOverlaps(start, end int) bool {
return start <= s.decodedEnd && end >= s.decodedStart
}
// adjustMatchIndex takes the matchIndex from the current decoding pass and
// updates it to match the absolute matchIndex in the original text.
func (s EncodedSegment) adjustMatchIndex(matchIndex []int) []int {
// The match is within the bounds of the segment so we just return
// the absolute start and end of the root segment.
if s.decodedStart <= matchIndex[0] && matchIndex[1] <= s.decodedEnd {
return []int{
s.absoluteStart,
s.absoluteEnd,
}
}
// Since it overlaps one side and/or the other, we're going to have to adjust
// and climb parents until we're either at the root or we've determined
// we're fully inside one of the parent segments.
adjustedMatchIndex := make([]int, 2)
if matchIndex[0] < s.decodedStart {
// It starts before the encoded segment so adjust the start to match
// the location before it was decoded
matchStartDelta := s.decodedStart - matchIndex[0]
adjustedMatchIndex[0] = s.relativeStart - matchStartDelta
} else {
// It starts within the encoded segment so set the bound to the
// relative start
adjustedMatchIndex[0] = s.relativeStart
}
if matchIndex[1] > s.decodedEnd {
// It ends after the encoded segment so adjust the end to match
// the location before it was decoded
matchEndDelta := matchIndex[1] - s.decodedEnd
adjustedMatchIndex[1] = s.relativeEnd + matchEndDelta
} else {
// It ends within the encoded segment so set the bound to the relative end
adjustedMatchIndex[1] = s.relativeEnd
}
// We're still not at a root segment so we'll need to keep on adjusting
if s.parent != nil {
return s.parent.adjustMatchIndex(adjustedMatchIndex)
}
return adjustedMatchIndex
}
// depth reports how many levels of decoding needed to be done (default is 1)
func (s EncodedSegment) depth() int {
depth := 1
// Climb the tree and increment the depth
for current := &s; current.parent != nil; current = current.parent {
depth++
}
return depth
}
// tags returns additional meta data tags related to the types of segments
func (s EncodedSegment) tags() []string {
return []string{
fmt.Sprintf("decoded:%s", s.encoding),
fmt.Sprintf("decode-depth:%d", s.depth()),
}
}
// Decoder decodes various types of data in place
type Decoder struct {
decodedMap map[string]string
}
// NewDecoder creates a default decoder struct
func NewDecoder() *Decoder {
return &Decoder{
decodedMap: make(map[string]string),
}
}
// decode returns the data with the values decoded in-place
func (d *Decoder) decode(data string, parentSegments []EncodedSegment) (string, []EncodedSegment) {
segments := d.findEncodedSegments(data, parentSegments)
if len(segments) > 0 {
result := bytes.NewBuffer(make([]byte, 0, len(data)))
relativeStart := 0
for _, segment := range segments {
result.WriteString(data[relativeStart:segment.relativeStart])
result.WriteString(segment.decodedValue)
relativeStart = segment.relativeEnd
}
result.WriteString(data[relativeStart:])
return result.String(), segments
}
return data, segments
}
// findEncodedSegments finds the encoded segments in the data and updates the
// segment tree for this pass
func (d *Decoder) findEncodedSegments(data string, parentSegments []EncodedSegment) []EncodedSegment {
if len(data) == 0 {
return []EncodedSegment{}
}
matchIndices := b64Regexp.FindAllStringIndex(data, -1)
if matchIndices == nil {
return []EncodedSegment{}
}
segments := make([]EncodedSegment, 0, len(matchIndices))
// Keeps up with offsets from the text changing size as things are decoded
decodedShift := 0
for _, matchIndex := range matchIndices {
encodedValue := data[matchIndex[0]:matchIndex[1]]
if !isLikelyB64(encodedValue) {
d.decodedMap[encodedValue] = ""
continue
}
decodedValue, alreadyDecoded := d.decodedMap[encodedValue]
// We haven't decoded this yet, so go ahead and decode it
if !alreadyDecoded {
decodedValue = decodeValue(encodedValue)
d.decodedMap[encodedValue] = decodedValue
}
// Skip this segment because there was nothing to check
if len(decodedValue) == 0 {
continue
}
// Create a segment for the encoded data
segment := EncodedSegment{
relativeStart: matchIndex[0],
relativeEnd: matchIndex[1],
absoluteStart: matchIndex[0],
absoluteEnd: matchIndex[1],
decodedStart: matchIndex[0] + decodedShift,
decodedEnd: matchIndex[0] + decodedShift + len(decodedValue),
decodedValue: decodedValue,
encoding: "base64",
}
// Shift decoded start and ends based on size changes
decodedShift += len(decodedValue) - len(encodedValue)
// Adjust the absolute position of segments contained in parent segments
for _, parentSegment := range parentSegments {
if segment.isChildOf(parentSegment) {
segment.absoluteStart = parentSegment.absoluteStart
segment.absoluteEnd = parentSegment.absoluteEnd
segment.parent = &parentSegment
break
}
}
logging.Debug().Msgf("segment found: %#v", segment)
segments = append(segments, segment)
}
return segments
}
// decoders tries a list of decoders and returns the first successful one
func decodeValue(encodedValue string) string {
for _, decoder := range decoders {
decodedValue, err := decoder(encodedValue)
if err == nil && len(decodedValue) > 0 && isASCII(decodedValue) {
return string(decodedValue)
}
}
return ""
}
func isASCII(b []byte) bool {
for i := 0; i < len(b); i++ {
if b[i] > unicode.MaxASCII || b[i] < '\t' {
return false
}
}
return true
}
// Skip a lot of method signatures and things at the risk of missing about
// 1% of base64
func isLikelyB64(s string) bool {
for _, c := range s {
if b64LikelyChars[c] != 0 {
return true
}
}
return false
}
// Find a segment where the decoded bounds overlaps a range
func segmentWithDecodedOverlap(encodedSegments []EncodedSegment, start, end int) *EncodedSegment {
for _, segment := range encodedSegments {
if segment.decodedOverlaps(start, end) {
return &segment
}
}
return nil
}
func (s EncodedSegment) currentLine(currentRaw string) string {
start := 0
end := len(currentRaw)
// Find the start of the range
for i := s.decodedStart; i > -1; i-- {
c := currentRaw[i]
if c == '\n' {
start = i
break
}
}
// Find the end of the range
for i := s.decodedEnd; i < end; i++ {
c := currentRaw[i]
if c == '\n' {
end = i
break
}
}
return currentRaw[start:end]
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,754 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"fmt"
"os"
"path/filepath"
"testing"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
"github.com/Infisical/infisical-merge/config"
"github.com/Infisical/infisical-merge/report"
)
const configPath = "../testdata/config/"
const repoBasePath = "../testdata/repos/"
func TestDetect(t *testing.T) {
tests := []struct {
cfgName string
baselinePath string
fragment Fragment
// NOTE: for expected findings, all line numbers will be 0
// because line deltas are added _after_ the finding is created.
// I.e, if the finding is from a --no-git file, the line number will be
// increase by 1 in DetectFromFiles(). If the finding is from git,
// the line number will be increased by the patch delta.
expectedFindings []report.Finding
wantError error
}{
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OKIA\ // infisical-scan:ignore"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \
\"AKIALALEMEL33243OKIA\ // infisical-scan:ignore"
`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OKIA\"
// infisical-scan:ignore"
`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
Secret: "AKIALALEMEL33243OKIA",
Match: "AKIALALEMEL33243OKIA",
File: "tmp.go",
Line: `awsToken := \"AKIALALEMEL33243OKIA\"`,
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
StartLine: 0,
EndLine: 0,
StartColumn: 15,
EndColumn: 34,
Entropy: 3.1464393,
},
},
},
{
cfgName: "escaped_character_group",
fragment: Fragment{
Raw: `pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{
{
Description: "PyPI upload token",
Secret: "pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB",
Match: "pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB",
Line: `pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB`,
File: "tmp.go",
RuleID: "pypi-upload-token",
Tags: []string{"key", "pypi"},
StartLine: 0,
EndLine: 0,
StartColumn: 1,
EndColumn: 86,
Entropy: 1.9606875,
},
},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
Secret: "AKIALALEMEL33243OLIA",
Match: "AKIALALEMEL33243OLIA",
Line: `awsToken := \"AKIALALEMEL33243OLIA\"`,
File: "tmp.go",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
StartLine: 0,
EndLine: 0,
StartColumn: 15,
EndColumn: 34,
Entropy: 3.0841837,
},
},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `export BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;`,
FilePath: "tmp.sh",
},
expectedFindings: []report.Finding{
{
Description: "Sidekiq Secret",
Match: "BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;",
Secret: "cafebabe:deadbeef",
Line: `export BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;`,
File: "tmp.sh",
RuleID: "sidekiq-secret",
Tags: []string{},
Entropy: 2.6098502,
StartLine: 0,
EndLine: 0,
StartColumn: 8,
EndColumn: 60,
},
},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `echo hello1; export BUNDLE_ENTERPRISE__CONTRIBSYS__COM="cafebabe:deadbeef" && echo hello2`,
FilePath: "tmp.sh",
},
expectedFindings: []report.Finding{
{
Description: "Sidekiq Secret",
Match: "BUNDLE_ENTERPRISE__CONTRIBSYS__COM=\"cafebabe:deadbeef\"",
Secret: "cafebabe:deadbeef",
File: "tmp.sh",
Line: `echo hello1; export BUNDLE_ENTERPRISE__CONTRIBSYS__COM="cafebabe:deadbeef" && echo hello2`,
RuleID: "sidekiq-secret",
Tags: []string{},
Entropy: 2.6098502,
StartLine: 0,
EndLine: 0,
StartColumn: 21,
EndColumn: 74,
},
},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `url = "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:80/path?param1=true&param2=false#heading1"`,
FilePath: "tmp.sh",
},
expectedFindings: []report.Finding{
{
Description: "Sidekiq Sensitive URL",
Match: "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:",
Secret: "cafeb4b3:d3adb33f",
File: "tmp.sh",
Line: `url = "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:80/path?param1=true&param2=false#heading1"`,
RuleID: "sidekiq-sensitive-url",
Tags: []string{},
Entropy: 2.984234,
StartLine: 0,
EndLine: 0,
StartColumn: 8,
EndColumn: 58,
},
},
},
{
cfgName: "allow_aws_re",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "allow_path",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "allow_commit",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
CommitSHA: "allowthiscommit",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "entropy_group",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{
{
Description: "Discord API key",
Match: "Discord_Public_Key = \"e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5\"",
Secret: "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5",
Line: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
File: "tmp.go",
RuleID: "discord-api-key",
Tags: []string{},
Entropy: 3.7906237,
StartLine: 0,
EndLine: 0,
StartColumn: 7,
EndColumn: 93,
},
},
},
{
cfgName: "generic_with_py_path",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "generic_with_py_path",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.py",
},
expectedFindings: []report.Finding{
{
Description: "Generic API Key",
Match: "Key = \"e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5\"",
Secret: "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5",
Line: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
File: "tmp.py",
RuleID: "generic-api-key",
Tags: []string{},
Entropy: 3.7906237,
StartLine: 0,
EndLine: 0,
StartColumn: 22,
EndColumn: 93,
},
},
},
{
cfgName: "path_only",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.py",
},
expectedFindings: []report.Finding{
{
Description: "Python Files",
Match: "file detected: tmp.py",
File: "tmp.py",
RuleID: "python-files-only",
Tags: []string{},
},
},
},
{
cfgName: "bad_entropy_group",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
wantError: fmt.Errorf("Discord API key invalid regex secret group 5, max regex secret group 3"),
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: filepath.Join(configPath, "simple.toml"),
},
expectedFindings: []report.Finding{},
},
{
cfgName: "allow_global_aws_re",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "generic_with_py_path",
fragment: Fragment{
Raw: `const Discord_Public_Key = "load2523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.py",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "path_only",
baselinePath: ".baseline.json",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: ".baseline.json",
},
expectedFindings: []report.Finding{},
},
}
for _, tt := range tests {
viper.Reset()
viper.AddConfigPath(configPath)
viper.SetConfigName(tt.cfgName)
viper.SetConfigType("toml")
err := viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, err := vc.Translate()
cfg.Path = filepath.Join(configPath, tt.cfgName+".toml")
if tt.wantError != nil {
if err == nil {
t.Errorf("expected error")
}
assert.Equal(t, tt.wantError, err)
}
d := NewDetector(cfg)
d.baselinePath = tt.baselinePath
findings := d.Detect(tt.fragment)
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
// TestFromGit tests the FromGit function
func TestFromGit(t *testing.T) {
tests := []struct {
cfgName string
source string
logOpts string
expectedFindings []report.Finding
}{
{
source: filepath.Join(repoBasePath, "small"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 20,
EndLine: 20,
StartColumn: 19,
EndColumn: 38,
Line: "\n awsToken := \"AKIALALEMEL33243OLIA\"",
Secret: "AKIALALEMEL33243OLIA",
Match: "AKIALALEMEL33243OLIA",
File: "main.go",
Date: "2021-11-02T23:37:53Z",
Commit: "1b6da43b82b22e4eaa10bcf8ee591e91abbfc587",
Author: "Zachary Rice",
Email: "zricer@protonmail.com",
Message: "Accidentally add a secret",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "1b6da43b82b22e4eaa10bcf8ee591e91abbfc587:main.go:aws-access-key:20",
},
{
Description: "AWS Access Key",
StartLine: 9,
EndLine: 9,
StartColumn: 17,
EndColumn: 36,
Secret: "AKIALALEMEL33243OLIA",
Match: "AKIALALEMEL33243OLIA",
Line: "\n\taws_token := \"AKIALALEMEL33243OLIA\"",
File: "foo/foo.go",
Date: "2021-11-02T23:48:06Z",
Commit: "491504d5a31946ce75e22554cc34203d8e5ff3ca",
Author: "Zach Rice",
Email: "zricer@protonmail.com",
Message: "adding foo package with secret",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "491504d5a31946ce75e22554cc34203d8e5ff3ca:foo/foo.go:aws-access-key:9",
},
},
},
{
source: filepath.Join(repoBasePath, "small"),
logOpts: "--all foo...",
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 9,
EndLine: 9,
StartColumn: 17,
EndColumn: 36,
Secret: "AKIALALEMEL33243OLIA",
Line: "\n\taws_token := \"AKIALALEMEL33243OLIA\"",
Match: "AKIALALEMEL33243OLIA",
Date: "2021-11-02T23:48:06Z",
File: "foo/foo.go",
Commit: "491504d5a31946ce75e22554cc34203d8e5ff3ca",
Author: "Zach Rice",
Email: "zricer@protonmail.com",
Message: "adding foo package with secret",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "491504d5a31946ce75e22554cc34203d8e5ff3ca:foo/foo.go:aws-access-key:9",
},
},
},
}
err := moveDotGit("dotGit", ".git")
if err != nil {
t.Fatal(err)
}
defer func() {
if err := moveDotGit(".git", "dotGit"); err != nil {
t.Error(err)
}
}()
for _, tt := range tests {
viper.AddConfigPath(configPath)
viper.SetConfigName("simple")
viper.SetConfigType("toml")
err = viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, err := vc.Translate()
if err != nil {
t.Error(err)
}
detector := NewDetector(cfg)
findings, err := detector.DetectGit(tt.source, tt.logOpts, DetectType)
if err != nil {
t.Error(err)
}
for _, f := range findings {
f.Match = "" // remove lines cause copying and pasting them has some wack formatting
}
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
func TestFromGitStaged(t *testing.T) {
tests := []struct {
cfgName string
source string
logOpts string
expectedFindings []report.Finding
}{
{
source: filepath.Join(repoBasePath, "staged"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 7,
EndLine: 7,
StartColumn: 18,
EndColumn: 37,
Line: "\n\taws_token2 := \"AKIALALEMEL33243OLIA\" // this one is not",
Match: "AKIALALEMEL33243OLIA",
Secret: "AKIALALEMEL33243OLIA",
File: "api/api.go",
SymlinkFile: "",
Commit: "",
Entropy: 3.0841837,
Author: "",
Email: "",
Date: "0001-01-01T00:00:00Z",
Message: "",
Tags: []string{
"key",
"AWS",
},
RuleID: "aws-access-key",
Fingerprint: "api/api.go:aws-access-key:7",
},
},
},
}
err := moveDotGit("dotGit", ".git")
if err != nil {
t.Fatal(err)
}
defer func() {
if err := moveDotGit(".git", "dotGit"); err != nil {
t.Error(err)
}
}()
for _, tt := range tests {
viper.AddConfigPath(configPath)
viper.SetConfigName("simple")
viper.SetConfigType("toml")
err = viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, err := vc.Translate()
if err != nil {
t.Error(err)
}
detector := NewDetector(cfg)
detector.AddGitleaksIgnore(filepath.Join(tt.source, ".gitleaksignore"))
findings, err := detector.DetectGit(tt.source, tt.logOpts, ProtectStagedType)
if err != nil {
t.Error(err)
}
for _, f := range findings {
f.Match = "" // remove lines cause copying and pasting them has some wack formatting
}
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
// TestFromFiles tests the FromFiles function
func TestFromFiles(t *testing.T) {
tests := []struct {
cfgName string
source string
expectedFindings []report.Finding
}{
{
source: filepath.Join(repoBasePath, "nogit"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 20,
EndLine: 20,
StartColumn: 16,
EndColumn: 35,
Match: "AKIALALEMEL33243OLIA",
Secret: "AKIALALEMEL33243OLIA",
Line: "\n\tawsToken := \"AKIALALEMEL33243OLIA\"",
File: "../testdata/repos/nogit/main.go",
SymlinkFile: "",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "../testdata/repos/nogit/main.go:aws-access-key:20",
},
},
},
{
source: filepath.Join(repoBasePath, "nogit", "main.go"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 20,
EndLine: 20,
StartColumn: 16,
EndColumn: 35,
Match: "AKIALALEMEL33243OLIA",
Secret: "AKIALALEMEL33243OLIA",
Line: "\n\tawsToken := \"AKIALALEMEL33243OLIA\"",
File: "../testdata/repos/nogit/main.go",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "../testdata/repos/nogit/main.go:aws-access-key:20",
},
},
},
}
for _, tt := range tests {
viper.AddConfigPath(configPath)
viper.SetConfigName("simple")
viper.SetConfigType("toml")
err := viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, _ := vc.Translate()
detector := NewDetector(cfg)
detector.FollowSymlinks = true
findings, err := detector.DetectFiles(tt.source)
if err != nil {
t.Error(err)
}
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
func TestDetectWithSymlinks(t *testing.T) {
tests := []struct {
cfgName string
source string
expectedFindings []report.Finding
}{
{
source: filepath.Join(repoBasePath, "symlinks/file_symlink"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "Asymmetric Private Key",
StartLine: 1,
EndLine: 1,
StartColumn: 1,
EndColumn: 35,
Match: "-----BEGIN OPENSSH PRIVATE KEY-----",
Secret: "-----BEGIN OPENSSH PRIVATE KEY-----",
Line: "-----BEGIN OPENSSH PRIVATE KEY-----",
File: "../testdata/repos/symlinks/source_file/id_ed25519",
SymlinkFile: "../testdata/repos/symlinks/file_symlink/symlinked_id_ed25519",
RuleID: "apkey",
Tags: []string{"key", "AsymmetricPrivateKey"},
Entropy: 3.587164,
Fingerprint: "../testdata/repos/symlinks/source_file/id_ed25519:apkey:1",
},
},
},
}
for _, tt := range tests {
viper.AddConfigPath(configPath)
viper.SetConfigName("simple")
viper.SetConfigType("toml")
err := viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, _ := vc.Translate()
detector := NewDetector(cfg)
detector.FollowSymlinks = true
findings, err := detector.DetectFiles(tt.source)
if err != nil {
t.Error(err)
}
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
func moveDotGit(from, to string) error {
repoDirs, err := os.ReadDir("../testdata/repos")
if err != nil {
return err
}
for _, dir := range repoDirs {
if to == ".git" {
_, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), "dotGit"))
if os.IsNotExist(err) {
// dont want to delete the only copy of .git accidentally
continue
}
os.RemoveAll(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), ".git"))
}
if !dir.IsDir() {
continue
}
_, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from))
if os.IsNotExist(err) {
continue
}
err = os.Rename(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from),
fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), to))
if err != nil {
return err
}
}
return nil
}

225
cli/detect/directory.go Normal file
View File

@@ -0,0 +1,225 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bufio"
"bytes"
"io"
"os"
"path/filepath"
"strings"
"time"
"github.com/h2non/filetype"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/report"
"github.com/Infisical/infisical-merge/detect/sources"
)
const maxPeekSize = 25 * 1_000 // 10kb
func (d *Detector) DetectFiles(paths <-chan sources.ScanTarget) ([]report.Finding, error) {
for pa := range paths {
d.Sema.Go(func() error {
logger := logging.With().Str("path", pa.Path).Logger()
logger.Trace().Msg("Scanning path")
f, err := os.Open(pa.Path)
if err != nil {
if os.IsPermission(err) {
logger.Warn().Msg("Skipping file: permission denied")
return nil
}
return err
}
defer func() {
_ = f.Close()
}()
// Get file size
fileInfo, err := f.Stat()
if err != nil {
return err
}
fileSize := fileInfo.Size()
if d.MaxTargetMegaBytes > 0 {
rawLength := fileSize / 1000000
if rawLength > int64(d.MaxTargetMegaBytes) {
logger.Debug().
Int64("size", rawLength).
Msg("Skipping file: exceeds --max-target-megabytes")
return nil
}
}
var (
// Buffer to hold file chunks
reader = bufio.NewReaderSize(f, chunkSize)
buf = make([]byte, chunkSize)
totalLines = 0
)
for {
n, err := reader.Read(buf)
// "Callers should always process the n > 0 bytes returned before considering the error err."
// https://pkg.go.dev/io#Reader
if n > 0 {
// Only check the filetype at the start of file.
if totalLines == 0 {
// TODO: could other optimizations be introduced here?
if mimetype, err := filetype.Match(buf[:n]); err != nil {
return nil
} else if mimetype.MIME.Type == "application" {
return nil // skip binary files
}
}
// Try to split chunks across large areas of whitespace, if possible.
peekBuf := bytes.NewBuffer(buf[:n])
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
return readErr
}
// Count the number of newlines in this chunk
chunk := peekBuf.String()
linesInChunk := strings.Count(chunk, "\n")
totalLines += linesInChunk
fragment := Fragment{
Raw: chunk,
Bytes: peekBuf.Bytes(),
}
if pa.Symlink != "" {
fragment.SymlinkFile = pa.Symlink
}
if isWindows {
fragment.FilePath = filepath.ToSlash(pa.Path)
fragment.SymlinkFile = filepath.ToSlash(fragment.SymlinkFile)
fragment.WindowsFilePath = pa.Path
} else {
fragment.FilePath = pa.Path
}
timer := time.AfterFunc(SlowWarningThreshold, func() {
logger.Debug().Msgf("Taking longer than %s to inspect fragment", SlowWarningThreshold.String())
})
for _, finding := range d.Detect(fragment) {
// need to add 1 since line counting starts at 1
finding.StartLine += (totalLines - linesInChunk) + 1
finding.EndLine += (totalLines - linesInChunk) + 1
d.AddFinding(finding)
}
if timer != nil {
timer.Stop()
timer = nil
}
}
if err != nil {
if err == io.EOF {
return nil
}
return err
}
}
})
}
if err := d.Sema.Wait(); err != nil {
return d.findings, err
}
return d.findings, nil
}
// readUntilSafeBoundary consumes |f| until it finds two consecutive `\n` characters, up to |maxPeekSize|.
// This hopefully avoids splitting. (https://github.com/gitleaks/gitleaks/issues/1651)
func readUntilSafeBoundary(r *bufio.Reader, n int, maxPeekSize int, peekBuf *bytes.Buffer) error {
if peekBuf.Len() == 0 {
return nil
}
// Does the buffer end in consecutive newlines?
var (
data = peekBuf.Bytes()
lastChar = data[len(data)-1]
newlineCount = 0 // Tracks consecutive newlines
)
if isWhitespace(lastChar) {
for i := len(data) - 1; i >= 0; i-- {
lastChar = data[i]
if lastChar == '\n' {
newlineCount++
// Stop if two consecutive newlines are found
if newlineCount >= 2 {
return nil
}
} else if lastChar == '\r' || lastChar == ' ' || lastChar == '\t' {
// The presence of other whitespace characters (`\r`, ` `, `\t`) shouldn't reset the count.
// (Intentionally do nothing.)
} else {
break
}
}
}
// If not, read ahead until we (hopefully) find some.
newlineCount = 0
for {
data = peekBuf.Bytes()
// Check if the last character is a newline.
lastChar = data[len(data)-1]
if lastChar == '\n' {
newlineCount++
// Stop if two consecutive newlines are found
if newlineCount >= 2 {
break
}
} else if lastChar == '\r' || lastChar == ' ' || lastChar == '\t' {
// The presence of other whitespace characters (`\r`, ` `, `\t`) shouldn't reset the count.
// (Intentionally do nothing.)
} else {
newlineCount = 0 // Reset if a non-newline character is found
}
// Stop growing the buffer if it reaches maxSize
if (peekBuf.Len() - n) >= maxPeekSize {
break
}
// Read additional data into a temporary buffer
b, err := r.ReadByte()
if err != nil {
if err == io.EOF {
break
}
return err
}
peekBuf.WriteByte(b)
}
return nil
}

214
cli/detect/git.go Normal file
View File

@@ -0,0 +1,214 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bytes"
"errors"
"fmt"
"net/url"
"os/exec"
"regexp"
"strings"
"time"
"github.com/Infisical/infisical-merge/detect/cmd/scm"
"github.com/gitleaks/go-gitdiff/gitdiff"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/report"
"github.com/Infisical/infisical-merge/detect/sources"
)
func (d *Detector) DetectGit(cmd *sources.GitCmd, remote *RemoteInfo) ([]report.Finding, error) {
defer cmd.Wait()
var (
diffFilesCh = cmd.DiffFilesCh()
errCh = cmd.ErrCh()
)
// loop to range over both DiffFiles (stdout) and ErrCh (stderr)
for diffFilesCh != nil || errCh != nil {
select {
case gitdiffFile, open := <-diffFilesCh:
if !open {
diffFilesCh = nil
break
}
// skip binary files
if gitdiffFile.IsBinary || gitdiffFile.IsDelete {
continue
}
// Check if commit is allowed
commitSHA := ""
if gitdiffFile.PatchHeader != nil {
commitSHA = gitdiffFile.PatchHeader.SHA
for _, a := range d.Config.Allowlists {
if ok, c := a.CommitAllowed(gitdiffFile.PatchHeader.SHA); ok {
logging.Trace().Str("allowed-commit", c).Msg("skipping commit: global allowlist")
continue
}
}
}
d.addCommit(commitSHA)
d.Sema.Go(func() error {
for _, textFragment := range gitdiffFile.TextFragments {
if textFragment == nil {
return nil
}
fragment := Fragment{
Raw: textFragment.Raw(gitdiff.OpAdd),
CommitSHA: commitSHA,
FilePath: gitdiffFile.NewName,
}
timer := time.AfterFunc(SlowWarningThreshold, func() {
logging.Debug().
Str("commit", commitSHA[:7]).
Str("path", fragment.FilePath).
Msgf("Taking longer than %s to inspect fragment", SlowWarningThreshold.String())
})
for _, finding := range d.Detect(fragment) {
d.AddFinding(augmentGitFinding(remote, finding, textFragment, gitdiffFile))
}
if timer != nil {
timer.Stop()
timer = nil
}
}
return nil
})
case err, open := <-errCh:
if !open {
errCh = nil
break
}
return d.findings, err
}
}
if err := d.Sema.Wait(); err != nil {
return d.findings, err
}
logging.Info().Msgf("%d commits scanned.", len(d.commitMap))
logging.Debug().Msg("Note: this number might be smaller than expected due to commits with no additions")
return d.findings, nil
}
type RemoteInfo struct {
Platform scm.Platform
Url string
}
func NewRemoteInfo(platform scm.Platform, source string) *RemoteInfo {
if platform == scm.NoPlatform {
return &RemoteInfo{Platform: platform}
}
remoteUrl, err := getRemoteUrl(source)
if err != nil {
if strings.Contains(err.Error(), "No remote configured") {
logging.Debug().Msg("skipping finding links: repository has no configured remote.")
platform = scm.NoPlatform
} else {
logging.Error().Err(err).Msg("skipping finding links: unable to parse remote URL")
}
goto End
}
if platform == scm.UnknownPlatform {
platform = platformFromHost(remoteUrl)
if platform == scm.UnknownPlatform {
logging.Info().
Str("host", remoteUrl.Hostname()).
Msg("Unknown SCM platform. Use --platform to include links in findings.")
} else {
logging.Debug().
Str("host", remoteUrl.Hostname()).
Str("platform", platform.String()).
Msg("SCM platform parsed from host")
}
}
End:
var rUrl string
if remoteUrl != nil {
rUrl = remoteUrl.String()
}
return &RemoteInfo{
Platform: platform,
Url: rUrl,
}
}
var sshUrlpat = regexp.MustCompile(`^git@([a-zA-Z0-9.-]+):([\w/.-]+?)(?:\.git)?$`)
func getRemoteUrl(source string) (*url.URL, error) {
// This will return the first remote — typically, "origin".
cmd := exec.Command("git", "ls-remote", "--quiet", "--get-url")
if source != "." {
cmd.Dir = source
}
stdout, err := cmd.Output()
if err != nil {
var exitError *exec.ExitError
if errors.As(err, &exitError) {
return nil, fmt.Errorf("command failed (%d): %w, stderr: %s", exitError.ExitCode(), err, string(bytes.TrimSpace(exitError.Stderr)))
}
return nil, err
}
remoteUrl := string(bytes.TrimSpace(stdout))
if matches := sshUrlpat.FindStringSubmatch(remoteUrl); matches != nil {
remoteUrl = fmt.Sprintf("https://%s/%s", matches[1], matches[2])
}
remoteUrl = strings.TrimSuffix(remoteUrl, ".git")
parsedUrl, err := url.Parse(remoteUrl)
if err != nil {
return nil, fmt.Errorf("unable to parse remote URL: %w", err)
}
// Remove any user info.
parsedUrl.User = nil
return parsedUrl, nil
}
func platformFromHost(u *url.URL) scm.Platform {
switch strings.ToLower(u.Hostname()) {
case "github.com":
return scm.GitHubPlatform
case "gitlab.com":
return scm.GitLabPlatform
case "dev.azure.com", "visualstudio.com":
return scm.AzureDevOpsPlatform
default:
return scm.UnknownPlatform
}
}

View File

@@ -1,143 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package git
import (
"bufio"
"io"
"os/exec"
"path/filepath"
"strings"
"time"
"github.com/gitleaks/go-gitdiff/gitdiff"
"github.com/rs/zerolog/log"
)
var ErrEncountered bool
// GitLog returns a channel of gitdiff.File objects from the
// git log -p command for the given source.
func GitLog(source string, logOpts string) (<-chan *gitdiff.File, error) {
sourceClean := filepath.Clean(source)
var cmd *exec.Cmd
if logOpts != "" {
args := []string{"-C", sourceClean, "log", "-p", "-U0"}
args = append(args, strings.Split(logOpts, " ")...)
cmd = exec.Command("git", args...)
} else {
cmd = exec.Command("git", "-C", sourceClean, "log", "-p", "-U0",
"--full-history", "--all")
}
log.Debug().Msgf("executing: %s", cmd.String())
stdout, err := cmd.StdoutPipe()
if err != nil {
return nil, err
}
stderr, err := cmd.StderrPipe()
if err != nil {
return nil, err
}
go listenForStdErr(stderr)
if err := cmd.Start(); err != nil {
return nil, err
}
// HACK: to avoid https://github.com/zricethezav/gitleaks/issues/722
time.Sleep(50 * time.Millisecond)
return gitdiff.Parse(cmd, stdout)
}
// GitDiff returns a channel of gitdiff.File objects from
// the git diff command for the given source.
func GitDiff(source string, staged bool) (<-chan *gitdiff.File, error) {
sourceClean := filepath.Clean(source)
var cmd *exec.Cmd
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0", ".")
if staged {
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0",
"--staged", ".")
}
log.Debug().Msgf("executing: %s", cmd.String())
stdout, err := cmd.StdoutPipe()
if err != nil {
return nil, err
}
stderr, err := cmd.StderrPipe()
if err != nil {
return nil, err
}
go listenForStdErr(stderr)
if err := cmd.Start(); err != nil {
return nil, err
}
// HACK: to avoid https://github.com/zricethezav/gitleaks/issues/722
time.Sleep(50 * time.Millisecond)
return gitdiff.Parse(cmd, stdout)
}
// listenForStdErr listens for stderr output from git and prints it to stdout
// then exits with exit code 1
func listenForStdErr(stderr io.ReadCloser) {
scanner := bufio.NewScanner(stderr)
for scanner.Scan() {
// if git throws one of the following errors:
//
// exhaustive rename detection was skipped due to too many files.
// you may want to set your diff.renameLimit variable to at least
// (some large number) and retry the command.
//
// inexact rename detection was skipped due to too many files.
// you may want to set your diff.renameLimit variable to at least
// (some large number) and retry the command.
//
// we skip exiting the program as git log -p/git diff will continue
// to send data to stdout and finish executing. This next bit of
// code prevents gitleaks from stopping mid scan if this error is
// encountered
if strings.Contains(scanner.Text(),
"exhaustive rename detection was skipped") ||
strings.Contains(scanner.Text(),
"inexact rename detection was skipped") ||
strings.Contains(scanner.Text(),
"you may want to set your diff.renameLimit") {
log.Warn().Msg(scanner.Text())
} else {
log.Error().Msgf("[git] %s", scanner.Text())
// asynchronously set this error flag to true so that we can
// capture a log message and exit with a non-zero exit code
// This value should get set before the `git` command exits so it's
// safe-ish, although I know I know, bad practice.
ErrEncountered = true
}
}
}

View File

@@ -1,158 +0,0 @@
package git_test
// TODO: commenting out this test for now because it's flaky. Alternatives to consider to get this working:
// -- use `git stash` instead of `restore()`
// const repoBasePath = "../../testdata/repos/"
// const expectPath = "../../testdata/expected/"
// func TestGitLog(t *testing.T) {
// tests := []struct {
// source string
// logOpts string
// expected string
// }{
// {
// source: filepath.Join(repoBasePath, "small"),
// expected: filepath.Join(expectPath, "git", "small.txt"),
// },
// {
// source: filepath.Join(repoBasePath, "small"),
// expected: filepath.Join(expectPath, "git", "small-branch-foo.txt"),
// logOpts: "--all foo...",
// },
// }
// err := moveDotGit("dotGit", ".git")
// if err != nil {
// t.Fatal(err)
// }
// defer func() {
// if err = moveDotGit(".git", "dotGit"); err != nil {
// t.Fatal(err)
// }
// }()
// for _, tt := range tests {
// files, err := git.GitLog(tt.source, tt.logOpts)
// if err != nil {
// t.Error(err)
// }
// var diffSb strings.Builder
// for f := range files {
// for _, tf := range f.TextFragments {
// diffSb.WriteString(tf.Raw(gitdiff.OpAdd))
// }
// }
// expectedBytes, err := os.ReadFile(tt.expected)
// if err != nil {
// t.Error(err)
// }
// expected := string(expectedBytes)
// if expected != diffSb.String() {
// // write string builder to .got file using os.Create
// err = os.WriteFile(strings.Replace(tt.expected, ".txt", ".got.txt", 1), []byte(diffSb.String()), 0644)
// if err != nil {
// t.Error(err)
// }
// t.Error("expected: ", expected, "got: ", diffSb.String())
// }
// }
// }
// func TestGitDiff(t *testing.T) {
// tests := []struct {
// source string
// expected string
// additions string
// target string
// }{
// {
// source: filepath.Join(repoBasePath, "small"),
// expected: "this line is added\nand another one",
// additions: "this line is added\nand another one",
// target: filepath.Join(repoBasePath, "small", "main.go"),
// },
// }
// err := moveDotGit("dotGit", ".git")
// if err != nil {
// t.Fatal(err)
// }
// defer func() {
// if err = moveDotGit(".git", "dotGit"); err != nil {
// t.Fatal(err)
// }
// }()
// for _, tt := range tests {
// noChanges, err := os.ReadFile(tt.target)
// if err != nil {
// t.Error(err)
// }
// err = os.WriteFile(tt.target, []byte(tt.additions), 0644)
// if err != nil {
// restore(tt.target, noChanges, t)
// t.Error(err)
// }
// files, err := git.GitDiff(tt.source, false)
// if err != nil {
// restore(tt.target, noChanges, t)
// t.Error(err)
// }
// for f := range files {
// sb := strings.Builder{}
// for _, tf := range f.TextFragments {
// sb.WriteString(tf.Raw(gitdiff.OpAdd))
// }
// if sb.String() != tt.expected {
// restore(tt.target, noChanges, t)
// t.Error("expected: ", tt.expected, "got: ", sb.String())
// }
// }
// restore(tt.target, noChanges, t)
// }
// }
// func restore(path string, data []byte, t *testing.T) {
// err := os.WriteFile(path, data, 0644)
// if err != nil {
// t.Fatal(err)
// }
// }
// func moveDotGit(from, to string) error {
// repoDirs, err := os.ReadDir("../../testdata/repos")
// if err != nil {
// return err
// }
// for _, dir := range repoDirs {
// if to == ".git" {
// _, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), "dotGit"))
// if os.IsNotExist(err) {
// // dont want to delete the only copy of .git accidentally
// continue
// }
// os.RemoveAll(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), ".git"))
// }
// if !dir.IsDir() {
// continue
// }
// _, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from))
// if os.IsNotExist(err) {
// continue
// }
// err = os.Rename(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from),
// fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), to))
// if err != nil {
// return err
// }
// }
// return nil
// }

View File

@@ -72,6 +72,7 @@ func location(fragment Fragment, matchIndex []int) Location {
location.endColumn = (end - prevNewLine)
location.endLineIndex = newLineByteIndex
}
prevNewLine = pair[0]
}

View File

@@ -1,82 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"testing"
)
// TestGetLocation tests the getLocation function.
func TestGetLocation(t *testing.T) {
tests := []struct {
linePairs [][]int
start int
end int
wantLocation Location
}{
{
linePairs: [][]int{
{0, 39},
{40, 55},
{56, 57},
},
start: 35,
end: 38,
wantLocation: Location{
startLine: 1,
startColumn: 36,
endLine: 1,
endColumn: 38,
startLineIndex: 0,
endLineIndex: 40,
},
},
{
linePairs: [][]int{
{0, 39},
{40, 55},
{56, 57},
},
start: 40,
end: 44,
wantLocation: Location{
startLine: 2,
startColumn: 1,
endLine: 2,
endColumn: 4,
startLineIndex: 40,
endLineIndex: 56,
},
},
}
for _, test := range tests {
loc := location(Fragment{newlineIndices: test.linePairs}, []int{test.start, test.end})
if loc != test.wantLocation {
t.Errorf("\nstartLine %d\nstartColumn: %d\nendLine: %d\nendColumn: %d\nstartLineIndex: %d\nendlineIndex %d",
loc.startLine, loc.startColumn, loc.endLine, loc.endColumn, loc.startLineIndex, loc.endLineIndex)
t.Error("got", loc, "want", test.wantLocation)
}
}
}

72
cli/detect/logging/log.go Normal file
View File

@@ -0,0 +1,72 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package logging
import (
"os"
"github.com/rs/zerolog"
)
var Logger zerolog.Logger
func init() {
// send all logs to stdout
Logger = zerolog.New(zerolog.ConsoleWriter{Out: os.Stderr}).
Level(zerolog.InfoLevel).
With().Timestamp().Logger()
}
func With() zerolog.Context {
return Logger.With()
}
func Trace() *zerolog.Event {
return Logger.Trace()
}
func Debug() *zerolog.Event {
return Logger.Debug()
}
func Info() *zerolog.Event {
return Logger.Info()
}
func Warn() *zerolog.Event {
return Logger.Warn()
}
func Error() *zerolog.Event {
return Logger.Error()
}
func Err(err error) *zerolog.Event {
return Logger.Err(err)
}
func Fatal() *zerolog.Event {
return Logger.Fatal()
}
func Panic() *zerolog.Event {
return Logger.Panic()
}

149
cli/detect/reader.go Normal file
View File

@@ -0,0 +1,149 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bufio"
"bytes"
"errors"
"io"
"github.com/Infisical/infisical-merge/detect/report"
)
// DetectReader accepts an io.Reader and a buffer size for the reader in KB
func (d *Detector) DetectReader(r io.Reader, bufSize int) ([]report.Finding, error) {
reader := bufio.NewReader(r)
buf := make([]byte, 1000*bufSize)
findings := []report.Finding{}
for {
n, err := reader.Read(buf)
// "Callers should always process the n > 0 bytes returned before considering the error err."
// https://pkg.go.dev/io#Reader
if n > 0 {
// Try to split chunks across large areas of whitespace, if possible.
peekBuf := bytes.NewBuffer(buf[:n])
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
return findings, readErr
}
fragment := Fragment{
Raw: peekBuf.String(),
}
for _, finding := range d.Detect(fragment) {
findings = append(findings, finding)
if d.Verbose {
printFinding(finding, d.NoColor)
}
}
}
if err != nil {
if err == io.EOF {
break
}
return findings, err
}
}
return findings, nil
}
// StreamDetectReader streams the detection results from the provided io.Reader.
// It reads data using the specified buffer size (in KB) and processes each chunk through
// the existing detection logic. Findings are sent down the returned findings channel as soon as
// they are detected, while a separate error channel signals a terminal error (or nil upon successful completion).
// The function returns two channels:
// - findingsCh: a receive-only channel that emits report.Finding objects as they are found.
// - errCh: a receive-only channel that emits a single final error (or nil if no error occurred)
// once the stream ends.
//
// Recommended Usage:
//
// Since there will only ever be a single value on the errCh, it is recommended to consume the findingsCh
// first. Once findingsCh is closed, the consumer should then read from errCh to determine
// if the stream completed successfully or if an error occurred.
//
// This design avoids the need for a select loop, keeping client code simple.
//
// Example:
//
// // Assume detector is an instance of *Detector and myReader implements io.Reader.
// findingsCh, errCh := detector.StreamDetectReader(myReader, 64) // using 64 KB buffer size
//
// // Process findings as they arrive.
// for finding := range findingsCh {
// fmt.Printf("Found secret: %+v\n", finding)
// }
//
// // After the findings channel is closed, check the final error.
// if err := <-errCh; err != nil {
// log.Fatalf("StreamDetectReader encountered an error: %v", err)
// } else {
// fmt.Println("Scanning completed successfully.")
// }
func (d *Detector) StreamDetectReader(r io.Reader, bufSize int) (<-chan report.Finding, <-chan error) {
findingsCh := make(chan report.Finding, 1)
errCh := make(chan error, 1)
go func() {
defer close(findingsCh)
defer close(errCh)
reader := bufio.NewReader(r)
buf := make([]byte, 1000*bufSize)
for {
n, err := reader.Read(buf)
if n > 0 {
peekBuf := bytes.NewBuffer(buf[:n])
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
errCh <- readErr
return
}
fragment := Fragment{Raw: peekBuf.String()}
for _, finding := range d.Detect(fragment) {
findingsCh <- finding
if d.Verbose {
printFinding(finding, d.NoColor)
}
}
}
if err != nil {
if errors.Is(err, io.EOF) {
errCh <- nil
return
}
errCh <- err
return
}
}
}()
return findingsCh, errCh
}

View File

@@ -0,0 +1,37 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//go:build !gore2regex
package regexp
import (
re "regexp"
)
const Version = "stdlib"
type Regexp = re.Regexp
func MustCompile(str string) *re.Regexp {
return re.MustCompile(str)
}

View File

@@ -0,0 +1,37 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//go:build gore2regex
package regexp
import (
re "github.com/wasilibs/go-re2"
)
const Version = "github.com/wasilibs/go-re2"
type Regexp = re.Regexp
func MustCompile(str string) *re.Regexp {
return re.MustCompile(str)
}

View File

@@ -19,6 +19,7 @@
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
const version = "v8.0.0"

View File

@@ -26,16 +26,24 @@ import (
"encoding/csv"
"io"
"strconv"
"strings"
)
// writeCsv writes the list of findings to a writeCloser.
func writeCsv(f []Finding, w io.WriteCloser) error {
if len(f) == 0 {
type CsvReporter struct {
}
var _ Reporter = (*CsvReporter)(nil)
func (r *CsvReporter) Write(w io.WriteCloser, findings []Finding) error {
if len(findings) == 0 {
return nil
}
defer w.Close()
cw := csv.NewWriter(w)
err := cw.Write([]string{"RuleID",
var (
cw = csv.NewWriter(w)
err error
)
columns := []string{"RuleID",
"Commit",
"File",
"SymlinkFile",
@@ -50,12 +58,18 @@ func writeCsv(f []Finding, w io.WriteCloser) error {
"Date",
"Email",
"Fingerprint",
})
if err != nil {
"Tags",
}
// A miserable attempt at "omitempty" so tests don't yell at me.
if findings[0].Link != "" {
columns = append(columns, "Link")
}
if err = cw.Write(columns); err != nil {
return err
}
for _, f := range f {
err = cw.Write([]string{f.RuleID,
for _, f := range findings {
row := []string{f.RuleID,
f.Commit,
f.File,
f.SymlinkFile,
@@ -70,8 +84,13 @@ func writeCsv(f []Finding, w io.WriteCloser) error {
f.Date,
f.Email,
f.Fingerprint,
})
if err != nil {
strings.Join(f.Tags, " "),
}
if findings[0].Link != "" {
row = append(row, f.Link)
}
if err = cw.Write(row); err != nil {
return err
}
}

View File

@@ -23,13 +23,17 @@
package report
import (
"math"
"strings"
)
// Finding contains information about strings that
// have been captured by a tree-sitter query.
type Finding struct {
// Rule is the name of the rule that was matched
RuleID string
Description string
StartLine int
EndLine int
StartColumn int
@@ -47,6 +51,7 @@ type Finding struct {
File string
SymlinkFile string
Commit string
Link string `json:",omitempty"`
// Entropy is the shannon entropy of Value
Entropy float32
@@ -57,16 +62,31 @@ type Finding struct {
Message string
Tags []string
// Rule is the name of the rule that was matched
RuleID string
// unique identifer
// unique identifier
Fingerprint string
}
// Redact removes sensitive information from a finding.
func (f *Finding) Redact() {
f.Line = strings.Replace(f.Line, f.Secret, "REDACTED", -1)
f.Match = strings.Replace(f.Match, f.Secret, "REDACTED", -1)
f.Secret = "REDACTED"
func (f *Finding) Redact(percent uint) {
secret := maskSecret(f.Secret, percent)
if percent >= 100 {
secret = "REDACTED"
}
f.Line = strings.Replace(f.Line, f.Secret, secret, -1)
f.Match = strings.Replace(f.Match, f.Secret, secret, -1)
f.Secret = secret
}
func maskSecret(secret string, percent uint) string {
if percent > 100 {
percent = 100
}
len := float64(len(secret))
if len <= 0 {
return secret
}
prc := float64(100 - percent)
lth := int64(math.RoundToEven(len * prc / float64(100)))
return secret[:lth] + "..."
}

View File

@@ -27,10 +27,12 @@ import (
"io"
)
func writeJson(findings []Finding, w io.WriteCloser) error {
if len(findings) == 0 {
findings = []Finding{}
}
type JsonReporter struct {
}
var _ Reporter = (*JsonReporter)(nil)
func (t *JsonReporter) Write(w io.WriteCloser, findings []Finding) error {
encoder := json.NewEncoder(w)
encoder.SetIndent("", " ")
return encoder.Encode(findings)

129
cli/detect/report/junit.go Normal file
View File

@@ -0,0 +1,129 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"encoding/json"
"encoding/xml"
"fmt"
"io"
"strconv"
)
type JunitReporter struct {
}
var _ Reporter = (*JunitReporter)(nil)
func (r *JunitReporter) Write(w io.WriteCloser, findings []Finding) error {
testSuites := TestSuites{
TestSuites: getTestSuites(findings),
}
io.WriteString(w, xml.Header)
encoder := xml.NewEncoder(w)
encoder.Indent("", "\t")
return encoder.Encode(testSuites)
}
func getTestSuites(findings []Finding) []TestSuite {
return []TestSuite{
{
Failures: strconv.Itoa(len(findings)),
Name: "gitleaks",
Tests: strconv.Itoa(len(findings)),
TestCases: getTestCases(findings),
Time: "",
},
}
}
func getTestCases(findings []Finding) []TestCase {
testCases := []TestCase{}
for _, f := range findings {
testCase := TestCase{
Classname: f.Description,
Failure: getFailure(f),
File: f.File,
Name: getMessage(f),
Time: "",
}
testCases = append(testCases, testCase)
}
return testCases
}
func getFailure(f Finding) Failure {
return Failure{
Data: getData(f),
Message: getMessage(f),
Type: f.Description,
}
}
func getData(f Finding) string {
data, err := json.MarshalIndent(f, "", "\t")
if err != nil {
fmt.Println(err)
return ""
}
return string(data)
}
func getMessage(f Finding) string {
if f.Commit == "" {
return fmt.Sprintf("%s has detected a secret in file %s, line %s.", f.RuleID, f.File, strconv.Itoa(f.StartLine))
}
return fmt.Sprintf("%s has detected a secret in file %s, line %s, at commit %s.", f.RuleID, f.File, strconv.Itoa(f.StartLine), f.Commit)
}
type TestSuites struct {
XMLName xml.Name `xml:"testsuites"`
TestSuites []TestSuite
}
type TestSuite struct {
XMLName xml.Name `xml:"testsuite"`
Failures string `xml:"failures,attr"`
Name string `xml:"name,attr"`
Tests string `xml:"tests,attr"`
TestCases []TestCase `xml:"testcase"`
Time string `xml:"time,attr"`
}
type TestCase struct {
XMLName xml.Name `xml:"testcase"`
Classname string `xml:"classname,attr"`
Failure Failure `xml:"failure"`
File string `xml:"file,attr"`
Name string `xml:"name,attr"`
Time string `xml:"time,attr"`
}
type Failure struct {
XMLName xml.Name `xml:"failure"`
Data string `xml:",chardata"`
Message string `xml:"message,attr"`
Type string `xml:"type,attr"`
}

View File

@@ -19,30 +19,20 @@
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import "testing"
import (
"io"
)
func TestRedact(t *testing.T) {
tests := []struct {
findings []Finding
redact bool
}{
{
redact: true,
findings: []Finding{
{
Secret: "line containing secret",
Match: "secret",
},
}},
}
for _, test := range tests {
for _, f := range test.findings {
f.Redact()
if f.Secret != "REDACTED" {
t.Error("redact not redacting: ", f.Secret)
}
}
}
const (
// https://cwe.mitre.org/data/definitions/798.html
CWE = "CWE-798"
CWE_DESCRIPTION = "Use of Hard-coded Credentials"
StdoutReportPath = "-"
)
type Reporter interface {
Write(w io.WriteCloser, findings []Finding) error
}

View File

@@ -27,14 +27,20 @@ import (
"fmt"
"io"
"github.com/Infisical/infisical-merge/config"
"github.com/Infisical/infisical-merge/detect/config"
)
func writeSarif(cfg config.Config, findings []Finding, w io.WriteCloser) error {
type SarifReporter struct {
OrderedRules []config.Rule
}
var _ Reporter = (*SarifReporter)(nil)
func (r *SarifReporter) Write(w io.WriteCloser, findings []Finding) error {
sarif := Sarif{
Schema: "https://json.schemastore.org/sarif-2.1.0.json",
Version: "2.1.0",
Runs: getRuns(cfg, findings),
Runs: r.getRuns(findings),
}
encoder := json.NewEncoder(w)
@@ -42,22 +48,22 @@ func writeSarif(cfg config.Config, findings []Finding, w io.WriteCloser) error {
return encoder.Encode(sarif)
}
func getRuns(cfg config.Config, findings []Finding) []Runs {
func (r *SarifReporter) getRuns(findings []Finding) []Runs {
return []Runs{
{
Tool: getTool(cfg),
Tool: r.getTool(),
Results: getResults(findings),
},
}
}
func getTool(cfg config.Config) Tool {
func (r *SarifReporter) getTool() Tool {
tool := Tool{
Driver: Driver{
Name: driver,
SemanticVersion: version,
InformationUri: "https://github.com/Infisical/infisical",
Rules: getRules(cfg),
InformationUri: "https://github.com/gitleaks/gitleaks",
Rules: r.getRules(),
},
}
@@ -73,26 +79,15 @@ func hasEmptyRules(tool Tool) bool {
return len(tool.Driver.Rules) == 0
}
func getRules(cfg config.Config) []Rules {
func (r *SarifReporter) getRules() []Rules {
// TODO for _, rule := range cfg.Rules {
var rules []Rules
for _, rule := range cfg.OrderedRules() {
shortDescription := ShortDescription{
Text: rule.Description,
}
if rule.Regex != nil {
shortDescription = ShortDescription{
Text: rule.Regex.String(),
}
} else if rule.Path != nil {
shortDescription = ShortDescription{
Text: rule.Path.String(),
}
}
for _, rule := range r.OrderedRules {
rules = append(rules, Rules{
ID: rule.RuleID,
Name: rule.Description,
Description: shortDescription,
ID: rule.RuleID,
Description: ShortDescription{
Text: rule.Description,
},
})
}
return rules
@@ -125,6 +120,9 @@ func getResults(findings []Finding) []Results {
Date: f.Date,
Author: f.Author,
},
Properties: Properties{
Tags: f.Tags,
},
}
results = append(results, r)
}
@@ -180,7 +178,6 @@ type FullDescription struct {
type Rules struct {
ID string `json:"id"`
Name string `json:"name"`
Description ShortDescription `json:"shortDescription"`
}
@@ -224,11 +221,16 @@ type Locations struct {
PhysicalLocation PhysicalLocation `json:"physicalLocation"`
}
type Properties struct {
Tags []string `json:"tags"`
}
type Results struct {
Message Message `json:"message"`
RuleId string `json:"ruleId"`
Locations []Locations `json:"locations"`
PartialFingerPrints `json:"partialFingerprints"`
Properties Properties `json:"properties"`
}
type Runs struct {

View File

@@ -0,0 +1,68 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"fmt"
"io"
"os"
"text/template"
"github.com/Masterminds/sprig/v3"
)
type TemplateReporter struct {
template *template.Template
}
var _ Reporter = (*TemplateReporter)(nil)
func NewTemplateReporter(templatePath string) (*TemplateReporter, error) {
if templatePath == "" {
return nil, fmt.Errorf("template path cannot be empty")
}
file, err := os.ReadFile(templatePath)
if err != nil {
return nil, fmt.Errorf("error reading file: %w", err)
}
templateText := string(file)
// TODO: Add helper functions like escaping for JSON, XML, etc.
t := template.New("custom")
t = t.Funcs(sprig.TxtFuncMap())
t, err = t.Parse(templateText)
if err != nil {
return nil, fmt.Errorf("error parsing file: %w", err)
}
return &TemplateReporter{template: t}, nil
}
// writeTemplate renders the findings using the user-provided template.
// https://www.digitalocean.com/community/tutorials/how-to-use-templates-in-go
func (t *TemplateReporter) Write(w io.WriteCloser, findings []Finding) error {
if err := t.template.Execute(w, findings); err != nil {
return err
}
return nil
}

View File

@@ -0,0 +1,127 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package sources
import (
"io/fs"
"os"
"path/filepath"
"runtime"
"github.com/fatih/semgroup"
"github.com/Infisical/infisical-merge/detect/config"
"github.com/Infisical/infisical-merge/detect/logging"
)
type ScanTarget struct {
Path string
Symlink string
}
var isWindows = runtime.GOOS == "windows"
func DirectoryTargets(source string, s *semgroup.Group, followSymlinks bool, allowlists []*config.Allowlist) (<-chan ScanTarget, error) {
paths := make(chan ScanTarget)
s.Go(func() error {
defer close(paths)
return filepath.Walk(source,
func(path string, fInfo os.FileInfo, err error) error {
logger := logging.With().Str("path", path).Logger()
if err != nil {
if os.IsPermission(err) {
// This seems to only fail on directories at this stage.
logger.Warn().Msg("Skipping directory: permission denied")
return filepath.SkipDir
}
return err
}
// Empty; nothing to do here.
if fInfo.Size() == 0 {
return nil
}
// Unwrap symlinks, if |followSymlinks| is set.
scanTarget := ScanTarget{
Path: path,
}
if fInfo.Mode().Type() == fs.ModeSymlink {
if !followSymlinks {
logger.Debug().Msg("Skipping symlink")
return nil
}
realPath, err := filepath.EvalSymlinks(path)
if err != nil {
return err
}
realPathFileInfo, _ := os.Stat(realPath)
if realPathFileInfo.IsDir() {
logger.Warn().Str("target", realPath).Msg("Skipping symlinked directory")
return nil
}
scanTarget.Path = realPath
scanTarget.Symlink = path
}
// TODO: Also run this check against the resolved symlink?
var skip bool
for _, a := range allowlists {
skip = a.PathAllowed(path) ||
// TODO: Remove this in v9.
// This is an awkward hack to mitigate https://github.com/gitleaks/gitleaks/issues/1641.
(isWindows && a.PathAllowed(filepath.ToSlash(path)))
if skip {
break
}
}
if fInfo.IsDir() {
// Directory
if skip {
logger.Debug().Msg("Skipping directory due to global allowlist")
return filepath.SkipDir
}
if fInfo.Name() == ".git" {
// Don't scan .git directories.
// TODO: Add this to the config allowlist, instead of hard-coding it.
return filepath.SkipDir
}
} else {
// File
if skip {
logger.Debug().Msg("Skipping file due to global allowlist")
return nil
}
paths <- scanTarget
}
return nil
})
})
return paths, nil
}

211
cli/detect/sources/git.go Normal file
View File

@@ -0,0 +1,211 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package sources
import (
"bufio"
"errors"
"io"
"os/exec"
"path/filepath"
"regexp"
"strings"
"github.com/gitleaks/go-gitdiff/gitdiff"
"github.com/Infisical/infisical-merge/detect/logging"
)
var quotedOptPattern = regexp.MustCompile(`^(?:"[^"]+"|'[^']+')$`)
// GitCmd helps to work with Git's output.
type GitCmd struct {
cmd *exec.Cmd
diffFilesCh <-chan *gitdiff.File
errCh <-chan error
}
// NewGitLogCmd returns `*DiffFilesCmd` with two channels: `<-chan *gitdiff.File` and `<-chan error`.
// Caller should read everything from channels until receiving a signal about their closure and call
// the `func (*DiffFilesCmd) Wait()` error in order to release resources.
func NewGitLogCmd(source string, logOpts string) (*GitCmd, error) {
sourceClean := filepath.Clean(source)
var cmd *exec.Cmd
if logOpts != "" {
args := []string{"-C", sourceClean, "log", "-p", "-U0"}
// Ensure that the user-provided |logOpts| aren't wrapped in quotes.
// https://github.com/gitleaks/gitleaks/issues/1153
userArgs := strings.Split(logOpts, " ")
var quotedOpts []string
for _, element := range userArgs {
if quotedOptPattern.MatchString(element) {
quotedOpts = append(quotedOpts, element)
}
}
if len(quotedOpts) > 0 {
logging.Warn().Msgf("the following `--log-opts` values may not work as expected: %v\n\tsee https://github.com/gitleaks/gitleaks/issues/1153 for more information", quotedOpts)
}
args = append(args, userArgs...)
cmd = exec.Command("git", args...)
} else {
cmd = exec.Command("git", "-C", sourceClean, "log", "-p", "-U0",
"--full-history", "--all")
}
logging.Debug().Msgf("executing: %s", cmd.String())
stdout, err := cmd.StdoutPipe()
if err != nil {
return nil, err
}
stderr, err := cmd.StderrPipe()
if err != nil {
return nil, err
}
if err := cmd.Start(); err != nil {
return nil, err
}
errCh := make(chan error)
go listenForStdErr(stderr, errCh)
gitdiffFiles, err := gitdiff.Parse(stdout)
if err != nil {
return nil, err
}
return &GitCmd{
cmd: cmd,
diffFilesCh: gitdiffFiles,
errCh: errCh,
}, nil
}
// NewGitDiffCmd returns `*DiffFilesCmd` with two channels: `<-chan *gitdiff.File` and `<-chan error`.
// Caller should read everything from channels until receiving a signal about their closure and call
// the `func (*DiffFilesCmd) Wait()` error in order to release resources.
func NewGitDiffCmd(source string, staged bool) (*GitCmd, error) {
sourceClean := filepath.Clean(source)
var cmd *exec.Cmd
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0", "--no-ext-diff", ".")
if staged {
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0", "--no-ext-diff",
"--staged", ".")
}
logging.Debug().Msgf("executing: %s", cmd.String())
stdout, err := cmd.StdoutPipe()
if err != nil {
return nil, err
}
stderr, err := cmd.StderrPipe()
if err != nil {
return nil, err
}
if err := cmd.Start(); err != nil {
return nil, err
}
errCh := make(chan error)
go listenForStdErr(stderr, errCh)
gitdiffFiles, err := gitdiff.Parse(stdout)
if err != nil {
return nil, err
}
return &GitCmd{
cmd: cmd,
diffFilesCh: gitdiffFiles,
errCh: errCh,
}, nil
}
// DiffFilesCh returns a channel with *gitdiff.File.
func (c *GitCmd) DiffFilesCh() <-chan *gitdiff.File {
return c.diffFilesCh
}
// ErrCh returns a channel that could produce an error if there is something in stderr.
func (c *GitCmd) ErrCh() <-chan error {
return c.errCh
}
// Wait waits for the command to exit and waits for any copying to
// stdin or copying from stdout or stderr to complete.
//
// Wait also closes underlying stdout and stderr.
func (c *GitCmd) Wait() (err error) {
return c.cmd.Wait()
}
// listenForStdErr listens for stderr output from git, prints it to stdout,
// sends to errCh and closes it.
func listenForStdErr(stderr io.ReadCloser, errCh chan<- error) {
defer close(errCh)
var errEncountered bool
scanner := bufio.NewScanner(stderr)
for scanner.Scan() {
// if git throws one of the following errors:
//
// exhaustive rename detection was skipped due to too many files.
// you may want to set your diff.renameLimit variable to at least
// (some large number) and retry the command.
//
// inexact rename detection was skipped due to too many files.
// you may want to set your diff.renameLimit variable to at least
// (some large number) and retry the command.
//
// Auto packing the repository in background for optimum performance.
// See "git help gc" for manual housekeeping.
//
// we skip exiting the program as git log -p/git diff will continue
// to send data to stdout and finish executing. This next bit of
// code prevents gitleaks from stopping mid scan if this error is
// encountered
if strings.Contains(scanner.Text(),
"exhaustive rename detection was skipped") ||
strings.Contains(scanner.Text(),
"inexact rename detection was skipped") ||
strings.Contains(scanner.Text(),
"you may want to set your diff.renameLimit") ||
strings.Contains(scanner.Text(),
"See \"git help gc\" for manual housekeeping") ||
strings.Contains(scanner.Text(),
"Auto packing the repository in background for optimum performance") {
logging.Warn().Msg(scanner.Text())
} else {
logging.Error().Msgf("[git] %s", scanner.Text())
errEncountered = true
}
}
if errEncountered {
errCh <- errors.New("stderr is not empty")
return
}
}

Some files were not shown because too many files have changed in this diff Show More