Compare commits

...

63 Commits

Author SHA1 Message Date
Sheen Capadngan
9c611daada misc: updated org delete flow to clear session 2025-05-13 16:09:26 +08:00
x032205
71edb08942 Merge pull request #3587 from Infisical/ENG-2763
Fix approval request ordering
2025-05-12 23:54:27 -04:00
x032205
89d8261a43 Fix approval request ordering 2025-05-12 23:13:57 -04:00
Scott Wilson
a2b2b07185 Merge pull request #3584 from Infisical/sso-page
Improvements(org-settings): Refactor Organization Security Settings to SSO Page
2025-05-12 18:43:35 -07:00
Scott Wilson
76864ababa fix: correct doc casing 2025-05-12 18:37:05 -07:00
Scott Wilson
d17d40ebd9 improvements: refactor org security settings tab to sso page and update doc images 2025-05-12 17:18:40 -07:00
Daniel Hougaard
07df6803a5 Merge pull request #3581 from Infisical/daniel/unblock-dev
fix: move cli install to aws
2025-05-12 18:54:55 +04:00
Daniel Hougaard
a09d0e8948 fix: move cli install to aws 2025-05-12 18:47:02 +04:00
Daniel Hougaard
ee598560ec Merge pull request #3572 from Infisical/daniel/fix-secret-scaninng-public-keys
fix: update secret scanner to latest version
2025-05-12 11:13:51 +04:00
carlosmonastyrski
c629705c9c Merge pull request #3535 from Infisical/feat/addGroupsToSshHosts
feat(ssh-hosts): Add groups to ssh hosts allowed principals
2025-05-09 22:52:35 -03:00
Daniel Hougaard
be10f6e52a Merge pull request #3579 from Infisical/daniel/horizontal-scaling-ms-teams
fix(workflow-integrations): microsoft teams scaling issues
2025-05-10 01:11:37 +04:00
Scott Wilson
40c5ff0ad6 Merge pull request #3578 from Infisical/project-template-improvements
improvement(project-templates): Project templates UI improvements
2025-05-09 13:50:50 -07:00
Scott Wilson
8ecb5ca7bc remove extra margin 2025-05-09 13:47:28 -07:00
Daniel Hougaard
ab6a2b7dbb fix(workflow-integrations): microsoft teams scaling issues 2025-05-10 00:47:22 +04:00
carlosmonastyrski
81bfc04e7c Trim hostname input on SSH Host permission form and fix getWorkspaceUsers key invalidation 2025-05-09 17:10:01 -03:00
x032205
a757fceaed Merge pull request #3577 from Infisical/feat/docs-support-openapi-titles
feat(docs): Support OpenAPI titles for Zod descriptions
2025-05-09 15:49:49 -04:00
Scott Wilson
ce8e18f620 improvement: address feedback 2025-05-09 12:40:07 -07:00
Scott Wilson
d09c964647 fix: use tanstack router link 2025-05-09 12:32:37 -07:00
Scott Wilson
eeddbde600 improvement: update org project templates relocation banner 2025-05-09 12:23:05 -07:00
Daniel Hougaard
859b643e43 Delete ssh 2025-05-09 22:49:39 +04:00
Daniel Hougaard
91f71e0ef6 feat(cli): upgrade secret scanner 2025-05-09 22:48:56 +04:00
x032205
4e9e31eeb7 added credit 2025-05-09 13:45:36 -04:00
x032205
f6bc99b964 support openapi titles for zod description 2025-05-09 13:40:15 -04:00
Scott Wilson
679eb9dffc fix: correct project templates empty table display if feature is disabled 2025-05-09 10:14:03 -07:00
x032205
0754ae3aaf Merge pull request #3576 from Infisical/ENG-2692
feat(api): Rate limit for all email-sending endpoints
2025-05-09 11:37:08 -04:00
x032205
519a0c1bdf Merge branch 'main' into ENG-2692 2025-05-09 11:31:05 -04:00
x032205
e9d8979cf4 add rate limit to all email-sending endpoints 2025-05-09 11:29:53 -04:00
Maidul Islam
486d975fa0 Merge pull request #3575 from akhilmhdh/fix/octokit
feat: resolved esm error in octokit
2025-05-09 10:50:25 -04:00
=
42c49949b4 feat: resolved esm error in octokit 2025-05-09 20:13:08 +05:30
carlosmonastyrski
aea44088db Merge branch 'main' into feat/addGroupsToSshHosts 2025-05-09 09:21:29 -03:00
Daniel Hougaard
e584c9ea95 test 2025-05-09 09:04:30 +04:00
Maidul Islam
428c60880a Update jumpcloud.mdx 2025-05-09 00:28:20 -04:00
Maidul Islam
2179b9a4d7 Update general.mdx 2025-05-09 00:27:43 -04:00
Daniel Hougaard
1921763fa8 fix: update to upcoming version 2025-05-09 04:43:13 +04:00
Daniel Hougaard
5408859a18 fix: update gitleaks/go-diff to latest version 2025-05-09 04:40:09 +04:00
Daniel Hougaard
8dfc0cfbe0 Merge pull request #3571 from Infisical/daniel/identities-ldap-docs
docs(identities): ldap auth
2025-05-09 04:15:11 +04:00
Daniel Hougaard
060199e58c fix: machine identities -> identities 2025-05-09 04:13:11 +04:00
Daniel Hougaard
3b9b17f8d5 requested changes 2025-05-09 04:12:21 +04:00
Daniel Hougaard
6addde2650 docs(identities): ldap auth 2025-05-09 03:44:15 +04:00
Tuan Dang
a6b3be72a9 Make minor PR adjustments 2025-05-08 14:02:25 -07:00
Daniel Hougaard
394bd6755f Merge pull request #3566 from Infisical/daniel/identity-ldap-auth
feat(identities): ldap auth
2025-05-08 23:53:47 +04:00
Daniel Hougaard
c21873ac4b Update identity-ldap-auth-router.ts 2025-05-08 23:48:08 +04:00
Daniel Hougaard
64b8c1a2de added filter check 2025-05-08 23:44:30 +04:00
Daniel Hougaard
de443c5ea1 fix: requested changes 2025-05-08 23:20:18 +04:00
Daniel Hougaard
a3b7df4e6b fix: addressed requested changes 2025-05-08 23:13:46 +04:00
Sheen Capadngan
a4b648ad95 misc: addressed tooltip display issue 2025-05-08 21:24:26 +08:00
x032205
04a8931cf6 Merge pull request #3568 from Infisical/pki-merge-fix
small migration fix
2025-05-08 01:23:36 -04:00
x032205
ab0b8c0f10 migration tweak 2025-05-08 01:22:34 -04:00
x032205
258836a605 migration tweak 2025-05-08 01:17:47 -04:00
Daniel Hougaard
0b31d7f860 feat(identities): ldap auth, requested changes 2025-05-08 08:14:29 +04:00
Daniel Hougaard
5c91d380b8 feat(identities): ldap auth 2025-05-08 07:55:22 +04:00
Daniel Hougaard
b908893a68 feat(identities): ldap auth 2025-05-08 07:49:23 +04:00
Maidul Islam
4d0275e589 Merge pull request #3565 from Infisical/remove-migration-folder
Remove unused migration folder
2025-05-07 20:53:51 -04:00
Maidul Islam
6ca7a990f3 unused folder remove 2025-05-07 20:34:01 -04:00
Scott Wilson
befd77eec2 Merge pull request #3563 from Infisical/policy-selection-modal
improvement(project-roles): Add Policy Selection Modal
2025-05-07 16:49:05 -07:00
Daniel Hougaard
1d44774913 Merge pull request #3564 from Infisical/daniel/generator-doc-imp
docs(k8s/generators): improve documentation
2025-05-08 03:20:30 +04:00
Maidul Islam
984552eea9 rephrase generator overview 2025-05-07 19:18:45 -04:00
Daniel Hougaard
2f4efad8ae Update infisical-push-secret-crd.mdx 2025-05-08 01:47:00 +04:00
carlosmonastyrski
bf85df7e36 Fix SSH table UI user groups issues 2025-05-06 08:37:19 -03:00
carlosmonastyrski
b9070a8fa3 Merge branch 'main' into feat/addGroupsToSshHosts 2025-05-05 14:51:01 -03:00
carlosmonastyrski
3ea450e94a Add groups to ssh hosts allowed principals fix delete principal row issue 2025-05-02 13:41:53 -03:00
carlosmonastyrski
7d0574087c Add groups to ssh hosts allowed principals bot improvements 2025-05-02 13:36:05 -03:00
carlosmonastyrski
36916704be Add groups to ssh hosts allowed principals 2025-05-02 11:14:43 -03:00
237 changed files with 10208 additions and 7567 deletions

View File

@@ -133,8 +133,8 @@ RUN apt-get update && apt-get install -y \
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.41.2 \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user

View File

@@ -127,8 +127,8 @@ RUN apt-get update && apt-get install -y \
&& rm -rf /var/lib/apt/lists/*
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.41.2 \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /

View File

@@ -54,8 +54,8 @@ COPY --from=build /app .
# Install Infisical CLI
RUN apt-get install -y curl bash && \
curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
apt-get update && apt-get install -y infisical=0.8.1 git
curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && apt-get install -y infisical=0.41.2 git
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js

View File

@@ -55,9 +55,9 @@ RUN mkdir -p /etc/softhsm2/tokens && \
# ? App setup
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.8.1
apt-get install -y infisical=0.41.2
WORKDIR /app

View File

@@ -64,9 +64,9 @@ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
# ? App setup
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.8.1
apt-get install -y infisical=0.41.2
WORKDIR /app

View File

@@ -33,7 +33,8 @@
"@infisical/quic": "^1.0.8",
"@node-saml/passport-saml": "^5.0.1",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-paginate-graphql": "^5.2.4",
"@octokit/core": "^5.2.1",
"@octokit/plugin-paginate-graphql": "^4.0.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
@@ -121,7 +122,7 @@
"tweetnacl-util": "^0.15.1",
"uuid": "^9.0.1",
"zod": "^3.22.4",
"zod-to-json-schema": "^3.22.4"
"zod-to-json-schema": "^3.24.5"
},
"bin": {
"backend": "dist/main.js"
@@ -7805,119 +7806,38 @@
}
},
"node_modules/@octokit/core": {
"version": "6.1.5",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-6.1.5.tgz",
"integrity": "sha512-vvmsN0r7rguA+FySiCsbaTTobSftpIDIpPW81trAmsv9TGxg3YCujAxRYp/Uy8xmDgYCzzgulG62H7KYUFmeIg==",
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.1.tgz",
"integrity": "sha512-dKYCMuPO1bmrpuogcjQ8z7ICCH3FP6WmxpwC03yjzGfZhj9fTJg6+bS1+UAplekbN2C+M61UNllGOOoAfGCrdQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/auth-token": "^5.0.0",
"@octokit/graphql": "^8.2.2",
"@octokit/request": "^9.2.3",
"@octokit/request-error": "^6.1.8",
"@octokit/types": "^14.0.0",
"before-after-hook": "^3.0.2",
"universal-user-agent": "^7.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/core/node_modules/@octokit/auth-token": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-5.1.2.tgz",
"integrity": "sha512-JcQDsBdg49Yky2w2ld20IHAlwr8d/d8N6NiOXbtuoPCqzbsiJgF633mVUw3x4mo0H5ypataQIX7SFu3yy44Mpw==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/core/node_modules/@octokit/endpoint": {
"version": "10.1.4",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.4.tgz",
"integrity": "sha512-OlYOlZIsfEVZm5HCSR8aSg02T2lbUWOsCQoPKfTXJwDzcHQBrVBGdGXb89dv2Kw2ToZaRtudp8O3ZIYoaOjKlA==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/types": "^14.0.0",
"universal-user-agent": "^7.0.2"
"@octokit/auth-token": "^4.0.0",
"@octokit/graphql": "^7.1.0",
"@octokit/request": "^8.4.1",
"@octokit/request-error": "^5.1.1",
"@octokit/types": "^13.0.0",
"before-after-hook": "^2.2.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/core/node_modules/@octokit/openapi-types": {
"version": "25.0.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.0.0.tgz",
"integrity": "sha512-FZvktFu7HfOIJf2BScLKIEYjDsw6RKc7rBJCdvCTfKsVnx2GEB/Nbzjr29DUdb7vQhlzS/j8qDzdditP0OC6aw==",
"license": "MIT",
"peer": true
},
"node_modules/@octokit/core/node_modules/@octokit/request": {
"version": "9.2.3",
"resolved": "https://registry.npmjs.org/@octokit/request/-/request-9.2.3.tgz",
"integrity": "sha512-Ma+pZU8PXLOEYzsWf0cn/gY+ME57Wq8f49WTXA8FMHp2Ps9djKw//xYJ1je8Hm0pR2lU9FUGeJRWOtxq6olt4w==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/endpoint": "^10.1.4",
"@octokit/request-error": "^6.1.8",
"@octokit/types": "^14.0.0",
"fast-content-type-parse": "^2.0.0",
"universal-user-agent": "^7.0.2"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/core/node_modules/@octokit/request-error": {
"version": "6.1.8",
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.8.tgz",
"integrity": "sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/types": "^14.0.0"
},
"engines": {
"node": ">= 18"
}
"version": "24.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
"license": "MIT"
},
"node_modules/@octokit/core/node_modules/@octokit/types": {
"version": "14.0.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.0.0.tgz",
"integrity": "sha512-VVmZP0lEhbo2O1pdq63gZFiGCKkm8PPp8AUOijlwPO6hojEVjspA0MWKP7E4hbvGxzFKNqKr6p0IYtOH/Wf/zA==",
"version": "13.10.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/openapi-types": "^25.0.0"
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/@octokit/core/node_modules/fast-content-type-parse": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-2.0.1.tgz",
"integrity": "sha512-nGqtvLrj5w0naR6tDPfB4cUmYCqouzyQiz6C5y/LtcDllJdrcc6WaWW6iXyIIOErTa/XRybj28aasdn4LkVk6Q==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/fastify"
},
{
"type": "opencollective",
"url": "https://opencollective.com/fastify"
}
],
"license": "MIT",
"peer": true
},
"node_modules/@octokit/core/node_modules/universal-user-agent": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q==",
"license": "ISC",
"peer": true
},
"node_modules/@octokit/endpoint": {
"version": "9.0.6",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz",
@@ -7947,105 +7867,34 @@
}
},
"node_modules/@octokit/graphql": {
"version": "8.2.2",
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-8.2.2.tgz",
"integrity": "sha512-Yi8hcoqsrXGdt0yObxbebHXFOiUA+2v3n53epuOg1QUgOB6c4XzvisBNVXJSl8RYA5KrDuSL2yq9Qmqe5N0ryA==",
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz",
"integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/request": "^9.2.3",
"@octokit/types": "^14.0.0",
"universal-user-agent": "^7.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/graphql/node_modules/@octokit/endpoint": {
"version": "10.1.4",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.4.tgz",
"integrity": "sha512-OlYOlZIsfEVZm5HCSR8aSg02T2lbUWOsCQoPKfTXJwDzcHQBrVBGdGXb89dv2Kw2ToZaRtudp8O3ZIYoaOjKlA==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/types": "^14.0.0",
"universal-user-agent": "^7.0.2"
"@octokit/request": "^8.4.1",
"@octokit/types": "^13.0.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/graphql/node_modules/@octokit/openapi-types": {
"version": "25.0.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.0.0.tgz",
"integrity": "sha512-FZvktFu7HfOIJf2BScLKIEYjDsw6RKc7rBJCdvCTfKsVnx2GEB/Nbzjr29DUdb7vQhlzS/j8qDzdditP0OC6aw==",
"license": "MIT",
"peer": true
},
"node_modules/@octokit/graphql/node_modules/@octokit/request": {
"version": "9.2.3",
"resolved": "https://registry.npmjs.org/@octokit/request/-/request-9.2.3.tgz",
"integrity": "sha512-Ma+pZU8PXLOEYzsWf0cn/gY+ME57Wq8f49WTXA8FMHp2Ps9djKw//xYJ1je8Hm0pR2lU9FUGeJRWOtxq6olt4w==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/endpoint": "^10.1.4",
"@octokit/request-error": "^6.1.8",
"@octokit/types": "^14.0.0",
"fast-content-type-parse": "^2.0.0",
"universal-user-agent": "^7.0.2"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/graphql/node_modules/@octokit/request-error": {
"version": "6.1.8",
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.8.tgz",
"integrity": "sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/types": "^14.0.0"
},
"engines": {
"node": ">= 18"
}
"version": "24.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
"license": "MIT"
},
"node_modules/@octokit/graphql/node_modules/@octokit/types": {
"version": "14.0.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.0.0.tgz",
"integrity": "sha512-VVmZP0lEhbo2O1pdq63gZFiGCKkm8PPp8AUOijlwPO6hojEVjspA0MWKP7E4hbvGxzFKNqKr6p0IYtOH/Wf/zA==",
"version": "13.10.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/openapi-types": "^25.0.0"
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/@octokit/graphql/node_modules/fast-content-type-parse": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-2.0.1.tgz",
"integrity": "sha512-nGqtvLrj5w0naR6tDPfB4cUmYCqouzyQiz6C5y/LtcDllJdrcc6WaWW6iXyIIOErTa/XRybj28aasdn4LkVk6Q==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/fastify"
},
{
"type": "opencollective",
"url": "https://opencollective.com/fastify"
}
],
"license": "MIT",
"peer": true
},
"node_modules/@octokit/graphql/node_modules/universal-user-agent": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q==",
"license": "ISC",
"peer": true
},
"node_modules/@octokit/oauth-authorization-url": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/@octokit/oauth-authorization-url/-/oauth-authorization-url-7.1.1.tgz",
@@ -8141,15 +7990,15 @@
}
},
"node_modules/@octokit/plugin-paginate-graphql": {
"version": "5.2.4",
"resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-graphql/-/plugin-paginate-graphql-5.2.4.tgz",
"integrity": "sha512-pLZES1jWaOynXKHOqdnwZ5ULeVR6tVVCMm+AUbp0htdcyXDU95WbkYdU4R2ej1wKj5Tu94Mee2Ne0PjPO9cCyA==",
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-graphql/-/plugin-paginate-graphql-4.0.1.tgz",
"integrity": "sha512-R8ZQNmrIKKpHWC6V2gum4x9LG2qF1RxRjo27gjQcG3j+vf2tLsEfE7I/wRWEPzYMaenr1M+qDAtNcwZve1ce1A==",
"license": "MIT",
"engines": {
"node": ">= 18"
},
"peerDependencies": {
"@octokit/core": ">=6"
"@octokit/core": ">=5"
}
},
"node_modules/@octokit/plugin-paginate-rest": {
@@ -8302,59 +8151,6 @@
"node": ">= 18"
}
},
"node_modules/@octokit/rest/node_modules/@octokit/core": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.1.tgz",
"integrity": "sha512-dKYCMuPO1bmrpuogcjQ8z7ICCH3FP6WmxpwC03yjzGfZhj9fTJg6+bS1+UAplekbN2C+M61UNllGOOoAfGCrdQ==",
"license": "MIT",
"dependencies": {
"@octokit/auth-token": "^4.0.0",
"@octokit/graphql": "^7.1.0",
"@octokit/request": "^8.4.1",
"@octokit/request-error": "^5.1.1",
"@octokit/types": "^13.0.0",
"before-after-hook": "^2.2.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/rest/node_modules/@octokit/graphql": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz",
"integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==",
"license": "MIT",
"dependencies": {
"@octokit/request": "^8.4.1",
"@octokit/types": "^13.0.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/rest/node_modules/@octokit/openapi-types": {
"version": "24.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
"license": "MIT"
},
"node_modules/@octokit/rest/node_modules/@octokit/types": {
"version": "13.10.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
"license": "MIT",
"dependencies": {
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/@octokit/rest/node_modules/before-after-hook": {
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
"integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==",
"license": "Apache-2.0"
},
"node_modules/@octokit/types": {
"version": "12.4.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.4.0.tgz",
@@ -12799,11 +12595,10 @@
"integrity": "sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ=="
},
"node_modules/before-after-hook": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-3.0.2.tgz",
"integrity": "sha512-Nik3Sc0ncrMK4UUdXQmAnRtzmNQTAAXmXIopizwZ1W1t8QmfJj+zL4OA2I7XPTPW5z5TDqv4hRo/JzouDJnX3A==",
"license": "Apache-2.0",
"peer": true
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
"integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==",
"license": "Apache-2.0"
},
"node_modules/big-integer": {
"version": "1.6.52",
@@ -21602,62 +21397,6 @@
"node": ">=18"
}
},
"node_modules/probot/node_modules/@octokit/core": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.1.tgz",
"integrity": "sha512-dKYCMuPO1bmrpuogcjQ8z7ICCH3FP6WmxpwC03yjzGfZhj9fTJg6+bS1+UAplekbN2C+M61UNllGOOoAfGCrdQ==",
"license": "MIT",
"dependencies": {
"@octokit/auth-token": "^4.0.0",
"@octokit/graphql": "^7.1.0",
"@octokit/request": "^8.4.1",
"@octokit/request-error": "^5.1.1",
"@octokit/types": "^13.0.0",
"before-after-hook": "^2.2.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/probot/node_modules/@octokit/core/node_modules/@octokit/types": {
"version": "13.10.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
"license": "MIT",
"dependencies": {
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/probot/node_modules/@octokit/graphql": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz",
"integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==",
"license": "MIT",
"dependencies": {
"@octokit/request": "^8.4.1",
"@octokit/types": "^13.0.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/probot/node_modules/@octokit/graphql/node_modules/@octokit/types": {
"version": "13.10.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
"license": "MIT",
"dependencies": {
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/probot/node_modules/@octokit/openapi-types": {
"version": "24.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
"license": "MIT"
},
"node_modules/probot/node_modules/@octokit/plugin-retry": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-6.0.1.tgz",
@@ -21690,12 +21429,6 @@
"@octokit/core": "^5.0.0"
}
},
"node_modules/probot/node_modules/before-after-hook": {
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
"integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==",
"license": "Apache-2.0"
},
"node_modules/probot/node_modules/commander": {
"version": "12.1.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz",
@@ -27709,11 +27442,12 @@
}
},
"node_modules/zod-to-json-schema": {
"version": "3.22.4",
"resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.22.4.tgz",
"integrity": "sha512-2Ed5dJ+n/O3cU383xSY28cuVi0BCQhF8nYqWU5paEpl7fVdqdAmiLdqLyfblbNdfOFwFfi/mqU4O1pwc60iBhQ==",
"version": "3.24.5",
"resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.5.tgz",
"integrity": "sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g==",
"license": "ISC",
"peerDependencies": {
"zod": "^3.22.4"
"zod": "^3.24.1"
}
}
}

View File

@@ -152,7 +152,8 @@
"@infisical/quic": "^1.0.8",
"@node-saml/passport-saml": "^5.0.1",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-paginate-graphql": "^5.2.4",
"@octokit/core": "^5.2.1",
"@octokit/plugin-paginate-graphql": "^4.0.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
@@ -240,6 +241,6 @@
"tweetnacl-util": "^0.15.1",
"uuid": "^9.0.1",
"zod": "^3.22.4",
"zod-to-json-schema": "^3.22.4"
"zod-to-json-schema": "^3.24.5"
}
}

View File

@@ -66,6 +66,8 @@ import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-a
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
import { TIdentityJwtAuthServiceFactory } from "@app/services/identity-jwt-auth/identity-jwt-auth-service";
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
import { TIdentityLdapAuthServiceFactory } from "@app/services/identity-ldap-auth/identity-ldap-auth-service";
import { TAllowedFields } from "@app/services/identity-ldap-auth/identity-ldap-auth-types";
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
import { TIdentityTokenAuthServiceFactory } from "@app/services/identity-token-auth/identity-token-auth-service";
@@ -146,6 +148,13 @@ declare module "fastify" {
providerAuthToken: string;
externalProviderAccessToken?: string;
};
passportMachineIdentity: {
identityId: string;
user: {
uid: string;
mail?: string;
};
};
kmipUser: {
projectId: string;
clientId: string;
@@ -153,7 +162,9 @@ declare module "fastify" {
};
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>> & {
allowedFields?: TAllowedFields[];
};
}
interface FastifyInstance {
@@ -199,6 +210,7 @@ declare module "fastify" {
identityAzureAuth: TIdentityAzureAuthServiceFactory;
identityOidcAuth: TIdentityOidcAuthServiceFactory;
identityJwtAuth: TIdentityJwtAuthServiceFactory;
identityLdapAuth: TIdentityLdapAuthServiceFactory;
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;

View File

@@ -432,6 +432,11 @@ import {
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
} from "@app/db/schemas";
import {
TIdentityLdapAuths,
TIdentityLdapAuthsInsert,
TIdentityLdapAuthsUpdate
} from "@app/db/schemas/identity-ldap-auths";
import {
TMicrosoftTeamsIntegrations,
TMicrosoftTeamsIntegrationsInsert,
@@ -735,6 +740,11 @@ declare module "knex/types/tables" {
TIdentityJwtAuthsInsert,
TIdentityJwtAuthsUpdate
>;
[TableName.IdentityLdapAuth]: KnexOriginal.CompositeTableType<
TIdentityLdapAuths,
TIdentityLdapAuthsInsert,
TIdentityLdapAuthsUpdate
>;
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
TIdentityUaClientSecrets,
TIdentityUaClientSecretsInsert,

View File

@@ -3,7 +3,7 @@ import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateBody)) {
if (!(await knex.schema.hasColumn(TableName.CertificateBody, "encryptedCertificateChain"))) {
await knex.schema.alterTable(TableName.CertificateBody, (t) => {
t.binary("encryptedCertificateChain").nullable();
});
@@ -25,7 +25,7 @@ export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTable(TableName.CertificateSecret);
}
if (await knex.schema.hasTable(TableName.CertificateBody)) {
if (await knex.schema.hasColumn(TableName.CertificateBody, "encryptedCertificateChain")) {
await knex.schema.alterTable(TableName.CertificateBody, (t) => {
t.dropColumn("encryptedCertificateChain");
});

View File

@@ -0,0 +1,22 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SshHostLoginUserMapping, "groupId"))) {
await knex.schema.alterTable(TableName.SshHostLoginUserMapping, (t) => {
t.uuid("groupId").nullable();
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.unique(["sshHostLoginUserId", "groupId"]);
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SshHostLoginUserMapping, "groupId")) {
await knex.schema.alterTable(TableName.SshHostLoginUserMapping, (t) => {
t.dropUnique(["sshHostLoginUserId", "groupId"]);
t.dropColumn("groupId");
});
}
}

View File

@@ -0,0 +1,39 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityLdapAuth))) {
await knex.schema.createTable(TableName.IdentityLdapAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.binary("encryptedBindDN").notNullable();
t.binary("encryptedBindPass").notNullable();
t.binary("encryptedLdapCaCertificate").nullable();
t.string("url").notNullable();
t.string("searchBase").notNullable();
t.string("searchFilter").notNullable();
t.jsonb("allowedFields").nullable();
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.IdentityLdapAuth);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityLdapAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityLdapAuth);
}

View File

@@ -0,0 +1,32 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const IdentityLdapAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
identityId: z.string().uuid(),
encryptedBindDN: zodBuffer,
encryptedBindPass: zodBuffer,
encryptedLdapCaCertificate: zodBuffer.nullable().optional(),
url: z.string(),
searchBase: z.string(),
searchFilter: z.string(),
allowedFields: z.unknown().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TIdentityLdapAuths = z.infer<typeof IdentityLdapAuthsSchema>;
export type TIdentityLdapAuthsInsert = Omit<z.input<typeof IdentityLdapAuthsSchema>, TImmutableDBKeys>;
export type TIdentityLdapAuthsUpdate = Partial<Omit<z.input<typeof IdentityLdapAuthsSchema>, TImmutableDBKeys>>;

View File

@@ -80,6 +80,7 @@ export enum TableName {
IdentityAwsAuth = "identity_aws_auths",
IdentityOidcAuth = "identity_oidc_auths",
IdentityJwtAuth = "identity_jwt_auths",
IdentityLdapAuth = "identity_ldap_auths",
IdentityOrgMembership = "identity_org_memberships",
IdentityProjectMembership = "identity_project_memberships",
IdentityProjectMembershipRole = "identity_project_membership_role",
@@ -232,7 +233,8 @@ export enum IdentityAuthMethod {
AWS_AUTH = "aws-auth",
AZURE_AUTH = "azure-auth",
OIDC_AUTH = "oidc-auth",
JWT_AUTH = "jwt-auth"
JWT_AUTH = "jwt-auth",
LDAP_AUTH = "ldap-auth"
}
export enum ProjectType {

View File

@@ -12,7 +12,8 @@ export const SshHostLoginUserMappingsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
sshHostLoginUserId: z.string().uuid(),
userId: z.string().uuid().nullable().optional()
userId: z.string().uuid().nullable().optional(),
groupId: z.string().uuid().nullable().optional()
});
export type TSshHostLoginUserMappings = z.infer<typeof SshHostLoginUserMappingsSchema>;

View File

@@ -2,6 +2,7 @@ import { z } from "zod";
import { AccessApprovalRequestsReviewersSchema, AccessApprovalRequestsSchema, UsersSchema } from "@app/db/schemas";
import { ApprovalStatus } from "@app/ee/services/access-approval-request/access-approval-request-types";
import { writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -18,6 +19,9 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
server.route({
url: "/",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
permissions: z.any().array(),

View File

@@ -98,6 +98,9 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/login",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
organizationSlug: z.string().trim()

View File

@@ -166,6 +166,9 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/redirect/saml2/organizations/:orgSlug",
method: "GET",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
orgSlug: z.string().trim()
@@ -192,6 +195,9 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/redirect/saml2/:samlConfigId",
method: "GET",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
samlConfigId: z.string().trim()
@@ -218,6 +224,9 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/saml2/:samlConfigId",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
samlConfigId: z.string().trim()

View File

@@ -196,6 +196,9 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/Users",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
schemas: z.array(z.string()),

View File

@@ -22,6 +22,7 @@ import { ActorType } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
import { TAllowedFields } from "@app/services/identity-ldap-auth/identity-ldap-auth-types";
import { PkiItemType } from "@app/services/pki-collection/pki-collection-types";
import { SecretSync, SecretSyncImportBehavior } from "@app/services/secret-sync/secret-sync-enums";
import {
@@ -119,44 +120,60 @@ export enum EventType {
CREATE_TOKEN_IDENTITY_TOKEN_AUTH = "create-token-identity-token-auth",
UPDATE_TOKEN_IDENTITY_TOKEN_AUTH = "update-token-identity-token-auth",
GET_TOKENS_IDENTITY_TOKEN_AUTH = "get-tokens-identity-token-auth",
ADD_IDENTITY_TOKEN_AUTH = "add-identity-token-auth",
UPDATE_IDENTITY_TOKEN_AUTH = "update-identity-token-auth",
GET_IDENTITY_TOKEN_AUTH = "get-identity-token-auth",
REVOKE_IDENTITY_TOKEN_AUTH = "revoke-identity-token-auth",
LOGIN_IDENTITY_KUBERNETES_AUTH = "login-identity-kubernetes-auth",
ADD_IDENTITY_KUBERNETES_AUTH = "add-identity-kubernetes-auth",
UPDATE_IDENTITY_KUBENETES_AUTH = "update-identity-kubernetes-auth",
GET_IDENTITY_KUBERNETES_AUTH = "get-identity-kubernetes-auth",
REVOKE_IDENTITY_KUBERNETES_AUTH = "revoke-identity-kubernetes-auth",
LOGIN_IDENTITY_OIDC_AUTH = "login-identity-oidc-auth",
ADD_IDENTITY_OIDC_AUTH = "add-identity-oidc-auth",
UPDATE_IDENTITY_OIDC_AUTH = "update-identity-oidc-auth",
GET_IDENTITY_OIDC_AUTH = "get-identity-oidc-auth",
REVOKE_IDENTITY_OIDC_AUTH = "revoke-identity-oidc-auth",
LOGIN_IDENTITY_JWT_AUTH = "login-identity-jwt-auth",
ADD_IDENTITY_JWT_AUTH = "add-identity-jwt-auth",
UPDATE_IDENTITY_JWT_AUTH = "update-identity-jwt-auth",
GET_IDENTITY_JWT_AUTH = "get-identity-jwt-auth",
REVOKE_IDENTITY_JWT_AUTH = "revoke-identity-jwt-auth",
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID = "get-identity-universal-auth-client-secret-by-id",
LOGIN_IDENTITY_GCP_AUTH = "login-identity-gcp-auth",
ADD_IDENTITY_GCP_AUTH = "add-identity-gcp-auth",
UPDATE_IDENTITY_GCP_AUTH = "update-identity-gcp-auth",
REVOKE_IDENTITY_GCP_AUTH = "revoke-identity-gcp-auth",
GET_IDENTITY_GCP_AUTH = "get-identity-gcp-auth",
LOGIN_IDENTITY_AWS_AUTH = "login-identity-aws-auth",
ADD_IDENTITY_AWS_AUTH = "add-identity-aws-auth",
UPDATE_IDENTITY_AWS_AUTH = "update-identity-aws-auth",
REVOKE_IDENTITY_AWS_AUTH = "revoke-identity-aws-auth",
GET_IDENTITY_AWS_AUTH = "get-identity-aws-auth",
LOGIN_IDENTITY_AZURE_AUTH = "login-identity-azure-auth",
ADD_IDENTITY_AZURE_AUTH = "add-identity-azure-auth",
UPDATE_IDENTITY_AZURE_AUTH = "update-identity-azure-auth",
GET_IDENTITY_AZURE_AUTH = "get-identity-azure-auth",
REVOKE_IDENTITY_AZURE_AUTH = "revoke-identity-azure-auth",
LOGIN_IDENTITY_LDAP_AUTH = "login-identity-ldap-auth",
ADD_IDENTITY_LDAP_AUTH = "add-identity-ldap-auth",
UPDATE_IDENTITY_LDAP_AUTH = "update-identity-ldap-auth",
GET_IDENTITY_LDAP_AUTH = "get-identity-ldap-auth",
REVOKE_IDENTITY_LDAP_AUTH = "revoke-identity-ldap-auth",
CREATE_ENVIRONMENT = "create-environment",
UPDATE_ENVIRONMENT = "update-environment",
DELETE_ENVIRONMENT = "delete-environment",
@@ -1034,6 +1051,55 @@ interface GetIdentityAzureAuthEvent {
};
}
interface LoginIdentityLdapAuthEvent {
type: EventType.LOGIN_IDENTITY_LDAP_AUTH;
metadata: {
identityId: string;
ldapUsername: string;
ldapEmail?: string;
};
}
interface AddIdentityLdapAuthEvent {
type: EventType.ADD_IDENTITY_LDAP_AUTH;
metadata: {
identityId: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
allowedFields?: TAllowedFields[];
url: string;
};
}
interface UpdateIdentityLdapAuthEvent {
type: EventType.UPDATE_IDENTITY_LDAP_AUTH;
metadata: {
identityId: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
allowedFields?: TAllowedFields[];
url?: string;
};
}
interface GetIdentityLdapAuthEvent {
type: EventType.GET_IDENTITY_LDAP_AUTH;
metadata: {
identityId: string;
};
}
interface RevokeIdentityLdapAuthEvent {
type: EventType.REVOKE_IDENTITY_LDAP_AUTH;
metadata: {
identityId: string;
};
}
interface LoginIdentityOidcAuthEvent {
type: EventType.LOGIN_IDENTITY_OIDC_AUTH;
metadata: {
@@ -2785,6 +2851,11 @@ export type Event =
| UpdateIdentityJwtAuthEvent
| GetIdentityJwtAuthEvent
| DeleteIdentityJwtAuthEvent
| LoginIdentityLdapAuthEvent
| AddIdentityLdapAuthEvent
| UpdateIdentityLdapAuthEvent
| GetIdentityLdapAuthEvent
| RevokeIdentityLdapAuthEvent
| CreateEnvironmentEvent
| GetEnvironmentEvent
| UpdateEnvironmentEvent

View File

@@ -1,6 +1,6 @@
import { ForbiddenError } from "@casl/ability";
import { Octokit } from "@octokit/core";
import { paginateGraphQL } from "@octokit/plugin-paginate-graphql";
import { paginateGraphql } from "@octokit/plugin-paginate-graphql";
import { Octokit as OctokitRest } from "@octokit/rest";
import { OrgMembershipRole } from "@app/db/schemas";
@@ -18,7 +18,7 @@ import { TPermissionServiceFactory } from "../permission/permission-service";
import { TGithubOrgSyncDALFactory } from "./github-org-sync-dal";
import { TCreateGithubOrgSyncDTO, TDeleteGithubOrgSyncDTO, TUpdateGithubOrgSyncDTO } from "./github-org-sync-types";
const OctokitWithPlugin = Octokit.plugin(paginateGraphQL);
const OctokitWithPlugin = Octokit.plugin(paginateGraphql);
type TGithubOrgSyncServiceFactoryDep = {
githubOrgSyncDAL: TGithubOrgSyncDALFactory;

View File

@@ -157,10 +157,23 @@ export const groupDALFactory = (db: TDbClient) => {
}
};
const findGroupsByProjectId = async (projectId: string, tx?: Knex) => {
try {
const docs = await (tx || db.replicaNode())(TableName.Groups)
.join(TableName.GroupProjectMembership, `${TableName.Groups}.id`, `${TableName.GroupProjectMembership}.groupId`)
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
.select(selectAllTableCols(TableName.Groups));
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "Find groups by project id" });
}
};
return {
findGroups,
findByOrgId,
findAllGroupPossibleMembers,
findGroupsByProjectId,
...groupOrm
};
};

View File

@@ -176,7 +176,8 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
db.ref("name").withSchema(TableName.Groups).as("groupName"),
db.ref("id").withSchema(TableName.OrgMembership).as("orgMembershipId"),
db.ref("firstName").withSchema(TableName.Users).as("firstName"),
db.ref("lastName").withSchema(TableName.Users).as("lastName")
db.ref("lastName").withSchema(TableName.Users).as("lastName"),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
);
return docs;

View File

@@ -14,6 +14,11 @@ export type TLDAPConfig = {
caCert: string;
};
export type TTestLDAPConfigDTO = Omit<
TLDAPConfig,
"organization" | "id" | "groupSearchBase" | "groupSearchFilter" | "isActive" | "uniqueUserAttribute" | "searchBase"
>;
export type TCreateLdapCfgDTO = {
orgId: string;
isActive: boolean;

View File

@@ -2,15 +2,14 @@ import ldapjs from "ldapjs";
import { logger } from "@app/lib/logger";
import { TLDAPConfig } from "./ldap-config-types";
import { TLDAPConfig, TTestLDAPConfigDTO } from "./ldap-config-types";
export const isValidLdapFilter = (filter: string) => {
try {
ldapjs.parseFilter(filter);
return true;
} catch (error) {
logger.error("Invalid LDAP filter");
logger.error(error);
logger.error(error, "Invalid LDAP filter");
return false;
}
};
@@ -20,7 +19,7 @@ export const isValidLdapFilter = (filter: string) => {
* @param ldapConfig - The LDAP configuration to test
* @returns {Boolean} isConnected - Whether or not the connection was successful
*/
export const testLDAPConfig = async (ldapConfig: TLDAPConfig): Promise<boolean> => {
export const testLDAPConfig = async (ldapConfig: TTestLDAPConfigDTO): Promise<boolean> => {
return new Promise((resolve) => {
const ldapClient = ldapjs.createClient({
url: ldapConfig.url,

View File

@@ -132,7 +132,7 @@ export const permissionDALFactory = (db: TDbClient) => {
}
};
const getProjectGroupPermissions = async (projectId: string) => {
const getProjectGroupPermissions = async (projectId: string, filterGroupId?: string) => {
try {
const docs = await db
.replicaNode()(TableName.GroupProjectMembership)
@@ -148,6 +148,11 @@ export const permissionDALFactory = (db: TDbClient) => {
`groupCustomRoles.id`
)
.where(`${TableName.GroupProjectMembership}.projectId`, "=", projectId)
.where((bd) => {
if (filterGroupId) {
void bd.where(`${TableName.GroupProjectMembership}.groupId`, "=", filterGroupId);
}
})
.select(
db.ref("id").withSchema(TableName.GroupProjectMembership).as("membershipId"),
db.ref("id").withSchema(TableName.Groups).as("groupId"),

View File

@@ -630,6 +630,34 @@ export const permissionServiceFactory = ({
return { permission };
};
const checkGroupProjectPermission = async ({
groupId,
projectId,
checkPermissions
}: {
groupId: string;
projectId: string;
checkPermissions: ProjectPermissionSet;
}) => {
const rawGroupProjectPermissions = await permissionDAL.getProjectGroupPermissions(projectId, groupId);
const groupPermissions = rawGroupProjectPermissions.map((groupProjectPermission) => {
const rolePermissions =
groupProjectPermission.roles?.map(({ role, permissions }) => ({ role, permissions })) || [];
const rules = buildProjectPermissionRules(rolePermissions);
const permission = createMongoAbility<ProjectPermissionSet>(rules, {
conditionsMatcher
});
return {
permission,
id: groupProjectPermission.groupId,
name: groupProjectPermission.username,
membershipId: groupProjectPermission.id
};
});
return groupPermissions.some((groupPermission) => groupPermission.permission.can(...checkPermissions));
};
return {
getUserOrgPermission,
getOrgPermission,
@@ -639,6 +667,7 @@ export const permissionServiceFactory = ({
getOrgPermissionByRole,
getProjectPermissionByRole,
buildOrgPermission,
buildProjectPermissionRules
buildProjectPermissionRules,
checkGroupProjectPermission
};
};

View File

@@ -334,7 +334,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
db.ref("secretId").withSchema(TableName.SecretApprovalRequestSecret).as("commitSecretId"),
db.ref("id").withSchema(TableName.SecretApprovalRequestSecret).as("commitId"),
db.raw(
`DENSE_RANK() OVER (partition by ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."id" DESC) as rank`
`DENSE_RANK() OVER (PARTITION BY ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."createdAt" DESC) as rank`
),
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
db.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
@@ -483,7 +483,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
db.ref("secretId").withSchema(TableName.SecretApprovalRequestSecretV2).as("commitSecretId"),
db.ref("id").withSchema(TableName.SecretApprovalRequestSecretV2).as("commitId"),
db.raw(
`DENSE_RANK() OVER (partition by ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."id" DESC) as rank`
`DENSE_RANK() OVER (PARTITION BY ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."createdAt" DESC) as rank`
),
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
db.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),

View File

@@ -28,6 +28,7 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHostGroup}.projectId`, projectId)
.select(
db.ref("id").withSchema(TableName.SshHostGroup).as("sshHostGroupId"),
@@ -35,7 +36,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
db.ref("name").withSchema(TableName.SshHostGroup),
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping)
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
)
.orderBy(`${TableName.SshHostGroup}.updatedAt`, "desc");
@@ -69,7 +71,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
const loginMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
}
}));
return {
@@ -99,6 +102,7 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHostGroup}.id`, sshHostGroupId)
.select(
db.ref("id").withSchema(TableName.SshHostGroup).as("sshHostGroupId"),
@@ -106,7 +110,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
db.ref("name").withSchema(TableName.SshHostGroup),
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping)
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
);
if (rows.length === 0) return null;
@@ -121,7 +126,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
const loginMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
}
}));

View File

@@ -12,6 +12,7 @@ import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TGroupDALFactory } from "../group/group-dal";
import { TLicenseServiceFactory } from "../license/license-service";
import { createSshLoginMappings } from "../ssh-host/ssh-host-fns";
import {
@@ -43,8 +44,12 @@ type TSshHostGroupServiceFactoryDep = {
sshHostLoginUserDAL: Pick<TSshHostLoginUserDALFactory, "create" | "transaction" | "delete">;
sshHostLoginUserMappingDAL: Pick<TSshHostLoginUserMappingDALFactory, "insertMany">;
userDAL: Pick<TUserDALFactory, "find">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getUserProjectPermission">;
permissionService: Pick<
TPermissionServiceFactory,
"getProjectPermission" | "getUserProjectPermission" | "checkGroupProjectPermission"
>;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
groupDAL: Pick<TGroupDALFactory, "findGroupsByProjectId">;
};
export type TSshHostGroupServiceFactory = ReturnType<typeof sshHostGroupServiceFactory>;
@@ -58,7 +63,8 @@ export const sshHostGroupServiceFactory = ({
sshHostLoginUserMappingDAL,
userDAL,
permissionService,
licenseService
licenseService,
groupDAL
}: TSshHostGroupServiceFactoryDep) => {
const createSshHostGroup = async ({
projectId,
@@ -127,6 +133,7 @@ export const sshHostGroupServiceFactory = ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId,
@@ -194,6 +201,7 @@ export const sshHostGroupServiceFactory = ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId: sshHostGroup.projectId,

View File

@@ -9,12 +9,7 @@ export type TCreateSshHostGroupDTO = {
export type TUpdateSshHostGroupDTO = {
sshHostGroupId: string;
name?: string;
loginMappings?: {
loginUser: string;
allowedPrincipals: {
usernames: string[];
};
}[];
loginMappings?: TLoginMapping[];
} & Omit<TProjectPermission, "projectId">;
export type TGetSshHostGroupDTO = {

View File

@@ -31,8 +31,18 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUser}.id`,
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.Users}.id`, `${TableName.SshHostLoginUserMapping}.userId`)
.leftJoin(
TableName.UserGroupMembership,
`${TableName.UserGroupMembership}.groupId`,
`${TableName.SshHostLoginUserMapping}.groupId`
)
.whereIn(`${TableName.SshHost}.projectId`, projectIds)
.andWhere(`${TableName.SshHostLoginUserMapping}.userId`, userId)
.andWhere((bd) => {
void bd
.where(`${TableName.SshHostLoginUserMapping}.userId`, userId)
.orWhere(`${TableName.UserGroupMembership}.userId`, userId);
})
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
db.ref("projectId").withSchema(TableName.SshHost),
@@ -58,8 +68,17 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.join(TableName.SshHost, `${TableName.SshHostGroupMembership}.sshHostId`, `${TableName.SshHost}.id`)
.leftJoin(
TableName.UserGroupMembership,
`${TableName.UserGroupMembership}.groupId`,
`${TableName.SshHostLoginUserMapping}.groupId`
)
.whereIn(`${TableName.SshHost}.projectId`, projectIds)
.andWhere(`${TableName.SshHostLoginUserMapping}.userId`, userId)
.andWhere((bd) => {
void bd
.where(`${TableName.SshHostLoginUserMapping}.userId`, userId)
.orWhere(`${TableName.UserGroupMembership}.userId`, userId);
})
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
db.ref("projectId").withSchema(TableName.SshHost),
@@ -133,6 +152,7 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHost}.projectId`, projectId)
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
@@ -144,6 +164,7 @@ export const sshHostDALFactory = (db: TDbClient) => {
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug"),
db.ref("userSshCaId").withSchema(TableName.SshHost),
db.ref("hostSshCaId").withSchema(TableName.SshHost)
)
@@ -163,10 +184,12 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.select(
db.ref("sshHostId").withSchema(TableName.SshHostGroupMembership),
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users)
db.ref("username").withSchema(TableName.Users),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
)
.whereIn(`${TableName.SshHostGroupMembership}.sshHostId`, hostIds);
@@ -185,7 +208,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
const directMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
},
source: LoginMappingSource.HOST
}));
@@ -197,7 +221,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
const groupMappings = Object.entries(inheritedGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
},
source: LoginMappingSource.HOST_GROUP
}));
@@ -229,6 +254,7 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHost}.id`, sshHostId)
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
@@ -241,7 +267,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("userSshCaId").withSchema(TableName.SshHost),
db.ref("hostSshCaId").withSchema(TableName.SshHost)
db.ref("hostSshCaId").withSchema(TableName.SshHost),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
);
if (rows.length === 0) return null;
@@ -257,7 +284,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
const directMappings = Object.entries(directGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
},
source: LoginMappingSource.HOST
}));
@@ -275,10 +303,12 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHostGroupMembership}.sshHostId`, sshHostId)
.select(
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users)
db.ref("username").withSchema(TableName.Users),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
);
const groupGrouped = groupBy(
@@ -289,7 +319,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
const groupMappings = Object.entries(groupGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
},
source: LoginMappingSource.HOST_GROUP
}));

View File

@@ -3,6 +3,7 @@ import { Knex } from "knex";
import { ActionProjectType } from "@app/db/schemas";
import { BadRequestError } from "@app/lib/errors";
import { ProjectPermissionSshHostActions, ProjectPermissionSub } from "../permission/project-permission";
import { TCreateSshLoginMappingsDTO } from "./ssh-host-types";
/**
@@ -15,6 +16,7 @@ export const createSshLoginMappings = async ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId,
@@ -35,7 +37,7 @@ export const createSshLoginMappings = async ({
tx
);
if (allowedPrincipals.usernames.length > 0) {
if (allowedPrincipals.usernames && allowedPrincipals.usernames.length > 0) {
const users = await userDAL.find(
{
$in: {
@@ -74,6 +76,41 @@ export const createSshLoginMappings = async ({
tx
);
}
if (allowedPrincipals.groups && allowedPrincipals.groups.length > 0) {
const projectGroups = await groupDAL.findGroupsByProjectId(projectId);
const groups = projectGroups.filter((g) => allowedPrincipals.groups?.includes(g.slug));
if (groups.length !== allowedPrincipals.groups?.length) {
throw new BadRequestError({
message: `Invalid group slugs: ${allowedPrincipals.groups
.filter((g) => !projectGroups.some((pg) => pg.slug === g))
.join(", ")}`
});
}
for await (const group of groups) {
// check that each group has access to the SSH project and have read access to hosts
const hasPermission = await permissionService.checkGroupProjectPermission({
groupId: group.id,
projectId,
checkPermissions: [ProjectPermissionSshHostActions.Read, ProjectPermissionSub.SshHosts]
});
if (!hasPermission) {
throw new BadRequestError({
message: `Group ${group.slug} does not have access to the SSH project`
});
}
}
await sshHostLoginUserMappingDAL.insertMany(
groups.map((group) => ({
sshHostLoginUserId: sshHostLoginUser.id,
groupId: group.id
})),
tx
);
}
}
};

View File

@@ -15,7 +15,24 @@ export const sanitizedSshHost = SshHostsSchema.pick({
export const loginMappingSchema = z.object({
loginUser: z.string().trim(),
allowedPrincipals: z.object({
usernames: z.array(z.string().trim()).transform((usernames) => Array.from(new Set(usernames)))
})
allowedPrincipals: z
.object({
usernames: z
.array(z.string().trim())
.transform((usernames) => Array.from(new Set(usernames)))
.optional(),
groups: z
.array(z.string().trim())
.transform((groups) => Array.from(new Set(groups)))
.optional()
})
.refine(
(data) => {
return (data.usernames && data.usernames.length > 0) || (data.groups && data.groups.length > 0);
},
{
message: "At least one username or group must be provided",
path: ["allowedPrincipals"]
}
)
});

View File

@@ -1,6 +1,7 @@
import { ForbiddenError, subject } from "@casl/ability";
import { ActionProjectType, ProjectType } from "@app/db/schemas";
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionSshHostActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { TSshCertificateAuthorityDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-dal";
@@ -19,6 +20,7 @@ import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectSshConfigDALFactory } from "@app/services/project/project-ssh-config-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TUserGroupMembershipDALFactory } from "../group/user-group-membership-dal";
import {
convertActorToPrincipals,
createSshCert,
@@ -39,12 +41,14 @@ import {
type TSshHostServiceFactoryDep = {
userDAL: Pick<TUserDALFactory, "findById" | "find">;
groupDAL: Pick<TGroupDALFactory, "findGroupsByProjectId">;
projectDAL: Pick<TProjectDALFactory, "find">;
projectSshConfigDAL: Pick<TProjectSshConfigDALFactory, "findOne">;
sshCertificateAuthorityDAL: Pick<TSshCertificateAuthorityDALFactory, "findOne">;
sshCertificateAuthoritySecretDAL: Pick<TSshCertificateAuthoritySecretDALFactory, "findOne">;
sshCertificateDAL: Pick<TSshCertificateDALFactory, "create" | "transaction">;
sshCertificateBodyDAL: Pick<TSshCertificateBodyDALFactory, "create">;
userGroupMembershipDAL: Pick<TUserGroupMembershipDALFactory, "findGroupMembershipsByUserIdInOrg">;
sshHostDAL: Pick<
TSshHostDALFactory,
| "transaction"
@@ -58,7 +62,10 @@ type TSshHostServiceFactoryDep = {
>;
sshHostLoginUserDAL: TSshHostLoginUserDALFactory;
sshHostLoginUserMappingDAL: TSshHostLoginUserMappingDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getUserProjectPermission">;
permissionService: Pick<
TPermissionServiceFactory,
"getProjectPermission" | "getUserProjectPermission" | "checkGroupProjectPermission"
>;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
@@ -66,6 +73,8 @@ export type TSshHostServiceFactory = ReturnType<typeof sshHostServiceFactory>;
export const sshHostServiceFactory = ({
userDAL,
userGroupMembershipDAL,
groupDAL,
projectDAL,
projectSshConfigDAL,
sshCertificateAuthorityDAL,
@@ -208,6 +217,7 @@ export const sshHostServiceFactory = ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId,
@@ -278,6 +288,7 @@ export const sshHostServiceFactory = ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId: host.projectId,
@@ -387,10 +398,14 @@ export const sshHostServiceFactory = ({
userDAL
});
const userGroups = await userGroupMembershipDAL.findGroupMembershipsByUserIdInOrg(actorId, actorOrgId);
const userGroupSlugs = userGroups.map((g) => g.groupSlug);
const mapping = host.loginMappings.find(
(m) =>
m.loginUser === loginUser &&
m.allowedPrincipals.usernames.some((allowed) => internalPrincipals.includes(allowed))
(m.allowedPrincipals.usernames?.some((allowed) => internalPrincipals.includes(allowed)) ||
m.allowedPrincipals.groups?.some((allowed) => userGroupSlugs.includes(allowed)))
);
if (!mapping) {

View File

@@ -7,12 +7,15 @@ import { TProjectPermission } from "@app/lib/types";
import { ActorAuthMethod } from "@app/services/auth/auth-type";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TGroupDALFactory } from "../group/group-dal";
export type TListSshHostsDTO = Omit<TProjectPermission, "projectId">;
export type TLoginMapping = {
loginUser: string;
allowedPrincipals: {
usernames: string[];
usernames?: string[];
groups?: string[];
};
};
@@ -63,7 +66,8 @@ type BaseCreateSshLoginMappingsDTO = {
sshHostLoginUserDAL: Pick<TSshHostLoginUserDALFactory, "create" | "transaction">;
sshHostLoginUserMappingDAL: Pick<TSshHostLoginUserMappingDALFactory, "insertMany">;
userDAL: Pick<TUserDALFactory, "find">;
permissionService: Pick<TPermissionServiceFactory, "getUserProjectPermission">;
permissionService: Pick<TPermissionServiceFactory, "getUserProjectPermission" | "checkGroupProjectPermission">;
groupDAL: Pick<TGroupDALFactory, "findGroupsByProjectId">;
projectId: string;
actorAuthMethod: ActorAuthMethod;
actorOrgId: string;

View File

@@ -18,6 +18,7 @@ export enum ApiDocsTags {
KubernetesAuth = "Kubernetes Auth",
JwtAuth = "JWT Auth",
OidcAuth = "OIDC Auth",
LdapAuth = "LDAP Auth",
Groups = "Groups",
Organizations = "Organizations",
Projects = "Projects",
@@ -184,6 +185,49 @@ export const UNIVERSAL_AUTH = {
}
} as const;
export const LDAP_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login.",
username: "The username of the LDAP user to login.",
password: "The password of the LDAP user to login."
},
ATTACH: {
identityId: "The ID of the identity to attach the configuration onto.",
url: "The URL of the LDAP server.",
allowedFields:
"The comma-separated array of key/value pairs of required fields that the LDAP entry must have in order to authenticate.",
searchBase: "The base DN to search for the LDAP user.",
searchFilter: "The filter to use to search for the LDAP user.",
bindDN: "The DN of the user to bind to the LDAP server.",
bindPass: "The password of the user to bind to the LDAP server.",
ldapCaCertificate: "The PEM-encoded CA certificate for the LDAP server.",
accessTokenTTL: "The lifetime for an access token in seconds.",
accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.",
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from."
},
UPDATE: {
identityId: "The ID of the identity to update the configuration for.",
url: "The new URL of the LDAP server.",
allowedFields: "The comma-separated list of allowed fields to return from the LDAP user.",
searchBase: "The new base DN to search for the LDAP user.",
searchFilter: "The new filter to use to search for the LDAP user.",
bindDN: "The new DN of the user to bind to the LDAP server.",
bindPass: "The new password of the user to bind to the LDAP server.",
ldapCaCertificate: "The new PEM-encoded CA certificate for the LDAP server.",
accessTokenTTL: "The new lifetime for an access token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the configuration for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the configuration for."
}
} as const;
export const AWS_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login.",
@@ -1434,7 +1478,7 @@ export const SSH_HOSTS = {
loginUser: "A login user on the remote machine (e.g. 'ec2-user', 'deploy', 'admin')",
allowedPrincipals: "A list of allowed principals that can log in as the login user.",
loginMappings:
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users in the Infisical SSH project.",
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users or groups slugs in the Infisical SSH project.",
userSshCaId:
"The ID of the SSH CA to use for user certificates. If not specified, the default user SSH CA will be used if it exists.",
hostSshCaId:
@@ -1449,7 +1493,7 @@ export const SSH_HOSTS = {
loginUser: "A login user on the remote machine (e.g. 'ec2-user', 'deploy', 'admin')",
allowedPrincipals: "A list of allowed principals that can log in as the login user.",
loginMappings:
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users in the Infisical SSH project."
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users or groups slugs in the Infisical SSH project."
},
DELETE: {
sshHostId: "The ID of the SSH host to delete."

View File

@@ -84,7 +84,9 @@ const redactedKeys = [
"secrets",
"key",
"password",
"config"
"config",
"bindPass",
"bindDN"
];
const UNKNOWN_REQUEST_ID = "UNKNOWN_REQUEST_ID";

View File

@@ -104,6 +104,14 @@ export const publicSshCaLimit: RateLimitOptions = {
export const invalidateCacheLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
hook: "preValidation",
max: 1,
max: 2,
keyGenerator: (req) => req.realIp
};
// Makes spamming "request access" harder, preventing email DDoS
export const requestAccessLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
hook: "preValidation",
max: 10,
keyGenerator: (req) => req.realIp
};

View File

@@ -5,7 +5,7 @@
import type { FastifySchema, FastifySchemaCompiler, FastifyTypeProvider } from "fastify";
import type { FastifySerializerCompiler } from "fastify/types/schema";
import type { z, ZodAny, ZodTypeAny } from "zod";
import { zodToJsonSchema } from "zod-to-json-schema";
import { PostProcessCallback, zodToJsonSchema } from "zod-to-json-schema";
// eslint-disable-next-line @typescript-eslint/no-explicit-any
type FreeformRecord = Record<string, any>;
@@ -28,9 +28,25 @@ interface Schema extends FastifySchema {
hide?: boolean;
}
// Credit: https://github.com/StefanTerdell/zod-to-json-schema
const jsonDescription: PostProcessCallback = (jsonSchema, def) => {
if (def.description) {
try {
return {
...jsonSchema,
description: undefined,
...JSON.parse(def.description)
};
} catch {}
}
return jsonSchema;
};
const zodToJsonSchemaOptions = {
target: "openApi3",
$refStrategy: "none"
$refStrategy: "none",
postProcess: jsonDescription
} as const;
// eslint-disable-next-line @typescript-eslint/no-explicit-any

View File

@@ -160,6 +160,8 @@ import { identityJwtAuthDALFactory } from "@app/services/identity-jwt-auth/ident
import { identityJwtAuthServiceFactory } from "@app/services/identity-jwt-auth/identity-jwt-auth-service";
import { identityKubernetesAuthDALFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-dal";
import { identityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
import { identityLdapAuthDALFactory } from "@app/services/identity-ldap-auth/identity-ldap-auth-dal";
import { identityLdapAuthServiceFactory } from "@app/services/identity-ldap-auth/identity-ldap-auth-service";
import { identityOidcAuthDALFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-dal";
import { identityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
import { identityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
@@ -354,6 +356,7 @@ export const registerRoutes = async (
const identityOidcAuthDAL = identityOidcAuthDALFactory(db);
const identityJwtAuthDAL = identityJwtAuthDALFactory(db);
const identityAzureAuthDAL = identityAzureAuthDALFactory(db);
const identityLdapAuthDAL = identityLdapAuthDALFactory(db);
const auditLogDAL = auditLogDALFactory(auditLogDb ?? db);
const auditLogStreamDAL = auditLogStreamDALFactory(db);
@@ -867,6 +870,8 @@ export const registerRoutes = async (
const sshHostService = sshHostServiceFactory({
userDAL,
groupDAL,
userGroupMembershipDAL,
projectDAL,
projectSshConfigDAL,
sshCertificateAuthorityDAL,
@@ -889,7 +894,8 @@ export const registerRoutes = async (
sshHostLoginUserMappingDAL,
userDAL,
permissionService,
licenseService
licenseService,
groupDAL
});
const certificateAuthorityService = certificateAuthorityServiceFactory({
@@ -1445,6 +1451,16 @@ export const registerRoutes = async (
kmsService
});
const identityLdapAuthService = identityLdapAuthServiceFactory({
identityLdapAuthDAL,
permissionService,
kmsService,
identityAccessTokenDAL,
identityOrgMembershipDAL,
licenseService,
identityDAL
});
const gatewayService = gatewayServiceFactory({
permissionService,
gatewayDAL,
@@ -1705,6 +1721,7 @@ export const registerRoutes = async (
identityAzureAuth: identityAzureAuthService,
identityOidcAuth: identityOidcAuthService,
identityJwtAuth: identityJwtAuthService,
identityLdapAuth: identityLdapAuthService,
accessApprovalPolicy: accessApprovalPolicyService,
accessApprovalRequest: accessApprovalRequestService,
secretApprovalPolicy: secretApprovalPolicyService,
@@ -1770,6 +1787,10 @@ export const registerRoutes = async (
if (licenseSyncJob) {
cronJobs.push(licenseSyncJob);
}
const microsoftTeamsSyncJob = await microsoftTeamsService.initializeBackgroundSync();
if (microsoftTeamsSyncJob) {
cronJobs.push(microsoftTeamsSyncJob);
}
}
server.decorate<FastifyZodProvider["store"]>("store", {

View File

@@ -0,0 +1,497 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable @typescript-eslint/no-unsafe-return */
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
/* eslint-disable @typescript-eslint/no-unsafe-call */
/* eslint-disable @typescript-eslint/no-unsafe-argument */
// All the any rules are disabled because passport typesense with fastify is really poor
import { Authenticator } from "@fastify/passport";
import fastifySession from "@fastify/session";
import { FastifyRequest } from "fastify";
import { IncomingMessage } from "http";
import LdapStrategy from "passport-ldapauth";
import { z } from "zod";
import { IdentityLdapAuthsSchema } from "@app/db/schemas/identity-ldap-auths";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { isValidLdapFilter } from "@app/ee/services/ldap-config/ldap-fns";
import { ApiDocsTags, LDAP_AUTH } from "@app/lib/api-docs";
import { getConfig } from "@app/lib/config/env";
import { UnauthorizedError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
import { AllowedFieldsSchema } from "@app/services/identity-ldap-auth/identity-ldap-auth-types";
import { isSuperAdmin } from "@app/services/super-admin/super-admin-fns";
export const registerIdentityLdapAuthRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const passport = new Authenticator({ key: "ldap-identity-auth", userProperty: "passportMachineIdentity" });
await server.register(fastifySession, { secret: appCfg.COOKIE_SECRET_SIGN_KEY });
await server.register(passport.initialize());
await server.register(passport.secureSession());
const getLdapPassportOpts = (req: FastifyRequest, done: any) => {
const { identityId } = req.body as {
identityId: string;
};
process.nextTick(async () => {
try {
const { ldapConfig, opts } = await server.services.identityLdapAuth.getLdapConfig(identityId);
req.ldapConfig = {
...ldapConfig,
isActive: true,
groupSearchBase: "",
uniqueUserAttribute: "",
groupSearchFilter: ""
};
done(null, opts);
} catch (err) {
logger.error(err, "Error in LDAP verification callback");
done(err);
}
});
};
passport.use(
new LdapStrategy(
getLdapPassportOpts as any,
// eslint-disable-next-line
async (req: IncomingMessage, user, cb) => {
try {
const requestBody = (req as unknown as FastifyRequest).body as {
username: string;
password: string;
identityId: string;
};
if (!requestBody.username || !requestBody.password) {
return cb(new UnauthorizedError({ message: "Invalid request. Missing username or password." }), false);
}
if (!requestBody.identityId) {
return cb(new UnauthorizedError({ message: "Invalid request. Missing identity ID." }), false);
}
const { ldapConfig } = req as unknown as FastifyRequest;
if (ldapConfig.allowedFields) {
for (const field of ldapConfig.allowedFields) {
if (!user[field.key]) {
return cb(
new UnauthorizedError({ message: `Invalid request. Missing field ${field.key} on user.` }),
false
);
}
const value = field.value.split(",");
if (!value.includes(user[field.key])) {
return cb(
new UnauthorizedError({
message: `Invalid request. User field '${field.key}' does not match required fields.`
}),
false
);
}
}
}
return cb(null, { identityId: requestBody.identityId, user });
} catch (error) {
logger.error(error, "Error in LDAP verification callback");
return cb(error, false);
}
}
)
);
server.route({
method: "POST",
url: "/ldap-auth/login",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.LdapAuth],
description: "Login with LDAP Auth",
body: z.object({
identityId: z.string().trim().describe(LDAP_AUTH.LOGIN.identityId),
username: z.string().describe(LDAP_AUTH.LOGIN.username),
password: z.string().describe(LDAP_AUTH.LOGIN.password)
}),
response: {
200: z.object({
accessToken: z.string(),
expiresIn: z.coerce.number(),
accessTokenMaxTTL: z.coerce.number(),
tokenType: z.literal("Bearer")
})
}
},
preValidation: passport.authenticate("ldapauth", {
failWithError: true,
session: false
}) as any,
errorHandler: (error) => {
if (error.name === "AuthenticationError") {
throw new UnauthorizedError({ message: "Invalid credentials" });
}
throw error;
},
handler: async (req) => {
if (!req.passportMachineIdentity?.identityId) {
throw new UnauthorizedError({ message: "Invalid request. Missing identity ID or LDAP entry details." });
}
const { identityId, user } = req.passportMachineIdentity;
const { accessToken, identityLdapAuth, identityMembershipOrg } = await server.services.identityLdapAuth.login({
identityId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityMembershipOrg?.orgId,
event: {
type: EventType.LOGIN_IDENTITY_LDAP_AUTH,
metadata: {
identityId,
ldapEmail: user.mail,
ldapUsername: user.uid
}
}
});
return {
accessToken,
tokenType: "Bearer" as const,
expiresIn: identityLdapAuth.accessTokenTTL,
accessTokenMaxTTL: identityLdapAuth.accessTokenMaxTTL
};
}
});
server.route({
method: "POST",
url: "/ldap-auth/identities/:identityId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.LdapAuth],
description: "Attach LDAP Auth configuration onto identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().trim().describe(LDAP_AUTH.ATTACH.identityId)
}),
body: z
.object({
url: z.string().trim().min(1).describe(LDAP_AUTH.ATTACH.url),
bindDN: z.string().trim().min(1).describe(LDAP_AUTH.ATTACH.bindDN),
bindPass: z.string().trim().min(1).describe(LDAP_AUTH.ATTACH.bindPass),
searchBase: z.string().trim().min(1).describe(LDAP_AUTH.ATTACH.searchBase),
searchFilter: z
.string()
.trim()
.min(1)
.default("(uid={{username}})")
.refine(isValidLdapFilter, "Invalid LDAP search filter")
.describe(LDAP_AUTH.ATTACH.searchFilter),
allowedFields: AllowedFieldsSchema.array().optional().describe(LDAP_AUTH.ATTACH.allowedFields),
ldapCaCertificate: z.string().trim().optional().describe(LDAP_AUTH.ATTACH.ldapCaCertificate),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
.describe(LDAP_AUTH.ATTACH.accessTokenTrustedIps),
accessTokenTTL: z
.number()
.int()
.min(0)
.max(315360000)
.default(2592000)
.describe(LDAP_AUTH.ATTACH.accessTokenTTL),
accessTokenMaxTTL: z
.number()
.int()
.min(1)
.max(315360000)
.default(2592000)
.describe(LDAP_AUTH.ATTACH.accessTokenMaxTTL),
accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(LDAP_AUTH.ATTACH.accessTokenNumUsesLimit)
})
.refine(
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
"Access Token TTL cannot be greater than Access Token Max TTL."
),
response: {
200: z.object({
identityLdapAuth: IdentityLdapAuthsSchema.omit({
encryptedBindDN: true,
encryptedBindPass: true,
encryptedLdapCaCertificate: true
})
})
}
},
handler: async (req) => {
const identityLdapAuth = await server.services.identityLdapAuth.attachLdapAuth({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body,
identityId: req.params.identityId,
isActorSuperAdmin: isSuperAdmin(req.auth)
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.ADD_IDENTITY_LDAP_AUTH,
metadata: {
identityId: req.params.identityId,
url: identityLdapAuth.url,
accessTokenMaxTTL: identityLdapAuth.accessTokenMaxTTL,
accessTokenTTL: identityLdapAuth.accessTokenTTL,
accessTokenNumUsesLimit: identityLdapAuth.accessTokenNumUsesLimit,
allowedFields: req.body.allowedFields
}
}
});
return { identityLdapAuth };
}
});
server.route({
method: "PATCH",
url: "/ldap-auth/identities/:identityId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.LdapAuth],
description: "Update LDAP Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().trim().describe(LDAP_AUTH.UPDATE.identityId)
}),
body: z
.object({
url: z.string().trim().min(1).optional().describe(LDAP_AUTH.UPDATE.url),
bindDN: z.string().trim().min(1).optional().describe(LDAP_AUTH.UPDATE.bindDN),
bindPass: z.string().trim().min(1).optional().describe(LDAP_AUTH.UPDATE.bindPass),
searchBase: z.string().trim().min(1).optional().describe(LDAP_AUTH.UPDATE.searchBase),
searchFilter: z
.string()
.trim()
.min(1)
.optional()
.refine((v) => v === undefined || isValidLdapFilter(v), "Invalid LDAP search filter")
.describe(LDAP_AUTH.UPDATE.searchFilter),
allowedFields: AllowedFieldsSchema.array().optional().describe(LDAP_AUTH.UPDATE.allowedFields),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.optional()
.describe(LDAP_AUTH.UPDATE.accessTokenTrustedIps),
accessTokenTTL: z.number().int().min(0).max(315360000).optional().describe(LDAP_AUTH.UPDATE.accessTokenTTL),
accessTokenNumUsesLimit: z
.number()
.int()
.min(0)
.optional()
.describe(LDAP_AUTH.UPDATE.accessTokenNumUsesLimit),
accessTokenMaxTTL: z
.number()
.int()
.max(315360000)
.min(0)
.optional()
.describe(LDAP_AUTH.UPDATE.accessTokenMaxTTL)
})
.refine(
(val) => (val.accessTokenMaxTTL && val.accessTokenTTL ? val.accessTokenTTL <= val.accessTokenMaxTTL : true),
"Access Token TTL cannot be greater than Access Token Max TTL."
),
response: {
200: z.object({
identityLdapAuth: IdentityLdapAuthsSchema.omit({
encryptedBindDN: true,
encryptedBindPass: true,
encryptedLdapCaCertificate: true
})
})
}
},
handler: async (req) => {
const identityLdapAuth = await server.services.identityLdapAuth.updateLdapAuth({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body,
identityId: req.params.identityId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.UPDATE_IDENTITY_LDAP_AUTH,
metadata: {
identityId: req.params.identityId,
url: identityLdapAuth.url,
accessTokenMaxTTL: identityLdapAuth.accessTokenMaxTTL,
accessTokenTTL: identityLdapAuth.accessTokenTTL,
accessTokenNumUsesLimit: identityLdapAuth.accessTokenNumUsesLimit,
accessTokenTrustedIps: identityLdapAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
allowedFields: req.body.allowedFields
}
}
});
return { identityLdapAuth };
}
});
server.route({
method: "GET",
url: "/ldap-auth/identities/:identityId",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.LdapAuth],
description: "Retrieve LDAP Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().trim().describe(LDAP_AUTH.RETRIEVE.identityId)
}),
response: {
200: z.object({
identityLdapAuth: IdentityLdapAuthsSchema.omit({
encryptedBindDN: true,
encryptedBindPass: true,
encryptedLdapCaCertificate: true
}).extend({
bindDN: z.string(),
bindPass: z.string(),
ldapCaCertificate: z.string().optional()
})
})
}
},
handler: async (req) => {
const identityLdapAuth = await server.services.identityLdapAuth.getLdapAuth({
identityId: req.params.identityId,
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.GET_IDENTITY_LDAP_AUTH,
metadata: {
identityId: identityLdapAuth.identityId
}
}
});
return { identityLdapAuth };
}
});
server.route({
method: "DELETE",
url: "/ldap-auth/identities/:identityId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.LdapAuth],
description: "Delete LDAP Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().trim().describe(LDAP_AUTH.REVOKE.identityId)
}),
response: {
200: z.object({
identityLdapAuth: IdentityLdapAuthsSchema.omit({
encryptedBindDN: true,
encryptedBindPass: true,
encryptedLdapCaCertificate: true
})
})
}
},
handler: async (req) => {
const identityLdapAuth = await server.services.identityLdapAuth.revokeIdentityLdapAuth({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
identityId: req.params.identityId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.REVOKE_IDENTITY_LDAP_AUTH,
metadata: {
identityId: identityLdapAuth.identityId
}
}
});
return { identityLdapAuth };
}
});
};

View File

@@ -19,6 +19,7 @@ import { registerIdentityAzureAuthRouter } from "./identity-azure-auth-router";
import { registerIdentityGcpAuthRouter } from "./identity-gcp-auth-router";
import { registerIdentityJwtAuthRouter } from "./identity-jwt-auth-router";
import { registerIdentityKubernetesRouter } from "./identity-kubernetes-auth-router";
import { registerIdentityLdapAuthRouter } from "./identity-ldap-auth-router";
import { registerIdentityOidcAuthRouter } from "./identity-oidc-auth-router";
import { registerIdentityRouter } from "./identity-router";
import { registerIdentityTokenAuthRouter } from "./identity-token-auth-router";
@@ -63,6 +64,7 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
await authRouter.register(registerIdentityAzureAuthRouter);
await authRouter.register(registerIdentityOidcAuthRouter);
await authRouter.register(registerIdentityJwtAuthRouter);
await authRouter.register(registerIdentityLdapAuthRouter);
},
{ prefix: "/auth" }
);

View File

@@ -2,7 +2,7 @@ import { z } from "zod";
import { ProjectMembershipsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { readLimit } from "@app/server/config/rateLimiter";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -47,7 +47,7 @@ export const registerOrgAdminRouter = async (server: FastifyZodProvider) => {
method: "POST",
url: "/projects/:projectId/grant-admin-access",
config: {
rateLimit: readLimit
rateLimit: writeLimit
},
schema: {
params: z.object({

View File

@@ -19,7 +19,7 @@ import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { ApiDocsTags, PROJECTS } from "@app/lib/api-docs";
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
import { re2Validator } from "@app/lib/zod";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { readLimit, requestAccessLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
import { validateMicrosoftTeamsChannelsSchema } from "@app/services/microsoft-teams/microsoft-teams-fns";
@@ -1006,7 +1006,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
method: "POST",
url: "/:workspaceId/project-access",
config: {
rateLimit: writeLimit
rateLimit: requestAccessLimit
},
schema: {
params: z.object({

View File

@@ -30,6 +30,7 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
.leftJoin(TableName.IdentityGcpAuth, `${TableName.Identity}.id`, `${TableName.IdentityGcpAuth}.identityId`)
.leftJoin(TableName.IdentityAwsAuth, `${TableName.Identity}.id`, `${TableName.IdentityAwsAuth}.identityId`)
.leftJoin(TableName.IdentityAzureAuth, `${TableName.Identity}.id`, `${TableName.IdentityAzureAuth}.identityId`)
.leftJoin(TableName.IdentityLdapAuth, `${TableName.Identity}.id`, `${TableName.IdentityLdapAuth}.identityId`)
.leftJoin(
TableName.IdentityKubernetesAuth,
`${TableName.Identity}.id`,
@@ -48,6 +49,7 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityOidcAuth).as("accessTokenTrustedIpsOidc"),
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityTokenAuth).as("accessTokenTrustedIpsToken"),
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityJwtAuth).as("accessTokenTrustedIpsJwt"),
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityLdapAuth).as("accessTokenTrustedIpsLdap"),
db.ref("name").withSchema(TableName.Identity)
)
.first();
@@ -63,7 +65,8 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
trustedIpsKubernetesAuth: doc.accessTokenTrustedIpsK8s,
trustedIpsOidcAuth: doc.accessTokenTrustedIpsOidc,
trustedIpsAccessTokenAuth: doc.accessTokenTrustedIpsToken,
trustedIpsAccessJwtAuth: doc.accessTokenTrustedIpsJwt
trustedIpsAccessJwtAuth: doc.accessTokenTrustedIpsJwt,
trustedIpsAccessLdapAuth: doc.accessTokenTrustedIpsLdap
};
} catch (error) {
throw new DatabaseError({ error, name: "IdAccessTokenFindOne" });

View File

@@ -186,7 +186,8 @@ export const identityAccessTokenServiceFactory = ({
[IdentityAuthMethod.KUBERNETES_AUTH]: identityAccessToken.trustedIpsKubernetesAuth,
[IdentityAuthMethod.OIDC_AUTH]: identityAccessToken.trustedIpsOidcAuth,
[IdentityAuthMethod.TOKEN_AUTH]: identityAccessToken.trustedIpsAccessTokenAuth,
[IdentityAuthMethod.JWT_AUTH]: identityAccessToken.trustedIpsAccessJwtAuth
[IdentityAuthMethod.JWT_AUTH]: identityAccessToken.trustedIpsAccessJwtAuth,
[IdentityAuthMethod.LDAP_AUTH]: identityAccessToken.trustedIpsAccessLdapAuth
};
const trustedIps = trustedIpsMap[identityAccessToken.authMethod as IdentityAuthMethod];

View File

@@ -0,0 +1,11 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TIdentityLdapAuthDALFactory = ReturnType<typeof identityLdapAuthDALFactory>;
export const identityLdapAuthDALFactory = (db: TDbClient) => {
const ldapAuthOrm = ormify(db, TableName.IdentityLdapAuth);
return ldapAuthOrm;
};

View File

@@ -0,0 +1,543 @@
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import { ForbiddenError } from "@casl/ability";
import jwt from "jsonwebtoken";
import { IdentityAuthMethod } from "@app/db/schemas";
import { testLDAPConfig } from "@app/ee/services/ldap-config/ldap-fns";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionIdentityActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import {
constructPermissionErrorMessage,
validatePrivilegeChangeOperation
} from "@app/ee/services/permission/permission-fns";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
import { ActorType, AuthTokenType } from "../auth/auth-type";
import { TIdentityDALFactory } from "../identity/identity-dal";
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types";
import { TKmsServiceFactory } from "../kms/kms-service";
import { KmsDataKey } from "../kms/kms-types";
import { validateIdentityUpdateForSuperAdminPrivileges } from "../super-admin/super-admin-fns";
import { TIdentityLdapAuthDALFactory } from "./identity-ldap-auth-dal";
import {
AllowedFieldsSchema,
TAttachLdapAuthDTO,
TGetLdapAuthDTO,
TLoginLdapAuthDTO,
TRevokeLdapAuthDTO,
TUpdateLdapAuthDTO
} from "./identity-ldap-auth-types";
type TIdentityLdapAuthServiceFactoryDep = {
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
identityLdapAuthDAL: Pick<
TIdentityLdapAuthDALFactory,
"findOne" | "transaction" | "create" | "updateById" | "delete"
>;
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
kmsService: TKmsServiceFactory;
identityDAL: TIdentityDALFactory;
};
export type TIdentityLdapAuthServiceFactory = ReturnType<typeof identityLdapAuthServiceFactory>;
export const identityLdapAuthServiceFactory = ({
identityAccessTokenDAL,
identityDAL,
identityLdapAuthDAL,
identityOrgMembershipDAL,
licenseService,
permissionService,
kmsService
}: TIdentityLdapAuthServiceFactoryDep) => {
const getLdapConfig = async (identityId: string) => {
const identity = await identityDAL.findOne({ id: identityId });
if (!identity) throw new NotFoundError({ message: `Identity with ID '${identityId}' not found` });
const identityOrgMembership = await identityOrgMembershipDAL.findOne({ identityId: identity.id });
if (!identityOrgMembership) throw new NotFoundError({ message: `Identity with ID '${identityId}' not found` });
const ldapAuth = await identityLdapAuthDAL.findOne({ identityId: identity.id });
if (!ldapAuth) throw new NotFoundError({ message: `LDAP auth with ID '${identityId}' not found` });
const parsedAllowedFields = ldapAuth.allowedFields
? AllowedFieldsSchema.array().parse(ldapAuth.allowedFields)
: undefined;
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: identityOrgMembership.orgId
});
const bindDN = decryptor({ cipherTextBlob: ldapAuth.encryptedBindDN }).toString();
const bindPass = decryptor({ cipherTextBlob: ldapAuth.encryptedBindPass }).toString();
const ldapCaCertificate = ldapAuth.encryptedLdapCaCertificate
? decryptor({ cipherTextBlob: ldapAuth.encryptedLdapCaCertificate }).toString()
: undefined;
const ldapConfig = {
id: ldapAuth.id,
organization: identityOrgMembership.orgId,
url: ldapAuth.url,
bindDN,
bindPass,
searchBase: ldapAuth.searchBase,
searchFilter: ldapAuth.searchFilter,
caCert: ldapCaCertificate || "",
allowedFields: parsedAllowedFields
};
const opts = {
server: {
url: ldapAuth.url,
bindDN,
bindCredentials: bindPass,
searchBase: ldapAuth.searchBase,
searchFilter: ldapAuth.searchFilter,
...(ldapCaCertificate
? {
tlsOptions: {
ca: [ldapCaCertificate]
}
}
: {})
},
passReqToCallback: true
};
return { opts, ldapConfig };
};
const login = async ({ identityId }: TLoginLdapAuthDTO) => {
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
if (!identityMembershipOrg) {
throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
}
const identityLdapAuth = await identityLdapAuthDAL.findOne({ identityId });
if (!identityLdapAuth) {
throw new NotFoundError({ message: `Failed to find LDAP auth for identity with ID ${identityId}` });
}
const plan = await licenseService.getPlan(identityMembershipOrg.orgId);
if (!plan.ldap) {
throw new BadRequestError({
message:
"Failed to login to identity due to plan restriction. Upgrade plan to login to use LDAP authentication."
});
}
const identityAccessToken = await identityLdapAuthDAL.transaction(async (tx) => {
const newToken = await identityAccessTokenDAL.create(
{
identityId: identityLdapAuth.identityId,
isAccessTokenRevoked: false,
accessTokenTTL: identityLdapAuth.accessTokenTTL,
accessTokenMaxTTL: identityLdapAuth.accessTokenMaxTTL,
accessTokenNumUses: 0,
accessTokenNumUsesLimit: identityLdapAuth.accessTokenNumUsesLimit,
authMethod: IdentityAuthMethod.LDAP_AUTH
},
tx
);
return newToken;
});
const appCfg = getConfig();
const accessToken = jwt.sign(
{
identityId: identityLdapAuth.identityId,
identityAccessTokenId: identityAccessToken.id,
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN
} as TIdentityAccessTokenJwtPayload,
appCfg.AUTH_SECRET,
// akhilmhdh: for non-expiry tokens you should not even set the value, including undefined. Even for undefined jsonwebtoken throws error
Number(identityAccessToken.accessTokenTTL) === 0
? undefined
: {
expiresIn: Number(identityAccessToken.accessTokenTTL)
}
);
return { accessToken, identityLdapAuth, identityAccessToken, identityMembershipOrg };
};
const attachLdapAuth = async ({
identityId,
url,
searchBase,
searchFilter,
bindDN,
bindPass,
ldapCaCertificate,
accessTokenTTL,
accessTokenMaxTTL,
accessTokenNumUsesLimit,
accessTokenTrustedIps,
actorId,
actorAuthMethod,
actor,
actorOrgId,
isActorSuperAdmin,
allowedFields
}: TAttachLdapAuthDTO) => {
await validateIdentityUpdateForSuperAdminPrivileges(identityId, isActorSuperAdmin);
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
if (identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.LDAP_AUTH)) {
throw new BadRequestError({
message: "Failed to add LDAP Auth to already configured identity"
});
}
if (accessTokenMaxTTL > 0 && accessTokenTTL > accessTokenMaxTTL) {
throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" });
}
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
identityMembershipOrg.orgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Create, OrgPermissionSubjects.Identity);
const plan = await licenseService.getPlan(identityMembershipOrg.orgId);
if (!plan.ldap) {
throw new BadRequestError({
message: "Failed to add LDAP Auth to identity due to plan restriction. Upgrade plan to add LDAP Auth."
});
}
const reformattedAccessTokenTrustedIps = accessTokenTrustedIps.map((accessTokenTrustedIp) => {
if (
!plan.ipAllowlisting &&
accessTokenTrustedIp.ipAddress !== "0.0.0.0/0" &&
accessTokenTrustedIp.ipAddress !== "::/0"
)
throw new BadRequestError({
message:
"Failed to add IP access range to access token due to plan restriction. Upgrade plan to add IP access range."
});
if (!isValidIpOrCidr(accessTokenTrustedIp.ipAddress))
throw new BadRequestError({
message: "The IP is not a valid IPv4, IPv6, or CIDR block"
});
return extractIPDetails(accessTokenTrustedIp.ipAddress);
});
if (allowedFields) AllowedFieldsSchema.array().parse(allowedFields);
const identityLdapAuth = await identityLdapAuthDAL.transaction(async (tx) => {
const { encryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: identityMembershipOrg.orgId
});
const { cipherTextBlob: encryptedBindPass } = encryptor({
plainText: Buffer.from(bindPass)
});
let encryptedLdapCaCertificate: Buffer | undefined;
if (ldapCaCertificate) {
const { cipherTextBlob: encryptedCertificate } = encryptor({
plainText: Buffer.from(ldapCaCertificate)
});
encryptedLdapCaCertificate = encryptedCertificate;
}
const { cipherTextBlob: encryptedBindDN } = encryptor({
plainText: Buffer.from(bindDN)
});
const isConnected = await testLDAPConfig({
bindDN,
bindPass,
caCert: ldapCaCertificate || "",
url
});
if (!isConnected) {
throw new BadRequestError({
message:
"Failed to connect to LDAP server. Please ensure that the LDAP server is running and your credentials are correct."
});
}
const doc = await identityLdapAuthDAL.create(
{
identityId: identityMembershipOrg.identityId,
encryptedBindDN,
encryptedBindPass,
searchBase,
searchFilter,
url,
encryptedLdapCaCertificate,
accessTokenMaxTTL,
accessTokenTTL,
accessTokenNumUsesLimit,
accessTokenTrustedIps: JSON.stringify(reformattedAccessTokenTrustedIps),
allowedFields: allowedFields ? JSON.stringify(allowedFields) : undefined
},
tx
);
return doc;
});
return { ...identityLdapAuth, orgId: identityMembershipOrg.orgId };
};
const updateLdapAuth = async ({
identityId,
url,
searchBase,
searchFilter,
bindDN,
bindPass,
ldapCaCertificate,
allowedFields,
accessTokenTTL,
accessTokenMaxTTL,
accessTokenNumUsesLimit,
accessTokenTrustedIps,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TUpdateLdapAuthDTO) => {
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.LDAP_AUTH)) {
throw new NotFoundError({
message: "The identity does not have LDAP Auth attached"
});
}
const identityLdapAuth = await identityLdapAuthDAL.findOne({ identityId });
if (
(accessTokenMaxTTL || identityLdapAuth.accessTokenMaxTTL) > 0 &&
(accessTokenTTL || identityLdapAuth.accessTokenTTL) > (accessTokenMaxTTL || identityLdapAuth.accessTokenMaxTTL)
) {
throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" });
}
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
identityMembershipOrg.orgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
const plan = await licenseService.getPlan(identityMembershipOrg.orgId);
if (!plan.ldap) {
throw new BadRequestError({
message: "Failed to update LDAP Auth due to plan restriction. Upgrade plan to update LDAP Auth."
});
}
const reformattedAccessTokenTrustedIps = accessTokenTrustedIps?.map((accessTokenTrustedIp) => {
if (
!plan.ipAllowlisting &&
accessTokenTrustedIp.ipAddress !== "0.0.0.0/0" &&
accessTokenTrustedIp.ipAddress !== "::/0"
)
throw new BadRequestError({
message:
"Failed to add IP access range to access token due to plan restriction. Upgrade plan to add IP access range."
});
if (!isValidIpOrCidr(accessTokenTrustedIp.ipAddress))
throw new BadRequestError({
message: "The IP is not a valid IPv4, IPv6, or CIDR block"
});
return extractIPDetails(accessTokenTrustedIp.ipAddress);
});
if (allowedFields) AllowedFieldsSchema.array().parse(allowedFields);
const { encryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: identityMembershipOrg.orgId
});
let encryptedBindPass: Buffer | undefined;
if (bindPass) {
const { cipherTextBlob: bindPassCiphertext } = encryptor({
plainText: Buffer.from(bindPass)
});
encryptedBindPass = bindPassCiphertext;
}
let encryptedLdapCaCertificate: Buffer | undefined;
if (ldapCaCertificate) {
const { cipherTextBlob: ldapCaCertificateCiphertext } = encryptor({
plainText: Buffer.from(ldapCaCertificate)
});
encryptedLdapCaCertificate = ldapCaCertificateCiphertext;
}
let encryptedBindDN: Buffer | undefined;
if (bindDN) {
const { cipherTextBlob: bindDNCiphertext } = encryptor({
plainText: Buffer.from(bindDN)
});
encryptedBindDN = bindDNCiphertext;
}
const { ldapConfig } = await getLdapConfig(identityId);
const isConnected = await testLDAPConfig({
bindDN: bindDN || ldapConfig.bindDN,
bindPass: bindPass || ldapConfig.bindPass,
caCert: ldapCaCertificate || ldapConfig.caCert,
url: url || ldapConfig.url
});
if (!isConnected) {
throw new BadRequestError({
message:
"Failed to connect to LDAP server. Please ensure that the LDAP server is running and your credentials are correct."
});
}
const updatedLdapAuth = await identityLdapAuthDAL.updateById(identityLdapAuth.id, {
url,
searchBase,
searchFilter,
encryptedBindDN,
encryptedBindPass,
encryptedLdapCaCertificate,
allowedFields: allowedFields ? JSON.stringify(allowedFields) : undefined,
accessTokenMaxTTL,
accessTokenTTL,
accessTokenNumUsesLimit,
accessTokenTrustedIps: reformattedAccessTokenTrustedIps
? JSON.stringify(reformattedAccessTokenTrustedIps)
: undefined
});
return { ...updatedLdapAuth, orgId: identityMembershipOrg.orgId };
};
const getLdapAuth = async ({ identityId, actorId, actor, actorAuthMethod, actorOrgId }: TGetLdapAuthDTO) => {
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.LDAP_AUTH)) {
throw new BadRequestError({
message: "The identity does not have LDAP Auth attached"
});
}
const ldapIdentityAuth = await identityLdapAuthDAL.findOne({ identityId });
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
identityMembershipOrg.orgId,
actorAuthMethod,
actorOrgId
);
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: identityMembershipOrg.orgId
});
const bindDN = decryptor({ cipherTextBlob: ldapIdentityAuth.encryptedBindDN }).toString();
const bindPass = decryptor({ cipherTextBlob: ldapIdentityAuth.encryptedBindPass }).toString();
const ldapCaCertificate = ldapIdentityAuth.encryptedLdapCaCertificate
? decryptor({ cipherTextBlob: ldapIdentityAuth.encryptedLdapCaCertificate }).toString()
: undefined;
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
return { ...ldapIdentityAuth, orgId: identityMembershipOrg.orgId, bindDN, bindPass, ldapCaCertificate };
};
const revokeIdentityLdapAuth = async ({
identityId,
actorId,
actor,
actorAuthMethod,
actorOrgId
}: TRevokeLdapAuthDTO) => {
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.LDAP_AUTH)) {
throw new BadRequestError({
message: "The identity does not have LDAP Auth attached"
});
}
const { permission, membership } = await permissionService.getOrgPermission(
actor,
actorId,
identityMembershipOrg.orgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
const { permission: rolePermission } = await permissionService.getOrgPermission(
ActorType.IDENTITY,
identityMembershipOrg.identityId,
identityMembershipOrg.orgId,
actorAuthMethod,
actorOrgId
);
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
OrgPermissionIdentityActions.RevokeAuth,
OrgPermissionSubjects.Identity,
permission,
rolePermission
);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to revoke LDAP auth of identity with more privileged role",
membership.shouldUseNewPrivilegeSystem,
OrgPermissionIdentityActions.RevokeAuth,
OrgPermissionSubjects.Identity
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const revokedIdentityLdapAuth = await identityLdapAuthDAL.transaction(async (tx) => {
const [deletedLdapAuth] = await identityLdapAuthDAL.delete({ identityId }, tx);
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.LDAP_AUTH }, tx);
return { ...deletedLdapAuth, orgId: identityMembershipOrg.orgId };
});
return revokedIdentityLdapAuth;
};
return {
attachLdapAuth,
getLdapConfig,
updateLdapAuth,
login,
revokeIdentityLdapAuth,
getLdapAuth
};
};

View File

@@ -0,0 +1,56 @@
import { z } from "zod";
import { TProjectPermission } from "@app/lib/types";
export const AllowedFieldsSchema = z.object({
key: z.string().trim(),
value: z
.string()
.trim()
.transform((val) => val.replace(/\s/g, ""))
});
export type TAllowedFields = z.infer<typeof AllowedFieldsSchema>;
export type TAttachLdapAuthDTO = {
identityId: string;
url: string;
searchBase: string;
searchFilter: string;
bindDN: string;
bindPass: string;
ldapCaCertificate?: string;
allowedFields?: TAllowedFields[];
accessTokenTTL: number;
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
accessTokenTrustedIps: { ipAddress: string }[];
isActorSuperAdmin?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateLdapAuthDTO = {
identityId: string;
url?: string;
searchBase?: string;
searchFilter?: string;
bindDN?: string;
bindPass?: string;
allowedFields?: TAllowedFields[];
ldapCaCertificate?: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: { ipAddress: string }[];
} & Omit<TProjectPermission, "projectId">;
export type TGetLdapAuthDTO = {
identityId: string;
} & Omit<TProjectPermission, "projectId">;
export type TLoginLdapAuthDTO = {
identityId: string;
};
export type TRevokeLdapAuthDTO = {
identityId: string;
} & Omit<TProjectPermission, "projectId">;

View File

@@ -8,7 +8,8 @@ export const buildAuthMethods = ({
oidcId,
azureId,
tokenId,
jwtId
jwtId,
ldapId
}: {
uaId?: string;
gcpId?: string;
@@ -18,6 +19,7 @@ export const buildAuthMethods = ({
azureId?: string;
tokenId?: string;
jwtId?: string;
ldapId?: string;
}) => {
return [
...[uaId ? IdentityAuthMethod.UNIVERSAL_AUTH : null],
@@ -27,6 +29,7 @@ export const buildAuthMethods = ({
...[oidcId ? IdentityAuthMethod.OIDC_AUTH : null],
...[azureId ? IdentityAuthMethod.AZURE_AUTH : null],
...[tokenId ? IdentityAuthMethod.TOKEN_AUTH : null],
...[jwtId ? IdentityAuthMethod.JWT_AUTH : null]
...[jwtId ? IdentityAuthMethod.JWT_AUTH : null],
...[ldapId ? IdentityAuthMethod.LDAP_AUTH : null]
].filter((authMethod) => authMethod) as IdentityAuthMethod[];
};

View File

@@ -14,6 +14,7 @@ import {
TIdentityUniversalAuths,
TOrgRoles
} from "@app/db/schemas";
import { TIdentityLdapAuths } from "@app/db/schemas/identity-ldap-auths";
import { BadRequestError, DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
import { buildKnexFilterForSearchResource } from "@app/lib/search-resource/db";
@@ -81,6 +82,11 @@ export const identityOrgDALFactory = (db: TDbClient) => {
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityJwtAuth}.identityId`
)
.leftJoin<TIdentityLdapAuths>(
TableName.IdentityLdapAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityLdapAuth}.identityId`
)
.select(
selectAllTableCols(TableName.IdentityOrgMembership),
@@ -93,7 +99,7 @@ export const identityOrgDALFactory = (db: TDbClient) => {
db.ref("id").as("azureId").withSchema(TableName.IdentityAzureAuth),
db.ref("id").as("tokenId").withSchema(TableName.IdentityTokenAuth),
db.ref("id").as("jwtId").withSchema(TableName.IdentityJwtAuth),
db.ref("id").as("ldapId").withSchema(TableName.IdentityLdapAuth),
db.ref("name").withSchema(TableName.Identity)
);
@@ -200,6 +206,12 @@ export const identityOrgDALFactory = (db: TDbClient) => {
"paginatedIdentity.identityId",
`${TableName.IdentityJwtAuth}.identityId`
)
.leftJoin<TIdentityLdapAuths>(
TableName.IdentityLdapAuth,
"paginatedIdentity.identityId",
`${TableName.IdentityLdapAuth}.identityId`
)
.select(
db.ref("id").withSchema("paginatedIdentity"),
db.ref("role").withSchema("paginatedIdentity"),
@@ -217,7 +229,8 @@ export const identityOrgDALFactory = (db: TDbClient) => {
db.ref("id").as("oidcId").withSchema(TableName.IdentityOidcAuth),
db.ref("id").as("azureId").withSchema(TableName.IdentityAzureAuth),
db.ref("id").as("tokenId").withSchema(TableName.IdentityTokenAuth),
db.ref("id").as("jwtId").withSchema(TableName.IdentityJwtAuth)
db.ref("id").as("jwtId").withSchema(TableName.IdentityJwtAuth),
db.ref("id").as("ldapId").withSchema(TableName.IdentityLdapAuth)
)
// cr stands for custom role
.select(db.ref("id").as("crId").withSchema(TableName.OrgRoles))
@@ -259,6 +272,7 @@ export const identityOrgDALFactory = (db: TDbClient) => {
oidcId,
azureId,
tokenId,
ldapId,
createdAt,
updatedAt
}) => ({
@@ -290,7 +304,8 @@ export const identityOrgDALFactory = (db: TDbClient) => {
oidcId,
azureId,
tokenId,
jwtId
jwtId,
ldapId
})
}
}),
@@ -406,6 +421,11 @@ export const identityOrgDALFactory = (db: TDbClient) => {
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityJwtAuth}.identityId`
)
.leftJoin(
TableName.IdentityLdapAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityLdapAuth}.identityId`
)
.select(
db.ref("id").withSchema(TableName.IdentityOrgMembership),
db.ref("total_count").withSchema("searchedIdentities"),
@@ -424,7 +444,8 @@ export const identityOrgDALFactory = (db: TDbClient) => {
db.ref("id").as("oidcId").withSchema(TableName.IdentityOidcAuth),
db.ref("id").as("azureId").withSchema(TableName.IdentityAzureAuth),
db.ref("id").as("tokenId").withSchema(TableName.IdentityTokenAuth),
db.ref("id").as("jwtId").withSchema(TableName.IdentityJwtAuth)
db.ref("id").as("jwtId").withSchema(TableName.IdentityJwtAuth),
db.ref("id").as("ldapId").withSchema(TableName.IdentityLdapAuth)
)
// cr stands for custom role
.select(db.ref("id").as("crId").withSchema(TableName.OrgRoles))
@@ -467,6 +488,7 @@ export const identityOrgDALFactory = (db: TDbClient) => {
oidcId,
azureId,
tokenId,
ldapId,
createdAt,
updatedAt
}) => ({
@@ -498,7 +520,8 @@ export const identityOrgDALFactory = (db: TDbClient) => {
oidcId,
azureId,
tokenId,
jwtId
jwtId,
ldapId
})
}
}),

View File

@@ -177,7 +177,6 @@ export const deleteGithubSecrets = async ({
selected_repositories_url?: string | undefined;
}
// @ts-expect-error just octokit ts compatiability issue
const OctokitWithRetry = Octokit.plugin(retry);
let octokit: Octokit;
const appCfg = getConfig();

View File

@@ -6,6 +6,7 @@ import {
Request,
Response
} from "botbuilder";
import { CronJob } from "cron";
import { FastifyReply, FastifyRequest } from "fastify";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
@@ -86,8 +87,17 @@ export const microsoftTeamsServiceFactory = ({
}: TMicrosoftTeamsServiceFactoryDep) => {
let teamsBot: TeamsBot | null = null;
let adapter: CloudAdapter | null = null;
let lastKnownUpdatedAt = new Date();
const initializeTeamsBot = async ({ botAppId, botAppPassword }: { botAppId: string; botAppPassword: string }) => {
const initializeTeamsBot = async ({
botAppId,
botAppPassword,
lastUpdatedAt
}: {
botAppId: string;
botAppPassword: string;
lastUpdatedAt?: Date;
}) => {
logger.info("Initializing Microsoft Teams bot");
teamsBot = new TeamsBot({
botAppId,
@@ -106,6 +116,57 @@ export const microsoftTeamsServiceFactory = ({
})
)
);
if (lastUpdatedAt) {
lastKnownUpdatedAt = lastUpdatedAt;
}
};
const $syncMicrosoftTeamsIntegrationConfiguration = async () => {
try {
const serverCfg = await serverCfgDAL.findById(ADMIN_CONFIG_DB_UUID);
if (!serverCfg) {
throw new BadRequestError({
message: "Failed to get server configuration."
});
}
if (lastKnownUpdatedAt.getTime() === serverCfg.updatedAt.getTime()) {
logger.info("No changes to Microsoft Teams integration configuration, skipping sync");
return;
}
lastKnownUpdatedAt = serverCfg.updatedAt;
if (
serverCfg.encryptedMicrosoftTeamsAppId &&
serverCfg.encryptedMicrosoftTeamsClientSecret &&
serverCfg.encryptedMicrosoftTeamsBotId
) {
const decryptWithRoot = kmsService.decryptWithRootKey();
const decryptedAppId = decryptWithRoot(serverCfg.encryptedMicrosoftTeamsAppId);
const decryptedAppPassword = decryptWithRoot(serverCfg.encryptedMicrosoftTeamsClientSecret);
await initializeTeamsBot({
botAppId: decryptedAppId.toString(),
botAppPassword: decryptedAppPassword.toString()
});
}
} catch (err) {
logger.error(err, "Error syncing Microsoft Teams integration configuration");
}
};
const initializeBackgroundSync = async () => {
logger.info("Setting up background sync process for Microsoft Teams workflow integration configuration");
// initial sync upon startup
await $syncMicrosoftTeamsIntegrationConfiguration();
// sync rate limits configuration every 5 minutes
const job = new CronJob("*/5 * * * *", $syncMicrosoftTeamsIntegrationConfiguration);
job.start();
return job;
};
const start = async () => {
@@ -703,6 +764,7 @@ export const microsoftTeamsServiceFactory = ({
getTeams,
handleMessageEndpoint,
start,
initializeBackgroundSync,
sendNotification,
checkInstallationStatus,
getClientId

View File

@@ -246,7 +246,8 @@ export const superAdminServiceFactory = ({
await microsoftTeamsService.initializeTeamsBot({
botAppId: decryptedAppId.toString(),
botAppPassword: decryptedAppPassword.toString()
botAppPassword: decryptedAppPassword.toString(),
lastUpdatedAt: updatedServerCfg.updatedAt
});
}

View File

@@ -1,115 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"regexp"
"testing"
"github.com/stretchr/testify/assert"
)
func TestCommitAllowed(t *testing.T) {
tests := []struct {
allowlist Allowlist
commit string
commitAllowed bool
}{
{
allowlist: Allowlist{
Commits: []string{"commitA"},
},
commit: "commitA",
commitAllowed: true,
},
{
allowlist: Allowlist{
Commits: []string{"commitB"},
},
commit: "commitA",
commitAllowed: false,
},
{
allowlist: Allowlist{
Commits: []string{"commitB"},
},
commit: "",
commitAllowed: false,
},
}
for _, tt := range tests {
assert.Equal(t, tt.commitAllowed, tt.allowlist.CommitAllowed(tt.commit))
}
}
func TestRegexAllowed(t *testing.T) {
tests := []struct {
allowlist Allowlist
secret string
regexAllowed bool
}{
{
allowlist: Allowlist{
Regexes: []*regexp.Regexp{regexp.MustCompile("matchthis")},
},
secret: "a secret: matchthis, done",
regexAllowed: true,
},
{
allowlist: Allowlist{
Regexes: []*regexp.Regexp{regexp.MustCompile("matchthis")},
},
secret: "a secret",
regexAllowed: false,
},
}
for _, tt := range tests {
assert.Equal(t, tt.regexAllowed, tt.allowlist.RegexAllowed(tt.secret))
}
}
func TestPathAllowed(t *testing.T) {
tests := []struct {
allowlist Allowlist
path string
pathAllowed bool
}{
{
allowlist: Allowlist{
Paths: []*regexp.Regexp{regexp.MustCompile("path")},
},
path: "a path",
pathAllowed: true,
},
{
allowlist: Allowlist{
Paths: []*regexp.Regexp{regexp.MustCompile("path")},
},
path: "a ???",
pathAllowed: false,
},
}
for _, tt := range tests {
assert.Equal(t, tt.pathAllowed, tt.allowlist.PathAllowed(tt.path))
}
}

View File

@@ -1,279 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
_ "embed"
"fmt"
"regexp"
"strings"
"github.com/rs/zerolog/log"
"github.com/spf13/viper"
)
//go:embed infisical-scan.toml
var DefaultConfig string
// use to keep track of how many configs we can extend
// yea I know, globals bad
var extendDepth int
const maxExtendDepth = 2
const DefaultScanConfigFileName = ".infisical-scan.toml"
const DefaultScanConfigEnvName = "INFISICAL_SCAN_CONFIG"
const DefaultInfisicalIgnoreFineName = ".infisicalignore"
// ViperConfig is the config struct used by the Viper config package
// to parse the config file. This struct does not include regular expressions.
// It is used as an intermediary to convert the Viper config to the Config struct.
type ViperConfig struct {
Description string
Extend Extend
Rules []struct {
ID string
Description string
Entropy float64
SecretGroup int
Regex string
Keywords []string
Path string
Tags []string
Allowlist struct {
RegexTarget string
Regexes []string
Paths []string
Commits []string
StopWords []string
}
}
Allowlist struct {
RegexTarget string
Regexes []string
Paths []string
Commits []string
StopWords []string
}
}
// Config is a configuration struct that contains rules and an allowlist if present.
type Config struct {
Extend Extend
Path string
Description string
Rules map[string]Rule
Allowlist Allowlist
Keywords []string
// used to keep sarif results consistent
orderedRules []string
}
// Extend is a struct that allows users to define how they want their
// configuration extended by other configuration files.
type Extend struct {
Path string
URL string
UseDefault bool
}
func (vc *ViperConfig) Translate() (Config, error) {
var (
keywords []string
orderedRules []string
)
rulesMap := make(map[string]Rule)
for _, r := range vc.Rules {
var allowlistRegexes []*regexp.Regexp
for _, a := range r.Allowlist.Regexes {
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
}
var allowlistPaths []*regexp.Regexp
for _, a := range r.Allowlist.Paths {
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
}
if r.Keywords == nil {
r.Keywords = []string{}
} else {
for _, k := range r.Keywords {
keywords = append(keywords, strings.ToLower(k))
}
}
if r.Tags == nil {
r.Tags = []string{}
}
var configRegex *regexp.Regexp
var configPathRegex *regexp.Regexp
if r.Regex == "" {
configRegex = nil
} else {
configRegex = regexp.MustCompile(r.Regex)
}
if r.Path == "" {
configPathRegex = nil
} else {
configPathRegex = regexp.MustCompile(r.Path)
}
r := Rule{
Description: r.Description,
RuleID: r.ID,
Regex: configRegex,
Path: configPathRegex,
SecretGroup: r.SecretGroup,
Entropy: r.Entropy,
Tags: r.Tags,
Keywords: r.Keywords,
Allowlist: Allowlist{
RegexTarget: r.Allowlist.RegexTarget,
Regexes: allowlistRegexes,
Paths: allowlistPaths,
Commits: r.Allowlist.Commits,
StopWords: r.Allowlist.StopWords,
},
}
orderedRules = append(orderedRules, r.RuleID)
if r.Regex != nil && r.SecretGroup > r.Regex.NumSubexp() {
return Config{}, fmt.Errorf("%s invalid regex secret group %d, max regex secret group %d", r.Description, r.SecretGroup, r.Regex.NumSubexp())
}
rulesMap[r.RuleID] = r
}
var allowlistRegexes []*regexp.Regexp
for _, a := range vc.Allowlist.Regexes {
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
}
var allowlistPaths []*regexp.Regexp
for _, a := range vc.Allowlist.Paths {
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
}
c := Config{
Description: vc.Description,
Extend: vc.Extend,
Rules: rulesMap,
Allowlist: Allowlist{
RegexTarget: vc.Allowlist.RegexTarget,
Regexes: allowlistRegexes,
Paths: allowlistPaths,
Commits: vc.Allowlist.Commits,
StopWords: vc.Allowlist.StopWords,
},
Keywords: keywords,
orderedRules: orderedRules,
}
if maxExtendDepth != extendDepth {
// disallow both usedefault and path from being set
if c.Extend.Path != "" && c.Extend.UseDefault {
log.Fatal().Msg("unable to load config due to extend.path and extend.useDefault being set")
}
if c.Extend.UseDefault {
c.extendDefault()
} else if c.Extend.Path != "" {
c.extendPath()
}
}
return c, nil
}
func (c *Config) OrderedRules() []Rule {
var orderedRules []Rule
for _, id := range c.orderedRules {
if _, ok := c.Rules[id]; ok {
orderedRules = append(orderedRules, c.Rules[id])
}
}
return orderedRules
}
func (c *Config) extendDefault() {
extendDepth++
viper.SetConfigType("toml")
if err := viper.ReadConfig(strings.NewReader(DefaultConfig)); err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
defaultViperConfig := ViperConfig{}
if err := viper.Unmarshal(&defaultViperConfig); err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
cfg, err := defaultViperConfig.Translate()
if err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
log.Debug().Msg("extending config with default config")
c.extend(cfg)
}
func (c *Config) extendPath() {
extendDepth++
viper.SetConfigFile(c.Extend.Path)
if err := viper.ReadInConfig(); err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
extensionViperConfig := ViperConfig{}
if err := viper.Unmarshal(&extensionViperConfig); err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
cfg, err := extensionViperConfig.Translate()
if err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
log.Debug().Msgf("extending config with %s", c.Extend.Path)
c.extend(cfg)
}
func (c *Config) extendURL() {
// TODO
}
func (c *Config) extend(extensionConfig Config) {
for ruleID, rule := range extensionConfig.Rules {
if _, ok := c.Rules[ruleID]; !ok {
log.Trace().Msgf("adding %s to base config", ruleID)
c.Rules[ruleID] = rule
c.Keywords = append(c.Keywords, rule.Keywords...)
}
}
// append allowlists, not attempting to merge
c.Allowlist.Commits = append(c.Allowlist.Commits,
extensionConfig.Allowlist.Commits...)
c.Allowlist.Paths = append(c.Allowlist.Paths,
extensionConfig.Allowlist.Paths...)
c.Allowlist.Regexes = append(c.Allowlist.Regexes,
extensionConfig.Allowlist.Regexes...)
}

View File

@@ -1,170 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"fmt"
"regexp"
"testing"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
)
const configPath = "../testdata/config/"
func TestTranslate(t *testing.T) {
tests := []struct {
cfgName string
cfg Config
wantError error
}{
{
cfgName: "allow_aws_re",
cfg: Config{
Rules: map[string]Rule{"aws-access-key": {
Description: "AWS Access Key",
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-access-key",
Allowlist: Allowlist{
Regexes: []*regexp.Regexp{
regexp.MustCompile("AKIALALEMEL33243OLIA"),
},
},
},
},
},
},
{
cfgName: "allow_commit",
cfg: Config{
Rules: map[string]Rule{"aws-access-key": {
Description: "AWS Access Key",
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-access-key",
Allowlist: Allowlist{
Commits: []string{"allowthiscommit"},
},
},
},
},
},
{
cfgName: "allow_path",
cfg: Config{
Rules: map[string]Rule{"aws-access-key": {
Description: "AWS Access Key",
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-access-key",
Allowlist: Allowlist{
Paths: []*regexp.Regexp{
regexp.MustCompile(".go"),
},
},
},
},
},
},
{
cfgName: "entropy_group",
cfg: Config{
Rules: map[string]Rule{"discord-api-key": {
Description: "Discord API key",
Regex: regexp.MustCompile(`(?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{64})['\"]`),
RuleID: "discord-api-key",
Allowlist: Allowlist{},
Entropy: 3.5,
SecretGroup: 3,
Tags: []string{},
Keywords: []string{},
},
},
},
},
{
cfgName: "bad_entropy_group",
cfg: Config{},
wantError: fmt.Errorf("Discord API key invalid regex secret group 5, max regex secret group 3"),
},
{
cfgName: "base",
cfg: Config{
Rules: map[string]Rule{
"aws-access-key": {
Description: "AWS Access Key",
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-access-key",
},
"aws-secret-key": {
Description: "AWS Secret Key",
Regex: regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-secret-key",
},
"aws-secret-key-again": {
Description: "AWS Secret Key",
Regex: regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-secret-key-again",
},
},
},
},
}
for _, tt := range tests {
viper.Reset()
viper.AddConfigPath(configPath)
viper.SetConfigName(tt.cfgName)
viper.SetConfigType("toml")
err := viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, err := vc.Translate()
if tt.wantError != nil {
if err == nil {
t.Errorf("expected error")
}
assert.Equal(t, tt.wantError, err)
}
assert.Equal(t, cfg.Rules, tt.cfg.Rules)
}
}

View File

@@ -1,8 +0,0 @@
public_ip: 127.0.0.1
auth_secret: example-auth-secret
realm: infisical.org
# set port 5349 for tls
# port: 5349
# tls_private_key_path: /full-path
# tls_ca_path: /full-path
# tls_cert_path: /full-path

View File

@@ -1,8 +0,0 @@
public_ip: 127.0.0.1
auth_secret: changeThisOnProduction
realm: infisical.org
# set port 5349 for tls
# port: 5349
# tls_private_key_path: /full-path
# tls_ca_path: /full-path
# tls_cert_path: /full-path

File diff suppressed because it is too large Load Diff

View File

@@ -1,43 +0,0 @@
package config
import (
"regexp"
)
// Rules contain information that define details on how to detect secrets
type Rule struct {
// Description is the description of the rule.
Description string
// RuleID is a unique identifier for this rule
RuleID string
// Entropy is a float representing the minimum shannon
// entropy a regex group must have to be considered a secret.
Entropy float64
// SecretGroup is an int used to extract secret from regex
// match and used as the group that will have its entropy
// checked if `entropy` is set.
SecretGroup int
// Regex is a golang regular expression used to detect secrets.
Regex *regexp.Regexp
// Path is a golang regular expression used to
// filter secrets by path
Path *regexp.Regexp
// Tags is an array of strings used for metadata
// and reporting purposes.
Tags []string
// Keywords are used for pre-regex check filtering. Rules that contain
// keywords will perform a quick string compare check to make sure the
// keyword(s) are in the content being scanned.
Keywords []string
// Allowlist allows a rule to be ignored for specific
// regexes, paths, and/or commits
Allowlist Allowlist
}

View File

@@ -1,24 +0,0 @@
package config
import (
"regexp"
)
func anyRegexMatch(f string, res []*regexp.Regexp) bool {
for _, re := range res {
if regexMatched(f, re) {
return true
}
}
return false
}
func regexMatched(f string, re *regexp.Regexp) bool {
if re == nil {
return false
}
if re.FindString(f) != "" {
return true
}
return false
}

View File

@@ -25,35 +25,31 @@ package detect
import (
"encoding/json"
"fmt"
"io"
"os"
"path/filepath"
"github.com/rs/zerolog/log"
"github.com/Infisical/infisical-merge/report"
"github.com/Infisical/infisical-merge/detect/report"
)
func IsNew(finding report.Finding, baseline []report.Finding) bool {
func IsNew(finding report.Finding, redact uint, baseline []report.Finding) bool {
// Explicitly testing each property as it gives significantly better performance in comparison to cmp.Equal(). Drawback is that
// the code requires maintanance if/when the Finding struct changes
// the code requires maintenance if/when the Finding struct changes
for _, b := range baseline {
if finding.Author == b.Author &&
finding.Commit == b.Commit &&
finding.Date == b.Date &&
if finding.RuleID == b.RuleID &&
finding.Description == b.Description &&
finding.Email == b.Email &&
finding.EndColumn == b.EndColumn &&
finding.StartLine == b.StartLine &&
finding.EndLine == b.EndLine &&
finding.Entropy == b.Entropy &&
finding.File == b.File &&
// Omit checking finding.Fingerprint - if the format of the fingerprint changes, the users will see unexpected behaviour
finding.Match == b.Match &&
finding.Message == b.Message &&
finding.RuleID == b.RuleID &&
finding.Secret == b.Secret &&
finding.StartColumn == b.StartColumn &&
finding.StartLine == b.StartLine {
finding.EndColumn == b.EndColumn &&
(redact > 0 || (finding.Match == b.Match && finding.Secret == b.Secret)) &&
finding.File == b.File &&
finding.Commit == b.Commit &&
finding.Author == b.Author &&
finding.Email == b.Email &&
finding.Date == b.Date &&
finding.Message == b.Message &&
// Omit checking finding.Fingerprint - if the format of the fingerprint changes, the users will see unexpected behaviour
finding.Entropy == b.Entropy {
return false
}
}
@@ -61,23 +57,12 @@ func IsNew(finding report.Finding, baseline []report.Finding) bool {
}
func LoadBaseline(baselinePath string) ([]report.Finding, error) {
var previousFindings []report.Finding
jsonFile, err := os.Open(baselinePath)
bytes, err := os.ReadFile(baselinePath)
if err != nil {
return nil, fmt.Errorf("could not open %s", baselinePath)
}
defer func() {
if cerr := jsonFile.Close(); cerr != nil {
log.Warn().Err(cerr).Msg("problem closing jsonFile handle")
}
}()
bytes, err := io.ReadAll(jsonFile)
if err != nil {
return nil, fmt.Errorf("could not read data from the file %s", baselinePath)
}
var previousFindings []report.Finding
err = json.Unmarshal(bytes, &previousFindings)
if err != nil {
return nil, fmt.Errorf("the format of the file %s is not supported", baselinePath)
@@ -85,3 +70,34 @@ func LoadBaseline(baselinePath string) ([]report.Finding, error) {
return previousFindings, nil
}
func (d *Detector) AddBaseline(baselinePath string, source string) error {
if baselinePath != "" {
absoluteSource, err := filepath.Abs(source)
if err != nil {
return err
}
absoluteBaseline, err := filepath.Abs(baselinePath)
if err != nil {
return err
}
relativeBaseline, err := filepath.Rel(absoluteSource, absoluteBaseline)
if err != nil {
return err
}
baseline, err := LoadBaseline(baselinePath)
if err != nil {
return err
}
d.baseline = baseline
baselinePath = relativeBaseline
}
d.baselinePath = baselinePath
return nil
}

View File

@@ -1,160 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"errors"
"testing"
"github.com/stretchr/testify/assert"
"github.com/Infisical/infisical-merge/report"
)
func TestIsNew(t *testing.T) {
tests := []struct {
findings report.Finding
baseline []report.Finding
expect bool
}{
{
findings: report.Finding{
Author: "a",
Commit: "0000",
},
baseline: []report.Finding{
{
Author: "a",
Commit: "0000",
},
},
expect: false,
},
{
findings: report.Finding{
Author: "a",
Commit: "0000",
},
baseline: []report.Finding{
{
Author: "a",
Commit: "0002",
},
},
expect: true,
},
{
findings: report.Finding{
Author: "a",
Commit: "0000",
Tags: []string{"a", "b"},
},
baseline: []report.Finding{
{
Author: "a",
Commit: "0000",
Tags: []string{"a", "c"},
},
},
expect: false, // Updated tags doesn't make it a new finding
},
}
for _, test := range tests {
assert.Equal(t, test.expect, IsNew(test.findings, test.baseline))
}
}
func TestFileLoadBaseline(t *testing.T) {
tests := []struct {
Filename string
ExpectedError error
}{
{
Filename: "../testdata/baseline/baseline.csv",
ExpectedError: errors.New("the format of the file ../testdata/baseline/baseline.csv is not supported"),
},
{
Filename: "../testdata/baseline/baseline.sarif",
ExpectedError: errors.New("the format of the file ../testdata/baseline/baseline.sarif is not supported"),
},
{
Filename: "../testdata/baseline/notfound.json",
ExpectedError: errors.New("could not open ../testdata/baseline/notfound.json"),
},
}
for _, test := range tests {
_, err := LoadBaseline(test.Filename)
assert.Equal(t, test.ExpectedError.Error(), err.Error())
}
}
func TestIgnoreIssuesInBaseline(t *testing.T) {
tests := []struct {
findings []report.Finding
baseline []report.Finding
expectCount int
}{
{
findings: []report.Finding{
{
Author: "a",
Commit: "5",
},
},
baseline: []report.Finding{
{
Author: "a",
Commit: "5",
},
},
expectCount: 0,
},
{
findings: []report.Finding{
{
Author: "a",
Commit: "5",
Fingerprint: "a",
},
},
baseline: []report.Finding{
{
Author: "a",
Commit: "5",
Fingerprint: "b",
},
},
expectCount: 0,
},
}
for _, test := range tests {
d, _ := NewDetectorDefaultConfig()
d.baseline = test.baseline
for _, finding := range test.findings {
d.addFinding(finding)
}
assert.Equal(t, test.expectCount, len(d.findings))
}
}

66
cli/detect/cmd/scm/scm.go Normal file
View File

@@ -0,0 +1,66 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package scm
import (
"fmt"
"strings"
)
type Platform int
const (
UnknownPlatform Platform = iota
NoPlatform // Explicitly disable the feature
GitHubPlatform
GitLabPlatform
AzureDevOpsPlatform
// TODO: Add others.
)
func (p Platform) String() string {
return [...]string{
"unknown",
"none",
"github",
"gitlab",
"azuredevops",
}[p]
}
func PlatformFromString(s string) (Platform, error) {
switch strings.ToLower(s) {
case "", "unknown":
return UnknownPlatform, nil
case "none":
return NoPlatform, nil
case "github":
return GitHubPlatform, nil
case "gitlab":
return GitLabPlatform, nil
case "azuredevops":
return AzureDevOpsPlatform, nil
default:
return UnknownPlatform, fmt.Errorf("invalid scm platform value: %s", s)
}
}

View File

@@ -23,63 +23,137 @@
package config
import (
"regexp"
"fmt"
"strings"
"golang.org/x/exp/maps"
"github.com/Infisical/infisical-merge/detect/regexp"
)
type AllowlistMatchCondition int
const (
AllowlistMatchOr AllowlistMatchCondition = iota
AllowlistMatchAnd
)
func (a AllowlistMatchCondition) String() string {
return [...]string{
"OR",
"AND",
}[a]
}
// Allowlist allows a rule to be ignored for specific
// regexes, paths, and/or commits
type Allowlist struct {
// Short human readable description of the allowlist.
Description string
// Regexes is slice of content regular expressions that are allowed to be ignored.
Regexes []*regexp.Regexp
// MatchCondition determines whether all criteria must match.
MatchCondition AllowlistMatchCondition
// RegexTarget
RegexTarget string
// Commits is a slice of commit SHAs that are allowed to be ignored. Defaults to "OR".
Commits []string
// Paths is a slice of path regular expressions that are allowed to be ignored.
Paths []*regexp.Regexp
// Commits is a slice of commit SHAs that are allowed to be ignored.
Commits []string
// Can be `match` or `line`.
//
// If `match` the _Regexes_ will be tested against the match of the _Rule.Regex_.
//
// If `line` the _Regexes_ will be tested against the entire line.
//
// If RegexTarget is empty, it will be tested against the found secret.
RegexTarget string
// Regexes is slice of content regular expressions that are allowed to be ignored.
Regexes []*regexp.Regexp
// StopWords is a slice of stop words that are allowed to be ignored.
// This targets the _secret_, not the content of the regex match like the
// Regexes slice.
StopWords []string
// validated is an internal flag to track whether `Validate()` has been called.
validated bool
}
func (a *Allowlist) Validate() error {
if a.validated {
return nil
}
// Disallow empty allowlists.
if len(a.Commits) == 0 &&
len(a.Paths) == 0 &&
len(a.Regexes) == 0 &&
len(a.StopWords) == 0 {
return fmt.Errorf("must contain at least one check for: commits, paths, regexes, or stopwords")
}
// Deduplicate commits and stopwords.
if len(a.Commits) > 0 {
uniqueCommits := make(map[string]struct{})
for _, commit := range a.Commits {
uniqueCommits[commit] = struct{}{}
}
a.Commits = maps.Keys(uniqueCommits)
}
if len(a.StopWords) > 0 {
uniqueStopwords := make(map[string]struct{})
for _, stopWord := range a.StopWords {
uniqueStopwords[stopWord] = struct{}{}
}
a.StopWords = maps.Keys(uniqueStopwords)
}
a.validated = true
return nil
}
// CommitAllowed returns true if the commit is allowed to be ignored.
func (a *Allowlist) CommitAllowed(c string) bool {
if c == "" {
return false
func (a *Allowlist) CommitAllowed(c string) (bool, string) {
if a == nil || c == "" {
return false, ""
}
for _, commit := range a.Commits {
if commit == c {
return true
return true, c
}
}
return false
return false, ""
}
// PathAllowed returns true if the path is allowed to be ignored.
func (a *Allowlist) PathAllowed(path string) bool {
if a == nil || path == "" {
return false
}
return anyRegexMatch(path, a.Paths)
}
// RegexAllowed returns true if the regex is allowed to be ignored.
func (a *Allowlist) RegexAllowed(s string) bool {
return anyRegexMatch(s, a.Regexes)
func (a *Allowlist) RegexAllowed(secret string) bool {
if a == nil || secret == "" {
return false
}
return anyRegexMatch(secret, a.Regexes)
}
func (a *Allowlist) ContainsStopWord(s string) bool {
func (a *Allowlist) ContainsStopWord(s string) (bool, string) {
if a == nil || s == "" {
return false, ""
}
s = strings.ToLower(s)
for _, stopWord := range a.StopWords {
if strings.Contains(s, strings.ToLower(stopWord)) {
return true
return true, stopWord
}
}
return false
return false, ""
}

426
cli/detect/config/config.go Normal file
View File

@@ -0,0 +1,426 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
_ "embed"
"errors"
"fmt"
"sort"
"strings"
"github.com/spf13/viper"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/regexp"
)
const DefaultScanConfigFileName = ".infisical-scan.toml"
const DefaultScanConfigEnvName = "INFISICAL_SCAN_CONFIG"
const DefaultInfisicalIgnoreFineName = ".infisicalignore"
var (
//go:embed gitleaks.toml
DefaultConfig string
// use to keep track of how many configs we can extend
// yea I know, globals bad
extendDepth int
)
const maxExtendDepth = 2
// ViperConfig is the config struct used by the Viper config package
// to parse the config file. This struct does not include regular expressions.
// It is used as an intermediary to convert the Viper config to the Config struct.
type ViperConfig struct {
Title string
Description string
Extend Extend
Rules []struct {
ID string
Description string
Path string
Regex string
SecretGroup int
Entropy float64
Keywords []string
Tags []string
// Deprecated: this is a shim for backwards-compatibility.
// TODO: Remove this in 9.x.
AllowList *viperRuleAllowlist
Allowlists []*viperRuleAllowlist
}
// Deprecated: this is a shim for backwards-compatibility.
// TODO: Remove this in 9.x.
AllowList *viperGlobalAllowlist
Allowlists []*viperGlobalAllowlist
}
type viperRuleAllowlist struct {
Description string
Condition string
Commits []string
Paths []string
RegexTarget string
Regexes []string
StopWords []string
}
type viperGlobalAllowlist struct {
TargetRules []string
viperRuleAllowlist `mapstructure:",squash"`
}
// Config is a configuration struct that contains rules and an allowlist if present.
type Config struct {
Title string
Extend Extend
Path string
Description string
Rules map[string]Rule
Keywords map[string]struct{}
// used to keep sarif results consistent
OrderedRules []string
Allowlists []*Allowlist
}
// Extend is a struct that allows users to define how they want their
// configuration extended by other configuration files.
type Extend struct {
Path string
URL string
UseDefault bool
DisabledRules []string
}
func (vc *ViperConfig) Translate() (Config, error) {
var (
keywords = make(map[string]struct{})
orderedRules []string
rulesMap = make(map[string]Rule)
ruleAllowlists = make(map[string][]*Allowlist)
)
// Validate individual rules.
for _, vr := range vc.Rules {
var (
pathPat *regexp.Regexp
regexPat *regexp.Regexp
)
if vr.Path != "" {
pathPat = regexp.MustCompile(vr.Path)
}
if vr.Regex != "" {
regexPat = regexp.MustCompile(vr.Regex)
}
if vr.Keywords == nil {
vr.Keywords = []string{}
} else {
for i, k := range vr.Keywords {
keyword := strings.ToLower(k)
keywords[keyword] = struct{}{}
vr.Keywords[i] = keyword
}
}
if vr.Tags == nil {
vr.Tags = []string{}
}
cr := Rule{
RuleID: vr.ID,
Description: vr.Description,
Regex: regexPat,
SecretGroup: vr.SecretGroup,
Entropy: vr.Entropy,
Path: pathPat,
Keywords: vr.Keywords,
Tags: vr.Tags,
}
// Parse the rule allowlists, including the older format for backwards compatibility.
if vr.AllowList != nil {
// TODO: Remove this in v9.
if len(vr.Allowlists) > 0 {
return Config{}, fmt.Errorf("%s: [rules.allowlist] is deprecated, it cannot be used alongside [[rules.allowlist]]", cr.RuleID)
}
vr.Allowlists = append(vr.Allowlists, vr.AllowList)
}
for _, a := range vr.Allowlists {
allowlist, err := parseAllowlist(a)
if err != nil {
return Config{}, fmt.Errorf("%s: [[rules.allowlists]] %w", cr.RuleID, err)
}
cr.Allowlists = append(cr.Allowlists, allowlist)
}
orderedRules = append(orderedRules, cr.RuleID)
rulesMap[cr.RuleID] = cr
}
// Assemble the config.
c := Config{
Title: vc.Title,
Description: vc.Description,
Extend: vc.Extend,
Rules: rulesMap,
Keywords: keywords,
OrderedRules: orderedRules,
}
// Parse the config allowlists, including the older format for backwards compatibility.
if vc.AllowList != nil {
// TODO: Remove this in v9.
if len(vc.Allowlists) > 0 {
return Config{}, errors.New("[allowlist] is deprecated, it cannot be used alongside [[allowlists]]")
}
vc.Allowlists = append(vc.Allowlists, vc.AllowList)
}
for _, a := range vc.Allowlists {
allowlist, err := parseAllowlist(&a.viperRuleAllowlist)
if err != nil {
return Config{}, fmt.Errorf("[[allowlists]] %w", err)
}
// Allowlists with |targetRules| aren't added to the global list.
if len(a.TargetRules) > 0 {
for _, ruleID := range a.TargetRules {
// It's not possible to validate |ruleID| until after extend.
ruleAllowlists[ruleID] = append(ruleAllowlists[ruleID], allowlist)
}
} else {
c.Allowlists = append(c.Allowlists, allowlist)
}
}
if maxExtendDepth != extendDepth {
// disallow both usedefault and path from being set
if c.Extend.Path != "" && c.Extend.UseDefault {
return Config{}, errors.New("unable to load config due to extend.path and extend.useDefault being set")
}
if c.Extend.UseDefault {
if err := c.extendDefault(); err != nil {
return Config{}, err
}
} else if c.Extend.Path != "" {
if err := c.extendPath(); err != nil {
return Config{}, err
}
}
}
// Validate the rules after everything has been assembled (including extended configs).
if extendDepth == 0 {
for _, rule := range c.Rules {
if err := rule.Validate(); err != nil {
return Config{}, err
}
}
// Populate targeted configs.
for ruleID, allowlists := range ruleAllowlists {
rule, ok := c.Rules[ruleID]
if !ok {
return Config{}, fmt.Errorf("[[allowlists]] target rule ID '%s' does not exist", ruleID)
}
rule.Allowlists = append(rule.Allowlists, allowlists...)
c.Rules[ruleID] = rule
}
}
return c, nil
}
func parseAllowlist(a *viperRuleAllowlist) (*Allowlist, error) {
var matchCondition AllowlistMatchCondition
switch strings.ToUpper(a.Condition) {
case "AND", "&&":
matchCondition = AllowlistMatchAnd
case "", "OR", "||":
matchCondition = AllowlistMatchOr
default:
return nil, fmt.Errorf("unknown allowlist |condition| '%s' (expected 'and', 'or')", a.Condition)
}
// Validate the target.
regexTarget := a.RegexTarget
if regexTarget != "" {
switch regexTarget {
case "secret":
regexTarget = ""
case "match", "line":
// do nothing
default:
return nil, fmt.Errorf("unknown allowlist |regexTarget| '%s' (expected 'match', 'line')", regexTarget)
}
}
var allowlistRegexes []*regexp.Regexp
for _, a := range a.Regexes {
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
}
var allowlistPaths []*regexp.Regexp
for _, a := range a.Paths {
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
}
allowlist := &Allowlist{
Description: a.Description,
MatchCondition: matchCondition,
Commits: a.Commits,
Paths: allowlistPaths,
RegexTarget: regexTarget,
Regexes: allowlistRegexes,
StopWords: a.StopWords,
}
if err := allowlist.Validate(); err != nil {
return nil, err
}
return allowlist, nil
}
func (c *Config) GetOrderedRules() []Rule {
var orderedRules []Rule
for _, id := range c.OrderedRules {
if _, ok := c.Rules[id]; ok {
orderedRules = append(orderedRules, c.Rules[id])
}
}
return orderedRules
}
func (c *Config) extendDefault() error {
extendDepth++
viper.SetConfigType("toml")
if err := viper.ReadConfig(strings.NewReader(DefaultConfig)); err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
defaultViperConfig := ViperConfig{}
if err := viper.Unmarshal(&defaultViperConfig); err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
cfg, err := defaultViperConfig.Translate()
if err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
logging.Debug().Msg("extending config with default config")
c.extend(cfg)
return nil
}
func (c *Config) extendPath() error {
extendDepth++
viper.SetConfigFile(c.Extend.Path)
if err := viper.ReadInConfig(); err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
extensionViperConfig := ViperConfig{}
if err := viper.Unmarshal(&extensionViperConfig); err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
cfg, err := extensionViperConfig.Translate()
if err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
logging.Debug().Msgf("extending config with %s", c.Extend.Path)
c.extend(cfg)
return nil
}
func (c *Config) extendURL() {
// TODO
}
func (c *Config) extend(extensionConfig Config) {
// Get config name for helpful log messages.
var configName string
if c.Extend.Path != "" {
configName = c.Extend.Path
} else {
configName = "default"
}
// Convert |Config.DisabledRules| into a map for ease of access.
disabledRuleIDs := map[string]struct{}{}
for _, id := range c.Extend.DisabledRules {
if _, ok := extensionConfig.Rules[id]; !ok {
logging.Warn().
Str("rule-id", id).
Str("config", configName).
Msg("Disabled rule doesn't exist in extended config.")
}
disabledRuleIDs[id] = struct{}{}
}
for ruleID, baseRule := range extensionConfig.Rules {
// Skip the rule.
if _, ok := disabledRuleIDs[ruleID]; ok {
logging.Debug().
Str("rule-id", ruleID).
Str("config", configName).
Msg("Ignoring rule from extended config.")
continue
}
currentRule, ok := c.Rules[ruleID]
if !ok {
// Rule doesn't exist, add it to the config.
c.Rules[ruleID] = baseRule
for _, k := range baseRule.Keywords {
c.Keywords[k] = struct{}{}
}
c.OrderedRules = append(c.OrderedRules, ruleID)
} else {
// Rule exists, merge our changes into the base.
if currentRule.Description != "" {
baseRule.Description = currentRule.Description
}
if currentRule.Entropy != 0 {
baseRule.Entropy = currentRule.Entropy
}
if currentRule.SecretGroup != 0 {
baseRule.SecretGroup = currentRule.SecretGroup
}
if currentRule.Regex != nil {
baseRule.Regex = currentRule.Regex
}
if currentRule.Path != nil {
baseRule.Path = currentRule.Path
}
baseRule.Tags = append(baseRule.Tags, currentRule.Tags...)
baseRule.Keywords = append(baseRule.Keywords, currentRule.Keywords...)
for _, a := range currentRule.Allowlists {
baseRule.Allowlists = append(baseRule.Allowlists, a)
}
// The keywords from the base rule and the extended rule must be merged into the global keywords list
for _, k := range baseRule.Keywords {
c.Keywords[k] = struct{}{}
}
c.Rules[ruleID] = baseRule
}
}
// append allowlists, not attempting to merge
for _, a := range extensionConfig.Allowlists {
c.Allowlists = append(c.Allowlists, a)
}
// sort to keep extended rules in order
sort.Strings(c.OrderedRules)
}

File diff suppressed because it is too large Load Diff

114
cli/detect/config/rule.go Normal file
View File

@@ -0,0 +1,114 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"fmt"
"strings"
"github.com/Infisical/infisical-merge/detect/regexp"
)
// Rules contain information that define details on how to detect secrets
type Rule struct {
// RuleID is a unique identifier for this rule
RuleID string
// Description is the description of the rule.
Description string
// Entropy is a float representing the minimum shannon
// entropy a regex group must have to be considered a secret.
Entropy float64
// SecretGroup is an int used to extract secret from regex
// match and used as the group that will have its entropy
// checked if `entropy` is set.
SecretGroup int
// Regex is a golang regular expression used to detect secrets.
Regex *regexp.Regexp
// Path is a golang regular expression used to
// filter secrets by path
Path *regexp.Regexp
// Tags is an array of strings used for metadata
// and reporting purposes.
Tags []string
// Keywords are used for pre-regex check filtering. Rules that contain
// keywords will perform a quick string compare check to make sure the
// keyword(s) are in the content being scanned.
Keywords []string
// Allowlists allows a rule to be ignored for specific commits, paths, regexes, and/or stopwords.
Allowlists []*Allowlist
// validated is an internal flag to track whether `Validate()` has been called.
validated bool
}
// Validate guards against common misconfigurations.
func (r *Rule) Validate() error {
if r.validated {
return nil
}
// Ensure |id| is present.
if strings.TrimSpace(r.RuleID) == "" {
// Try to provide helpful context, since |id| is empty.
var context string
if r.Regex != nil {
context = ", regex: " + r.Regex.String()
} else if r.Path != nil {
context = ", path: " + r.Path.String()
} else if r.Description != "" {
context = ", description: " + r.Description
}
return fmt.Errorf("rule |id| is missing or empty" + context)
}
// Ensure the rule actually matches something.
if r.Regex == nil && r.Path == nil {
return fmt.Errorf("%s: both |regex| and |path| are empty, this rule will have no effect", r.RuleID)
}
// Ensure |secretGroup| works.
if r.Regex != nil && r.SecretGroup > r.Regex.NumSubexp() {
return fmt.Errorf("%s: invalid regex secret group %d, max regex secret group %d", r.RuleID, r.SecretGroup, r.Regex.NumSubexp())
}
for _, allowlist := range r.Allowlists {
// This will probably never happen.
if allowlist == nil {
continue
}
if err := allowlist.Validate(); err != nil {
return fmt.Errorf("%s: %w", r.RuleID, err)
}
}
r.validated = true
return nil
}

View File

@@ -20,35 +20,27 @@
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
package config
import (
"os"
"strings"
"github.com/Infisical/infisical-merge/config"
"github.com/Infisical/infisical-merge/detect/regexp"
)
const (
// https://cwe.mitre.org/data/definitions/798.html
CWE = "CWE-798"
CWE_DESCRIPTION = "Use of Hard-coded Credentials"
)
func Write(findings []Finding, cfg config.Config, ext string, reportPath string) error {
file, err := os.Create(reportPath)
if err != nil {
return err
func anyRegexMatch(f string, res []*regexp.Regexp) bool {
for _, re := range res {
if regexMatched(f, re) {
return true
}
}
ext = strings.ToLower(ext)
switch ext {
case ".json", "json":
err = writeJson(findings, file)
case ".csv", "csv":
err = writeCsv(findings, file)
case ".sarif", "sarif":
err = writeSarif(cfg, findings, file)
}
return err
return false
}
func regexMatched(f string, re *regexp.Regexp) bool {
if re == nil {
return false
}
if re.FindString(f) != "" {
return true
}
return false
}

328
cli/detect/decoder.go Normal file
View File

@@ -0,0 +1,328 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bytes"
"encoding/base64"
"fmt"
"regexp"
"unicode"
"github.com/Infisical/infisical-merge/detect/logging"
)
var b64LikelyChars [128]byte
var b64Regexp = regexp.MustCompile(`[\w/+-]{16,}={0,3}`)
var decoders = []func(string) ([]byte, error){
base64.StdEncoding.DecodeString,
base64.RawURLEncoding.DecodeString,
}
func init() {
// Basically look for anything that isn't just letters
for _, c := range `0123456789+/-_` {
b64LikelyChars[c] = 1
}
}
// EncodedSegment represents a portion of text that is encoded in some way.
// `decode` supports recusive decoding and can result in "segment trees".
// There can be multiple segments in the original text, so each can be thought
// of as its own tree with the root being the original segment.
type EncodedSegment struct {
// The parent segment in a segment tree. If nil, it is a root segment
parent *EncodedSegment
// Relative start/end are the bounds of the encoded value in the current pass.
relativeStart int
relativeEnd int
// Absolute start/end refer to the bounds of the root segment in this segment
// tree
absoluteStart int
absoluteEnd int
// Decoded start/end refer to the bounds of the decoded value in the current
// pass. These can differ from relative values because decoding can shrink
// or grow the size of the segment.
decodedStart int
decodedEnd int
// This is the actual decoded content in the segment
decodedValue string
// This is the type of encoding
encoding string
}
// isChildOf inspects the bounds of two segments to determine
// if one should be the child of another
func (s EncodedSegment) isChildOf(parent EncodedSegment) bool {
return parent.decodedStart <= s.relativeStart && parent.decodedEnd >= s.relativeEnd
}
// decodedOverlaps checks if the decoded bounds of the segment overlaps a range
func (s EncodedSegment) decodedOverlaps(start, end int) bool {
return start <= s.decodedEnd && end >= s.decodedStart
}
// adjustMatchIndex takes the matchIndex from the current decoding pass and
// updates it to match the absolute matchIndex in the original text.
func (s EncodedSegment) adjustMatchIndex(matchIndex []int) []int {
// The match is within the bounds of the segment so we just return
// the absolute start and end of the root segment.
if s.decodedStart <= matchIndex[0] && matchIndex[1] <= s.decodedEnd {
return []int{
s.absoluteStart,
s.absoluteEnd,
}
}
// Since it overlaps one side and/or the other, we're going to have to adjust
// and climb parents until we're either at the root or we've determined
// we're fully inside one of the parent segments.
adjustedMatchIndex := make([]int, 2)
if matchIndex[0] < s.decodedStart {
// It starts before the encoded segment so adjust the start to match
// the location before it was decoded
matchStartDelta := s.decodedStart - matchIndex[0]
adjustedMatchIndex[0] = s.relativeStart - matchStartDelta
} else {
// It starts within the encoded segment so set the bound to the
// relative start
adjustedMatchIndex[0] = s.relativeStart
}
if matchIndex[1] > s.decodedEnd {
// It ends after the encoded segment so adjust the end to match
// the location before it was decoded
matchEndDelta := matchIndex[1] - s.decodedEnd
adjustedMatchIndex[1] = s.relativeEnd + matchEndDelta
} else {
// It ends within the encoded segment so set the bound to the relative end
adjustedMatchIndex[1] = s.relativeEnd
}
// We're still not at a root segment so we'll need to keep on adjusting
if s.parent != nil {
return s.parent.adjustMatchIndex(adjustedMatchIndex)
}
return adjustedMatchIndex
}
// depth reports how many levels of decoding needed to be done (default is 1)
func (s EncodedSegment) depth() int {
depth := 1
// Climb the tree and increment the depth
for current := &s; current.parent != nil; current = current.parent {
depth++
}
return depth
}
// tags returns additional meta data tags related to the types of segments
func (s EncodedSegment) tags() []string {
return []string{
fmt.Sprintf("decoded:%s", s.encoding),
fmt.Sprintf("decode-depth:%d", s.depth()),
}
}
// Decoder decodes various types of data in place
type Decoder struct {
decodedMap map[string]string
}
// NewDecoder creates a default decoder struct
func NewDecoder() *Decoder {
return &Decoder{
decodedMap: make(map[string]string),
}
}
// decode returns the data with the values decoded in-place
func (d *Decoder) decode(data string, parentSegments []EncodedSegment) (string, []EncodedSegment) {
segments := d.findEncodedSegments(data, parentSegments)
if len(segments) > 0 {
result := bytes.NewBuffer(make([]byte, 0, len(data)))
relativeStart := 0
for _, segment := range segments {
result.WriteString(data[relativeStart:segment.relativeStart])
result.WriteString(segment.decodedValue)
relativeStart = segment.relativeEnd
}
result.WriteString(data[relativeStart:])
return result.String(), segments
}
return data, segments
}
// findEncodedSegments finds the encoded segments in the data and updates the
// segment tree for this pass
func (d *Decoder) findEncodedSegments(data string, parentSegments []EncodedSegment) []EncodedSegment {
if len(data) == 0 {
return []EncodedSegment{}
}
matchIndices := b64Regexp.FindAllStringIndex(data, -1)
if matchIndices == nil {
return []EncodedSegment{}
}
segments := make([]EncodedSegment, 0, len(matchIndices))
// Keeps up with offsets from the text changing size as things are decoded
decodedShift := 0
for _, matchIndex := range matchIndices {
encodedValue := data[matchIndex[0]:matchIndex[1]]
if !isLikelyB64(encodedValue) {
d.decodedMap[encodedValue] = ""
continue
}
decodedValue, alreadyDecoded := d.decodedMap[encodedValue]
// We haven't decoded this yet, so go ahead and decode it
if !alreadyDecoded {
decodedValue = decodeValue(encodedValue)
d.decodedMap[encodedValue] = decodedValue
}
// Skip this segment because there was nothing to check
if len(decodedValue) == 0 {
continue
}
// Create a segment for the encoded data
segment := EncodedSegment{
relativeStart: matchIndex[0],
relativeEnd: matchIndex[1],
absoluteStart: matchIndex[0],
absoluteEnd: matchIndex[1],
decodedStart: matchIndex[0] + decodedShift,
decodedEnd: matchIndex[0] + decodedShift + len(decodedValue),
decodedValue: decodedValue,
encoding: "base64",
}
// Shift decoded start and ends based on size changes
decodedShift += len(decodedValue) - len(encodedValue)
// Adjust the absolute position of segments contained in parent segments
for _, parentSegment := range parentSegments {
if segment.isChildOf(parentSegment) {
segment.absoluteStart = parentSegment.absoluteStart
segment.absoluteEnd = parentSegment.absoluteEnd
segment.parent = &parentSegment
break
}
}
logging.Debug().Msgf("segment found: %#v", segment)
segments = append(segments, segment)
}
return segments
}
// decoders tries a list of decoders and returns the first successful one
func decodeValue(encodedValue string) string {
for _, decoder := range decoders {
decodedValue, err := decoder(encodedValue)
if err == nil && len(decodedValue) > 0 && isASCII(decodedValue) {
return string(decodedValue)
}
}
return ""
}
func isASCII(b []byte) bool {
for i := 0; i < len(b); i++ {
if b[i] > unicode.MaxASCII || b[i] < '\t' {
return false
}
}
return true
}
// Skip a lot of method signatures and things at the risk of missing about
// 1% of base64
func isLikelyB64(s string) bool {
for _, c := range s {
if b64LikelyChars[c] != 0 {
return true
}
}
return false
}
// Find a segment where the decoded bounds overlaps a range
func segmentWithDecodedOverlap(encodedSegments []EncodedSegment, start, end int) *EncodedSegment {
for _, segment := range encodedSegments {
if segment.decodedOverlaps(start, end) {
return &segment
}
}
return nil
}
func (s EncodedSegment) currentLine(currentRaw string) string {
start := 0
end := len(currentRaw)
// Find the start of the range
for i := s.decodedStart; i > -1; i-- {
c := currentRaw[i]
if c == '\n' {
start = i
break
}
}
// Find the end of the range
for i := s.decodedEnd; i < end; i++ {
c := currentRaw[i]
if c == '\n' {
end = i
break
}
}
return currentRaw[start:end]
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,754 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"fmt"
"os"
"path/filepath"
"testing"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
"github.com/Infisical/infisical-merge/config"
"github.com/Infisical/infisical-merge/report"
)
const configPath = "../testdata/config/"
const repoBasePath = "../testdata/repos/"
func TestDetect(t *testing.T) {
tests := []struct {
cfgName string
baselinePath string
fragment Fragment
// NOTE: for expected findings, all line numbers will be 0
// because line deltas are added _after_ the finding is created.
// I.e, if the finding is from a --no-git file, the line number will be
// increase by 1 in DetectFromFiles(). If the finding is from git,
// the line number will be increased by the patch delta.
expectedFindings []report.Finding
wantError error
}{
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OKIA\ // infisical-scan:ignore"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \
\"AKIALALEMEL33243OKIA\ // infisical-scan:ignore"
`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OKIA\"
// infisical-scan:ignore"
`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
Secret: "AKIALALEMEL33243OKIA",
Match: "AKIALALEMEL33243OKIA",
File: "tmp.go",
Line: `awsToken := \"AKIALALEMEL33243OKIA\"`,
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
StartLine: 0,
EndLine: 0,
StartColumn: 15,
EndColumn: 34,
Entropy: 3.1464393,
},
},
},
{
cfgName: "escaped_character_group",
fragment: Fragment{
Raw: `pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{
{
Description: "PyPI upload token",
Secret: "pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB",
Match: "pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB",
Line: `pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB`,
File: "tmp.go",
RuleID: "pypi-upload-token",
Tags: []string{"key", "pypi"},
StartLine: 0,
EndLine: 0,
StartColumn: 1,
EndColumn: 86,
Entropy: 1.9606875,
},
},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
Secret: "AKIALALEMEL33243OLIA",
Match: "AKIALALEMEL33243OLIA",
Line: `awsToken := \"AKIALALEMEL33243OLIA\"`,
File: "tmp.go",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
StartLine: 0,
EndLine: 0,
StartColumn: 15,
EndColumn: 34,
Entropy: 3.0841837,
},
},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `export BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;`,
FilePath: "tmp.sh",
},
expectedFindings: []report.Finding{
{
Description: "Sidekiq Secret",
Match: "BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;",
Secret: "cafebabe:deadbeef",
Line: `export BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;`,
File: "tmp.sh",
RuleID: "sidekiq-secret",
Tags: []string{},
Entropy: 2.6098502,
StartLine: 0,
EndLine: 0,
StartColumn: 8,
EndColumn: 60,
},
},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `echo hello1; export BUNDLE_ENTERPRISE__CONTRIBSYS__COM="cafebabe:deadbeef" && echo hello2`,
FilePath: "tmp.sh",
},
expectedFindings: []report.Finding{
{
Description: "Sidekiq Secret",
Match: "BUNDLE_ENTERPRISE__CONTRIBSYS__COM=\"cafebabe:deadbeef\"",
Secret: "cafebabe:deadbeef",
File: "tmp.sh",
Line: `echo hello1; export BUNDLE_ENTERPRISE__CONTRIBSYS__COM="cafebabe:deadbeef" && echo hello2`,
RuleID: "sidekiq-secret",
Tags: []string{},
Entropy: 2.6098502,
StartLine: 0,
EndLine: 0,
StartColumn: 21,
EndColumn: 74,
},
},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `url = "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:80/path?param1=true&param2=false#heading1"`,
FilePath: "tmp.sh",
},
expectedFindings: []report.Finding{
{
Description: "Sidekiq Sensitive URL",
Match: "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:",
Secret: "cafeb4b3:d3adb33f",
File: "tmp.sh",
Line: `url = "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:80/path?param1=true&param2=false#heading1"`,
RuleID: "sidekiq-sensitive-url",
Tags: []string{},
Entropy: 2.984234,
StartLine: 0,
EndLine: 0,
StartColumn: 8,
EndColumn: 58,
},
},
},
{
cfgName: "allow_aws_re",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "allow_path",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "allow_commit",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
CommitSHA: "allowthiscommit",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "entropy_group",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{
{
Description: "Discord API key",
Match: "Discord_Public_Key = \"e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5\"",
Secret: "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5",
Line: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
File: "tmp.go",
RuleID: "discord-api-key",
Tags: []string{},
Entropy: 3.7906237,
StartLine: 0,
EndLine: 0,
StartColumn: 7,
EndColumn: 93,
},
},
},
{
cfgName: "generic_with_py_path",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "generic_with_py_path",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.py",
},
expectedFindings: []report.Finding{
{
Description: "Generic API Key",
Match: "Key = \"e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5\"",
Secret: "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5",
Line: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
File: "tmp.py",
RuleID: "generic-api-key",
Tags: []string{},
Entropy: 3.7906237,
StartLine: 0,
EndLine: 0,
StartColumn: 22,
EndColumn: 93,
},
},
},
{
cfgName: "path_only",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.py",
},
expectedFindings: []report.Finding{
{
Description: "Python Files",
Match: "file detected: tmp.py",
File: "tmp.py",
RuleID: "python-files-only",
Tags: []string{},
},
},
},
{
cfgName: "bad_entropy_group",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
wantError: fmt.Errorf("Discord API key invalid regex secret group 5, max regex secret group 3"),
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: filepath.Join(configPath, "simple.toml"),
},
expectedFindings: []report.Finding{},
},
{
cfgName: "allow_global_aws_re",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "generic_with_py_path",
fragment: Fragment{
Raw: `const Discord_Public_Key = "load2523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.py",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "path_only",
baselinePath: ".baseline.json",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: ".baseline.json",
},
expectedFindings: []report.Finding{},
},
}
for _, tt := range tests {
viper.Reset()
viper.AddConfigPath(configPath)
viper.SetConfigName(tt.cfgName)
viper.SetConfigType("toml")
err := viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, err := vc.Translate()
cfg.Path = filepath.Join(configPath, tt.cfgName+".toml")
if tt.wantError != nil {
if err == nil {
t.Errorf("expected error")
}
assert.Equal(t, tt.wantError, err)
}
d := NewDetector(cfg)
d.baselinePath = tt.baselinePath
findings := d.Detect(tt.fragment)
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
// TestFromGit tests the FromGit function
func TestFromGit(t *testing.T) {
tests := []struct {
cfgName string
source string
logOpts string
expectedFindings []report.Finding
}{
{
source: filepath.Join(repoBasePath, "small"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 20,
EndLine: 20,
StartColumn: 19,
EndColumn: 38,
Line: "\n awsToken := \"AKIALALEMEL33243OLIA\"",
Secret: "AKIALALEMEL33243OLIA",
Match: "AKIALALEMEL33243OLIA",
File: "main.go",
Date: "2021-11-02T23:37:53Z",
Commit: "1b6da43b82b22e4eaa10bcf8ee591e91abbfc587",
Author: "Zachary Rice",
Email: "zricer@protonmail.com",
Message: "Accidentally add a secret",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "1b6da43b82b22e4eaa10bcf8ee591e91abbfc587:main.go:aws-access-key:20",
},
{
Description: "AWS Access Key",
StartLine: 9,
EndLine: 9,
StartColumn: 17,
EndColumn: 36,
Secret: "AKIALALEMEL33243OLIA",
Match: "AKIALALEMEL33243OLIA",
Line: "\n\taws_token := \"AKIALALEMEL33243OLIA\"",
File: "foo/foo.go",
Date: "2021-11-02T23:48:06Z",
Commit: "491504d5a31946ce75e22554cc34203d8e5ff3ca",
Author: "Zach Rice",
Email: "zricer@protonmail.com",
Message: "adding foo package with secret",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "491504d5a31946ce75e22554cc34203d8e5ff3ca:foo/foo.go:aws-access-key:9",
},
},
},
{
source: filepath.Join(repoBasePath, "small"),
logOpts: "--all foo...",
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 9,
EndLine: 9,
StartColumn: 17,
EndColumn: 36,
Secret: "AKIALALEMEL33243OLIA",
Line: "\n\taws_token := \"AKIALALEMEL33243OLIA\"",
Match: "AKIALALEMEL33243OLIA",
Date: "2021-11-02T23:48:06Z",
File: "foo/foo.go",
Commit: "491504d5a31946ce75e22554cc34203d8e5ff3ca",
Author: "Zach Rice",
Email: "zricer@protonmail.com",
Message: "adding foo package with secret",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "491504d5a31946ce75e22554cc34203d8e5ff3ca:foo/foo.go:aws-access-key:9",
},
},
},
}
err := moveDotGit("dotGit", ".git")
if err != nil {
t.Fatal(err)
}
defer func() {
if err := moveDotGit(".git", "dotGit"); err != nil {
t.Error(err)
}
}()
for _, tt := range tests {
viper.AddConfigPath(configPath)
viper.SetConfigName("simple")
viper.SetConfigType("toml")
err = viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, err := vc.Translate()
if err != nil {
t.Error(err)
}
detector := NewDetector(cfg)
findings, err := detector.DetectGit(tt.source, tt.logOpts, DetectType)
if err != nil {
t.Error(err)
}
for _, f := range findings {
f.Match = "" // remove lines cause copying and pasting them has some wack formatting
}
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
func TestFromGitStaged(t *testing.T) {
tests := []struct {
cfgName string
source string
logOpts string
expectedFindings []report.Finding
}{
{
source: filepath.Join(repoBasePath, "staged"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 7,
EndLine: 7,
StartColumn: 18,
EndColumn: 37,
Line: "\n\taws_token2 := \"AKIALALEMEL33243OLIA\" // this one is not",
Match: "AKIALALEMEL33243OLIA",
Secret: "AKIALALEMEL33243OLIA",
File: "api/api.go",
SymlinkFile: "",
Commit: "",
Entropy: 3.0841837,
Author: "",
Email: "",
Date: "0001-01-01T00:00:00Z",
Message: "",
Tags: []string{
"key",
"AWS",
},
RuleID: "aws-access-key",
Fingerprint: "api/api.go:aws-access-key:7",
},
},
},
}
err := moveDotGit("dotGit", ".git")
if err != nil {
t.Fatal(err)
}
defer func() {
if err := moveDotGit(".git", "dotGit"); err != nil {
t.Error(err)
}
}()
for _, tt := range tests {
viper.AddConfigPath(configPath)
viper.SetConfigName("simple")
viper.SetConfigType("toml")
err = viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, err := vc.Translate()
if err != nil {
t.Error(err)
}
detector := NewDetector(cfg)
detector.AddGitleaksIgnore(filepath.Join(tt.source, ".gitleaksignore"))
findings, err := detector.DetectGit(tt.source, tt.logOpts, ProtectStagedType)
if err != nil {
t.Error(err)
}
for _, f := range findings {
f.Match = "" // remove lines cause copying and pasting them has some wack formatting
}
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
// TestFromFiles tests the FromFiles function
func TestFromFiles(t *testing.T) {
tests := []struct {
cfgName string
source string
expectedFindings []report.Finding
}{
{
source: filepath.Join(repoBasePath, "nogit"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 20,
EndLine: 20,
StartColumn: 16,
EndColumn: 35,
Match: "AKIALALEMEL33243OLIA",
Secret: "AKIALALEMEL33243OLIA",
Line: "\n\tawsToken := \"AKIALALEMEL33243OLIA\"",
File: "../testdata/repos/nogit/main.go",
SymlinkFile: "",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "../testdata/repos/nogit/main.go:aws-access-key:20",
},
},
},
{
source: filepath.Join(repoBasePath, "nogit", "main.go"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 20,
EndLine: 20,
StartColumn: 16,
EndColumn: 35,
Match: "AKIALALEMEL33243OLIA",
Secret: "AKIALALEMEL33243OLIA",
Line: "\n\tawsToken := \"AKIALALEMEL33243OLIA\"",
File: "../testdata/repos/nogit/main.go",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "../testdata/repos/nogit/main.go:aws-access-key:20",
},
},
},
}
for _, tt := range tests {
viper.AddConfigPath(configPath)
viper.SetConfigName("simple")
viper.SetConfigType("toml")
err := viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, _ := vc.Translate()
detector := NewDetector(cfg)
detector.FollowSymlinks = true
findings, err := detector.DetectFiles(tt.source)
if err != nil {
t.Error(err)
}
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
func TestDetectWithSymlinks(t *testing.T) {
tests := []struct {
cfgName string
source string
expectedFindings []report.Finding
}{
{
source: filepath.Join(repoBasePath, "symlinks/file_symlink"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "Asymmetric Private Key",
StartLine: 1,
EndLine: 1,
StartColumn: 1,
EndColumn: 35,
Match: "-----BEGIN OPENSSH PRIVATE KEY-----",
Secret: "-----BEGIN OPENSSH PRIVATE KEY-----",
Line: "-----BEGIN OPENSSH PRIVATE KEY-----",
File: "../testdata/repos/symlinks/source_file/id_ed25519",
SymlinkFile: "../testdata/repos/symlinks/file_symlink/symlinked_id_ed25519",
RuleID: "apkey",
Tags: []string{"key", "AsymmetricPrivateKey"},
Entropy: 3.587164,
Fingerprint: "../testdata/repos/symlinks/source_file/id_ed25519:apkey:1",
},
},
},
}
for _, tt := range tests {
viper.AddConfigPath(configPath)
viper.SetConfigName("simple")
viper.SetConfigType("toml")
err := viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, _ := vc.Translate()
detector := NewDetector(cfg)
detector.FollowSymlinks = true
findings, err := detector.DetectFiles(tt.source)
if err != nil {
t.Error(err)
}
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
func moveDotGit(from, to string) error {
repoDirs, err := os.ReadDir("../testdata/repos")
if err != nil {
return err
}
for _, dir := range repoDirs {
if to == ".git" {
_, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), "dotGit"))
if os.IsNotExist(err) {
// dont want to delete the only copy of .git accidentally
continue
}
os.RemoveAll(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), ".git"))
}
if !dir.IsDir() {
continue
}
_, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from))
if os.IsNotExist(err) {
continue
}
err = os.Rename(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from),
fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), to))
if err != nil {
return err
}
}
return nil
}

225
cli/detect/directory.go Normal file
View File

@@ -0,0 +1,225 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bufio"
"bytes"
"io"
"os"
"path/filepath"
"strings"
"time"
"github.com/h2non/filetype"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/report"
"github.com/Infisical/infisical-merge/detect/sources"
)
const maxPeekSize = 25 * 1_000 // 10kb
func (d *Detector) DetectFiles(paths <-chan sources.ScanTarget) ([]report.Finding, error) {
for pa := range paths {
d.Sema.Go(func() error {
logger := logging.With().Str("path", pa.Path).Logger()
logger.Trace().Msg("Scanning path")
f, err := os.Open(pa.Path)
if err != nil {
if os.IsPermission(err) {
logger.Warn().Msg("Skipping file: permission denied")
return nil
}
return err
}
defer func() {
_ = f.Close()
}()
// Get file size
fileInfo, err := f.Stat()
if err != nil {
return err
}
fileSize := fileInfo.Size()
if d.MaxTargetMegaBytes > 0 {
rawLength := fileSize / 1000000
if rawLength > int64(d.MaxTargetMegaBytes) {
logger.Debug().
Int64("size", rawLength).
Msg("Skipping file: exceeds --max-target-megabytes")
return nil
}
}
var (
// Buffer to hold file chunks
reader = bufio.NewReaderSize(f, chunkSize)
buf = make([]byte, chunkSize)
totalLines = 0
)
for {
n, err := reader.Read(buf)
// "Callers should always process the n > 0 bytes returned before considering the error err."
// https://pkg.go.dev/io#Reader
if n > 0 {
// Only check the filetype at the start of file.
if totalLines == 0 {
// TODO: could other optimizations be introduced here?
if mimetype, err := filetype.Match(buf[:n]); err != nil {
return nil
} else if mimetype.MIME.Type == "application" {
return nil // skip binary files
}
}
// Try to split chunks across large areas of whitespace, if possible.
peekBuf := bytes.NewBuffer(buf[:n])
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
return readErr
}
// Count the number of newlines in this chunk
chunk := peekBuf.String()
linesInChunk := strings.Count(chunk, "\n")
totalLines += linesInChunk
fragment := Fragment{
Raw: chunk,
Bytes: peekBuf.Bytes(),
}
if pa.Symlink != "" {
fragment.SymlinkFile = pa.Symlink
}
if isWindows {
fragment.FilePath = filepath.ToSlash(pa.Path)
fragment.SymlinkFile = filepath.ToSlash(fragment.SymlinkFile)
fragment.WindowsFilePath = pa.Path
} else {
fragment.FilePath = pa.Path
}
timer := time.AfterFunc(SlowWarningThreshold, func() {
logger.Debug().Msgf("Taking longer than %s to inspect fragment", SlowWarningThreshold.String())
})
for _, finding := range d.Detect(fragment) {
// need to add 1 since line counting starts at 1
finding.StartLine += (totalLines - linesInChunk) + 1
finding.EndLine += (totalLines - linesInChunk) + 1
d.AddFinding(finding)
}
if timer != nil {
timer.Stop()
timer = nil
}
}
if err != nil {
if err == io.EOF {
return nil
}
return err
}
}
})
}
if err := d.Sema.Wait(); err != nil {
return d.findings, err
}
return d.findings, nil
}
// readUntilSafeBoundary consumes |f| until it finds two consecutive `\n` characters, up to |maxPeekSize|.
// This hopefully avoids splitting. (https://github.com/gitleaks/gitleaks/issues/1651)
func readUntilSafeBoundary(r *bufio.Reader, n int, maxPeekSize int, peekBuf *bytes.Buffer) error {
if peekBuf.Len() == 0 {
return nil
}
// Does the buffer end in consecutive newlines?
var (
data = peekBuf.Bytes()
lastChar = data[len(data)-1]
newlineCount = 0 // Tracks consecutive newlines
)
if isWhitespace(lastChar) {
for i := len(data) - 1; i >= 0; i-- {
lastChar = data[i]
if lastChar == '\n' {
newlineCount++
// Stop if two consecutive newlines are found
if newlineCount >= 2 {
return nil
}
} else if lastChar == '\r' || lastChar == ' ' || lastChar == '\t' {
// The presence of other whitespace characters (`\r`, ` `, `\t`) shouldn't reset the count.
// (Intentionally do nothing.)
} else {
break
}
}
}
// If not, read ahead until we (hopefully) find some.
newlineCount = 0
for {
data = peekBuf.Bytes()
// Check if the last character is a newline.
lastChar = data[len(data)-1]
if lastChar == '\n' {
newlineCount++
// Stop if two consecutive newlines are found
if newlineCount >= 2 {
break
}
} else if lastChar == '\r' || lastChar == ' ' || lastChar == '\t' {
// The presence of other whitespace characters (`\r`, ` `, `\t`) shouldn't reset the count.
// (Intentionally do nothing.)
} else {
newlineCount = 0 // Reset if a non-newline character is found
}
// Stop growing the buffer if it reaches maxSize
if (peekBuf.Len() - n) >= maxPeekSize {
break
}
// Read additional data into a temporary buffer
b, err := r.ReadByte()
if err != nil {
if err == io.EOF {
break
}
return err
}
peekBuf.WriteByte(b)
}
return nil
}

214
cli/detect/git.go Normal file
View File

@@ -0,0 +1,214 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bytes"
"errors"
"fmt"
"net/url"
"os/exec"
"regexp"
"strings"
"time"
"github.com/Infisical/infisical-merge/detect/cmd/scm"
"github.com/gitleaks/go-gitdiff/gitdiff"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/report"
"github.com/Infisical/infisical-merge/detect/sources"
)
func (d *Detector) DetectGit(cmd *sources.GitCmd, remote *RemoteInfo) ([]report.Finding, error) {
defer cmd.Wait()
var (
diffFilesCh = cmd.DiffFilesCh()
errCh = cmd.ErrCh()
)
// loop to range over both DiffFiles (stdout) and ErrCh (stderr)
for diffFilesCh != nil || errCh != nil {
select {
case gitdiffFile, open := <-diffFilesCh:
if !open {
diffFilesCh = nil
break
}
// skip binary files
if gitdiffFile.IsBinary || gitdiffFile.IsDelete {
continue
}
// Check if commit is allowed
commitSHA := ""
if gitdiffFile.PatchHeader != nil {
commitSHA = gitdiffFile.PatchHeader.SHA
for _, a := range d.Config.Allowlists {
if ok, c := a.CommitAllowed(gitdiffFile.PatchHeader.SHA); ok {
logging.Trace().Str("allowed-commit", c).Msg("skipping commit: global allowlist")
continue
}
}
}
d.addCommit(commitSHA)
d.Sema.Go(func() error {
for _, textFragment := range gitdiffFile.TextFragments {
if textFragment == nil {
return nil
}
fragment := Fragment{
Raw: textFragment.Raw(gitdiff.OpAdd),
CommitSHA: commitSHA,
FilePath: gitdiffFile.NewName,
}
timer := time.AfterFunc(SlowWarningThreshold, func() {
logging.Debug().
Str("commit", commitSHA[:7]).
Str("path", fragment.FilePath).
Msgf("Taking longer than %s to inspect fragment", SlowWarningThreshold.String())
})
for _, finding := range d.Detect(fragment) {
d.AddFinding(augmentGitFinding(remote, finding, textFragment, gitdiffFile))
}
if timer != nil {
timer.Stop()
timer = nil
}
}
return nil
})
case err, open := <-errCh:
if !open {
errCh = nil
break
}
return d.findings, err
}
}
if err := d.Sema.Wait(); err != nil {
return d.findings, err
}
logging.Info().Msgf("%d commits scanned.", len(d.commitMap))
logging.Debug().Msg("Note: this number might be smaller than expected due to commits with no additions")
return d.findings, nil
}
type RemoteInfo struct {
Platform scm.Platform
Url string
}
func NewRemoteInfo(platform scm.Platform, source string) *RemoteInfo {
if platform == scm.NoPlatform {
return &RemoteInfo{Platform: platform}
}
remoteUrl, err := getRemoteUrl(source)
if err != nil {
if strings.Contains(err.Error(), "No remote configured") {
logging.Debug().Msg("skipping finding links: repository has no configured remote.")
platform = scm.NoPlatform
} else {
logging.Error().Err(err).Msg("skipping finding links: unable to parse remote URL")
}
goto End
}
if platform == scm.UnknownPlatform {
platform = platformFromHost(remoteUrl)
if platform == scm.UnknownPlatform {
logging.Info().
Str("host", remoteUrl.Hostname()).
Msg("Unknown SCM platform. Use --platform to include links in findings.")
} else {
logging.Debug().
Str("host", remoteUrl.Hostname()).
Str("platform", platform.String()).
Msg("SCM platform parsed from host")
}
}
End:
var rUrl string
if remoteUrl != nil {
rUrl = remoteUrl.String()
}
return &RemoteInfo{
Platform: platform,
Url: rUrl,
}
}
var sshUrlpat = regexp.MustCompile(`^git@([a-zA-Z0-9.-]+):([\w/.-]+?)(?:\.git)?$`)
func getRemoteUrl(source string) (*url.URL, error) {
// This will return the first remote — typically, "origin".
cmd := exec.Command("git", "ls-remote", "--quiet", "--get-url")
if source != "." {
cmd.Dir = source
}
stdout, err := cmd.Output()
if err != nil {
var exitError *exec.ExitError
if errors.As(err, &exitError) {
return nil, fmt.Errorf("command failed (%d): %w, stderr: %s", exitError.ExitCode(), err, string(bytes.TrimSpace(exitError.Stderr)))
}
return nil, err
}
remoteUrl := string(bytes.TrimSpace(stdout))
if matches := sshUrlpat.FindStringSubmatch(remoteUrl); matches != nil {
remoteUrl = fmt.Sprintf("https://%s/%s", matches[1], matches[2])
}
remoteUrl = strings.TrimSuffix(remoteUrl, ".git")
parsedUrl, err := url.Parse(remoteUrl)
if err != nil {
return nil, fmt.Errorf("unable to parse remote URL: %w", err)
}
// Remove any user info.
parsedUrl.User = nil
return parsedUrl, nil
}
func platformFromHost(u *url.URL) scm.Platform {
switch strings.ToLower(u.Hostname()) {
case "github.com":
return scm.GitHubPlatform
case "gitlab.com":
return scm.GitLabPlatform
case "dev.azure.com", "visualstudio.com":
return scm.AzureDevOpsPlatform
default:
return scm.UnknownPlatform
}
}

View File

@@ -1,143 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package git
import (
"bufio"
"io"
"os/exec"
"path/filepath"
"strings"
"time"
"github.com/gitleaks/go-gitdiff/gitdiff"
"github.com/rs/zerolog/log"
)
var ErrEncountered bool
// GitLog returns a channel of gitdiff.File objects from the
// git log -p command for the given source.
func GitLog(source string, logOpts string) (<-chan *gitdiff.File, error) {
sourceClean := filepath.Clean(source)
var cmd *exec.Cmd
if logOpts != "" {
args := []string{"-C", sourceClean, "log", "-p", "-U0"}
args = append(args, strings.Split(logOpts, " ")...)
cmd = exec.Command("git", args...)
} else {
cmd = exec.Command("git", "-C", sourceClean, "log", "-p", "-U0",
"--full-history", "--all")
}
log.Debug().Msgf("executing: %s", cmd.String())
stdout, err := cmd.StdoutPipe()
if err != nil {
return nil, err
}
stderr, err := cmd.StderrPipe()
if err != nil {
return nil, err
}
go listenForStdErr(stderr)
if err := cmd.Start(); err != nil {
return nil, err
}
// HACK: to avoid https://github.com/zricethezav/gitleaks/issues/722
time.Sleep(50 * time.Millisecond)
return gitdiff.Parse(cmd, stdout)
}
// GitDiff returns a channel of gitdiff.File objects from
// the git diff command for the given source.
func GitDiff(source string, staged bool) (<-chan *gitdiff.File, error) {
sourceClean := filepath.Clean(source)
var cmd *exec.Cmd
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0", ".")
if staged {
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0",
"--staged", ".")
}
log.Debug().Msgf("executing: %s", cmd.String())
stdout, err := cmd.StdoutPipe()
if err != nil {
return nil, err
}
stderr, err := cmd.StderrPipe()
if err != nil {
return nil, err
}
go listenForStdErr(stderr)
if err := cmd.Start(); err != nil {
return nil, err
}
// HACK: to avoid https://github.com/zricethezav/gitleaks/issues/722
time.Sleep(50 * time.Millisecond)
return gitdiff.Parse(cmd, stdout)
}
// listenForStdErr listens for stderr output from git and prints it to stdout
// then exits with exit code 1
func listenForStdErr(stderr io.ReadCloser) {
scanner := bufio.NewScanner(stderr)
for scanner.Scan() {
// if git throws one of the following errors:
//
// exhaustive rename detection was skipped due to too many files.
// you may want to set your diff.renameLimit variable to at least
// (some large number) and retry the command.
//
// inexact rename detection was skipped due to too many files.
// you may want to set your diff.renameLimit variable to at least
// (some large number) and retry the command.
//
// we skip exiting the program as git log -p/git diff will continue
// to send data to stdout and finish executing. This next bit of
// code prevents gitleaks from stopping mid scan if this error is
// encountered
if strings.Contains(scanner.Text(),
"exhaustive rename detection was skipped") ||
strings.Contains(scanner.Text(),
"inexact rename detection was skipped") ||
strings.Contains(scanner.Text(),
"you may want to set your diff.renameLimit") {
log.Warn().Msg(scanner.Text())
} else {
log.Error().Msgf("[git] %s", scanner.Text())
// asynchronously set this error flag to true so that we can
// capture a log message and exit with a non-zero exit code
// This value should get set before the `git` command exits so it's
// safe-ish, although I know I know, bad practice.
ErrEncountered = true
}
}
}

View File

@@ -1,158 +0,0 @@
package git_test
// TODO: commenting out this test for now because it's flaky. Alternatives to consider to get this working:
// -- use `git stash` instead of `restore()`
// const repoBasePath = "../../testdata/repos/"
// const expectPath = "../../testdata/expected/"
// func TestGitLog(t *testing.T) {
// tests := []struct {
// source string
// logOpts string
// expected string
// }{
// {
// source: filepath.Join(repoBasePath, "small"),
// expected: filepath.Join(expectPath, "git", "small.txt"),
// },
// {
// source: filepath.Join(repoBasePath, "small"),
// expected: filepath.Join(expectPath, "git", "small-branch-foo.txt"),
// logOpts: "--all foo...",
// },
// }
// err := moveDotGit("dotGit", ".git")
// if err != nil {
// t.Fatal(err)
// }
// defer func() {
// if err = moveDotGit(".git", "dotGit"); err != nil {
// t.Fatal(err)
// }
// }()
// for _, tt := range tests {
// files, err := git.GitLog(tt.source, tt.logOpts)
// if err != nil {
// t.Error(err)
// }
// var diffSb strings.Builder
// for f := range files {
// for _, tf := range f.TextFragments {
// diffSb.WriteString(tf.Raw(gitdiff.OpAdd))
// }
// }
// expectedBytes, err := os.ReadFile(tt.expected)
// if err != nil {
// t.Error(err)
// }
// expected := string(expectedBytes)
// if expected != diffSb.String() {
// // write string builder to .got file using os.Create
// err = os.WriteFile(strings.Replace(tt.expected, ".txt", ".got.txt", 1), []byte(diffSb.String()), 0644)
// if err != nil {
// t.Error(err)
// }
// t.Error("expected: ", expected, "got: ", diffSb.String())
// }
// }
// }
// func TestGitDiff(t *testing.T) {
// tests := []struct {
// source string
// expected string
// additions string
// target string
// }{
// {
// source: filepath.Join(repoBasePath, "small"),
// expected: "this line is added\nand another one",
// additions: "this line is added\nand another one",
// target: filepath.Join(repoBasePath, "small", "main.go"),
// },
// }
// err := moveDotGit("dotGit", ".git")
// if err != nil {
// t.Fatal(err)
// }
// defer func() {
// if err = moveDotGit(".git", "dotGit"); err != nil {
// t.Fatal(err)
// }
// }()
// for _, tt := range tests {
// noChanges, err := os.ReadFile(tt.target)
// if err != nil {
// t.Error(err)
// }
// err = os.WriteFile(tt.target, []byte(tt.additions), 0644)
// if err != nil {
// restore(tt.target, noChanges, t)
// t.Error(err)
// }
// files, err := git.GitDiff(tt.source, false)
// if err != nil {
// restore(tt.target, noChanges, t)
// t.Error(err)
// }
// for f := range files {
// sb := strings.Builder{}
// for _, tf := range f.TextFragments {
// sb.WriteString(tf.Raw(gitdiff.OpAdd))
// }
// if sb.String() != tt.expected {
// restore(tt.target, noChanges, t)
// t.Error("expected: ", tt.expected, "got: ", sb.String())
// }
// }
// restore(tt.target, noChanges, t)
// }
// }
// func restore(path string, data []byte, t *testing.T) {
// err := os.WriteFile(path, data, 0644)
// if err != nil {
// t.Fatal(err)
// }
// }
// func moveDotGit(from, to string) error {
// repoDirs, err := os.ReadDir("../../testdata/repos")
// if err != nil {
// return err
// }
// for _, dir := range repoDirs {
// if to == ".git" {
// _, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), "dotGit"))
// if os.IsNotExist(err) {
// // dont want to delete the only copy of .git accidentally
// continue
// }
// os.RemoveAll(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), ".git"))
// }
// if !dir.IsDir() {
// continue
// }
// _, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from))
// if os.IsNotExist(err) {
// continue
// }
// err = os.Rename(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from),
// fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), to))
// if err != nil {
// return err
// }
// }
// return nil
// }

View File

@@ -72,6 +72,7 @@ func location(fragment Fragment, matchIndex []int) Location {
location.endColumn = (end - prevNewLine)
location.endLineIndex = newLineByteIndex
}
prevNewLine = pair[0]
}

View File

@@ -1,82 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"testing"
)
// TestGetLocation tests the getLocation function.
func TestGetLocation(t *testing.T) {
tests := []struct {
linePairs [][]int
start int
end int
wantLocation Location
}{
{
linePairs: [][]int{
{0, 39},
{40, 55},
{56, 57},
},
start: 35,
end: 38,
wantLocation: Location{
startLine: 1,
startColumn: 36,
endLine: 1,
endColumn: 38,
startLineIndex: 0,
endLineIndex: 40,
},
},
{
linePairs: [][]int{
{0, 39},
{40, 55},
{56, 57},
},
start: 40,
end: 44,
wantLocation: Location{
startLine: 2,
startColumn: 1,
endLine: 2,
endColumn: 4,
startLineIndex: 40,
endLineIndex: 56,
},
},
}
for _, test := range tests {
loc := location(Fragment{newlineIndices: test.linePairs}, []int{test.start, test.end})
if loc != test.wantLocation {
t.Errorf("\nstartLine %d\nstartColumn: %d\nendLine: %d\nendColumn: %d\nstartLineIndex: %d\nendlineIndex %d",
loc.startLine, loc.startColumn, loc.endLine, loc.endColumn, loc.startLineIndex, loc.endLineIndex)
t.Error("got", loc, "want", test.wantLocation)
}
}
}

72
cli/detect/logging/log.go Normal file
View File

@@ -0,0 +1,72 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package logging
import (
"os"
"github.com/rs/zerolog"
)
var Logger zerolog.Logger
func init() {
// send all logs to stdout
Logger = zerolog.New(zerolog.ConsoleWriter{Out: os.Stderr}).
Level(zerolog.InfoLevel).
With().Timestamp().Logger()
}
func With() zerolog.Context {
return Logger.With()
}
func Trace() *zerolog.Event {
return Logger.Trace()
}
func Debug() *zerolog.Event {
return Logger.Debug()
}
func Info() *zerolog.Event {
return Logger.Info()
}
func Warn() *zerolog.Event {
return Logger.Warn()
}
func Error() *zerolog.Event {
return Logger.Error()
}
func Err(err error) *zerolog.Event {
return Logger.Err(err)
}
func Fatal() *zerolog.Event {
return Logger.Fatal()
}
func Panic() *zerolog.Event {
return Logger.Panic()
}

149
cli/detect/reader.go Normal file
View File

@@ -0,0 +1,149 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bufio"
"bytes"
"errors"
"io"
"github.com/Infisical/infisical-merge/detect/report"
)
// DetectReader accepts an io.Reader and a buffer size for the reader in KB
func (d *Detector) DetectReader(r io.Reader, bufSize int) ([]report.Finding, error) {
reader := bufio.NewReader(r)
buf := make([]byte, 1000*bufSize)
findings := []report.Finding{}
for {
n, err := reader.Read(buf)
// "Callers should always process the n > 0 bytes returned before considering the error err."
// https://pkg.go.dev/io#Reader
if n > 0 {
// Try to split chunks across large areas of whitespace, if possible.
peekBuf := bytes.NewBuffer(buf[:n])
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
return findings, readErr
}
fragment := Fragment{
Raw: peekBuf.String(),
}
for _, finding := range d.Detect(fragment) {
findings = append(findings, finding)
if d.Verbose {
printFinding(finding, d.NoColor)
}
}
}
if err != nil {
if err == io.EOF {
break
}
return findings, err
}
}
return findings, nil
}
// StreamDetectReader streams the detection results from the provided io.Reader.
// It reads data using the specified buffer size (in KB) and processes each chunk through
// the existing detection logic. Findings are sent down the returned findings channel as soon as
// they are detected, while a separate error channel signals a terminal error (or nil upon successful completion).
// The function returns two channels:
// - findingsCh: a receive-only channel that emits report.Finding objects as they are found.
// - errCh: a receive-only channel that emits a single final error (or nil if no error occurred)
// once the stream ends.
//
// Recommended Usage:
//
// Since there will only ever be a single value on the errCh, it is recommended to consume the findingsCh
// first. Once findingsCh is closed, the consumer should then read from errCh to determine
// if the stream completed successfully or if an error occurred.
//
// This design avoids the need for a select loop, keeping client code simple.
//
// Example:
//
// // Assume detector is an instance of *Detector and myReader implements io.Reader.
// findingsCh, errCh := detector.StreamDetectReader(myReader, 64) // using 64 KB buffer size
//
// // Process findings as they arrive.
// for finding := range findingsCh {
// fmt.Printf("Found secret: %+v\n", finding)
// }
//
// // After the findings channel is closed, check the final error.
// if err := <-errCh; err != nil {
// log.Fatalf("StreamDetectReader encountered an error: %v", err)
// } else {
// fmt.Println("Scanning completed successfully.")
// }
func (d *Detector) StreamDetectReader(r io.Reader, bufSize int) (<-chan report.Finding, <-chan error) {
findingsCh := make(chan report.Finding, 1)
errCh := make(chan error, 1)
go func() {
defer close(findingsCh)
defer close(errCh)
reader := bufio.NewReader(r)
buf := make([]byte, 1000*bufSize)
for {
n, err := reader.Read(buf)
if n > 0 {
peekBuf := bytes.NewBuffer(buf[:n])
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
errCh <- readErr
return
}
fragment := Fragment{Raw: peekBuf.String()}
for _, finding := range d.Detect(fragment) {
findingsCh <- finding
if d.Verbose {
printFinding(finding, d.NoColor)
}
}
}
if err != nil {
if errors.Is(err, io.EOF) {
errCh <- nil
return
}
errCh <- err
return
}
}
}()
return findingsCh, errCh
}

View File

@@ -0,0 +1,37 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//go:build !gore2regex
package regexp
import (
re "regexp"
)
const Version = "stdlib"
type Regexp = re.Regexp
func MustCompile(str string) *re.Regexp {
return re.MustCompile(str)
}

View File

@@ -0,0 +1,37 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//go:build gore2regex
package regexp
import (
re "github.com/wasilibs/go-re2"
)
const Version = "github.com/wasilibs/go-re2"
type Regexp = re.Regexp
func MustCompile(str string) *re.Regexp {
return re.MustCompile(str)
}

View File

@@ -19,6 +19,7 @@
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
const version = "v8.0.0"

View File

@@ -26,16 +26,24 @@ import (
"encoding/csv"
"io"
"strconv"
"strings"
)
// writeCsv writes the list of findings to a writeCloser.
func writeCsv(f []Finding, w io.WriteCloser) error {
if len(f) == 0 {
type CsvReporter struct {
}
var _ Reporter = (*CsvReporter)(nil)
func (r *CsvReporter) Write(w io.WriteCloser, findings []Finding) error {
if len(findings) == 0 {
return nil
}
defer w.Close()
cw := csv.NewWriter(w)
err := cw.Write([]string{"RuleID",
var (
cw = csv.NewWriter(w)
err error
)
columns := []string{"RuleID",
"Commit",
"File",
"SymlinkFile",
@@ -50,12 +58,18 @@ func writeCsv(f []Finding, w io.WriteCloser) error {
"Date",
"Email",
"Fingerprint",
})
if err != nil {
"Tags",
}
// A miserable attempt at "omitempty" so tests don't yell at me.
if findings[0].Link != "" {
columns = append(columns, "Link")
}
if err = cw.Write(columns); err != nil {
return err
}
for _, f := range f {
err = cw.Write([]string{f.RuleID,
for _, f := range findings {
row := []string{f.RuleID,
f.Commit,
f.File,
f.SymlinkFile,
@@ -70,8 +84,13 @@ func writeCsv(f []Finding, w io.WriteCloser) error {
f.Date,
f.Email,
f.Fingerprint,
})
if err != nil {
strings.Join(f.Tags, " "),
}
if findings[0].Link != "" {
row = append(row, f.Link)
}
if err = cw.Write(row); err != nil {
return err
}
}

View File

@@ -23,13 +23,17 @@
package report
import (
"math"
"strings"
)
// Finding contains information about strings that
// have been captured by a tree-sitter query.
type Finding struct {
// Rule is the name of the rule that was matched
RuleID string
Description string
StartLine int
EndLine int
StartColumn int
@@ -47,6 +51,7 @@ type Finding struct {
File string
SymlinkFile string
Commit string
Link string `json:",omitempty"`
// Entropy is the shannon entropy of Value
Entropy float32
@@ -57,16 +62,31 @@ type Finding struct {
Message string
Tags []string
// Rule is the name of the rule that was matched
RuleID string
// unique identifer
// unique identifier
Fingerprint string
}
// Redact removes sensitive information from a finding.
func (f *Finding) Redact() {
f.Line = strings.Replace(f.Line, f.Secret, "REDACTED", -1)
f.Match = strings.Replace(f.Match, f.Secret, "REDACTED", -1)
f.Secret = "REDACTED"
func (f *Finding) Redact(percent uint) {
secret := maskSecret(f.Secret, percent)
if percent >= 100 {
secret = "REDACTED"
}
f.Line = strings.Replace(f.Line, f.Secret, secret, -1)
f.Match = strings.Replace(f.Match, f.Secret, secret, -1)
f.Secret = secret
}
func maskSecret(secret string, percent uint) string {
if percent > 100 {
percent = 100
}
len := float64(len(secret))
if len <= 0 {
return secret
}
prc := float64(100 - percent)
lth := int64(math.RoundToEven(len * prc / float64(100)))
return secret[:lth] + "..."
}

View File

@@ -27,10 +27,12 @@ import (
"io"
)
func writeJson(findings []Finding, w io.WriteCloser) error {
if len(findings) == 0 {
findings = []Finding{}
}
type JsonReporter struct {
}
var _ Reporter = (*JsonReporter)(nil)
func (t *JsonReporter) Write(w io.WriteCloser, findings []Finding) error {
encoder := json.NewEncoder(w)
encoder.SetIndent("", " ")
return encoder.Encode(findings)

129
cli/detect/report/junit.go Normal file
View File

@@ -0,0 +1,129 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"encoding/json"
"encoding/xml"
"fmt"
"io"
"strconv"
)
type JunitReporter struct {
}
var _ Reporter = (*JunitReporter)(nil)
func (r *JunitReporter) Write(w io.WriteCloser, findings []Finding) error {
testSuites := TestSuites{
TestSuites: getTestSuites(findings),
}
io.WriteString(w, xml.Header)
encoder := xml.NewEncoder(w)
encoder.Indent("", "\t")
return encoder.Encode(testSuites)
}
func getTestSuites(findings []Finding) []TestSuite {
return []TestSuite{
{
Failures: strconv.Itoa(len(findings)),
Name: "gitleaks",
Tests: strconv.Itoa(len(findings)),
TestCases: getTestCases(findings),
Time: "",
},
}
}
func getTestCases(findings []Finding) []TestCase {
testCases := []TestCase{}
for _, f := range findings {
testCase := TestCase{
Classname: f.Description,
Failure: getFailure(f),
File: f.File,
Name: getMessage(f),
Time: "",
}
testCases = append(testCases, testCase)
}
return testCases
}
func getFailure(f Finding) Failure {
return Failure{
Data: getData(f),
Message: getMessage(f),
Type: f.Description,
}
}
func getData(f Finding) string {
data, err := json.MarshalIndent(f, "", "\t")
if err != nil {
fmt.Println(err)
return ""
}
return string(data)
}
func getMessage(f Finding) string {
if f.Commit == "" {
return fmt.Sprintf("%s has detected a secret in file %s, line %s.", f.RuleID, f.File, strconv.Itoa(f.StartLine))
}
return fmt.Sprintf("%s has detected a secret in file %s, line %s, at commit %s.", f.RuleID, f.File, strconv.Itoa(f.StartLine), f.Commit)
}
type TestSuites struct {
XMLName xml.Name `xml:"testsuites"`
TestSuites []TestSuite
}
type TestSuite struct {
XMLName xml.Name `xml:"testsuite"`
Failures string `xml:"failures,attr"`
Name string `xml:"name,attr"`
Tests string `xml:"tests,attr"`
TestCases []TestCase `xml:"testcase"`
Time string `xml:"time,attr"`
}
type TestCase struct {
XMLName xml.Name `xml:"testcase"`
Classname string `xml:"classname,attr"`
Failure Failure `xml:"failure"`
File string `xml:"file,attr"`
Name string `xml:"name,attr"`
Time string `xml:"time,attr"`
}
type Failure struct {
XMLName xml.Name `xml:"failure"`
Data string `xml:",chardata"`
Message string `xml:"message,attr"`
Type string `xml:"type,attr"`
}

View File

@@ -19,30 +19,20 @@
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import "testing"
import (
"io"
)
func TestRedact(t *testing.T) {
tests := []struct {
findings []Finding
redact bool
}{
{
redact: true,
findings: []Finding{
{
Secret: "line containing secret",
Match: "secret",
},
}},
}
for _, test := range tests {
for _, f := range test.findings {
f.Redact()
if f.Secret != "REDACTED" {
t.Error("redact not redacting: ", f.Secret)
}
}
}
const (
// https://cwe.mitre.org/data/definitions/798.html
CWE = "CWE-798"
CWE_DESCRIPTION = "Use of Hard-coded Credentials"
StdoutReportPath = "-"
)
type Reporter interface {
Write(w io.WriteCloser, findings []Finding) error
}

View File

@@ -27,14 +27,20 @@ import (
"fmt"
"io"
"github.com/Infisical/infisical-merge/config"
"github.com/Infisical/infisical-merge/detect/config"
)
func writeSarif(cfg config.Config, findings []Finding, w io.WriteCloser) error {
type SarifReporter struct {
OrderedRules []config.Rule
}
var _ Reporter = (*SarifReporter)(nil)
func (r *SarifReporter) Write(w io.WriteCloser, findings []Finding) error {
sarif := Sarif{
Schema: "https://json.schemastore.org/sarif-2.1.0.json",
Version: "2.1.0",
Runs: getRuns(cfg, findings),
Runs: r.getRuns(findings),
}
encoder := json.NewEncoder(w)
@@ -42,22 +48,22 @@ func writeSarif(cfg config.Config, findings []Finding, w io.WriteCloser) error {
return encoder.Encode(sarif)
}
func getRuns(cfg config.Config, findings []Finding) []Runs {
func (r *SarifReporter) getRuns(findings []Finding) []Runs {
return []Runs{
{
Tool: getTool(cfg),
Tool: r.getTool(),
Results: getResults(findings),
},
}
}
func getTool(cfg config.Config) Tool {
func (r *SarifReporter) getTool() Tool {
tool := Tool{
Driver: Driver{
Name: driver,
SemanticVersion: version,
InformationUri: "https://github.com/Infisical/infisical",
Rules: getRules(cfg),
InformationUri: "https://github.com/gitleaks/gitleaks",
Rules: r.getRules(),
},
}
@@ -73,26 +79,15 @@ func hasEmptyRules(tool Tool) bool {
return len(tool.Driver.Rules) == 0
}
func getRules(cfg config.Config) []Rules {
func (r *SarifReporter) getRules() []Rules {
// TODO for _, rule := range cfg.Rules {
var rules []Rules
for _, rule := range cfg.OrderedRules() {
shortDescription := ShortDescription{
Text: rule.Description,
}
if rule.Regex != nil {
shortDescription = ShortDescription{
Text: rule.Regex.String(),
}
} else if rule.Path != nil {
shortDescription = ShortDescription{
Text: rule.Path.String(),
}
}
for _, rule := range r.OrderedRules {
rules = append(rules, Rules{
ID: rule.RuleID,
Name: rule.Description,
Description: shortDescription,
ID: rule.RuleID,
Description: ShortDescription{
Text: rule.Description,
},
})
}
return rules
@@ -125,6 +120,9 @@ func getResults(findings []Finding) []Results {
Date: f.Date,
Author: f.Author,
},
Properties: Properties{
Tags: f.Tags,
},
}
results = append(results, r)
}
@@ -180,7 +178,6 @@ type FullDescription struct {
type Rules struct {
ID string `json:"id"`
Name string `json:"name"`
Description ShortDescription `json:"shortDescription"`
}
@@ -224,11 +221,16 @@ type Locations struct {
PhysicalLocation PhysicalLocation `json:"physicalLocation"`
}
type Properties struct {
Tags []string `json:"tags"`
}
type Results struct {
Message Message `json:"message"`
RuleId string `json:"ruleId"`
Locations []Locations `json:"locations"`
PartialFingerPrints `json:"partialFingerprints"`
Properties Properties `json:"properties"`
}
type Runs struct {

View File

@@ -0,0 +1,68 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"fmt"
"io"
"os"
"text/template"
"github.com/Masterminds/sprig/v3"
)
type TemplateReporter struct {
template *template.Template
}
var _ Reporter = (*TemplateReporter)(nil)
func NewTemplateReporter(templatePath string) (*TemplateReporter, error) {
if templatePath == "" {
return nil, fmt.Errorf("template path cannot be empty")
}
file, err := os.ReadFile(templatePath)
if err != nil {
return nil, fmt.Errorf("error reading file: %w", err)
}
templateText := string(file)
// TODO: Add helper functions like escaping for JSON, XML, etc.
t := template.New("custom")
t = t.Funcs(sprig.TxtFuncMap())
t, err = t.Parse(templateText)
if err != nil {
return nil, fmt.Errorf("error parsing file: %w", err)
}
return &TemplateReporter{template: t}, nil
}
// writeTemplate renders the findings using the user-provided template.
// https://www.digitalocean.com/community/tutorials/how-to-use-templates-in-go
func (t *TemplateReporter) Write(w io.WriteCloser, findings []Finding) error {
if err := t.template.Execute(w, findings); err != nil {
return err
}
return nil
}

View File

@@ -0,0 +1,127 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package sources
import (
"io/fs"
"os"
"path/filepath"
"runtime"
"github.com/fatih/semgroup"
"github.com/Infisical/infisical-merge/detect/config"
"github.com/Infisical/infisical-merge/detect/logging"
)
type ScanTarget struct {
Path string
Symlink string
}
var isWindows = runtime.GOOS == "windows"
func DirectoryTargets(source string, s *semgroup.Group, followSymlinks bool, allowlists []*config.Allowlist) (<-chan ScanTarget, error) {
paths := make(chan ScanTarget)
s.Go(func() error {
defer close(paths)
return filepath.Walk(source,
func(path string, fInfo os.FileInfo, err error) error {
logger := logging.With().Str("path", path).Logger()
if err != nil {
if os.IsPermission(err) {
// This seems to only fail on directories at this stage.
logger.Warn().Msg("Skipping directory: permission denied")
return filepath.SkipDir
}
return err
}
// Empty; nothing to do here.
if fInfo.Size() == 0 {
return nil
}
// Unwrap symlinks, if |followSymlinks| is set.
scanTarget := ScanTarget{
Path: path,
}
if fInfo.Mode().Type() == fs.ModeSymlink {
if !followSymlinks {
logger.Debug().Msg("Skipping symlink")
return nil
}
realPath, err := filepath.EvalSymlinks(path)
if err != nil {
return err
}
realPathFileInfo, _ := os.Stat(realPath)
if realPathFileInfo.IsDir() {
logger.Warn().Str("target", realPath).Msg("Skipping symlinked directory")
return nil
}
scanTarget.Path = realPath
scanTarget.Symlink = path
}
// TODO: Also run this check against the resolved symlink?
var skip bool
for _, a := range allowlists {
skip = a.PathAllowed(path) ||
// TODO: Remove this in v9.
// This is an awkward hack to mitigate https://github.com/gitleaks/gitleaks/issues/1641.
(isWindows && a.PathAllowed(filepath.ToSlash(path)))
if skip {
break
}
}
if fInfo.IsDir() {
// Directory
if skip {
logger.Debug().Msg("Skipping directory due to global allowlist")
return filepath.SkipDir
}
if fInfo.Name() == ".git" {
// Don't scan .git directories.
// TODO: Add this to the config allowlist, instead of hard-coding it.
return filepath.SkipDir
}
} else {
// File
if skip {
logger.Debug().Msg("Skipping file due to global allowlist")
return nil
}
paths <- scanTarget
}
return nil
})
})
return paths, nil
}

211
cli/detect/sources/git.go Normal file
View File

@@ -0,0 +1,211 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package sources
import (
"bufio"
"errors"
"io"
"os/exec"
"path/filepath"
"regexp"
"strings"
"github.com/gitleaks/go-gitdiff/gitdiff"
"github.com/Infisical/infisical-merge/detect/logging"
)
var quotedOptPattern = regexp.MustCompile(`^(?:"[^"]+"|'[^']+')$`)
// GitCmd helps to work with Git's output.
type GitCmd struct {
cmd *exec.Cmd
diffFilesCh <-chan *gitdiff.File
errCh <-chan error
}
// NewGitLogCmd returns `*DiffFilesCmd` with two channels: `<-chan *gitdiff.File` and `<-chan error`.
// Caller should read everything from channels until receiving a signal about their closure and call
// the `func (*DiffFilesCmd) Wait()` error in order to release resources.
func NewGitLogCmd(source string, logOpts string) (*GitCmd, error) {
sourceClean := filepath.Clean(source)
var cmd *exec.Cmd
if logOpts != "" {
args := []string{"-C", sourceClean, "log", "-p", "-U0"}
// Ensure that the user-provided |logOpts| aren't wrapped in quotes.
// https://github.com/gitleaks/gitleaks/issues/1153
userArgs := strings.Split(logOpts, " ")
var quotedOpts []string
for _, element := range userArgs {
if quotedOptPattern.MatchString(element) {
quotedOpts = append(quotedOpts, element)
}
}
if len(quotedOpts) > 0 {
logging.Warn().Msgf("the following `--log-opts` values may not work as expected: %v\n\tsee https://github.com/gitleaks/gitleaks/issues/1153 for more information", quotedOpts)
}
args = append(args, userArgs...)
cmd = exec.Command("git", args...)
} else {
cmd = exec.Command("git", "-C", sourceClean, "log", "-p", "-U0",
"--full-history", "--all")
}
logging.Debug().Msgf("executing: %s", cmd.String())
stdout, err := cmd.StdoutPipe()
if err != nil {
return nil, err
}
stderr, err := cmd.StderrPipe()
if err != nil {
return nil, err
}
if err := cmd.Start(); err != nil {
return nil, err
}
errCh := make(chan error)
go listenForStdErr(stderr, errCh)
gitdiffFiles, err := gitdiff.Parse(stdout)
if err != nil {
return nil, err
}
return &GitCmd{
cmd: cmd,
diffFilesCh: gitdiffFiles,
errCh: errCh,
}, nil
}
// NewGitDiffCmd returns `*DiffFilesCmd` with two channels: `<-chan *gitdiff.File` and `<-chan error`.
// Caller should read everything from channels until receiving a signal about their closure and call
// the `func (*DiffFilesCmd) Wait()` error in order to release resources.
func NewGitDiffCmd(source string, staged bool) (*GitCmd, error) {
sourceClean := filepath.Clean(source)
var cmd *exec.Cmd
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0", "--no-ext-diff", ".")
if staged {
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0", "--no-ext-diff",
"--staged", ".")
}
logging.Debug().Msgf("executing: %s", cmd.String())
stdout, err := cmd.StdoutPipe()
if err != nil {
return nil, err
}
stderr, err := cmd.StderrPipe()
if err != nil {
return nil, err
}
if err := cmd.Start(); err != nil {
return nil, err
}
errCh := make(chan error)
go listenForStdErr(stderr, errCh)
gitdiffFiles, err := gitdiff.Parse(stdout)
if err != nil {
return nil, err
}
return &GitCmd{
cmd: cmd,
diffFilesCh: gitdiffFiles,
errCh: errCh,
}, nil
}
// DiffFilesCh returns a channel with *gitdiff.File.
func (c *GitCmd) DiffFilesCh() <-chan *gitdiff.File {
return c.diffFilesCh
}
// ErrCh returns a channel that could produce an error if there is something in stderr.
func (c *GitCmd) ErrCh() <-chan error {
return c.errCh
}
// Wait waits for the command to exit and waits for any copying to
// stdin or copying from stdout or stderr to complete.
//
// Wait also closes underlying stdout and stderr.
func (c *GitCmd) Wait() (err error) {
return c.cmd.Wait()
}
// listenForStdErr listens for stderr output from git, prints it to stdout,
// sends to errCh and closes it.
func listenForStdErr(stderr io.ReadCloser, errCh chan<- error) {
defer close(errCh)
var errEncountered bool
scanner := bufio.NewScanner(stderr)
for scanner.Scan() {
// if git throws one of the following errors:
//
// exhaustive rename detection was skipped due to too many files.
// you may want to set your diff.renameLimit variable to at least
// (some large number) and retry the command.
//
// inexact rename detection was skipped due to too many files.
// you may want to set your diff.renameLimit variable to at least
// (some large number) and retry the command.
//
// Auto packing the repository in background for optimum performance.
// See "git help gc" for manual housekeeping.
//
// we skip exiting the program as git log -p/git diff will continue
// to send data to stdout and finish executing. This next bit of
// code prevents gitleaks from stopping mid scan if this error is
// encountered
if strings.Contains(scanner.Text(),
"exhaustive rename detection was skipped") ||
strings.Contains(scanner.Text(),
"inexact rename detection was skipped") ||
strings.Contains(scanner.Text(),
"you may want to set your diff.renameLimit") ||
strings.Contains(scanner.Text(),
"See \"git help gc\" for manual housekeeping") ||
strings.Contains(scanner.Text(),
"Auto packing the repository in background for optimum performance") {
logging.Warn().Msg(scanner.Text())
} else {
logging.Error().Msgf("[git] %s", scanner.Text())
errEncountered = true
}
}
if errEncountered {
errCh <- errors.New("stderr is not empty")
return
}
}

View File

@@ -26,20 +26,21 @@ import (
// "encoding/json"
"fmt"
"math"
"path/filepath"
"strings"
"time"
"github.com/Infisical/infisical-merge/detect/cmd/scm"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/report"
"github.com/charmbracelet/lipgloss"
"github.com/Infisical/infisical-merge/report"
"github.com/gitleaks/go-gitdiff/gitdiff"
"github.com/rs/zerolog/log"
)
// augmentGitFinding updates the start and end line numbers of a finding to include the
// delta from the git diff
func augmentGitFinding(finding report.Finding, textFragment *gitdiff.TextFragment, f *gitdiff.File) report.Finding {
func augmentGitFinding(remote *RemoteInfo, finding report.Finding, textFragment *gitdiff.TextFragment, f *gitdiff.File) report.Finding {
if !strings.HasPrefix(finding.Match, "file detected") {
finding.StartLine += int(textFragment.NewPosition)
finding.EndLine += int(textFragment.NewPosition)
@@ -47,16 +48,76 @@ func augmentGitFinding(finding report.Finding, textFragment *gitdiff.TextFragmen
if f.PatchHeader != nil {
finding.Commit = f.PatchHeader.SHA
finding.Message = f.PatchHeader.Message()
if f.PatchHeader.Author != nil {
finding.Author = f.PatchHeader.Author.Name
finding.Email = f.PatchHeader.Author.Email
}
finding.Date = f.PatchHeader.AuthorDate.UTC().Format(time.RFC3339)
finding.Message = f.PatchHeader.Message()
// Results from `git diff` shouldn't have a link.
if finding.Commit != "" {
finding.Link = createScmLink(remote.Platform, remote.Url, finding)
}
}
return finding
}
var linkCleaner = strings.NewReplacer(
" ", "%20",
"%", "%25",
)
func createScmLink(scmPlatform scm.Platform, remoteUrl string, finding report.Finding) string {
if scmPlatform == scm.UnknownPlatform || scmPlatform == scm.NoPlatform {
return ""
}
// Clean the path.
var (
filePath = linkCleaner.Replace(finding.File)
ext = strings.ToLower(filepath.Ext(filePath))
)
switch scmPlatform {
case scm.GitHubPlatform:
link := fmt.Sprintf("%s/blob/%s/%s", remoteUrl, finding.Commit, filePath)
if ext == ".ipynb" || ext == ".md" {
link += "?plain=1"
}
if finding.StartLine != 0 {
link += fmt.Sprintf("#L%d", finding.StartLine)
}
if finding.EndLine != finding.StartLine {
link += fmt.Sprintf("-L%d", finding.EndLine)
}
return link
case scm.GitLabPlatform:
link := fmt.Sprintf("%s/blob/%s/%s", remoteUrl, finding.Commit, filePath)
if finding.StartLine != 0 {
link += fmt.Sprintf("#L%d", finding.StartLine)
}
if finding.EndLine != finding.StartLine {
link += fmt.Sprintf("-%d", finding.EndLine)
}
return link
case scm.AzureDevOpsPlatform:
link := fmt.Sprintf("%s/commit/%s?path=/%s", remoteUrl, finding.Commit, filePath)
// Add line information if applicable
if finding.StartLine != 0 {
link += fmt.Sprintf("&line=%d", finding.StartLine)
}
if finding.EndLine != finding.StartLine {
link += fmt.Sprintf("&lineEnd=%d", finding.EndLine)
}
// This is a bit dirty, but Azure DevOps does not highlight the line when the lineStartColumn and lineEndColumn are not provided
link += "&lineStartColumn=1&lineEndColumn=10000000&type=2&lineStyle=plain&_a=files"
return link
default:
// This should never happen.
return ""
}
}
// shannonEntropy calculates the entropy of data using the formula defined here:
// https://en.wiktionary.org/wiki/Shannon_entropy
// Another way to think about what this is doing is calculating the number of bits
@@ -82,7 +143,7 @@ func shannonEntropy(data string) (entropy float64) {
}
// filter will dedupe and redact findings
func filter(findings []report.Finding, redact bool) []report.Finding {
func filter(findings []report.Finding, redact uint) []report.Finding {
var retFindings []report.Finding
for _, f := range findings {
include := true
@@ -96,15 +157,15 @@ func filter(findings []report.Finding, redact bool) []report.Finding {
genericMatch := strings.Replace(f.Match, f.Secret, "REDACTED", -1)
betterMatch := strings.Replace(fPrime.Match, fPrime.Secret, "REDACTED", -1)
log.Trace().Msgf("skipping %s finding (%s), %s rule takes precendence (%s)", f.RuleID, genericMatch, fPrime.RuleID, betterMatch)
logging.Trace().Msgf("skipping %s finding (%s), %s rule takes precedence (%s)", f.RuleID, genericMatch, fPrime.RuleID, betterMatch)
include = false
break
}
}
}
if redact {
f.Redact()
if redact > 0 {
f.Redact(redact)
}
if include {
retFindings = append(retFindings, f)
@@ -152,7 +213,7 @@ func printFinding(f report.Finding, noColor bool) {
lineEndIdx := matchInLineIDX + len(f.Match)
if len(f.Line)-1 <= lineEndIdx {
lineEndIdx = len(f.Line) - 1
lineEndIdx = len(f.Line)
}
lineEnd := f.Line[lineEndIdx:]
@@ -184,6 +245,9 @@ func printFinding(f report.Finding, noColor bool) {
fmt.Println("")
return
}
if len(f.Tags) > 0 {
fmt.Printf("%-12s %s\n", "Tags:", f.Tags)
}
fmt.Printf("%-12s %s\n", "File:", f.File)
fmt.Printf("%-12s %d\n", "Line:", f.StartLine)
if f.Commit == "" {
@@ -196,16 +260,12 @@ func printFinding(f report.Finding, noColor bool) {
fmt.Printf("%-12s %s\n", "Email:", f.Email)
fmt.Printf("%-12s %s\n", "Date:", f.Date)
fmt.Printf("%-12s %s\n", "Fingerprint:", f.Fingerprint)
if f.Link != "" {
fmt.Printf("%-12s %s\n", "Link:", f.Link)
}
fmt.Println("")
}
func containsDigit(s string) bool {
for _, c := range s {
switch c {
case '1', '2', '3', '4', '5', '6', '7', '8', '9':
return true
}
}
return false
func isWhitespace(ch byte) bool {
return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r'
}

View File

@@ -10,7 +10,7 @@ require (
github.com/creack/pty v1.1.21
github.com/denisbrodbeck/machineid v1.0.1
github.com/fatih/semgroup v1.2.0
github.com/gitleaks/go-gitdiff v0.8.0
github.com/gitleaks/go-gitdiff v0.9.1
github.com/h2non/filetype v1.1.3
github.com/infisical/go-sdk v0.5.92
github.com/infisical/infisical-kmip v0.3.5
@@ -42,6 +42,11 @@ require (
cloud.google.com/go/auth/oauth2adapt v0.2.2 // indirect
cloud.google.com/go/compute/metadata v0.4.0 // indirect
cloud.google.com/go/iam v1.1.11 // indirect
dario.cat/mergo v1.0.1 // indirect
github.com/BobuSumisu/aho-corasick v1.0.3 // indirect
github.com/Masterminds/goutils v1.1.1 // indirect
github.com/Masterminds/semver/v3 v3.3.0 // indirect
github.com/Masterminds/sprig/v3 v3.3.0 // indirect
github.com/alessio/shellescape v1.4.1 // indirect
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef // indirect
github.com/aws/aws-sdk-go-v2 v1.27.2 // indirect
@@ -74,17 +79,21 @@ require (
github.com/golang/protobuf v1.5.4 // indirect
github.com/google/pprof v0.0.0-20250302191652-9094ed2288e7 // indirect
github.com/google/s2a-go v0.1.7 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect
github.com/googleapis/gax-go/v2 v2.12.5 // indirect
github.com/gosimple/slug v1.15.0 // indirect
github.com/gosimple/unidecode v1.0.1 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/huandu/xstrings v1.5.0 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/magiconair/properties v1.8.5 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-runewidth v0.0.15 // indirect
github.com/mitchellh/copystructure v1.2.0 // indirect
github.com/mitchellh/mapstructure v1.4.1 // indirect
github.com/mitchellh/reflectwalk v1.0.2 // indirect
github.com/mtibben/percent v0.2.1 // indirect
github.com/muesli/mango v0.1.0 // indirect
github.com/muesli/mango-pflag v0.1.0 // indirect
@@ -98,8 +107,9 @@ require (
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/rivo/uniseg v0.2.0 // indirect
github.com/shopspring/decimal v1.4.0 // indirect
github.com/spf13/afero v1.6.0 // indirect
github.com/spf13/cast v1.3.1 // indirect
github.com/spf13/cast v1.7.0 // indirect
github.com/spf13/jwalterweatherman v1.1.0 // indirect
github.com/subosito/gotenv v1.2.0 // indirect
github.com/wlynxg/anet v0.0.5 // indirect

View File

@@ -44,13 +44,23 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo
cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s=
dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/BobuSumisu/aho-corasick v1.0.3 h1:uuf+JHwU9CHP2Vx+wAy6jcksJThhJS9ehR8a+4nPE9g=
github.com/BobuSumisu/aho-corasick v1.0.3/go.mod h1:hm4jLcvZKI2vRF2WDU1N4p/jpWtpOzp3nLmi9AzX/XE=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/Infisical/go-keyring v1.0.2 h1:dWOkI/pB/7RocfSJgGXbXxLDcVYsdslgjEPmVhb+nl8=
github.com/Infisical/go-keyring v1.0.2/go.mod h1:LWOnn/sw9FxDW/0VY+jHFAfOFEe03xmwBVSfJnBowto=
github.com/Infisical/turn/v4 v4.0.1 h1:omdelNsnFfzS5cu86W5OBR68by68a8sva4ogR0lQQnw=
github.com/Infisical/turn/v4 v4.0.1/go.mod h1:pMMKP/ieNAG/fN5cZiN4SDuyKsXtNTr0ccN7IToA1zs=
github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI=
github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
github.com/Masterminds/semver/v3 v3.3.0 h1:B8LGeaivUe71a5qox1ICM/JLl0NqZSW5CHyL+hmvYS0=
github.com/Masterminds/semver/v3 v3.3.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
github.com/Masterminds/sprig/v3 v3.3.0 h1:mQh0Yrg1XPo6vjYXgtf5OtijNAKJRNcTdOOGZe3tPhs=
github.com/Masterminds/sprig/v3 v3.3.0/go.mod h1:Zy1iXRYNqNLUolqCpL4uhk6SHUMAOSCzdgBfDb35Lz0=
github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0=
github.com/alessio/shellescape v1.4.1/go.mod h1:PZAiSCk0LJaZkiCSkPv8qIobYglO3FPpyFjDCtHLS30=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
@@ -142,6 +152,8 @@ github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/gitleaks/go-gitdiff v0.8.0 h1:7aExTZm+K/M/EQKOyYcub8rIAdWK6ONxPGuRzxmWW+0=
github.com/gitleaks/go-gitdiff v0.8.0/go.mod h1:pKz0X4YzCKZs30BL+weqBIG7mx0jl4tF1uXV9ZyNvrA=
github.com/gitleaks/go-gitdiff v0.9.1 h1:ni6z6/3i9ODT685OLCTf+s/ERlWUNWQF4x1pvoNICw0=
github.com/gitleaks/go-gitdiff v0.9.1/go.mod h1:pKz0X4YzCKZs30BL+weqBIG7mx0jl4tF1uXV9ZyNvrA=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
@@ -273,6 +285,8 @@ github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO
github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ=
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
github.com/huandu/xstrings v1.5.0 h1:2ag3IFq9ZDANvthTwTiqSSZLjDc+BedvHPAp5tJy2TI=
github.com/huandu/xstrings v1.5.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
@@ -315,6 +329,8 @@ github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZ
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg=
@@ -324,6 +340,8 @@ github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh
github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag=
github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
@@ -393,6 +411,8 @@ github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQD
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
@@ -402,6 +422,8 @@ github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY=
github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I=
github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng=
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cast v1.7.0 h1:ntdiHjuueXFgm5nzDRdOS4yfT43P5Fnud6DH50rz/7w=
github.com/spf13/cast v1.7.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA=
github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY=
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=

View File

@@ -32,10 +32,13 @@ import (
"strings"
"time"
"github.com/Infisical/infisical-merge/config"
"github.com/Infisical/infisical-merge/detect"
"github.com/Infisical/infisical-merge/detect/cmd/scm"
"github.com/Infisical/infisical-merge/detect/config"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/report"
"github.com/Infisical/infisical-merge/detect/sources"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/Infisical/infisical-merge/report"
"github.com/manifoldco/promptui"
"github.com/posthog/posthog-go"
"github.com/rs/zerolog/log"
@@ -240,9 +243,17 @@ var scanCmd = &cobra.Command{
log.Fatal().Err(err).Msg("")
}
// set redact flag
if detector.Redact, err = cmd.Flags().GetBool("redact"); err != nil {
redactFlag, err := cmd.Flags().GetBool("redact")
if err != nil {
log.Fatal().Err(err).Msg("")
}
if redactFlag {
detector.Redact = 100
} else {
detector.Redact = 0
}
if detector.MaxTargetMegaBytes, err = cmd.Flags().GetInt("max-target-megabytes"); err != nil {
log.Fatal().Err(err).Msg("")
}
@@ -293,31 +304,49 @@ var scanCmd = &cobra.Command{
// start the detector scan
if noGit {
findings, err = detector.DetectFiles(source)
paths, err := sources.DirectoryTargets(
source,
detector.Sema,
detector.FollowSymlinks,
detector.Config.Allowlists,
)
if err != nil {
logging.Fatal().Err(err).Send()
}
if findings, err = detector.DetectFiles(paths); err != nil {
// don't exit on error, just log it
log.Error().Err(err).Msg("")
logging.Error().Err(err).Msg("failed scan directory")
}
} else if fromPipe {
findings, err = detector.DetectReader(os.Stdin, 10)
if err != nil {
if findings, err = detector.DetectReader(os.Stdin, 10); err != nil {
// log fatal to exit, no need to continue since a report
// will not be generated when scanning from a pipe...for now
log.Fatal().Err(err).Msg("")
logging.Fatal().Err(err).Msg("failed scan input from stdin")
}
} else {
var (
gitCmd *sources.GitCmd
scmPlatform scm.Platform
remote *detect.RemoteInfo
)
var logOpts string
logOpts, err = cmd.Flags().GetString("log-opts")
if err != nil {
log.Fatal().Err(err).Msg("")
if gitCmd, err = sources.NewGitLogCmd(source, logOpts); err != nil {
logging.Fatal().Err(err).Msg("could not create Git cmd")
}
findings, err = detector.DetectGit(source, logOpts, detect.DetectType)
if err != nil {
if scmPlatform, err = scm.PlatformFromString("github"); err != nil {
logging.Fatal().Err(err).Send()
}
remote = detect.NewRemoteInfo(scmPlatform, source)
if findings, err = detector.DetectGit(gitCmd, remote); err != nil {
// don't exit on error, just log it
log.Error().Err(err).Msg("")
logging.Error().Err(err).Msg("failed to scan Git repository")
}
}
// log info about the scan
if err == nil {
log.Info().Msgf("scan completed in %s", FormatDuration(time.Since(start)))
@@ -341,9 +370,7 @@ var scanCmd = &cobra.Command{
reportPath, _ := cmd.Flags().GetString("report-path")
ext, _ := cmd.Flags().GetString("report-format")
if reportPath != "" {
if err := report.Write(findings, cfg, ext, reportPath); err != nil {
log.Fatal().Err(err).Msg("could not write")
}
reportFindings(findings, reportPath, ext, &cfg)
}
if err != nil {
@@ -375,7 +402,6 @@ var scanGitChangesCmd = &cobra.Command{
cfg.Path, _ = cmd.Flags().GetString("config")
exitCode, _ := cmd.Flags().GetInt("exit-code")
staged, _ := cmd.Flags().GetBool("staged")
start := time.Now()
// Setup detector
detector := detect.NewDetector(cfg)
@@ -397,9 +423,17 @@ var scanGitChangesCmd = &cobra.Command{
log.Fatal().Err(err).Msg("")
}
// set redact flag
if detector.Redact, err = cmd.Flags().GetBool("redact"); err != nil {
redactFlag, err := cmd.Flags().GetBool("redact")
if err != nil {
log.Fatal().Err(err).Msg("")
}
if redactFlag {
detector.Redact = 100
} else {
detector.Redact = 0
}
if detector.MaxTargetMegaBytes, err = cmd.Flags().GetInt("max-target-megabytes"); err != nil {
log.Fatal().Err(err).Msg("")
}
@@ -414,32 +448,22 @@ var scanGitChangesCmd = &cobra.Command{
}
}
// get log options for git scan
logOpts, err := cmd.Flags().GetString("log-opts")
if err != nil {
log.Fatal().Err(err).Msg("")
}
log.Info().Msgf("scanning for exposed secrets...")
// start git scan
var findings []report.Finding
if staged {
findings, err = detector.DetectGit(source, logOpts, detect.ProtectStagedType)
} else {
findings, err = detector.DetectGit(source, logOpts, detect.ProtectType)
}
if err != nil {
// don't exit on error, just log it
log.Error().Err(err).Msg("")
}
var (
findings []report.Finding
// log info about the scan
log.Info().Msgf("scan completed in %s", FormatDuration(time.Since(start)))
if len(findings) != 0 {
log.Warn().Msgf("leaks found: %d", len(findings))
} else {
log.Info().Msg("no leaks found")
gitCmd *sources.GitCmd
remote *detect.RemoteInfo
)
if gitCmd, err = sources.NewGitDiffCmd(source, staged); err != nil {
logging.Fatal().Err(err).Msg("could not create Git diff cmd")
}
remote = &detect.RemoteInfo{Platform: scm.NoPlatform}
if findings, err = detector.DetectGit(gitCmd, remote); err != nil {
// don't exit on error, just log it
logging.Error().Err(err).Msg("failed to scan Git repository")
}
Telemetry.CaptureEvent("cli-command:scan git-changes", posthog.NewProperties().Set("risks", len(findings)).Set("version", util.CLI_VERSION))
@@ -447,9 +471,7 @@ var scanGitChangesCmd = &cobra.Command{
reportPath, _ := cmd.Flags().GetString("report-path")
ext, _ := cmd.Flags().GetString("report-format")
if reportPath != "" {
if err = report.Write(findings, cfg, ext, reportPath); err != nil {
log.Fatal().Err(err).Msg("")
}
reportFindings(findings, reportPath, ext, &cfg)
}
if len(findings) != 0 {
os.Exit(exitCode)
@@ -457,6 +479,36 @@ var scanGitChangesCmd = &cobra.Command{
},
}
func reportFindings(findings []report.Finding, reportPath string, ext string, cfg *config.Config) {
var reporter report.Reporter
switch ext {
case "csv":
reporter = &report.CsvReporter{}
case "json":
reporter = &report.JsonReporter{}
case "junit":
reporter = &report.JunitReporter{}
case "sarif":
reporter = &report.SarifReporter{
OrderedRules: cfg.GetOrderedRules(),
}
default:
logging.Fatal().Msgf("unknown report format %s", ext)
}
file, err := os.Create(reportPath)
if err != nil {
log.Fatal().Err(err).Msg("could not create file")
}
if err := reporter.Write(file, findings); err != nil {
log.Fatal().Err(err).Msg("could not write")
}
}
func fileExists(fileName string) bool {
// check for a .infisicalignore file
info, err := os.Stat(fileName)

View File

@@ -1,108 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"os"
"path/filepath"
"strings"
"testing"
)
func TestWriteCSV(t *testing.T) {
tests := []struct {
findings []Finding
testReportName string
expected string
wantEmpty bool
}{
{
testReportName: "simple",
expected: filepath.Join(expectPath, "report", "csv_simple.csv"),
findings: []Finding{
{
RuleID: "test-rule",
Match: "line containing secret",
Secret: "a secret",
StartLine: 1,
EndLine: 2,
StartColumn: 1,
EndColumn: 2,
Message: "opps",
File: "auth.py",
SymlinkFile: "",
Commit: "0000000000000000",
Author: "John Doe",
Email: "johndoe@gmail.com",
Date: "10-19-2003",
Fingerprint: "fingerprint",
},
}},
{
wantEmpty: true,
testReportName: "empty",
expected: filepath.Join(expectPath, "report", "this_should_not_exist.csv"),
findings: []Finding{}},
}
for _, test := range tests {
tmpfile, err := os.Create(filepath.Join(tmpPath, test.testReportName+".csv"))
if err != nil {
os.Remove(tmpfile.Name())
t.Error(err)
}
err = writeCsv(test.findings, tmpfile)
if err != nil {
os.Remove(tmpfile.Name())
t.Error(err)
}
got, err := os.ReadFile(tmpfile.Name())
if err != nil {
os.Remove(tmpfile.Name())
t.Error(err)
}
if test.wantEmpty {
if len(got) > 0 {
t.Errorf("Expected empty file, got %s", got)
}
os.Remove(tmpfile.Name())
continue
}
want, err := os.ReadFile(test.expected)
if err != nil {
os.Remove(tmpfile.Name())
t.Error(err)
}
if string(got) != string(want) {
err = os.WriteFile(strings.Replace(test.expected, ".csv", ".got.csv", 1), got, 0644)
if err != nil {
t.Error(err)
}
t.Errorf("got %s, want %s", string(got), string(want))
}
os.Remove(tmpfile.Name())
}
}

View File

@@ -1,111 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"os"
"path/filepath"
"strings"
"testing"
)
func TestWriteJSON(t *testing.T) {
tests := []struct {
findings []Finding
testReportName string
expected string
wantEmpty bool
}{
{
testReportName: "simple",
expected: filepath.Join(expectPath, "report", "json_simple.json"),
findings: []Finding{
{
Description: "",
RuleID: "test-rule",
Match: "line containing secret",
Secret: "a secret",
StartLine: 1,
EndLine: 2,
StartColumn: 1,
EndColumn: 2,
Message: "opps",
File: "auth.py",
SymlinkFile: "",
Commit: "0000000000000000",
Author: "John Doe",
Email: "johndoe@gmail.com",
Date: "10-19-2003",
Tags: []string{},
},
}},
{
testReportName: "empty",
expected: filepath.Join(expectPath, "report", "empty.json"),
findings: []Finding{}},
}
for _, test := range tests {
// create tmp file using os.TempDir()
tmpfile, err := os.Create(filepath.Join(tmpPath, test.testReportName+".json"))
if err != nil {
os.Remove(tmpfile.Name())
t.Error(err)
}
err = writeJson(test.findings, tmpfile)
if err != nil {
os.Remove(tmpfile.Name())
t.Error(err)
}
got, err := os.ReadFile(tmpfile.Name())
if err != nil {
os.Remove(tmpfile.Name())
t.Error(err)
}
if test.wantEmpty {
if len(got) > 0 {
os.Remove(tmpfile.Name())
t.Errorf("Expected empty file, got %s", got)
}
os.Remove(tmpfile.Name())
continue
}
want, err := os.ReadFile(test.expected)
if err != nil {
os.Remove(tmpfile.Name())
t.Error(err)
}
if string(got) != string(want) {
err = os.WriteFile(strings.Replace(test.expected, ".json", ".got.json", 1), got, 0644)
if err != nil {
t.Error(err)
}
t.Errorf("got %s, want %s", string(got), string(want))
}
os.Remove(tmpfile.Name())
}
}

Some files were not shown because too many files have changed in this diff Show More