Compare commits

...

114 Commits

Author SHA1 Message Date
Scott Wilson
76864ababa fix: correct doc casing 2025-05-12 18:37:05 -07:00
Scott Wilson
d17d40ebd9 improvements: refactor org security settings tab to sso page and update doc images 2025-05-12 17:18:40 -07:00
Daniel Hougaard
07df6803a5 Merge pull request #3581 from Infisical/daniel/unblock-dev
fix: move cli install to aws
2025-05-12 18:54:55 +04:00
Daniel Hougaard
a09d0e8948 fix: move cli install to aws 2025-05-12 18:47:02 +04:00
Daniel Hougaard
ee598560ec Merge pull request #3572 from Infisical/daniel/fix-secret-scaninng-public-keys
fix: update secret scanner to latest version
2025-05-12 11:13:51 +04:00
carlosmonastyrski
c629705c9c Merge pull request #3535 from Infisical/feat/addGroupsToSshHosts
feat(ssh-hosts): Add groups to ssh hosts allowed principals
2025-05-09 22:52:35 -03:00
Daniel Hougaard
be10f6e52a Merge pull request #3579 from Infisical/daniel/horizontal-scaling-ms-teams
fix(workflow-integrations): microsoft teams scaling issues
2025-05-10 01:11:37 +04:00
Scott Wilson
40c5ff0ad6 Merge pull request #3578 from Infisical/project-template-improvements
improvement(project-templates): Project templates UI improvements
2025-05-09 13:50:50 -07:00
Scott Wilson
8ecb5ca7bc remove extra margin 2025-05-09 13:47:28 -07:00
Daniel Hougaard
ab6a2b7dbb fix(workflow-integrations): microsoft teams scaling issues 2025-05-10 00:47:22 +04:00
carlosmonastyrski
81bfc04e7c Trim hostname input on SSH Host permission form and fix getWorkspaceUsers key invalidation 2025-05-09 17:10:01 -03:00
x032205
a757fceaed Merge pull request #3577 from Infisical/feat/docs-support-openapi-titles
feat(docs): Support OpenAPI titles for Zod descriptions
2025-05-09 15:49:49 -04:00
Scott Wilson
ce8e18f620 improvement: address feedback 2025-05-09 12:40:07 -07:00
Scott Wilson
d09c964647 fix: use tanstack router link 2025-05-09 12:32:37 -07:00
Scott Wilson
eeddbde600 improvement: update org project templates relocation banner 2025-05-09 12:23:05 -07:00
Daniel Hougaard
859b643e43 Delete ssh 2025-05-09 22:49:39 +04:00
Daniel Hougaard
91f71e0ef6 feat(cli): upgrade secret scanner 2025-05-09 22:48:56 +04:00
x032205
4e9e31eeb7 added credit 2025-05-09 13:45:36 -04:00
x032205
f6bc99b964 support openapi titles for zod description 2025-05-09 13:40:15 -04:00
Scott Wilson
679eb9dffc fix: correct project templates empty table display if feature is disabled 2025-05-09 10:14:03 -07:00
x032205
0754ae3aaf Merge pull request #3576 from Infisical/ENG-2692
feat(api): Rate limit for all email-sending endpoints
2025-05-09 11:37:08 -04:00
x032205
519a0c1bdf Merge branch 'main' into ENG-2692 2025-05-09 11:31:05 -04:00
x032205
e9d8979cf4 add rate limit to all email-sending endpoints 2025-05-09 11:29:53 -04:00
Maidul Islam
486d975fa0 Merge pull request #3575 from akhilmhdh/fix/octokit
feat: resolved esm error in octokit
2025-05-09 10:50:25 -04:00
=
42c49949b4 feat: resolved esm error in octokit 2025-05-09 20:13:08 +05:30
carlosmonastyrski
aea44088db Merge branch 'main' into feat/addGroupsToSshHosts 2025-05-09 09:21:29 -03:00
Daniel Hougaard
e584c9ea95 test 2025-05-09 09:04:30 +04:00
Maidul Islam
428c60880a Update jumpcloud.mdx 2025-05-09 00:28:20 -04:00
Maidul Islam
2179b9a4d7 Update general.mdx 2025-05-09 00:27:43 -04:00
Daniel Hougaard
1921763fa8 fix: update to upcoming version 2025-05-09 04:43:13 +04:00
Daniel Hougaard
5408859a18 fix: update gitleaks/go-diff to latest version 2025-05-09 04:40:09 +04:00
Daniel Hougaard
8dfc0cfbe0 Merge pull request #3571 from Infisical/daniel/identities-ldap-docs
docs(identities): ldap auth
2025-05-09 04:15:11 +04:00
Daniel Hougaard
060199e58c fix: machine identities -> identities 2025-05-09 04:13:11 +04:00
Daniel Hougaard
3b9b17f8d5 requested changes 2025-05-09 04:12:21 +04:00
Daniel Hougaard
6addde2650 docs(identities): ldap auth 2025-05-09 03:44:15 +04:00
Tuan Dang
a6b3be72a9 Make minor PR adjustments 2025-05-08 14:02:25 -07:00
Daniel Hougaard
394bd6755f Merge pull request #3566 from Infisical/daniel/identity-ldap-auth
feat(identities): ldap auth
2025-05-08 23:53:47 +04:00
Daniel Hougaard
c21873ac4b Update identity-ldap-auth-router.ts 2025-05-08 23:48:08 +04:00
Daniel Hougaard
64b8c1a2de added filter check 2025-05-08 23:44:30 +04:00
Daniel Hougaard
de443c5ea1 fix: requested changes 2025-05-08 23:20:18 +04:00
Daniel Hougaard
a3b7df4e6b fix: addressed requested changes 2025-05-08 23:13:46 +04:00
Sheen Capadngan
a4b648ad95 misc: addressed tooltip display issue 2025-05-08 21:24:26 +08:00
x032205
04a8931cf6 Merge pull request #3568 from Infisical/pki-merge-fix
small migration fix
2025-05-08 01:23:36 -04:00
x032205
ab0b8c0f10 migration tweak 2025-05-08 01:22:34 -04:00
x032205
258836a605 migration tweak 2025-05-08 01:17:47 -04:00
Daniel Hougaard
0b31d7f860 feat(identities): ldap auth, requested changes 2025-05-08 08:14:29 +04:00
Daniel Hougaard
5c91d380b8 feat(identities): ldap auth 2025-05-08 07:55:22 +04:00
Daniel Hougaard
b908893a68 feat(identities): ldap auth 2025-05-08 07:49:23 +04:00
Maidul Islam
4d0275e589 Merge pull request #3565 from Infisical/remove-migration-folder
Remove unused migration folder
2025-05-07 20:53:51 -04:00
Scott Wilson
befd77eec2 Merge pull request #3563 from Infisical/policy-selection-modal
improvement(project-roles): Add Policy Selection Modal
2025-05-07 16:49:05 -07:00
Daniel Hougaard
1d44774913 Merge pull request #3564 from Infisical/daniel/generator-doc-imp
docs(k8s/generators): improve documentation
2025-05-08 03:20:30 +04:00
Maidul Islam
984552eea9 rephrase generator overview 2025-05-07 19:18:45 -04:00
Scott Wilson
b6a957a30d fix: select all apply to filtered policies only, skip replacing existing policies 2025-05-07 15:34:34 -07:00
Daniel Hougaard
2f4efad8ae Update infisical-push-secret-crd.mdx 2025-05-08 01:47:00 +04:00
Scott Wilson
16c476d78c fix: correct policies typos 2025-05-07 14:09:32 -07:00
Scott Wilson
68c549f1c6 improvement: add select polices modal 2025-05-07 13:50:27 -07:00
Scott Wilson
0610416677 Merge pull request #3550 from Infisical/project-specific-default-roles
Improvements: Refactor Project Templates and Project Type Policy Filtering/Specific Roles
2025-05-07 12:50:01 -07:00
Daniel Hougaard
4a37dc9cb7 Merge pull request #3561 from Infisical/helm-update-v0.9.2
Update Helm chart to version v0.9.2
2025-05-07 22:37:58 +04:00
DanielHougaard
7e432a4297 Update Helm chart to version v0.9.2 2025-05-07 18:27:13 +00:00
Scott Wilson
794fc9c2a2 improvements: address feedback 2025-05-07 11:23:51 -07:00
Daniel Hougaard
d4e5d2c7ed Merge pull request #3540 from Infisical/daniel/generators
feat(k8s): generator support
2025-05-07 22:10:22 +04:00
Sheen
0c2e0bb0f9 Merge pull request #3560 from Infisical/misc/add-default-old-space-config
misc: add default old space config
2025-05-08 01:46:46 +08:00
Sheen Capadngan
e2a414ffff misc: add default old space config 2025-05-08 01:39:56 +08:00
=
0ca3c2bb68 feat: added password generator crd to samples 2025-05-07 22:50:49 +05:30
Daniel Hougaard
083581b51a Merge pull request #3554 from Infisical/feat/new-project-properties-for-tf-management
feat: adjustments to properties and validation
2025-05-07 20:22:23 +04:00
x032205
40e976133c Merge pull request #3528 from Infisical/ENG-2647
feat(admin): Invalidate Cache
2025-05-07 11:50:57 -04:00
x032205
ad2f002822 Merge pull request #3558 from Infisical/pki-docs-patch
docs fix
2025-05-07 11:06:24 -04:00
x032205
8842dfe5d1 docs fix 2025-05-07 11:01:19 -04:00
Sheen
b1eea4ae9c Merge pull request #3556 from Infisical/misc/remove-unnecessary-key-encryption-for-service-token
misc: removed unnecessary key encryption for service token
2025-05-07 16:41:51 +08:00
Sheen Capadngan
a8e0a8aca3 misc: removed unnecessary key encryption for service token 2025-05-07 16:36:10 +08:00
=
b37058d0e2 feat: switched to is fetching 2025-05-07 11:30:31 +05:30
x032205
334a05d5f1 fix lint 2025-05-06 18:08:08 -04:00
x032205
12c813928c fix polling 2025-05-06 18:00:24 -04:00
x032205
521fef6fca Merge branch 'main' into ENG-2647 2025-05-06 17:00:40 -04:00
=
8f8236c445 feat: simplied the caching panel logic and fixed permission issue 2025-05-07 01:37:26 +05:30
x032205
3cf5c534ff Merge pull request #3553 from Infisical/pki-docs-patch
patch(docs): mint.json update
2025-05-06 15:54:31 -04:00
Sheen Capadngan
2b03c295f9 feat: adjustments to properties and validation 2025-05-07 03:51:22 +08:00
x032205
4fc7a52941 patch(docs): mint.json update 2025-05-06 15:38:10 -04:00
Scott Wilson
0ded2e51ba fix: filter project templates polices by type 2025-05-06 11:59:59 -07:00
Maidul Islam
0d2b3adec7 Merge pull request #3551 from Infisical/maidul98-patch-11
Add Conduct and Enforcement to bug bounty
2025-05-06 14:50:17 -04:00
Maidul Islam
e695203c05 Update docs/internals/bug-bounty.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-05-06 14:49:38 -04:00
Maidul Islam
f9d76aae5d Update bug-bounty.mdx 2025-05-06 14:46:42 -04:00
Daniel Hougaard
1c280759d1 Merge pull request #3548 from Infisical/daniel/self-hosted-secret-scanning
docs: secret scanning self hosted documentation
2025-05-06 22:27:00 +04:00
Scott Wilson
4562f57b54 improvements: refactor project templates, filter policies by project type, project type specific roles 2025-05-06 11:26:09 -07:00
Daniel Hougaard
6005dce44d fix: allow secret scanning from all self-hosted orgs 2025-05-06 22:16:29 +04:00
carlosmonastyrski
bf85df7e36 Fix SSH table UI user groups issues 2025-05-06 08:37:19 -03:00
Daniel Hougaard
f7f7d2d528 fix: typo 2025-05-06 08:24:59 +04:00
Daniel Hougaard
57342cf2a0 docs: secret scanning self hosted documentation 2025-05-06 08:14:05 +04:00
x032205
86bb2659b5 small ui tweaks 2025-05-05 16:07:04 -04:00
x032205
dc59f226b6 swapped polling to react query 2025-05-05 15:58:45 -04:00
x032205
9175c1dffa Merge branch 'main' into ENG-2647 2025-05-05 15:27:25 -04:00
carlosmonastyrski
b9070a8fa3 Merge branch 'main' into feat/addGroupsToSshHosts 2025-05-05 14:51:01 -03:00
Daniel Hougaard
1e4dfd0c7c fix(k8s/generators): update base crds 2025-05-05 02:35:57 +04:00
Daniel Hougaard
34b7d28e2f requested changes 2025-05-05 02:30:59 +04:00
Daniel Hougaard
245a348517 Update generators.go 2025-05-05 02:13:12 +04:00
Daniel Hougaard
e0fc582e2e docs(k8s/generators): docs and minor fix 2025-05-05 02:09:21 +04:00
Daniel Hougaard
68ef897b6a fix: logs and rbac 2025-05-05 01:39:30 +04:00
Daniel Hougaard
1b060e76de Update kustomization.yaml 2025-05-05 01:08:22 +04:00
Daniel Hougaard
9f7599b2a1 feat(k8s): generators 2025-05-05 00:59:11 +04:00
x
9cbe70a6f3 lint fixes 2025-05-02 20:10:30 -04:00
x
f49fb534ab review fixes 2025-05-02 19:50:55 -04:00
x
6eea4c8364 frontend tweaks 2025-05-02 19:20:02 -04:00
x
1e206ee441 Merge branch 'main' into ENG-2647 2025-05-02 19:03:08 -04:00
x
85c1a1081e checkpoint 2025-05-02 18:43:07 -04:00
x
877485b45a queue job 2025-05-02 15:23:35 -04:00
x
d13e685a81 emphasize that secrets cache is encrypted in frontend 2025-05-02 13:04:22 -04:00
x
9849a5f136 switched to applyJitter functions 2025-05-02 13:00:37 -04:00
x
26773a1444 merge 2025-05-02 12:57:28 -04:00
carlosmonastyrski
3ea450e94a Add groups to ssh hosts allowed principals fix delete principal row issue 2025-05-02 13:41:53 -03:00
carlosmonastyrski
7d0574087c Add groups to ssh hosts allowed principals bot improvements 2025-05-02 13:36:05 -03:00
carlosmonastyrski
36916704be Add groups to ssh hosts allowed principals 2025-05-02 11:14:43 -03:00
x
a6f280197b spelling fix 2025-05-01 17:37:54 -04:00
x
346d2f213e improvements + review fixes 2025-05-01 17:33:24 -04:00
x
9f1ac77afa invalidate cache 2025-05-01 16:34:29 -04:00
369 changed files with 13743 additions and 7751 deletions

View File

@@ -133,8 +133,8 @@ RUN apt-get update && apt-get install -y \
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.41.2 \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
@@ -171,6 +171,7 @@ ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
ENV NODE_OPTIONS="--max-old-space-size=1024"
WORKDIR /backend

View File

@@ -127,8 +127,8 @@ RUN apt-get update && apt-get install -y \
&& rm -rf /var/lib/apt/lists/*
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.41.2 \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /
@@ -168,6 +168,7 @@ ENV HTTPS_ENABLED false
ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
ENV NODE_OPTIONS="--max-old-space-size=1024"
WORKDIR /backend

View File

@@ -54,8 +54,8 @@ COPY --from=build /app .
# Install Infisical CLI
RUN apt-get install -y curl bash && \
curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
apt-get update && apt-get install -y infisical=0.8.1 git
curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && apt-get install -y infisical=0.41.2 git
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js

View File

@@ -55,9 +55,9 @@ RUN mkdir -p /etc/softhsm2/tokens && \
# ? App setup
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.8.1
apt-get install -y infisical=0.41.2
WORKDIR /app

View File

@@ -64,9 +64,9 @@ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
# ? App setup
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.8.1
apt-get install -y infisical=0.41.2
WORKDIR /app

View File

@@ -1,4 +1,8 @@
import RE2 from "re2";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { applyJitter } from "@app/lib/dates";
import { delay as delayMs } from "@app/lib/delay";
import { Lock } from "@app/lib/red-lock";
export const mockKeyStore = (): TKeyStoreFactory => {
@@ -18,6 +22,27 @@ export const mockKeyStore = (): TKeyStoreFactory => {
delete store[key];
return 1;
},
deleteItems: async ({ pattern, batchSize = 500, delay = 1500, jitter = 200 }) => {
const regex = new RE2(`^${pattern.replace(/[-[\]/{}()+?.\\^$|]/g, "\\$&").replace(/\*/g, ".*")}$`);
let totalDeleted = 0;
const keys = Object.keys(store);
for (let i = 0; i < keys.length; i += batchSize) {
const batch = keys.slice(i, i + batchSize);
for (const key of batch) {
if (regex.test(key)) {
delete store[key];
totalDeleted += 1;
}
}
// eslint-disable-next-line no-await-in-loop
await delayMs(Math.max(0, applyJitter(delay, jitter)));
}
return totalDeleted;
},
getItem: async (key) => {
const value = store[key];
if (typeof value === "string") {

View File

@@ -33,7 +33,8 @@
"@infisical/quic": "^1.0.8",
"@node-saml/passport-saml": "^5.0.1",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-paginate-graphql": "^5.2.4",
"@octokit/core": "^5.2.1",
"@octokit/plugin-paginate-graphql": "^4.0.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
@@ -121,7 +122,7 @@
"tweetnacl-util": "^0.15.1",
"uuid": "^9.0.1",
"zod": "^3.22.4",
"zod-to-json-schema": "^3.22.4"
"zod-to-json-schema": "^3.24.5"
},
"bin": {
"backend": "dist/main.js"
@@ -7805,119 +7806,38 @@
}
},
"node_modules/@octokit/core": {
"version": "6.1.5",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-6.1.5.tgz",
"integrity": "sha512-vvmsN0r7rguA+FySiCsbaTTobSftpIDIpPW81trAmsv9TGxg3YCujAxRYp/Uy8xmDgYCzzgulG62H7KYUFmeIg==",
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.1.tgz",
"integrity": "sha512-dKYCMuPO1bmrpuogcjQ8z7ICCH3FP6WmxpwC03yjzGfZhj9fTJg6+bS1+UAplekbN2C+M61UNllGOOoAfGCrdQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/auth-token": "^5.0.0",
"@octokit/graphql": "^8.2.2",
"@octokit/request": "^9.2.3",
"@octokit/request-error": "^6.1.8",
"@octokit/types": "^14.0.0",
"before-after-hook": "^3.0.2",
"universal-user-agent": "^7.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/core/node_modules/@octokit/auth-token": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-5.1.2.tgz",
"integrity": "sha512-JcQDsBdg49Yky2w2ld20IHAlwr8d/d8N6NiOXbtuoPCqzbsiJgF633mVUw3x4mo0H5ypataQIX7SFu3yy44Mpw==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/core/node_modules/@octokit/endpoint": {
"version": "10.1.4",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.4.tgz",
"integrity": "sha512-OlYOlZIsfEVZm5HCSR8aSg02T2lbUWOsCQoPKfTXJwDzcHQBrVBGdGXb89dv2Kw2ToZaRtudp8O3ZIYoaOjKlA==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/types": "^14.0.0",
"universal-user-agent": "^7.0.2"
"@octokit/auth-token": "^4.0.0",
"@octokit/graphql": "^7.1.0",
"@octokit/request": "^8.4.1",
"@octokit/request-error": "^5.1.1",
"@octokit/types": "^13.0.0",
"before-after-hook": "^2.2.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/core/node_modules/@octokit/openapi-types": {
"version": "25.0.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.0.0.tgz",
"integrity": "sha512-FZvktFu7HfOIJf2BScLKIEYjDsw6RKc7rBJCdvCTfKsVnx2GEB/Nbzjr29DUdb7vQhlzS/j8qDzdditP0OC6aw==",
"license": "MIT",
"peer": true
},
"node_modules/@octokit/core/node_modules/@octokit/request": {
"version": "9.2.3",
"resolved": "https://registry.npmjs.org/@octokit/request/-/request-9.2.3.tgz",
"integrity": "sha512-Ma+pZU8PXLOEYzsWf0cn/gY+ME57Wq8f49WTXA8FMHp2Ps9djKw//xYJ1je8Hm0pR2lU9FUGeJRWOtxq6olt4w==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/endpoint": "^10.1.4",
"@octokit/request-error": "^6.1.8",
"@octokit/types": "^14.0.0",
"fast-content-type-parse": "^2.0.0",
"universal-user-agent": "^7.0.2"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/core/node_modules/@octokit/request-error": {
"version": "6.1.8",
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.8.tgz",
"integrity": "sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/types": "^14.0.0"
},
"engines": {
"node": ">= 18"
}
"version": "24.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
"license": "MIT"
},
"node_modules/@octokit/core/node_modules/@octokit/types": {
"version": "14.0.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.0.0.tgz",
"integrity": "sha512-VVmZP0lEhbo2O1pdq63gZFiGCKkm8PPp8AUOijlwPO6hojEVjspA0MWKP7E4hbvGxzFKNqKr6p0IYtOH/Wf/zA==",
"version": "13.10.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/openapi-types": "^25.0.0"
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/@octokit/core/node_modules/fast-content-type-parse": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-2.0.1.tgz",
"integrity": "sha512-nGqtvLrj5w0naR6tDPfB4cUmYCqouzyQiz6C5y/LtcDllJdrcc6WaWW6iXyIIOErTa/XRybj28aasdn4LkVk6Q==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/fastify"
},
{
"type": "opencollective",
"url": "https://opencollective.com/fastify"
}
],
"license": "MIT",
"peer": true
},
"node_modules/@octokit/core/node_modules/universal-user-agent": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q==",
"license": "ISC",
"peer": true
},
"node_modules/@octokit/endpoint": {
"version": "9.0.6",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz",
@@ -7947,105 +7867,34 @@
}
},
"node_modules/@octokit/graphql": {
"version": "8.2.2",
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-8.2.2.tgz",
"integrity": "sha512-Yi8hcoqsrXGdt0yObxbebHXFOiUA+2v3n53epuOg1QUgOB6c4XzvisBNVXJSl8RYA5KrDuSL2yq9Qmqe5N0ryA==",
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz",
"integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/request": "^9.2.3",
"@octokit/types": "^14.0.0",
"universal-user-agent": "^7.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/graphql/node_modules/@octokit/endpoint": {
"version": "10.1.4",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.4.tgz",
"integrity": "sha512-OlYOlZIsfEVZm5HCSR8aSg02T2lbUWOsCQoPKfTXJwDzcHQBrVBGdGXb89dv2Kw2ToZaRtudp8O3ZIYoaOjKlA==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/types": "^14.0.0",
"universal-user-agent": "^7.0.2"
"@octokit/request": "^8.4.1",
"@octokit/types": "^13.0.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/graphql/node_modules/@octokit/openapi-types": {
"version": "25.0.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.0.0.tgz",
"integrity": "sha512-FZvktFu7HfOIJf2BScLKIEYjDsw6RKc7rBJCdvCTfKsVnx2GEB/Nbzjr29DUdb7vQhlzS/j8qDzdditP0OC6aw==",
"license": "MIT",
"peer": true
},
"node_modules/@octokit/graphql/node_modules/@octokit/request": {
"version": "9.2.3",
"resolved": "https://registry.npmjs.org/@octokit/request/-/request-9.2.3.tgz",
"integrity": "sha512-Ma+pZU8PXLOEYzsWf0cn/gY+ME57Wq8f49WTXA8FMHp2Ps9djKw//xYJ1je8Hm0pR2lU9FUGeJRWOtxq6olt4w==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/endpoint": "^10.1.4",
"@octokit/request-error": "^6.1.8",
"@octokit/types": "^14.0.0",
"fast-content-type-parse": "^2.0.0",
"universal-user-agent": "^7.0.2"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/graphql/node_modules/@octokit/request-error": {
"version": "6.1.8",
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.8.tgz",
"integrity": "sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/types": "^14.0.0"
},
"engines": {
"node": ">= 18"
}
"version": "24.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
"license": "MIT"
},
"node_modules/@octokit/graphql/node_modules/@octokit/types": {
"version": "14.0.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.0.0.tgz",
"integrity": "sha512-VVmZP0lEhbo2O1pdq63gZFiGCKkm8PPp8AUOijlwPO6hojEVjspA0MWKP7E4hbvGxzFKNqKr6p0IYtOH/Wf/zA==",
"version": "13.10.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
"license": "MIT",
"peer": true,
"dependencies": {
"@octokit/openapi-types": "^25.0.0"
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/@octokit/graphql/node_modules/fast-content-type-parse": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-2.0.1.tgz",
"integrity": "sha512-nGqtvLrj5w0naR6tDPfB4cUmYCqouzyQiz6C5y/LtcDllJdrcc6WaWW6iXyIIOErTa/XRybj28aasdn4LkVk6Q==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/fastify"
},
{
"type": "opencollective",
"url": "https://opencollective.com/fastify"
}
],
"license": "MIT",
"peer": true
},
"node_modules/@octokit/graphql/node_modules/universal-user-agent": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q==",
"license": "ISC",
"peer": true
},
"node_modules/@octokit/oauth-authorization-url": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/@octokit/oauth-authorization-url/-/oauth-authorization-url-7.1.1.tgz",
@@ -8141,15 +7990,15 @@
}
},
"node_modules/@octokit/plugin-paginate-graphql": {
"version": "5.2.4",
"resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-graphql/-/plugin-paginate-graphql-5.2.4.tgz",
"integrity": "sha512-pLZES1jWaOynXKHOqdnwZ5ULeVR6tVVCMm+AUbp0htdcyXDU95WbkYdU4R2ej1wKj5Tu94Mee2Ne0PjPO9cCyA==",
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-graphql/-/plugin-paginate-graphql-4.0.1.tgz",
"integrity": "sha512-R8ZQNmrIKKpHWC6V2gum4x9LG2qF1RxRjo27gjQcG3j+vf2tLsEfE7I/wRWEPzYMaenr1M+qDAtNcwZve1ce1A==",
"license": "MIT",
"engines": {
"node": ">= 18"
},
"peerDependencies": {
"@octokit/core": ">=6"
"@octokit/core": ">=5"
}
},
"node_modules/@octokit/plugin-paginate-rest": {
@@ -8302,59 +8151,6 @@
"node": ">= 18"
}
},
"node_modules/@octokit/rest/node_modules/@octokit/core": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.1.tgz",
"integrity": "sha512-dKYCMuPO1bmrpuogcjQ8z7ICCH3FP6WmxpwC03yjzGfZhj9fTJg6+bS1+UAplekbN2C+M61UNllGOOoAfGCrdQ==",
"license": "MIT",
"dependencies": {
"@octokit/auth-token": "^4.0.0",
"@octokit/graphql": "^7.1.0",
"@octokit/request": "^8.4.1",
"@octokit/request-error": "^5.1.1",
"@octokit/types": "^13.0.0",
"before-after-hook": "^2.2.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/rest/node_modules/@octokit/graphql": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz",
"integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==",
"license": "MIT",
"dependencies": {
"@octokit/request": "^8.4.1",
"@octokit/types": "^13.0.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/rest/node_modules/@octokit/openapi-types": {
"version": "24.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
"license": "MIT"
},
"node_modules/@octokit/rest/node_modules/@octokit/types": {
"version": "13.10.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
"license": "MIT",
"dependencies": {
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/@octokit/rest/node_modules/before-after-hook": {
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
"integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==",
"license": "Apache-2.0"
},
"node_modules/@octokit/types": {
"version": "12.4.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.4.0.tgz",
@@ -12799,11 +12595,10 @@
"integrity": "sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ=="
},
"node_modules/before-after-hook": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-3.0.2.tgz",
"integrity": "sha512-Nik3Sc0ncrMK4UUdXQmAnRtzmNQTAAXmXIopizwZ1W1t8QmfJj+zL4OA2I7XPTPW5z5TDqv4hRo/JzouDJnX3A==",
"license": "Apache-2.0",
"peer": true
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
"integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==",
"license": "Apache-2.0"
},
"node_modules/big-integer": {
"version": "1.6.52",
@@ -21602,62 +21397,6 @@
"node": ">=18"
}
},
"node_modules/probot/node_modules/@octokit/core": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.1.tgz",
"integrity": "sha512-dKYCMuPO1bmrpuogcjQ8z7ICCH3FP6WmxpwC03yjzGfZhj9fTJg6+bS1+UAplekbN2C+M61UNllGOOoAfGCrdQ==",
"license": "MIT",
"dependencies": {
"@octokit/auth-token": "^4.0.0",
"@octokit/graphql": "^7.1.0",
"@octokit/request": "^8.4.1",
"@octokit/request-error": "^5.1.1",
"@octokit/types": "^13.0.0",
"before-after-hook": "^2.2.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/probot/node_modules/@octokit/core/node_modules/@octokit/types": {
"version": "13.10.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
"license": "MIT",
"dependencies": {
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/probot/node_modules/@octokit/graphql": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz",
"integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==",
"license": "MIT",
"dependencies": {
"@octokit/request": "^8.4.1",
"@octokit/types": "^13.0.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/probot/node_modules/@octokit/graphql/node_modules/@octokit/types": {
"version": "13.10.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
"license": "MIT",
"dependencies": {
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/probot/node_modules/@octokit/openapi-types": {
"version": "24.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
"license": "MIT"
},
"node_modules/probot/node_modules/@octokit/plugin-retry": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-6.0.1.tgz",
@@ -21690,12 +21429,6 @@
"@octokit/core": "^5.0.0"
}
},
"node_modules/probot/node_modules/before-after-hook": {
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
"integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==",
"license": "Apache-2.0"
},
"node_modules/probot/node_modules/commander": {
"version": "12.1.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz",
@@ -27709,11 +27442,12 @@
}
},
"node_modules/zod-to-json-schema": {
"version": "3.22.4",
"resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.22.4.tgz",
"integrity": "sha512-2Ed5dJ+n/O3cU383xSY28cuVi0BCQhF8nYqWU5paEpl7fVdqdAmiLdqLyfblbNdfOFwFfi/mqU4O1pwc60iBhQ==",
"version": "3.24.5",
"resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.5.tgz",
"integrity": "sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g==",
"license": "ISC",
"peerDependencies": {
"zod": "^3.22.4"
"zod": "^3.24.1"
}
}
}

View File

@@ -152,7 +152,8 @@
"@infisical/quic": "^1.0.8",
"@node-saml/passport-saml": "^5.0.1",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-paginate-graphql": "^5.2.4",
"@octokit/core": "^5.2.1",
"@octokit/plugin-paginate-graphql": "^4.0.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
@@ -240,6 +241,6 @@
"tweetnacl-util": "^0.15.1",
"uuid": "^9.0.1",
"zod": "^3.22.4",
"zod-to-json-schema": "^3.22.4"
"zod-to-json-schema": "^3.24.5"
}
}

View File

@@ -66,6 +66,8 @@ import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-a
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
import { TIdentityJwtAuthServiceFactory } from "@app/services/identity-jwt-auth/identity-jwt-auth-service";
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
import { TIdentityLdapAuthServiceFactory } from "@app/services/identity-ldap-auth/identity-ldap-auth-service";
import { TAllowedFields } from "@app/services/identity-ldap-auth/identity-ldap-auth-types";
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
import { TIdentityTokenAuthServiceFactory } from "@app/services/identity-token-auth/identity-token-auth-service";
@@ -146,6 +148,13 @@ declare module "fastify" {
providerAuthToken: string;
externalProviderAccessToken?: string;
};
passportMachineIdentity: {
identityId: string;
user: {
uid: string;
mail?: string;
};
};
kmipUser: {
projectId: string;
clientId: string;
@@ -153,7 +162,9 @@ declare module "fastify" {
};
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>> & {
allowedFields?: TAllowedFields[];
};
}
interface FastifyInstance {
@@ -199,6 +210,7 @@ declare module "fastify" {
identityAzureAuth: TIdentityAzureAuthServiceFactory;
identityOidcAuth: TIdentityOidcAuthServiceFactory;
identityJwtAuth: TIdentityJwtAuthServiceFactory;
identityLdapAuth: TIdentityLdapAuthServiceFactory;
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;

View File

@@ -432,6 +432,11 @@ import {
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
} from "@app/db/schemas";
import {
TIdentityLdapAuths,
TIdentityLdapAuthsInsert,
TIdentityLdapAuthsUpdate
} from "@app/db/schemas/identity-ldap-auths";
import {
TMicrosoftTeamsIntegrations,
TMicrosoftTeamsIntegrationsInsert,
@@ -735,6 +740,11 @@ declare module "knex/types/tables" {
TIdentityJwtAuthsInsert,
TIdentityJwtAuthsUpdate
>;
[TableName.IdentityLdapAuth]: KnexOriginal.CompositeTableType<
TIdentityLdapAuths,
TIdentityLdapAuthsInsert,
TIdentityLdapAuthsUpdate
>;
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
TIdentityUaClientSecrets,
TIdentityUaClientSecretsInsert,

View File

@@ -3,7 +3,7 @@ import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateBody)) {
if (!(await knex.schema.hasColumn(TableName.CertificateBody, "encryptedCertificateChain"))) {
await knex.schema.alterTable(TableName.CertificateBody, (t) => {
t.binary("encryptedCertificateChain").nullable();
});
@@ -25,7 +25,7 @@ export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTable(TableName.CertificateSecret);
}
if (await knex.schema.hasTable(TableName.CertificateBody)) {
if (await knex.schema.hasColumn(TableName.CertificateBody, "encryptedCertificateChain")) {
await knex.schema.alterTable(TableName.CertificateBody, (t) => {
t.dropColumn("encryptedCertificateChain");
});

View File

@@ -0,0 +1,22 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SshHostLoginUserMapping, "groupId"))) {
await knex.schema.alterTable(TableName.SshHostLoginUserMapping, (t) => {
t.uuid("groupId").nullable();
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.unique(["sshHostLoginUserId", "groupId"]);
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SshHostLoginUserMapping, "groupId")) {
await knex.schema.alterTable(TableName.SshHostLoginUserMapping, (t) => {
t.dropUnique(["sshHostLoginUserId", "groupId"]);
t.dropColumn("groupId");
});
}
}

View File

@@ -0,0 +1,22 @@
import { Knex } from "knex";
import { ProjectType, TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.ProjectTemplates, "type"))) {
await knex.schema.alterTable(TableName.ProjectTemplates, (t) => {
// defaulting to sm for migration to set existing, new ones will always be specified on creation
t.string("type").defaultTo(ProjectType.SecretManager).notNullable();
t.jsonb("environments").nullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.ProjectTemplates, "type")) {
await knex.schema.alterTable(TableName.ProjectTemplates, (t) => {
t.dropColumn("type");
// not reverting nullable environments
});
}
}

View File

@@ -0,0 +1,39 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityLdapAuth))) {
await knex.schema.createTable(TableName.IdentityLdapAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.binary("encryptedBindDN").notNullable();
t.binary("encryptedBindPass").notNullable();
t.binary("encryptedLdapCaCertificate").nullable();
t.string("url").notNullable();
t.string("searchBase").notNullable();
t.string("searchFilter").notNullable();
t.jsonb("allowedFields").nullable();
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.IdentityLdapAuth);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityLdapAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityLdapAuth);
}

View File

@@ -0,0 +1,32 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const IdentityLdapAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
identityId: z.string().uuid(),
encryptedBindDN: zodBuffer,
encryptedBindPass: zodBuffer,
encryptedLdapCaCertificate: zodBuffer.nullable().optional(),
url: z.string(),
searchBase: z.string(),
searchFilter: z.string(),
allowedFields: z.unknown().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TIdentityLdapAuths = z.infer<typeof IdentityLdapAuthsSchema>;
export type TIdentityLdapAuthsInsert = Omit<z.input<typeof IdentityLdapAuthsSchema>, TImmutableDBKeys>;
export type TIdentityLdapAuthsUpdate = Partial<Omit<z.input<typeof IdentityLdapAuthsSchema>, TImmutableDBKeys>>;

View File

@@ -80,6 +80,7 @@ export enum TableName {
IdentityAwsAuth = "identity_aws_auths",
IdentityOidcAuth = "identity_oidc_auths",
IdentityJwtAuth = "identity_jwt_auths",
IdentityLdapAuth = "identity_ldap_auths",
IdentityOrgMembership = "identity_org_memberships",
IdentityProjectMembership = "identity_project_memberships",
IdentityProjectMembershipRole = "identity_project_membership_role",
@@ -185,11 +186,16 @@ export enum OrgMembershipStatus {
}
export enum ProjectMembershipRole {
// general
Admin = "admin",
Member = "member",
Custom = "custom",
Viewer = "viewer",
NoAccess = "no-access"
NoAccess = "no-access",
// ssh
SshHostBootstrapper = "ssh-host-bootstrapper",
// kms
KmsCryptographicOperator = "cryptographic-operator"
}
export enum SecretEncryptionAlgo {
@@ -227,7 +233,8 @@ export enum IdentityAuthMethod {
AWS_AUTH = "aws-auth",
AZURE_AUTH = "azure-auth",
OIDC_AUTH = "oidc-auth",
JWT_AUTH = "jwt-auth"
JWT_AUTH = "jwt-auth",
LDAP_AUTH = "ldap-auth"
}
export enum ProjectType {

View File

@@ -12,10 +12,11 @@ export const ProjectTemplatesSchema = z.object({
name: z.string(),
description: z.string().nullable().optional(),
roles: z.unknown(),
environments: z.unknown(),
environments: z.unknown().nullable().optional(),
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
type: z.string().default("secret-manager")
});
export type TProjectTemplates = z.infer<typeof ProjectTemplatesSchema>;

View File

@@ -12,7 +12,8 @@ export const SshHostLoginUserMappingsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
sshHostLoginUserId: z.string().uuid(),
userId: z.string().uuid().nullable().optional()
userId: z.string().uuid().nullable().optional(),
groupId: z.string().uuid().nullable().optional()
});
export type TSshHostLoginUserMappings = z.infer<typeof SshHostLoginUserMappingsSchema>;

View File

@@ -2,6 +2,7 @@ import { z } from "zod";
import { AccessApprovalRequestsReviewersSchema, AccessApprovalRequestsSchema, UsersSchema } from "@app/db/schemas";
import { ApprovalStatus } from "@app/ee/services/access-approval-request/access-approval-request-types";
import { writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -18,6 +19,9 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
server.route({
url: "/",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
permissions: z.any().array(),

View File

@@ -98,6 +98,9 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/login",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
organizationSlug: z.string().trim()

View File

@@ -1,9 +1,8 @@
import { z } from "zod";
import { ProjectMembershipRole, ProjectTemplatesSchema } from "@app/db/schemas";
import { ProjectMembershipRole, ProjectTemplatesSchema, ProjectType } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { ProjectTemplateDefaultEnvironments } from "@app/ee/services/project-template/project-template-constants";
import { isInfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-fns";
import { ApiDocsTags, ProjectTemplates } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
@@ -35,6 +34,7 @@ const SanitizedProjectTemplateSchema = ProjectTemplatesSchema.extend({
position: z.number().min(1)
})
.array()
.nullable()
});
const ProjectTemplateRolesSchema = z
@@ -104,6 +104,9 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
hide: false,
tags: [ApiDocsTags.ProjectTemplates],
description: "List project templates for the current organization.",
querystring: z.object({
type: z.nativeEnum(ProjectType).optional().describe(ProjectTemplates.LIST.type)
}),
response: {
200: z.object({
projectTemplates: SanitizedProjectTemplateSchema.array()
@@ -112,7 +115,8 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const projectTemplates = await server.services.projectTemplate.listProjectTemplatesByOrg(req.permission);
const { type } = req.query;
const projectTemplates = await server.services.projectTemplate.listProjectTemplatesByOrg(req.permission, type);
const auditTemplates = projectTemplates.filter((template) => !isInfisicalProjectTemplate(template.name));
@@ -184,6 +188,7 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
tags: [ApiDocsTags.ProjectTemplates],
description: "Create a project template.",
body: z.object({
type: z.nativeEnum(ProjectType).describe(ProjectTemplates.CREATE.type),
name: slugSchema({ field: "name" })
.refine((val) => !isInfisicalProjectTemplate(val), {
message: `The requested project template name is reserved.`
@@ -191,9 +196,7 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
.describe(ProjectTemplates.CREATE.name),
description: z.string().max(256).trim().optional().describe(ProjectTemplates.CREATE.description),
roles: ProjectTemplateRolesSchema.default([]).describe(ProjectTemplates.CREATE.roles),
environments: ProjectTemplateEnvironmentsSchema.default(ProjectTemplateDefaultEnvironments).describe(
ProjectTemplates.CREATE.environments
)
environments: ProjectTemplateEnvironmentsSchema.describe(ProjectTemplates.CREATE.environments).optional()
}),
response: {
200: z.object({

View File

@@ -166,6 +166,9 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/redirect/saml2/organizations/:orgSlug",
method: "GET",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
orgSlug: z.string().trim()
@@ -192,6 +195,9 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/redirect/saml2/:samlConfigId",
method: "GET",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
samlConfigId: z.string().trim()
@@ -218,6 +224,9 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/saml2/:samlConfigId",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
samlConfigId: z.string().trim()

View File

@@ -196,6 +196,9 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/Users",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
schemas: z.array(z.string()),

View File

@@ -1,11 +1,11 @@
import { z } from "zod";
import { GitAppOrgSchema, SecretScanningGitRisksSchema } from "@app/db/schemas";
import { canUseSecretScanning } from "@app/ee/services/secret-scanning/secret-scanning-fns";
import {
SecretScanningResolvedStatus,
SecretScanningRiskStatus
} from "@app/ee/services/secret-scanning/secret-scanning-types";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { OrderByDirection } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
@@ -23,14 +23,14 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) =
body: z.object({ organizationId: z.string().trim() }),
response: {
200: z.object({
sessionId: z.string()
sessionId: z.string(),
gitAppSlug: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const appCfg = getConfig();
if (!appCfg.SECRET_SCANNING_ORG_WHITELIST?.includes(req.auth.orgId)) {
if (!canUseSecretScanning(req.auth.orgId)) {
throw new BadRequestError({
message: "Secret scanning is temporarily unavailable."
});

View File

@@ -22,6 +22,7 @@ import { ActorType } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
import { TAllowedFields } from "@app/services/identity-ldap-auth/identity-ldap-auth-types";
import { PkiItemType } from "@app/services/pki-collection/pki-collection-types";
import { SecretSync, SecretSyncImportBehavior } from "@app/services/secret-sync/secret-sync-enums";
import {
@@ -119,44 +120,60 @@ export enum EventType {
CREATE_TOKEN_IDENTITY_TOKEN_AUTH = "create-token-identity-token-auth",
UPDATE_TOKEN_IDENTITY_TOKEN_AUTH = "update-token-identity-token-auth",
GET_TOKENS_IDENTITY_TOKEN_AUTH = "get-tokens-identity-token-auth",
ADD_IDENTITY_TOKEN_AUTH = "add-identity-token-auth",
UPDATE_IDENTITY_TOKEN_AUTH = "update-identity-token-auth",
GET_IDENTITY_TOKEN_AUTH = "get-identity-token-auth",
REVOKE_IDENTITY_TOKEN_AUTH = "revoke-identity-token-auth",
LOGIN_IDENTITY_KUBERNETES_AUTH = "login-identity-kubernetes-auth",
ADD_IDENTITY_KUBERNETES_AUTH = "add-identity-kubernetes-auth",
UPDATE_IDENTITY_KUBENETES_AUTH = "update-identity-kubernetes-auth",
GET_IDENTITY_KUBERNETES_AUTH = "get-identity-kubernetes-auth",
REVOKE_IDENTITY_KUBERNETES_AUTH = "revoke-identity-kubernetes-auth",
LOGIN_IDENTITY_OIDC_AUTH = "login-identity-oidc-auth",
ADD_IDENTITY_OIDC_AUTH = "add-identity-oidc-auth",
UPDATE_IDENTITY_OIDC_AUTH = "update-identity-oidc-auth",
GET_IDENTITY_OIDC_AUTH = "get-identity-oidc-auth",
REVOKE_IDENTITY_OIDC_AUTH = "revoke-identity-oidc-auth",
LOGIN_IDENTITY_JWT_AUTH = "login-identity-jwt-auth",
ADD_IDENTITY_JWT_AUTH = "add-identity-jwt-auth",
UPDATE_IDENTITY_JWT_AUTH = "update-identity-jwt-auth",
GET_IDENTITY_JWT_AUTH = "get-identity-jwt-auth",
REVOKE_IDENTITY_JWT_AUTH = "revoke-identity-jwt-auth",
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID = "get-identity-universal-auth-client-secret-by-id",
LOGIN_IDENTITY_GCP_AUTH = "login-identity-gcp-auth",
ADD_IDENTITY_GCP_AUTH = "add-identity-gcp-auth",
UPDATE_IDENTITY_GCP_AUTH = "update-identity-gcp-auth",
REVOKE_IDENTITY_GCP_AUTH = "revoke-identity-gcp-auth",
GET_IDENTITY_GCP_AUTH = "get-identity-gcp-auth",
LOGIN_IDENTITY_AWS_AUTH = "login-identity-aws-auth",
ADD_IDENTITY_AWS_AUTH = "add-identity-aws-auth",
UPDATE_IDENTITY_AWS_AUTH = "update-identity-aws-auth",
REVOKE_IDENTITY_AWS_AUTH = "revoke-identity-aws-auth",
GET_IDENTITY_AWS_AUTH = "get-identity-aws-auth",
LOGIN_IDENTITY_AZURE_AUTH = "login-identity-azure-auth",
ADD_IDENTITY_AZURE_AUTH = "add-identity-azure-auth",
UPDATE_IDENTITY_AZURE_AUTH = "update-identity-azure-auth",
GET_IDENTITY_AZURE_AUTH = "get-identity-azure-auth",
REVOKE_IDENTITY_AZURE_AUTH = "revoke-identity-azure-auth",
LOGIN_IDENTITY_LDAP_AUTH = "login-identity-ldap-auth",
ADD_IDENTITY_LDAP_AUTH = "add-identity-ldap-auth",
UPDATE_IDENTITY_LDAP_AUTH = "update-identity-ldap-auth",
GET_IDENTITY_LDAP_AUTH = "get-identity-ldap-auth",
REVOKE_IDENTITY_LDAP_AUTH = "revoke-identity-ldap-auth",
CREATE_ENVIRONMENT = "create-environment",
UPDATE_ENVIRONMENT = "update-environment",
DELETE_ENVIRONMENT = "delete-environment",
@@ -1034,6 +1051,55 @@ interface GetIdentityAzureAuthEvent {
};
}
interface LoginIdentityLdapAuthEvent {
type: EventType.LOGIN_IDENTITY_LDAP_AUTH;
metadata: {
identityId: string;
ldapUsername: string;
ldapEmail?: string;
};
}
interface AddIdentityLdapAuthEvent {
type: EventType.ADD_IDENTITY_LDAP_AUTH;
metadata: {
identityId: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
allowedFields?: TAllowedFields[];
url: string;
};
}
interface UpdateIdentityLdapAuthEvent {
type: EventType.UPDATE_IDENTITY_LDAP_AUTH;
metadata: {
identityId: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
allowedFields?: TAllowedFields[];
url?: string;
};
}
interface GetIdentityLdapAuthEvent {
type: EventType.GET_IDENTITY_LDAP_AUTH;
metadata: {
identityId: string;
};
}
interface RevokeIdentityLdapAuthEvent {
type: EventType.REVOKE_IDENTITY_LDAP_AUTH;
metadata: {
identityId: string;
};
}
interface LoginIdentityOidcAuthEvent {
type: EventType.LOGIN_IDENTITY_OIDC_AUTH;
metadata: {
@@ -2785,6 +2851,11 @@ export type Event =
| UpdateIdentityJwtAuthEvent
| GetIdentityJwtAuthEvent
| DeleteIdentityJwtAuthEvent
| LoginIdentityLdapAuthEvent
| AddIdentityLdapAuthEvent
| UpdateIdentityLdapAuthEvent
| GetIdentityLdapAuthEvent
| RevokeIdentityLdapAuthEvent
| CreateEnvironmentEvent
| GetEnvironmentEvent
| UpdateEnvironmentEvent

View File

@@ -1,6 +1,6 @@
import { ForbiddenError } from "@casl/ability";
import { Octokit } from "@octokit/core";
import { paginateGraphQL } from "@octokit/plugin-paginate-graphql";
import { paginateGraphql } from "@octokit/plugin-paginate-graphql";
import { Octokit as OctokitRest } from "@octokit/rest";
import { OrgMembershipRole } from "@app/db/schemas";
@@ -18,7 +18,7 @@ import { TPermissionServiceFactory } from "../permission/permission-service";
import { TGithubOrgSyncDALFactory } from "./github-org-sync-dal";
import { TCreateGithubOrgSyncDTO, TDeleteGithubOrgSyncDTO, TUpdateGithubOrgSyncDTO } from "./github-org-sync-types";
const OctokitWithPlugin = Octokit.plugin(paginateGraphQL);
const OctokitWithPlugin = Octokit.plugin(paginateGraphql);
type TGithubOrgSyncServiceFactoryDep = {
githubOrgSyncDAL: TGithubOrgSyncDALFactory;

View File

@@ -157,10 +157,23 @@ export const groupDALFactory = (db: TDbClient) => {
}
};
const findGroupsByProjectId = async (projectId: string, tx?: Knex) => {
try {
const docs = await (tx || db.replicaNode())(TableName.Groups)
.join(TableName.GroupProjectMembership, `${TableName.Groups}.id`, `${TableName.GroupProjectMembership}.groupId`)
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
.select(selectAllTableCols(TableName.Groups));
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "Find groups by project id" });
}
};
return {
findGroups,
findByOrgId,
findAllGroupPossibleMembers,
findGroupsByProjectId,
...groupOrm
};
};

View File

@@ -176,7 +176,8 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
db.ref("name").withSchema(TableName.Groups).as("groupName"),
db.ref("id").withSchema(TableName.OrgMembership).as("orgMembershipId"),
db.ref("firstName").withSchema(TableName.Users).as("firstName"),
db.ref("lastName").withSchema(TableName.Users).as("lastName")
db.ref("lastName").withSchema(TableName.Users).as("lastName"),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
);
return docs;

View File

@@ -14,6 +14,11 @@ export type TLDAPConfig = {
caCert: string;
};
export type TTestLDAPConfigDTO = Omit<
TLDAPConfig,
"organization" | "id" | "groupSearchBase" | "groupSearchFilter" | "isActive" | "uniqueUserAttribute" | "searchBase"
>;
export type TCreateLdapCfgDTO = {
orgId: string;
isActive: boolean;

View File

@@ -2,15 +2,14 @@ import ldapjs from "ldapjs";
import { logger } from "@app/lib/logger";
import { TLDAPConfig } from "./ldap-config-types";
import { TLDAPConfig, TTestLDAPConfigDTO } from "./ldap-config-types";
export const isValidLdapFilter = (filter: string) => {
try {
ldapjs.parseFilter(filter);
return true;
} catch (error) {
logger.error("Invalid LDAP filter");
logger.error(error);
logger.error(error, "Invalid LDAP filter");
return false;
}
};
@@ -20,7 +19,7 @@ export const isValidLdapFilter = (filter: string) => {
* @param ldapConfig - The LDAP configuration to test
* @returns {Boolean} isConnected - Whether or not the connection was successful
*/
export const testLDAPConfig = async (ldapConfig: TLDAPConfig): Promise<boolean> => {
export const testLDAPConfig = async (ldapConfig: TTestLDAPConfigDTO): Promise<boolean> => {
return new Promise((resolve) => {
const ldapClient = ldapjs.createClient({
url: ldapConfig.url,

View File

@@ -0,0 +1,448 @@
import { AbilityBuilder, createMongoAbility, MongoAbility } from "@casl/ability";
import {
ProjectPermissionActions,
ProjectPermissionCertificateActions,
ProjectPermissionCmekActions,
ProjectPermissionDynamicSecretActions,
ProjectPermissionGroupActions,
ProjectPermissionIdentityActions,
ProjectPermissionKmipActions,
ProjectPermissionMemberActions,
ProjectPermissionSecretActions,
ProjectPermissionSecretRotationActions,
ProjectPermissionSecretSyncActions,
ProjectPermissionSet,
ProjectPermissionSshHostActions,
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
const buildAdminPermissionRules = () => {
const { can, rules } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
// Admins get full access to everything
[
ProjectPermissionSub.SecretFolders,
ProjectPermissionSub.SecretImports,
ProjectPermissionSub.SecretApproval,
ProjectPermissionSub.Role,
ProjectPermissionSub.Integrations,
ProjectPermissionSub.Webhooks,
ProjectPermissionSub.ServiceTokens,
ProjectPermissionSub.Settings,
ProjectPermissionSub.Environments,
ProjectPermissionSub.Tags,
ProjectPermissionSub.AuditLogs,
ProjectPermissionSub.IpAllowList,
ProjectPermissionSub.CertificateAuthorities,
ProjectPermissionSub.CertificateTemplates,
ProjectPermissionSub.PkiAlerts,
ProjectPermissionSub.PkiCollections,
ProjectPermissionSub.SshCertificateAuthorities,
ProjectPermissionSub.SshCertificates,
ProjectPermissionSub.SshCertificateTemplates,
ProjectPermissionSub.SshHostGroups
].forEach((el) => {
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
el
);
});
can(
[
ProjectPermissionCertificateActions.Read,
ProjectPermissionCertificateActions.Edit,
ProjectPermissionCertificateActions.Create,
ProjectPermissionCertificateActions.Delete,
ProjectPermissionCertificateActions.ReadPrivateKey
],
ProjectPermissionSub.Certificates
);
can(
[
ProjectPermissionSshHostActions.Edit,
ProjectPermissionSshHostActions.Read,
ProjectPermissionSshHostActions.Create,
ProjectPermissionSshHostActions.Delete,
ProjectPermissionSshHostActions.IssueHostCert
],
ProjectPermissionSub.SshHosts
);
can(
[
ProjectPermissionMemberActions.Create,
ProjectPermissionMemberActions.Edit,
ProjectPermissionMemberActions.Delete,
ProjectPermissionMemberActions.Read,
ProjectPermissionMemberActions.GrantPrivileges,
ProjectPermissionMemberActions.AssumePrivileges
],
ProjectPermissionSub.Member
);
can(
[
ProjectPermissionGroupActions.Create,
ProjectPermissionGroupActions.Edit,
ProjectPermissionGroupActions.Delete,
ProjectPermissionGroupActions.Read,
ProjectPermissionGroupActions.GrantPrivileges
],
ProjectPermissionSub.Groups
);
can(
[
ProjectPermissionIdentityActions.Create,
ProjectPermissionIdentityActions.Edit,
ProjectPermissionIdentityActions.Delete,
ProjectPermissionIdentityActions.Read,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionIdentityActions.AssumePrivileges
],
ProjectPermissionSub.Identity
);
can(
[
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSecretActions.DescribeSecret,
ProjectPermissionSecretActions.ReadValue,
ProjectPermissionSecretActions.Create,
ProjectPermissionSecretActions.Edit,
ProjectPermissionSecretActions.Delete
],
ProjectPermissionSub.Secrets
);
can(
[
ProjectPermissionDynamicSecretActions.ReadRootCredential,
ProjectPermissionDynamicSecretActions.EditRootCredential,
ProjectPermissionDynamicSecretActions.CreateRootCredential,
ProjectPermissionDynamicSecretActions.DeleteRootCredential,
ProjectPermissionDynamicSecretActions.Lease
],
ProjectPermissionSub.DynamicSecrets
);
can([ProjectPermissionActions.Edit, ProjectPermissionActions.Delete], ProjectPermissionSub.Project);
can([ProjectPermissionActions.Read, ProjectPermissionActions.Create], ProjectPermissionSub.SecretRollback);
can([ProjectPermissionActions.Edit], ProjectPermissionSub.Kms);
can(
[
ProjectPermissionCmekActions.Create,
ProjectPermissionCmekActions.Edit,
ProjectPermissionCmekActions.Delete,
ProjectPermissionCmekActions.Read,
ProjectPermissionCmekActions.Encrypt,
ProjectPermissionCmekActions.Decrypt,
ProjectPermissionCmekActions.Sign,
ProjectPermissionCmekActions.Verify
],
ProjectPermissionSub.Cmek
);
can(
[
ProjectPermissionSecretSyncActions.Create,
ProjectPermissionSecretSyncActions.Edit,
ProjectPermissionSecretSyncActions.Delete,
ProjectPermissionSecretSyncActions.Read,
ProjectPermissionSecretSyncActions.SyncSecrets,
ProjectPermissionSecretSyncActions.ImportSecrets,
ProjectPermissionSecretSyncActions.RemoveSecrets
],
ProjectPermissionSub.SecretSyncs
);
can(
[
ProjectPermissionKmipActions.CreateClients,
ProjectPermissionKmipActions.UpdateClients,
ProjectPermissionKmipActions.DeleteClients,
ProjectPermissionKmipActions.ReadClients,
ProjectPermissionKmipActions.GenerateClientCertificates
],
ProjectPermissionSub.Kmip
);
can(
[
ProjectPermissionSecretRotationActions.Create,
ProjectPermissionSecretRotationActions.Edit,
ProjectPermissionSecretRotationActions.Delete,
ProjectPermissionSecretRotationActions.Read,
ProjectPermissionSecretRotationActions.ReadGeneratedCredentials,
ProjectPermissionSecretRotationActions.RotateSecrets
],
ProjectPermissionSub.SecretRotation
);
return rules;
};
const buildMemberPermissionRules = () => {
const { can, rules } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
can(
[
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSecretActions.DescribeSecret,
ProjectPermissionSecretActions.ReadValue,
ProjectPermissionSecretActions.Edit,
ProjectPermissionSecretActions.Create,
ProjectPermissionSecretActions.Delete
],
ProjectPermissionSub.Secrets
);
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
ProjectPermissionSub.SecretFolders
);
can(
[
ProjectPermissionDynamicSecretActions.ReadRootCredential,
ProjectPermissionDynamicSecretActions.EditRootCredential,
ProjectPermissionDynamicSecretActions.CreateRootCredential,
ProjectPermissionDynamicSecretActions.DeleteRootCredential,
ProjectPermissionDynamicSecretActions.Lease
],
ProjectPermissionSub.DynamicSecrets
);
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
ProjectPermissionSub.SecretImports
);
can([ProjectPermissionActions.Read], ProjectPermissionSub.SecretApproval);
can([ProjectPermissionSecretRotationActions.Read], ProjectPermissionSub.SecretRotation);
can([ProjectPermissionActions.Read, ProjectPermissionActions.Create], ProjectPermissionSub.SecretRollback);
can([ProjectPermissionMemberActions.Read, ProjectPermissionMemberActions.Create], ProjectPermissionSub.Member);
can([ProjectPermissionGroupActions.Read], ProjectPermissionSub.Groups);
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
ProjectPermissionSub.Integrations
);
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
ProjectPermissionSub.Webhooks
);
can(
[
ProjectPermissionIdentityActions.Read,
ProjectPermissionIdentityActions.Edit,
ProjectPermissionIdentityActions.Create,
ProjectPermissionIdentityActions.Delete
],
ProjectPermissionSub.Identity
);
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
ProjectPermissionSub.ServiceTokens
);
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
ProjectPermissionSub.Settings
);
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
ProjectPermissionSub.Environments
);
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
ProjectPermissionSub.Tags
);
can([ProjectPermissionActions.Read], ProjectPermissionSub.Role);
can([ProjectPermissionActions.Read], ProjectPermissionSub.AuditLogs);
can([ProjectPermissionActions.Read], ProjectPermissionSub.IpAllowList);
// double check if all CRUD are needed for CA and Certificates
can([ProjectPermissionActions.Read], ProjectPermissionSub.CertificateAuthorities);
can(
[
ProjectPermissionCertificateActions.Read,
ProjectPermissionCertificateActions.Edit,
ProjectPermissionCertificateActions.Create,
ProjectPermissionCertificateActions.Delete
],
ProjectPermissionSub.Certificates
);
can([ProjectPermissionActions.Read], ProjectPermissionSub.CertificateTemplates);
can([ProjectPermissionActions.Read], ProjectPermissionSub.PkiAlerts);
can([ProjectPermissionActions.Read], ProjectPermissionSub.PkiCollections);
can([ProjectPermissionActions.Read], ProjectPermissionSub.SshCertificates);
can([ProjectPermissionActions.Create], ProjectPermissionSub.SshCertificates);
can([ProjectPermissionActions.Read], ProjectPermissionSub.SshCertificateTemplates);
can([ProjectPermissionSshHostActions.Read], ProjectPermissionSub.SshHosts);
can(
[
ProjectPermissionCmekActions.Create,
ProjectPermissionCmekActions.Edit,
ProjectPermissionCmekActions.Delete,
ProjectPermissionCmekActions.Read,
ProjectPermissionCmekActions.Encrypt,
ProjectPermissionCmekActions.Decrypt,
ProjectPermissionCmekActions.Sign,
ProjectPermissionCmekActions.Verify
],
ProjectPermissionSub.Cmek
);
can(
[
ProjectPermissionSecretSyncActions.Create,
ProjectPermissionSecretSyncActions.Edit,
ProjectPermissionSecretSyncActions.Delete,
ProjectPermissionSecretSyncActions.Read,
ProjectPermissionSecretSyncActions.SyncSecrets,
ProjectPermissionSecretSyncActions.ImportSecrets,
ProjectPermissionSecretSyncActions.RemoveSecrets
],
ProjectPermissionSub.SecretSyncs
);
return rules;
};
const buildViewerPermissionRules = () => {
const { can, rules } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
can(ProjectPermissionSecretActions.DescribeAndReadValue, ProjectPermissionSub.Secrets);
can(ProjectPermissionSecretActions.DescribeSecret, ProjectPermissionSub.Secrets);
can(ProjectPermissionSecretActions.ReadValue, ProjectPermissionSub.Secrets);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretFolders);
can(ProjectPermissionDynamicSecretActions.ReadRootCredential, ProjectPermissionSub.DynamicSecrets);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretImports);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
can(ProjectPermissionSecretRotationActions.Read, ProjectPermissionSub.SecretRotation);
can(ProjectPermissionMemberActions.Read, ProjectPermissionSub.Member);
can(ProjectPermissionGroupActions.Read, ProjectPermissionSub.Groups);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Role);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Webhooks);
can(ProjectPermissionIdentityActions.Read, ProjectPermissionSub.Identity);
can(ProjectPermissionActions.Read, ProjectPermissionSub.ServiceTokens);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Settings);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Environments);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
can(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
can(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList);
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities);
can(ProjectPermissionCertificateActions.Read, ProjectPermissionSub.Certificates);
can(ProjectPermissionCmekActions.Read, ProjectPermissionSub.Cmek);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SshCertificates);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SshCertificateTemplates);
can(ProjectPermissionSecretSyncActions.Read, ProjectPermissionSub.SecretSyncs);
return rules;
};
const buildNoAccessProjectPermission = () => {
const { rules } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
return rules;
};
const buildSshHostBootstrapPermissionRules = () => {
const { can, rules } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
can(
[ProjectPermissionSshHostActions.Create, ProjectPermissionSshHostActions.IssueHostCert],
ProjectPermissionSub.SshHosts
);
return rules;
};
const buildCryptographicOperatorPermissionRules = () => {
const { can, rules } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
can(
[
ProjectPermissionCmekActions.Encrypt,
ProjectPermissionCmekActions.Decrypt,
ProjectPermissionCmekActions.Sign,
ProjectPermissionCmekActions.Verify
],
ProjectPermissionSub.Cmek
);
return rules;
};
// General
export const projectAdminPermissions = buildAdminPermissionRules();
export const projectMemberPermissions = buildMemberPermissionRules();
export const projectViewerPermission = buildViewerPermissionRules();
export const projectNoAccessPermissions = buildNoAccessProjectPermission();
// SSH
export const sshHostBootstrapPermissions = buildSshHostBootstrapPermissionRules();
// KMS
export const cryptographicOperatorPermissions = buildCryptographicOperatorPermissionRules();

View File

@@ -132,7 +132,7 @@ export const permissionDALFactory = (db: TDbClient) => {
}
};
const getProjectGroupPermissions = async (projectId: string) => {
const getProjectGroupPermissions = async (projectId: string, filterGroupId?: string) => {
try {
const docs = await db
.replicaNode()(TableName.GroupProjectMembership)
@@ -148,6 +148,11 @@ export const permissionDALFactory = (db: TDbClient) => {
`groupCustomRoles.id`
)
.where(`${TableName.GroupProjectMembership}.projectId`, "=", projectId)
.where((bd) => {
if (filterGroupId) {
void bd.where(`${TableName.GroupProjectMembership}.groupId`, "=", filterGroupId);
}
})
.select(
db.ref("id").withSchema(TableName.GroupProjectMembership).as("membershipId"),
db.ref("id").withSchema(TableName.Groups).as("groupId"),

View File

@@ -12,6 +12,14 @@ import {
TIdentityProjectMemberships,
TProjectMemberships
} from "@app/db/schemas";
import {
cryptographicOperatorPermissions,
projectAdminPermissions,
projectMemberPermissions,
projectNoAccessPermissions,
projectViewerPermission,
sshHostBootstrapPermissions
} from "@app/ee/services/permission/default-roles";
import { conditionsMatcher } from "@app/lib/casl";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { objectify } from "@app/lib/fn";
@@ -32,14 +40,7 @@ import {
TGetServiceTokenProjectPermissionArg,
TGetUserProjectPermissionArg
} from "./permission-service-types";
import {
buildServiceTokenProjectPermission,
projectAdminPermissions,
projectMemberPermissions,
projectNoAccessPermissions,
ProjectPermissionSet,
projectViewerPermission
} from "./project-permission";
import { buildServiceTokenProjectPermission, ProjectPermissionSet } from "./project-permission";
type TPermissionServiceFactoryDep = {
orgRoleDAL: Pick<TOrgRoleDALFactory, "findOne">;
@@ -95,6 +96,10 @@ export const permissionServiceFactory = ({
return projectViewerPermission;
case ProjectMembershipRole.NoAccess:
return projectNoAccessPermissions;
case ProjectMembershipRole.SshHostBootstrapper:
return sshHostBootstrapPermissions;
case ProjectMembershipRole.KmsCryptographicOperator:
return cryptographicOperatorPermissions;
case ProjectMembershipRole.Custom: {
return unpackRules<RawRuleOf<MongoAbility<ProjectPermissionSet>>>(
permissions as PackRule<RawRuleOf<MongoAbility<ProjectPermissionSet>>>[]
@@ -625,6 +630,34 @@ export const permissionServiceFactory = ({
return { permission };
};
const checkGroupProjectPermission = async ({
groupId,
projectId,
checkPermissions
}: {
groupId: string;
projectId: string;
checkPermissions: ProjectPermissionSet;
}) => {
const rawGroupProjectPermissions = await permissionDAL.getProjectGroupPermissions(projectId, groupId);
const groupPermissions = rawGroupProjectPermissions.map((groupProjectPermission) => {
const rolePermissions =
groupProjectPermission.roles?.map(({ role, permissions }) => ({ role, permissions })) || [];
const rules = buildProjectPermissionRules(rolePermissions);
const permission = createMongoAbility<ProjectPermissionSet>(rules, {
conditionsMatcher
});
return {
permission,
id: groupProjectPermission.groupId,
name: groupProjectPermission.username,
membershipId: groupProjectPermission.id
};
});
return groupPermissions.some((groupPermission) => groupPermission.permission.can(...checkPermissions));
};
return {
getUserOrgPermission,
getOrgPermission,
@@ -634,6 +667,7 @@ export const permissionServiceFactory = ({
getOrgPermissionByRole,
getProjectPermissionByRole,
buildOrgPermission,
buildProjectPermissionRules
buildProjectPermissionRules,
checkGroupProjectPermission
};
};

View File

@@ -678,403 +678,6 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
export type TProjectPermissionV2Schema = z.infer<typeof ProjectPermissionV2Schema>;
const buildAdminPermissionRules = () => {
const { can, rules } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
// Admins get full access to everything
[
ProjectPermissionSub.SecretFolders,
ProjectPermissionSub.SecretImports,
ProjectPermissionSub.SecretApproval,
ProjectPermissionSub.Role,
ProjectPermissionSub.Integrations,
ProjectPermissionSub.Webhooks,
ProjectPermissionSub.ServiceTokens,
ProjectPermissionSub.Settings,
ProjectPermissionSub.Environments,
ProjectPermissionSub.Tags,
ProjectPermissionSub.AuditLogs,
ProjectPermissionSub.IpAllowList,
ProjectPermissionSub.CertificateAuthorities,
ProjectPermissionSub.CertificateTemplates,
ProjectPermissionSub.PkiAlerts,
ProjectPermissionSub.PkiCollections,
ProjectPermissionSub.SshCertificateAuthorities,
ProjectPermissionSub.SshCertificates,
ProjectPermissionSub.SshCertificateTemplates,
ProjectPermissionSub.SshHostGroups
].forEach((el) => {
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
el
);
});
can(
[
ProjectPermissionCertificateActions.Read,
ProjectPermissionCertificateActions.Edit,
ProjectPermissionCertificateActions.Create,
ProjectPermissionCertificateActions.Delete,
ProjectPermissionCertificateActions.ReadPrivateKey
],
ProjectPermissionSub.Certificates
);
can(
[
ProjectPermissionSshHostActions.Edit,
ProjectPermissionSshHostActions.Read,
ProjectPermissionSshHostActions.Create,
ProjectPermissionSshHostActions.Delete,
ProjectPermissionSshHostActions.IssueHostCert
],
ProjectPermissionSub.SshHosts
);
can(
[
ProjectPermissionMemberActions.Create,
ProjectPermissionMemberActions.Edit,
ProjectPermissionMemberActions.Delete,
ProjectPermissionMemberActions.Read,
ProjectPermissionMemberActions.GrantPrivileges,
ProjectPermissionMemberActions.AssumePrivileges
],
ProjectPermissionSub.Member
);
can(
[
ProjectPermissionGroupActions.Create,
ProjectPermissionGroupActions.Edit,
ProjectPermissionGroupActions.Delete,
ProjectPermissionGroupActions.Read,
ProjectPermissionGroupActions.GrantPrivileges
],
ProjectPermissionSub.Groups
);
can(
[
ProjectPermissionIdentityActions.Create,
ProjectPermissionIdentityActions.Edit,
ProjectPermissionIdentityActions.Delete,
ProjectPermissionIdentityActions.Read,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionIdentityActions.AssumePrivileges
],
ProjectPermissionSub.Identity
);
can(
[
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSecretActions.DescribeSecret,
ProjectPermissionSecretActions.ReadValue,
ProjectPermissionSecretActions.Create,
ProjectPermissionSecretActions.Edit,
ProjectPermissionSecretActions.Delete
],
ProjectPermissionSub.Secrets
);
can(
[
ProjectPermissionDynamicSecretActions.ReadRootCredential,
ProjectPermissionDynamicSecretActions.EditRootCredential,
ProjectPermissionDynamicSecretActions.CreateRootCredential,
ProjectPermissionDynamicSecretActions.DeleteRootCredential,
ProjectPermissionDynamicSecretActions.Lease
],
ProjectPermissionSub.DynamicSecrets
);
can([ProjectPermissionActions.Edit, ProjectPermissionActions.Delete], ProjectPermissionSub.Project);
can([ProjectPermissionActions.Read, ProjectPermissionActions.Create], ProjectPermissionSub.SecretRollback);
can([ProjectPermissionActions.Edit], ProjectPermissionSub.Kms);
can(
[
ProjectPermissionCmekActions.Create,
ProjectPermissionCmekActions.Edit,
ProjectPermissionCmekActions.Delete,
ProjectPermissionCmekActions.Read,
ProjectPermissionCmekActions.Encrypt,
ProjectPermissionCmekActions.Decrypt,
ProjectPermissionCmekActions.Sign,
ProjectPermissionCmekActions.Verify
],
ProjectPermissionSub.Cmek
);
can(
[
ProjectPermissionSecretSyncActions.Create,
ProjectPermissionSecretSyncActions.Edit,
ProjectPermissionSecretSyncActions.Delete,
ProjectPermissionSecretSyncActions.Read,
ProjectPermissionSecretSyncActions.SyncSecrets,
ProjectPermissionSecretSyncActions.ImportSecrets,
ProjectPermissionSecretSyncActions.RemoveSecrets
],
ProjectPermissionSub.SecretSyncs
);
can(
[
ProjectPermissionKmipActions.CreateClients,
ProjectPermissionKmipActions.UpdateClients,
ProjectPermissionKmipActions.DeleteClients,
ProjectPermissionKmipActions.ReadClients,
ProjectPermissionKmipActions.GenerateClientCertificates
],
ProjectPermissionSub.Kmip
);
can(
[
ProjectPermissionSecretRotationActions.Create,
ProjectPermissionSecretRotationActions.Edit,
ProjectPermissionSecretRotationActions.Delete,
ProjectPermissionSecretRotationActions.Read,
ProjectPermissionSecretRotationActions.ReadGeneratedCredentials,
ProjectPermissionSecretRotationActions.RotateSecrets
],
ProjectPermissionSub.SecretRotation
);
return rules;
};
export const projectAdminPermissions = buildAdminPermissionRules();
const buildMemberPermissionRules = () => {
const { can, rules } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
can(
[
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSecretActions.DescribeSecret,
ProjectPermissionSecretActions.ReadValue,
ProjectPermissionSecretActions.Edit,
ProjectPermissionSecretActions.Create,
ProjectPermissionSecretActions.Delete
],
ProjectPermissionSub.Secrets
);
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
ProjectPermissionSub.SecretFolders
);
can(
[
ProjectPermissionDynamicSecretActions.ReadRootCredential,
ProjectPermissionDynamicSecretActions.EditRootCredential,
ProjectPermissionDynamicSecretActions.CreateRootCredential,
ProjectPermissionDynamicSecretActions.DeleteRootCredential,
ProjectPermissionDynamicSecretActions.Lease
],
ProjectPermissionSub.DynamicSecrets
);
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
ProjectPermissionSub.SecretImports
);
can([ProjectPermissionActions.Read], ProjectPermissionSub.SecretApproval);
can([ProjectPermissionSecretRotationActions.Read], ProjectPermissionSub.SecretRotation);
can([ProjectPermissionActions.Read, ProjectPermissionActions.Create], ProjectPermissionSub.SecretRollback);
can([ProjectPermissionMemberActions.Read, ProjectPermissionMemberActions.Create], ProjectPermissionSub.Member);
can([ProjectPermissionGroupActions.Read], ProjectPermissionSub.Groups);
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
ProjectPermissionSub.Integrations
);
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
ProjectPermissionSub.Webhooks
);
can(
[
ProjectPermissionIdentityActions.Read,
ProjectPermissionIdentityActions.Edit,
ProjectPermissionIdentityActions.Create,
ProjectPermissionIdentityActions.Delete
],
ProjectPermissionSub.Identity
);
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
ProjectPermissionSub.ServiceTokens
);
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
ProjectPermissionSub.Settings
);
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
ProjectPermissionSub.Environments
);
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
ProjectPermissionSub.Tags
);
can([ProjectPermissionActions.Read], ProjectPermissionSub.Role);
can([ProjectPermissionActions.Read], ProjectPermissionSub.AuditLogs);
can([ProjectPermissionActions.Read], ProjectPermissionSub.IpAllowList);
// double check if all CRUD are needed for CA and Certificates
can([ProjectPermissionActions.Read], ProjectPermissionSub.CertificateAuthorities);
can(
[
ProjectPermissionCertificateActions.Read,
ProjectPermissionCertificateActions.Edit,
ProjectPermissionCertificateActions.Create,
ProjectPermissionCertificateActions.Delete
],
ProjectPermissionSub.Certificates
);
can([ProjectPermissionActions.Read], ProjectPermissionSub.CertificateTemplates);
can([ProjectPermissionActions.Read], ProjectPermissionSub.PkiAlerts);
can([ProjectPermissionActions.Read], ProjectPermissionSub.PkiCollections);
can([ProjectPermissionActions.Read], ProjectPermissionSub.SshCertificates);
can([ProjectPermissionActions.Create], ProjectPermissionSub.SshCertificates);
can([ProjectPermissionActions.Read], ProjectPermissionSub.SshCertificateTemplates);
can([ProjectPermissionSshHostActions.Read], ProjectPermissionSub.SshHosts);
can(
[
ProjectPermissionCmekActions.Create,
ProjectPermissionCmekActions.Edit,
ProjectPermissionCmekActions.Delete,
ProjectPermissionCmekActions.Read,
ProjectPermissionCmekActions.Encrypt,
ProjectPermissionCmekActions.Decrypt,
ProjectPermissionCmekActions.Sign,
ProjectPermissionCmekActions.Verify
],
ProjectPermissionSub.Cmek
);
can(
[
ProjectPermissionSecretSyncActions.Create,
ProjectPermissionSecretSyncActions.Edit,
ProjectPermissionSecretSyncActions.Delete,
ProjectPermissionSecretSyncActions.Read,
ProjectPermissionSecretSyncActions.SyncSecrets,
ProjectPermissionSecretSyncActions.ImportSecrets,
ProjectPermissionSecretSyncActions.RemoveSecrets
],
ProjectPermissionSub.SecretSyncs
);
return rules;
};
export const projectMemberPermissions = buildMemberPermissionRules();
const buildViewerPermissionRules = () => {
const { can, rules } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
can(ProjectPermissionSecretActions.DescribeAndReadValue, ProjectPermissionSub.Secrets);
can(ProjectPermissionSecretActions.DescribeSecret, ProjectPermissionSub.Secrets);
can(ProjectPermissionSecretActions.ReadValue, ProjectPermissionSub.Secrets);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretFolders);
can(ProjectPermissionDynamicSecretActions.ReadRootCredential, ProjectPermissionSub.DynamicSecrets);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretImports);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
can(ProjectPermissionSecretRotationActions.Read, ProjectPermissionSub.SecretRotation);
can(ProjectPermissionMemberActions.Read, ProjectPermissionSub.Member);
can(ProjectPermissionGroupActions.Read, ProjectPermissionSub.Groups);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Role);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Webhooks);
can(ProjectPermissionIdentityActions.Read, ProjectPermissionSub.Identity);
can(ProjectPermissionActions.Read, ProjectPermissionSub.ServiceTokens);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Settings);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Environments);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
can(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
can(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList);
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities);
can(ProjectPermissionCertificateActions.Read, ProjectPermissionSub.Certificates);
can(ProjectPermissionCmekActions.Read, ProjectPermissionSub.Cmek);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SshCertificates);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SshCertificateTemplates);
can(ProjectPermissionSecretSyncActions.Read, ProjectPermissionSub.SecretSyncs);
return rules;
};
export const projectViewerPermission = buildViewerPermissionRules();
const buildNoAccessProjectPermission = () => {
const { rules } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
return rules;
};
export const buildServiceTokenProjectPermission = (
scopes: Array<{ secretPath: string; environment: string }>,
permission: string[]
@@ -1116,8 +719,6 @@ export const buildServiceTokenProjectPermission = (
return build({ conditionsMatcher });
};
export const projectNoAccessPermissions = buildNoAccessProjectPermission();
/* eslint-disable */
/**

View File

@@ -1,22 +1,27 @@
import { ProjectTemplateDefaultEnvironments } from "@app/ee/services/project-template/project-template-constants";
import { ProjectType } from "@app/db/schemas";
import {
InfisicalProjectTemplate,
TUnpackedPermission
} from "@app/ee/services/project-template/project-template-types";
import { getPredefinedRoles } from "@app/services/project-role/project-role-fns";
export const getDefaultProjectTemplate = (orgId: string) => ({
import { ProjectTemplateDefaultEnvironments } from "./project-template-constants";
export const getDefaultProjectTemplate = (orgId: string, type: ProjectType) => ({
id: "b11b49a9-09a9-4443-916a-4246f9ff2c69", // random ID to appease zod
type,
name: InfisicalProjectTemplate.Default,
createdAt: new Date(),
updatedAt: new Date(),
description: "Infisical's default project template",
environments: ProjectTemplateDefaultEnvironments,
roles: [...getPredefinedRoles("project-template")].map(({ name, slug, permissions }) => ({
name,
slug,
permissions: permissions as TUnpackedPermission[]
})),
description: `Infisical's ${type} default project template`,
environments: type === ProjectType.SecretManager ? ProjectTemplateDefaultEnvironments : null,
roles: [...getPredefinedRoles({ projectId: "project-template", projectType: type })].map(
({ name, slug, permissions }) => ({
name,
slug,
permissions: permissions as TUnpackedPermission[]
})
),
orgId
});

View File

@@ -1,10 +1,11 @@
import { ForbiddenError } from "@casl/ability";
import { packRules } from "@casl/ability/extra";
import { TProjectTemplates } from "@app/db/schemas";
import { ProjectType, TProjectTemplates } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectTemplateDefaultEnvironments } from "@app/ee/services/project-template/project-template-constants";
import { getDefaultProjectTemplate } from "@app/ee/services/project-template/project-template-fns";
import {
TCreateProjectTemplateDTO,
@@ -32,11 +33,13 @@ const $unpackProjectTemplate = ({ roles, environments, ...rest }: TProjectTempla
...rest,
environments: environments as TProjectTemplateEnvironment[],
roles: [
...getPredefinedRoles("project-template").map(({ name, slug, permissions }) => ({
name,
slug,
permissions: permissions as TUnpackedPermission[]
})),
...getPredefinedRoles({ projectId: "project-template", projectType: rest.type as ProjectType }).map(
({ name, slug, permissions }) => ({
name,
slug,
permissions: permissions as TUnpackedPermission[]
})
),
...(roles as TProjectTemplateRole[]).map((role) => ({
...role,
permissions: unpackPermissions(role.permissions)
@@ -49,7 +52,7 @@ export const projectTemplateServiceFactory = ({
permissionService,
projectTemplateDAL
}: TProjectTemplatesServiceFactoryDep) => {
const listProjectTemplatesByOrg = async (actor: OrgServiceActor) => {
const listProjectTemplatesByOrg = async (actor: OrgServiceActor, type?: ProjectType) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.projectTemplates)
@@ -68,11 +71,14 @@ export const projectTemplateServiceFactory = ({
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.ProjectTemplates);
const projectTemplates = await projectTemplateDAL.find({
orgId: actor.orgId
orgId: actor.orgId,
...(type ? { type } : {})
});
return [
getDefaultProjectTemplate(actor.orgId),
...(type
? [getDefaultProjectTemplate(actor.orgId, type)]
: Object.values(ProjectType).map((projectType) => getDefaultProjectTemplate(actor.orgId, projectType))),
...projectTemplates.map((template) => $unpackProjectTemplate(template))
];
};
@@ -134,7 +140,7 @@ export const projectTemplateServiceFactory = ({
};
const createProjectTemplate = async (
{ roles, environments, ...params }: TCreateProjectTemplateDTO,
{ roles, environments, type, ...params }: TCreateProjectTemplateDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
@@ -154,6 +160,17 @@ export const projectTemplateServiceFactory = ({
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.ProjectTemplates);
if (environments && type !== ProjectType.SecretManager) {
throw new BadRequestError({ message: "Cannot configure environments for non-SecretManager project templates" });
}
if (environments && plan.environmentLimit !== null && environments.length > plan.environmentLimit) {
throw new BadRequestError({
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
message: `Failed to create project template due to environment count exceeding your current limit of ${plan.environmentLimit}. Contact Infisical to increase limit.`
});
}
const isConflictingName = Boolean(
await projectTemplateDAL.findOne({
name: params.name,
@@ -169,8 +186,10 @@ export const projectTemplateServiceFactory = ({
const projectTemplate = await projectTemplateDAL.create({
...params,
roles: JSON.stringify(roles.map((role) => ({ ...role, permissions: packRules(role.permissions) }))),
environments: JSON.stringify(environments),
orgId: actor.orgId
environments:
type === ProjectType.SecretManager ? JSON.stringify(environments ?? ProjectTemplateDefaultEnvironments) : null,
orgId: actor.orgId,
type
});
return $unpackProjectTemplate(projectTemplate);
@@ -202,6 +221,19 @@ export const projectTemplateServiceFactory = ({
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.ProjectTemplates);
if (projectTemplate.type !== ProjectType.SecretManager && environments)
throw new BadRequestError({ message: "Cannot configure environments for non-SecretManager project templates" });
if (projectTemplate.type === ProjectType.SecretManager && environments === null)
throw new BadRequestError({ message: "Environments cannot be removed for SecretManager project templates" });
if (environments && plan.environmentLimit !== null && environments.length > plan.environmentLimit) {
throw new BadRequestError({
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
message: `Failed to update project template due to environment count exceeding your current limit of ${plan.environmentLimit}. Contact Infisical to increase limit.`
});
}
if (params.name && projectTemplate.name !== params.name) {
const isConflictingName = Boolean(
await projectTemplateDAL.findOne({

View File

@@ -1,6 +1,6 @@
import { z } from "zod";
import { TProjectEnvironments } from "@app/db/schemas";
import { ProjectType, TProjectEnvironments } from "@app/db/schemas";
import { TProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
@@ -15,8 +15,9 @@ export type TProjectTemplateRole = {
export type TCreateProjectTemplateDTO = {
name: string;
description?: string;
type: ProjectType;
roles: TProjectTemplateRole[];
environments: TProjectTemplateEnvironment[];
environments?: TProjectTemplateEnvironment[] | null;
};
export type TUpdateProjectTemplateDTO = Partial<TCreateProjectTemplateDTO>;

View File

@@ -0,0 +1,11 @@
import { getConfig } from "@app/lib/config/env";
export const canUseSecretScanning = (orgId: string) => {
const appCfg = getConfig();
if (!appCfg.isCloud) {
return true;
}
return appCfg.SECRET_SCANNING_ORG_WHITELIST?.includes(orgId);
};

View File

@@ -12,6 +12,7 @@ import { NotFoundError } from "@app/lib/errors";
import { TGitAppDALFactory } from "./git-app-dal";
import { TGitAppInstallSessionDALFactory } from "./git-app-install-session-dal";
import { TSecretScanningDALFactory } from "./secret-scanning-dal";
import { canUseSecretScanning } from "./secret-scanning-fns";
import { TSecretScanningQueueFactory } from "./secret-scanning-queue";
import {
SecretScanningRiskStatus,
@@ -47,12 +48,14 @@ export const secretScanningServiceFactory = ({
actorAuthMethod,
actorOrgId
}: TInstallAppSessionDTO) => {
const appCfg = getConfig();
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.SecretScanning);
const sessionId = crypto.randomBytes(16).toString("hex");
await gitAppInstallSessionDAL.upsert({ orgId, sessionId, userId: actorId });
return { sessionId };
return { sessionId, gitAppSlug: appCfg.SECRET_SCANNING_GIT_APP_SLUG };
};
const linkInstallationToOrg = async ({
@@ -91,7 +94,8 @@ export const secretScanningServiceFactory = ({
const {
data: { repositories }
} = await octokit.apps.listReposAccessibleToInstallation();
if (appCfg.SECRET_SCANNING_ORG_WHITELIST?.includes(actorOrgId)) {
if (canUseSecretScanning(actorOrgId)) {
await Promise.all(
repositories.map(({ id, full_name }) =>
secretScanningQueue.startFullRepoScan({
@@ -102,6 +106,7 @@ export const secretScanningServiceFactory = ({
)
);
}
return { installatedApp };
};
@@ -164,7 +169,6 @@ export const secretScanningServiceFactory = ({
};
const handleRepoPushEvent = async (payload: WebhookEventMap["push"]) => {
const appCfg = getConfig();
const { commits, repository, installation, pusher } = payload;
if (!commits || !repository || !installation || !pusher) {
return;
@@ -175,7 +179,7 @@ export const secretScanningServiceFactory = ({
});
if (!installationLink) return;
if (appCfg.SECRET_SCANNING_ORG_WHITELIST?.includes(installationLink.orgId)) {
if (canUseSecretScanning(installationLink.orgId)) {
await secretScanningQueue.startPushEventScan({
commits,
pusher: { name: pusher.name, email: pusher.email },

View File

@@ -28,6 +28,7 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHostGroup}.projectId`, projectId)
.select(
db.ref("id").withSchema(TableName.SshHostGroup).as("sshHostGroupId"),
@@ -35,7 +36,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
db.ref("name").withSchema(TableName.SshHostGroup),
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping)
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
)
.orderBy(`${TableName.SshHostGroup}.updatedAt`, "desc");
@@ -69,7 +71,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
const loginMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
}
}));
return {
@@ -99,6 +102,7 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHostGroup}.id`, sshHostGroupId)
.select(
db.ref("id").withSchema(TableName.SshHostGroup).as("sshHostGroupId"),
@@ -106,7 +110,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
db.ref("name").withSchema(TableName.SshHostGroup),
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping)
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
);
if (rows.length === 0) return null;
@@ -121,7 +126,8 @@ export const sshHostGroupDALFactory = (db: TDbClient) => {
const loginMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
}
}));

View File

@@ -12,6 +12,7 @@ import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TGroupDALFactory } from "../group/group-dal";
import { TLicenseServiceFactory } from "../license/license-service";
import { createSshLoginMappings } from "../ssh-host/ssh-host-fns";
import {
@@ -43,8 +44,12 @@ type TSshHostGroupServiceFactoryDep = {
sshHostLoginUserDAL: Pick<TSshHostLoginUserDALFactory, "create" | "transaction" | "delete">;
sshHostLoginUserMappingDAL: Pick<TSshHostLoginUserMappingDALFactory, "insertMany">;
userDAL: Pick<TUserDALFactory, "find">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getUserProjectPermission">;
permissionService: Pick<
TPermissionServiceFactory,
"getProjectPermission" | "getUserProjectPermission" | "checkGroupProjectPermission"
>;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
groupDAL: Pick<TGroupDALFactory, "findGroupsByProjectId">;
};
export type TSshHostGroupServiceFactory = ReturnType<typeof sshHostGroupServiceFactory>;
@@ -58,7 +63,8 @@ export const sshHostGroupServiceFactory = ({
sshHostLoginUserMappingDAL,
userDAL,
permissionService,
licenseService
licenseService,
groupDAL
}: TSshHostGroupServiceFactoryDep) => {
const createSshHostGroup = async ({
projectId,
@@ -127,6 +133,7 @@ export const sshHostGroupServiceFactory = ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId,
@@ -194,6 +201,7 @@ export const sshHostGroupServiceFactory = ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId: sshHostGroup.projectId,

View File

@@ -9,12 +9,7 @@ export type TCreateSshHostGroupDTO = {
export type TUpdateSshHostGroupDTO = {
sshHostGroupId: string;
name?: string;
loginMappings?: {
loginUser: string;
allowedPrincipals: {
usernames: string[];
};
}[];
loginMappings?: TLoginMapping[];
} & Omit<TProjectPermission, "projectId">;
export type TGetSshHostGroupDTO = {

View File

@@ -31,8 +31,18 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUser}.id`,
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.Users}.id`, `${TableName.SshHostLoginUserMapping}.userId`)
.leftJoin(
TableName.UserGroupMembership,
`${TableName.UserGroupMembership}.groupId`,
`${TableName.SshHostLoginUserMapping}.groupId`
)
.whereIn(`${TableName.SshHost}.projectId`, projectIds)
.andWhere(`${TableName.SshHostLoginUserMapping}.userId`, userId)
.andWhere((bd) => {
void bd
.where(`${TableName.SshHostLoginUserMapping}.userId`, userId)
.orWhere(`${TableName.UserGroupMembership}.userId`, userId);
})
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
db.ref("projectId").withSchema(TableName.SshHost),
@@ -58,8 +68,17 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.join(TableName.SshHost, `${TableName.SshHostGroupMembership}.sshHostId`, `${TableName.SshHost}.id`)
.leftJoin(
TableName.UserGroupMembership,
`${TableName.UserGroupMembership}.groupId`,
`${TableName.SshHostLoginUserMapping}.groupId`
)
.whereIn(`${TableName.SshHost}.projectId`, projectIds)
.andWhere(`${TableName.SshHostLoginUserMapping}.userId`, userId)
.andWhere((bd) => {
void bd
.where(`${TableName.SshHostLoginUserMapping}.userId`, userId)
.orWhere(`${TableName.UserGroupMembership}.userId`, userId);
})
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
db.ref("projectId").withSchema(TableName.SshHost),
@@ -133,6 +152,7 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHost}.projectId`, projectId)
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
@@ -144,6 +164,7 @@ export const sshHostDALFactory = (db: TDbClient) => {
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug"),
db.ref("userSshCaId").withSchema(TableName.SshHost),
db.ref("hostSshCaId").withSchema(TableName.SshHost)
)
@@ -163,10 +184,12 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.select(
db.ref("sshHostId").withSchema(TableName.SshHostGroupMembership),
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users)
db.ref("username").withSchema(TableName.Users),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
)
.whereIn(`${TableName.SshHostGroupMembership}.sshHostId`, hostIds);
@@ -185,7 +208,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
const directMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
},
source: LoginMappingSource.HOST
}));
@@ -197,7 +221,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
const groupMappings = Object.entries(inheritedGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
},
source: LoginMappingSource.HOST_GROUP
}));
@@ -229,6 +254,7 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHost}.id`, sshHostId)
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
@@ -241,7 +267,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("userSshCaId").withSchema(TableName.SshHost),
db.ref("hostSshCaId").withSchema(TableName.SshHost)
db.ref("hostSshCaId").withSchema(TableName.SshHost),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
);
if (rows.length === 0) return null;
@@ -257,7 +284,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
const directMappings = Object.entries(directGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
},
source: LoginMappingSource.HOST
}));
@@ -275,10 +303,12 @@ export const sshHostDALFactory = (db: TDbClient) => {
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.Groups, `${TableName.SshHostLoginUserMapping}.groupId`, `${TableName.Groups}.id`)
.where(`${TableName.SshHostGroupMembership}.sshHostId`, sshHostId)
.select(
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users)
db.ref("username").withSchema(TableName.Users),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
);
const groupGrouped = groupBy(
@@ -289,7 +319,8 @@ export const sshHostDALFactory = (db: TDbClient) => {
const groupMappings = Object.entries(groupGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
usernames: unique(entries.map((e) => e.username)).filter(Boolean),
groups: unique(entries.map((e) => e.groupSlug)).filter(Boolean)
},
source: LoginMappingSource.HOST_GROUP
}));

View File

@@ -3,6 +3,7 @@ import { Knex } from "knex";
import { ActionProjectType } from "@app/db/schemas";
import { BadRequestError } from "@app/lib/errors";
import { ProjectPermissionSshHostActions, ProjectPermissionSub } from "../permission/project-permission";
import { TCreateSshLoginMappingsDTO } from "./ssh-host-types";
/**
@@ -15,6 +16,7 @@ export const createSshLoginMappings = async ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId,
@@ -35,7 +37,7 @@ export const createSshLoginMappings = async ({
tx
);
if (allowedPrincipals.usernames.length > 0) {
if (allowedPrincipals.usernames && allowedPrincipals.usernames.length > 0) {
const users = await userDAL.find(
{
$in: {
@@ -74,6 +76,41 @@ export const createSshLoginMappings = async ({
tx
);
}
if (allowedPrincipals.groups && allowedPrincipals.groups.length > 0) {
const projectGroups = await groupDAL.findGroupsByProjectId(projectId);
const groups = projectGroups.filter((g) => allowedPrincipals.groups?.includes(g.slug));
if (groups.length !== allowedPrincipals.groups?.length) {
throw new BadRequestError({
message: `Invalid group slugs: ${allowedPrincipals.groups
.filter((g) => !projectGroups.some((pg) => pg.slug === g))
.join(", ")}`
});
}
for await (const group of groups) {
// check that each group has access to the SSH project and have read access to hosts
const hasPermission = await permissionService.checkGroupProjectPermission({
groupId: group.id,
projectId,
checkPermissions: [ProjectPermissionSshHostActions.Read, ProjectPermissionSub.SshHosts]
});
if (!hasPermission) {
throw new BadRequestError({
message: `Group ${group.slug} does not have access to the SSH project`
});
}
}
await sshHostLoginUserMappingDAL.insertMany(
groups.map((group) => ({
sshHostLoginUserId: sshHostLoginUser.id,
groupId: group.id
})),
tx
);
}
}
};

View File

@@ -15,7 +15,24 @@ export const sanitizedSshHost = SshHostsSchema.pick({
export const loginMappingSchema = z.object({
loginUser: z.string().trim(),
allowedPrincipals: z.object({
usernames: z.array(z.string().trim()).transform((usernames) => Array.from(new Set(usernames)))
})
allowedPrincipals: z
.object({
usernames: z
.array(z.string().trim())
.transform((usernames) => Array.from(new Set(usernames)))
.optional(),
groups: z
.array(z.string().trim())
.transform((groups) => Array.from(new Set(groups)))
.optional()
})
.refine(
(data) => {
return (data.usernames && data.usernames.length > 0) || (data.groups && data.groups.length > 0);
},
{
message: "At least one username or group must be provided",
path: ["allowedPrincipals"]
}
)
});

View File

@@ -1,6 +1,7 @@
import { ForbiddenError, subject } from "@casl/ability";
import { ActionProjectType, ProjectType } from "@app/db/schemas";
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionSshHostActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { TSshCertificateAuthorityDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-dal";
@@ -19,6 +20,7 @@ import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectSshConfigDALFactory } from "@app/services/project/project-ssh-config-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TUserGroupMembershipDALFactory } from "../group/user-group-membership-dal";
import {
convertActorToPrincipals,
createSshCert,
@@ -39,12 +41,14 @@ import {
type TSshHostServiceFactoryDep = {
userDAL: Pick<TUserDALFactory, "findById" | "find">;
groupDAL: Pick<TGroupDALFactory, "findGroupsByProjectId">;
projectDAL: Pick<TProjectDALFactory, "find">;
projectSshConfigDAL: Pick<TProjectSshConfigDALFactory, "findOne">;
sshCertificateAuthorityDAL: Pick<TSshCertificateAuthorityDALFactory, "findOne">;
sshCertificateAuthoritySecretDAL: Pick<TSshCertificateAuthoritySecretDALFactory, "findOne">;
sshCertificateDAL: Pick<TSshCertificateDALFactory, "create" | "transaction">;
sshCertificateBodyDAL: Pick<TSshCertificateBodyDALFactory, "create">;
userGroupMembershipDAL: Pick<TUserGroupMembershipDALFactory, "findGroupMembershipsByUserIdInOrg">;
sshHostDAL: Pick<
TSshHostDALFactory,
| "transaction"
@@ -58,7 +62,10 @@ type TSshHostServiceFactoryDep = {
>;
sshHostLoginUserDAL: TSshHostLoginUserDALFactory;
sshHostLoginUserMappingDAL: TSshHostLoginUserMappingDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getUserProjectPermission">;
permissionService: Pick<
TPermissionServiceFactory,
"getProjectPermission" | "getUserProjectPermission" | "checkGroupProjectPermission"
>;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
@@ -66,6 +73,8 @@ export type TSshHostServiceFactory = ReturnType<typeof sshHostServiceFactory>;
export const sshHostServiceFactory = ({
userDAL,
userGroupMembershipDAL,
groupDAL,
projectDAL,
projectSshConfigDAL,
sshCertificateAuthorityDAL,
@@ -208,6 +217,7 @@ export const sshHostServiceFactory = ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId,
@@ -278,6 +288,7 @@ export const sshHostServiceFactory = ({
loginMappings,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
groupDAL,
userDAL,
permissionService,
projectId: host.projectId,
@@ -387,10 +398,14 @@ export const sshHostServiceFactory = ({
userDAL
});
const userGroups = await userGroupMembershipDAL.findGroupMembershipsByUserIdInOrg(actorId, actorOrgId);
const userGroupSlugs = userGroups.map((g) => g.groupSlug);
const mapping = host.loginMappings.find(
(m) =>
m.loginUser === loginUser &&
m.allowedPrincipals.usernames.some((allowed) => internalPrincipals.includes(allowed))
(m.allowedPrincipals.usernames?.some((allowed) => internalPrincipals.includes(allowed)) ||
m.allowedPrincipals.groups?.some((allowed) => userGroupSlugs.includes(allowed)))
);
if (!mapping) {

View File

@@ -7,12 +7,15 @@ import { TProjectPermission } from "@app/lib/types";
import { ActorAuthMethod } from "@app/services/auth/auth-type";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TGroupDALFactory } from "../group/group-dal";
export type TListSshHostsDTO = Omit<TProjectPermission, "projectId">;
export type TLoginMapping = {
loginUser: string;
allowedPrincipals: {
usernames: string[];
usernames?: string[];
groups?: string[];
};
};
@@ -63,7 +66,8 @@ type BaseCreateSshLoginMappingsDTO = {
sshHostLoginUserDAL: Pick<TSshHostLoginUserDALFactory, "create" | "transaction">;
sshHostLoginUserMappingDAL: Pick<TSshHostLoginUserMappingDALFactory, "insertMany">;
userDAL: Pick<TUserDALFactory, "find">;
permissionService: Pick<TPermissionServiceFactory, "getUserProjectPermission">;
permissionService: Pick<TPermissionServiceFactory, "getUserProjectPermission" | "checkGroupProjectPermission">;
groupDAL: Pick<TGroupDALFactory, "findGroupsByProjectId">;
projectId: string;
actorAuthMethod: ActorAuthMethod;
actorOrgId: string;

View File

@@ -1,6 +1,8 @@
import { Redis } from "ioredis";
import { pgAdvisoryLockHashText } from "@app/lib/crypto/hashtext";
import { applyJitter } from "@app/lib/dates";
import { delay as delayMs } from "@app/lib/delay";
import { Redlock, Settings } from "@app/lib/red-lock";
export const PgSqlLock = {
@@ -48,6 +50,13 @@ export const KeyStoreTtls = {
AccessTokenStatusUpdateInSeconds: 120
};
type TDeleteItems = {
pattern: string;
batchSize?: number;
delay?: number;
jitter?: number;
};
type TWaitTillReady = {
key: string;
waitingCb?: () => void;
@@ -75,6 +84,35 @@ export const keyStoreFactory = (redisUrl: string) => {
const deleteItem = async (key: string) => redis.del(key);
const deleteItems = async ({ pattern, batchSize = 500, delay = 1500, jitter = 200 }: TDeleteItems) => {
let cursor = "0";
let totalDeleted = 0;
do {
// Await in loop is needed so that Redis is not overwhelmed
// eslint-disable-next-line no-await-in-loop
const [nextCursor, keys] = await redis.scan(cursor, "MATCH", pattern, "COUNT", 1000); // Count should be 1000 - 5000 for prod loads
cursor = nextCursor;
for (let i = 0; i < keys.length; i += batchSize) {
const batch = keys.slice(i, i + batchSize);
const pipeline = redis.pipeline();
for (const key of batch) {
pipeline.unlink(key);
}
// eslint-disable-next-line no-await-in-loop
await pipeline.exec();
totalDeleted += batch.length;
console.log("BATCH DONE");
// eslint-disable-next-line no-await-in-loop
await delayMs(Math.max(0, applyJitter(delay, jitter)));
}
} while (cursor !== "0");
return totalDeleted;
};
const incrementBy = async (key: string, value: number) => redis.incrby(key, value);
const setExpiry = async (key: string, expiryInSeconds: number) => redis.expire(key, expiryInSeconds);
@@ -94,7 +132,7 @@ export const keyStoreFactory = (redisUrl: string) => {
// eslint-disable-next-line
await new Promise((resolve) => {
waitingCb?.();
setTimeout(resolve, Math.max(0, delay + Math.floor((Math.random() * 2 - 1) * jitter)));
setTimeout(resolve, Math.max(0, applyJitter(delay, jitter)));
});
attempts += 1;
// eslint-disable-next-line
@@ -108,6 +146,7 @@ export const keyStoreFactory = (redisUrl: string) => {
setExpiry,
setItemWithExpiry,
deleteItem,
deleteItems,
incrementBy,
acquireLock(resources: string[], duration: number, settings?: Partial<Settings>) {
return redisLock.acquire(resources, duration, settings);

View File

@@ -1,3 +1,7 @@
import RE2 from "re2";
import { applyJitter } from "@app/lib/dates";
import { delay as delayMs } from "@app/lib/delay";
import { Lock } from "@app/lib/red-lock";
import { TKeyStoreFactory } from "./keystore";
@@ -19,6 +23,27 @@ export const inMemoryKeyStore = (): TKeyStoreFactory => {
delete store[key];
return 1;
},
deleteItems: async ({ pattern, batchSize = 500, delay = 1500, jitter = 200 }) => {
const regex = new RE2(`^${pattern.replace(/[-[\]/{}()+?.\\^$|]/g, "\\$&").replace(/\*/g, ".*")}$`);
let totalDeleted = 0;
const keys = Object.keys(store);
for (let i = 0; i < keys.length; i += batchSize) {
const batch = keys.slice(i, i + batchSize);
for (const key of batch) {
if (regex.test(key)) {
delete store[key];
totalDeleted += 1;
}
}
// eslint-disable-next-line no-await-in-loop
await delayMs(Math.max(0, applyJitter(delay, jitter)));
}
return totalDeleted;
},
getItem: async (key) => {
const value = store[key];
if (typeof value === "string") {

View File

@@ -18,6 +18,7 @@ export enum ApiDocsTags {
KubernetesAuth = "Kubernetes Auth",
JwtAuth = "JWT Auth",
OidcAuth = "OIDC Auth",
LdapAuth = "LDAP Auth",
Groups = "Groups",
Organizations = "Organizations",
Projects = "Projects",
@@ -184,6 +185,49 @@ export const UNIVERSAL_AUTH = {
}
} as const;
export const LDAP_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login.",
username: "The username of the LDAP user to login.",
password: "The password of the LDAP user to login."
},
ATTACH: {
identityId: "The ID of the identity to attach the configuration onto.",
url: "The URL of the LDAP server.",
allowedFields:
"The comma-separated array of key/value pairs of required fields that the LDAP entry must have in order to authenticate.",
searchBase: "The base DN to search for the LDAP user.",
searchFilter: "The filter to use to search for the LDAP user.",
bindDN: "The DN of the user to bind to the LDAP server.",
bindPass: "The password of the user to bind to the LDAP server.",
ldapCaCertificate: "The PEM-encoded CA certificate for the LDAP server.",
accessTokenTTL: "The lifetime for an access token in seconds.",
accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.",
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from."
},
UPDATE: {
identityId: "The ID of the identity to update the configuration for.",
url: "The new URL of the LDAP server.",
allowedFields: "The comma-separated list of allowed fields to return from the LDAP user.",
searchBase: "The new base DN to search for the LDAP user.",
searchFilter: "The new filter to use to search for the LDAP user.",
bindDN: "The new DN of the user to bind to the LDAP server.",
bindPass: "The new password of the user to bind to the LDAP server.",
ldapCaCertificate: "The new PEM-encoded CA certificate for the LDAP server.",
accessTokenTTL: "The new lifetime for an access token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the configuration for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the configuration for."
}
} as const;
export const AWS_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login.",
@@ -1434,7 +1478,7 @@ export const SSH_HOSTS = {
loginUser: "A login user on the remote machine (e.g. 'ec2-user', 'deploy', 'admin')",
allowedPrincipals: "A list of allowed principals that can log in as the login user.",
loginMappings:
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users in the Infisical SSH project.",
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users or groups slugs in the Infisical SSH project.",
userSshCaId:
"The ID of the SSH CA to use for user certificates. If not specified, the default user SSH CA will be used if it exists.",
hostSshCaId:
@@ -1449,7 +1493,7 @@ export const SSH_HOSTS = {
loginUser: "A login user on the remote machine (e.g. 'ec2-user', 'deploy', 'admin')",
allowedPrincipals: "A list of allowed principals that can log in as the login user.",
loginMappings:
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users in the Infisical SSH project."
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users or groups slugs in the Infisical SSH project."
},
DELETE: {
sshHostId: "The ID of the SSH host to delete."
@@ -1822,8 +1866,12 @@ export const KMS = {
};
export const ProjectTemplates = {
LIST: {
type: "The type of project template to list."
},
CREATE: {
name: "The name of the project template to be created. Must be slug-friendly.",
type: "The type of project template to be created.",
description: "An optional description of the project template.",
roles: "The roles to be created when the template is applied to a project.",
environments: "The environments to be created when the template is applied to a project."

View File

@@ -146,6 +146,7 @@ const envSchema = z
SECRET_SCANNING_GIT_APP_ID: zpStr(z.string().optional()),
SECRET_SCANNING_PRIVATE_KEY: zpStr(z.string().optional()),
SECRET_SCANNING_ORG_WHITELIST: zpStr(z.string().optional()),
SECRET_SCANNING_GIT_APP_SLUG: zpStr(z.string().default("infisical-radar")),
// LICENSE
LICENSE_SERVER_URL: zpStr(z.string().optional().default("https://portal.infisical.com")),
LICENSE_SERVER_KEY: zpStr(z.string().optional()),

View File

@@ -0,0 +1,4 @@
export const delay = (ms: number) =>
new Promise<void>((resolve) => {
setTimeout(resolve, ms);
});

View File

@@ -84,7 +84,9 @@ const redactedKeys = [
"secrets",
"key",
"password",
"config"
"config",
"bindPass",
"bindDN"
];
const UNKNOWN_REQUEST_ID = "UNKNOWN_REQUEST_ID";

View File

@@ -25,6 +25,7 @@ import {
TQueueSecretSyncSyncSecretsByIdDTO,
TQueueSendSecretSyncActionFailedNotificationsDTO
} from "@app/services/secret-sync/secret-sync-types";
import { CacheType } from "@app/services/super-admin/super-admin-types";
import { TWebhookPayloads } from "@app/services/webhook/webhook-types";
export enum QueueName {
@@ -49,7 +50,8 @@ export enum QueueName {
AccessTokenStatusUpdate = "access-token-status-update",
ImportSecretsFromExternalSource = "import-secrets-from-external-source",
AppConnectionSecretSync = "app-connection-secret-sync",
SecretRotationV2 = "secret-rotation-v2"
SecretRotationV2 = "secret-rotation-v2",
InvalidateCache = "invalidate-cache"
}
export enum QueueJobs {
@@ -81,7 +83,8 @@ export enum QueueJobs {
SecretSyncSendActionFailedNotifications = "secret-sync-send-action-failed-notifications",
SecretRotationV2QueueRotations = "secret-rotation-v2-queue-rotations",
SecretRotationV2RotateSecrets = "secret-rotation-v2-rotate-secrets",
SecretRotationV2SendNotification = "secret-rotation-v2-send-notification"
SecretRotationV2SendNotification = "secret-rotation-v2-send-notification",
InvalidateCache = "invalidate-cache"
}
export type TQueueJobTypes = {
@@ -234,6 +237,14 @@ export type TQueueJobTypes = {
name: QueueJobs.SecretRotationV2SendNotification;
payload: TSecretRotationSendNotificationJobPayload;
};
[QueueName.InvalidateCache]: {
name: QueueJobs.InvalidateCache;
payload: {
data: {
type: CacheType;
};
};
};
};
export type TQueueServiceFactory = ReturnType<typeof queueServiceFactory>;

View File

@@ -100,3 +100,18 @@ export const publicSshCaLimit: RateLimitOptions = {
max: 30, // conservative default
keyGenerator: (req) => req.realIp
};
export const invalidateCacheLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
hook: "preValidation",
max: 2,
keyGenerator: (req) => req.realIp
};
// Makes spamming "request access" harder, preventing email DDoS
export const requestAccessLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
hook: "preValidation",
max: 10,
keyGenerator: (req) => req.realIp
};

View File

@@ -5,7 +5,7 @@
import type { FastifySchema, FastifySchemaCompiler, FastifyTypeProvider } from "fastify";
import type { FastifySerializerCompiler } from "fastify/types/schema";
import type { z, ZodAny, ZodTypeAny } from "zod";
import { zodToJsonSchema } from "zod-to-json-schema";
import { PostProcessCallback, zodToJsonSchema } from "zod-to-json-schema";
// eslint-disable-next-line @typescript-eslint/no-explicit-any
type FreeformRecord = Record<string, any>;
@@ -28,9 +28,25 @@ interface Schema extends FastifySchema {
hide?: boolean;
}
// Credit: https://github.com/StefanTerdell/zod-to-json-schema
const jsonDescription: PostProcessCallback = (jsonSchema, def) => {
if (def.description) {
try {
return {
...jsonSchema,
description: undefined,
...JSON.parse(def.description)
};
} catch {}
}
return jsonSchema;
};
const zodToJsonSchemaOptions = {
target: "openApi3",
$refStrategy: "none"
$refStrategy: "none",
postProcess: jsonDescription
} as const;
// eslint-disable-next-line @typescript-eslint/no-explicit-any

View File

@@ -160,6 +160,8 @@ import { identityJwtAuthDALFactory } from "@app/services/identity-jwt-auth/ident
import { identityJwtAuthServiceFactory } from "@app/services/identity-jwt-auth/identity-jwt-auth-service";
import { identityKubernetesAuthDALFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-dal";
import { identityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
import { identityLdapAuthDALFactory } from "@app/services/identity-ldap-auth/identity-ldap-auth-dal";
import { identityLdapAuthServiceFactory } from "@app/services/identity-ldap-auth/identity-ldap-auth-service";
import { identityOidcAuthDALFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-dal";
import { identityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
import { identityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
@@ -242,6 +244,7 @@ import { projectSlackConfigDALFactory } from "@app/services/slack/project-slack-
import { slackIntegrationDALFactory } from "@app/services/slack/slack-integration-dal";
import { slackServiceFactory } from "@app/services/slack/slack-service";
import { TSmtpService } from "@app/services/smtp/smtp-service";
import { invalidateCacheQueueFactory } from "@app/services/super-admin/invalidate-cache-queue";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { getServerCfg, superAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
import { telemetryDALFactory } from "@app/services/telemetry/telemetry-dal";
@@ -353,6 +356,7 @@ export const registerRoutes = async (
const identityOidcAuthDAL = identityOidcAuthDALFactory(db);
const identityJwtAuthDAL = identityJwtAuthDALFactory(db);
const identityAzureAuthDAL = identityAzureAuthDALFactory(db);
const identityLdapAuthDAL = identityLdapAuthDALFactory(db);
const auditLogDAL = auditLogDALFactory(auditLogDb ?? db);
const auditLogStreamDAL = auditLogStreamDALFactory(db);
@@ -611,6 +615,11 @@ export const registerRoutes = async (
queueService
});
const invalidateCacheQueue = invalidateCacheQueueFactory({
keyStore,
queueService
});
const userService = userServiceFactory({
userDAL,
userAliasDAL,
@@ -722,7 +731,8 @@ export const registerRoutes = async (
keyStore,
licenseService,
kmsService,
microsoftTeamsService
microsoftTeamsService,
invalidateCacheQueue
});
const orgAdminService = orgAdminServiceFactory({
@@ -860,6 +870,8 @@ export const registerRoutes = async (
const sshHostService = sshHostServiceFactory({
userDAL,
groupDAL,
userGroupMembershipDAL,
projectDAL,
projectSshConfigDAL,
sshCertificateAuthorityDAL,
@@ -882,7 +894,8 @@ export const registerRoutes = async (
sshHostLoginUserMappingDAL,
userDAL,
permissionService,
licenseService
licenseService,
groupDAL
});
const certificateAuthorityService = certificateAuthorityServiceFactory({
@@ -1438,6 +1451,16 @@ export const registerRoutes = async (
kmsService
});
const identityLdapAuthService = identityLdapAuthServiceFactory({
identityLdapAuthDAL,
permissionService,
kmsService,
identityAccessTokenDAL,
identityOrgMembershipDAL,
licenseService,
identityDAL
});
const gatewayService = gatewayServiceFactory({
permissionService,
gatewayDAL,
@@ -1698,6 +1721,7 @@ export const registerRoutes = async (
identityAzureAuth: identityAzureAuthService,
identityOidcAuth: identityOidcAuthService,
identityJwtAuth: identityJwtAuthService,
identityLdapAuth: identityLdapAuthService,
accessApprovalPolicy: accessApprovalPolicyService,
accessApprovalRequest: accessApprovalRequestService,
secretApprovalPolicy: secretApprovalPolicyService,
@@ -1763,6 +1787,10 @@ export const registerRoutes = async (
if (licenseSyncJob) {
cronJobs.push(licenseSyncJob);
}
const microsoftTeamsSyncJob = await microsoftTeamsService.initializeBackgroundSync();
if (microsoftTeamsSyncJob) {
cronJobs.push(microsoftTeamsSyncJob);
}
}
server.decorate<FastifyZodProvider["store"]>("store", {

View File

@@ -4,13 +4,14 @@ import { z } from "zod";
import { IdentitiesSchema, OrganizationsSchema, SuperAdminSchema, UsersSchema } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { invalidateCacheLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { RootKeyEncryptionStrategy } from "@app/services/kms/kms-types";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
import { CacheType, LoginMethod } from "@app/services/super-admin/super-admin-types";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
export const registerAdminRouter = async (server: FastifyZodProvider) => {
@@ -548,4 +549,69 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
};
}
});
server.route({
method: "POST",
url: "/invalidate-cache",
config: {
rateLimit: invalidateCacheLimit
},
schema: {
body: z.object({
type: z.nativeEnum(CacheType)
}),
response: {
200: z.object({
message: z.string()
})
}
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
handler: async (req) => {
await server.services.superAdmin.invalidateCache(req.body.type);
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.InvalidateCache,
distinctId: getTelemetryDistinctId(req),
properties: {
...req.auditLogInfo
}
});
return {
message: "Cache invalidation job started"
};
}
});
server.route({
method: "GET",
url: "/invalidating-cache-status",
config: {
rateLimit: readLimit
},
schema: {
response: {
200: z.object({
invalidating: z.boolean()
})
}
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
handler: async () => {
const invalidating = await server.services.superAdmin.checkIfInvalidatingCache();
return {
invalidating
};
}
});
};

View File

@@ -0,0 +1,497 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable @typescript-eslint/no-unsafe-return */
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
/* eslint-disable @typescript-eslint/no-unsafe-call */
/* eslint-disable @typescript-eslint/no-unsafe-argument */
// All the any rules are disabled because passport typesense with fastify is really poor
import { Authenticator } from "@fastify/passport";
import fastifySession from "@fastify/session";
import { FastifyRequest } from "fastify";
import { IncomingMessage } from "http";
import LdapStrategy from "passport-ldapauth";
import { z } from "zod";
import { IdentityLdapAuthsSchema } from "@app/db/schemas/identity-ldap-auths";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { isValidLdapFilter } from "@app/ee/services/ldap-config/ldap-fns";
import { ApiDocsTags, LDAP_AUTH } from "@app/lib/api-docs";
import { getConfig } from "@app/lib/config/env";
import { UnauthorizedError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
import { AllowedFieldsSchema } from "@app/services/identity-ldap-auth/identity-ldap-auth-types";
import { isSuperAdmin } from "@app/services/super-admin/super-admin-fns";
export const registerIdentityLdapAuthRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const passport = new Authenticator({ key: "ldap-identity-auth", userProperty: "passportMachineIdentity" });
await server.register(fastifySession, { secret: appCfg.COOKIE_SECRET_SIGN_KEY });
await server.register(passport.initialize());
await server.register(passport.secureSession());
const getLdapPassportOpts = (req: FastifyRequest, done: any) => {
const { identityId } = req.body as {
identityId: string;
};
process.nextTick(async () => {
try {
const { ldapConfig, opts } = await server.services.identityLdapAuth.getLdapConfig(identityId);
req.ldapConfig = {
...ldapConfig,
isActive: true,
groupSearchBase: "",
uniqueUserAttribute: "",
groupSearchFilter: ""
};
done(null, opts);
} catch (err) {
logger.error(err, "Error in LDAP verification callback");
done(err);
}
});
};
passport.use(
new LdapStrategy(
getLdapPassportOpts as any,
// eslint-disable-next-line
async (req: IncomingMessage, user, cb) => {
try {
const requestBody = (req as unknown as FastifyRequest).body as {
username: string;
password: string;
identityId: string;
};
if (!requestBody.username || !requestBody.password) {
return cb(new UnauthorizedError({ message: "Invalid request. Missing username or password." }), false);
}
if (!requestBody.identityId) {
return cb(new UnauthorizedError({ message: "Invalid request. Missing identity ID." }), false);
}
const { ldapConfig } = req as unknown as FastifyRequest;
if (ldapConfig.allowedFields) {
for (const field of ldapConfig.allowedFields) {
if (!user[field.key]) {
return cb(
new UnauthorizedError({ message: `Invalid request. Missing field ${field.key} on user.` }),
false
);
}
const value = field.value.split(",");
if (!value.includes(user[field.key])) {
return cb(
new UnauthorizedError({
message: `Invalid request. User field '${field.key}' does not match required fields.`
}),
false
);
}
}
}
return cb(null, { identityId: requestBody.identityId, user });
} catch (error) {
logger.error(error, "Error in LDAP verification callback");
return cb(error, false);
}
}
)
);
server.route({
method: "POST",
url: "/ldap-auth/login",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.LdapAuth],
description: "Login with LDAP Auth",
body: z.object({
identityId: z.string().trim().describe(LDAP_AUTH.LOGIN.identityId),
username: z.string().describe(LDAP_AUTH.LOGIN.username),
password: z.string().describe(LDAP_AUTH.LOGIN.password)
}),
response: {
200: z.object({
accessToken: z.string(),
expiresIn: z.coerce.number(),
accessTokenMaxTTL: z.coerce.number(),
tokenType: z.literal("Bearer")
})
}
},
preValidation: passport.authenticate("ldapauth", {
failWithError: true,
session: false
}) as any,
errorHandler: (error) => {
if (error.name === "AuthenticationError") {
throw new UnauthorizedError({ message: "Invalid credentials" });
}
throw error;
},
handler: async (req) => {
if (!req.passportMachineIdentity?.identityId) {
throw new UnauthorizedError({ message: "Invalid request. Missing identity ID or LDAP entry details." });
}
const { identityId, user } = req.passportMachineIdentity;
const { accessToken, identityLdapAuth, identityMembershipOrg } = await server.services.identityLdapAuth.login({
identityId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityMembershipOrg?.orgId,
event: {
type: EventType.LOGIN_IDENTITY_LDAP_AUTH,
metadata: {
identityId,
ldapEmail: user.mail,
ldapUsername: user.uid
}
}
});
return {
accessToken,
tokenType: "Bearer" as const,
expiresIn: identityLdapAuth.accessTokenTTL,
accessTokenMaxTTL: identityLdapAuth.accessTokenMaxTTL
};
}
});
server.route({
method: "POST",
url: "/ldap-auth/identities/:identityId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.LdapAuth],
description: "Attach LDAP Auth configuration onto identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().trim().describe(LDAP_AUTH.ATTACH.identityId)
}),
body: z
.object({
url: z.string().trim().min(1).describe(LDAP_AUTH.ATTACH.url),
bindDN: z.string().trim().min(1).describe(LDAP_AUTH.ATTACH.bindDN),
bindPass: z.string().trim().min(1).describe(LDAP_AUTH.ATTACH.bindPass),
searchBase: z.string().trim().min(1).describe(LDAP_AUTH.ATTACH.searchBase),
searchFilter: z
.string()
.trim()
.min(1)
.default("(uid={{username}})")
.refine(isValidLdapFilter, "Invalid LDAP search filter")
.describe(LDAP_AUTH.ATTACH.searchFilter),
allowedFields: AllowedFieldsSchema.array().optional().describe(LDAP_AUTH.ATTACH.allowedFields),
ldapCaCertificate: z.string().trim().optional().describe(LDAP_AUTH.ATTACH.ldapCaCertificate),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
.describe(LDAP_AUTH.ATTACH.accessTokenTrustedIps),
accessTokenTTL: z
.number()
.int()
.min(0)
.max(315360000)
.default(2592000)
.describe(LDAP_AUTH.ATTACH.accessTokenTTL),
accessTokenMaxTTL: z
.number()
.int()
.min(1)
.max(315360000)
.default(2592000)
.describe(LDAP_AUTH.ATTACH.accessTokenMaxTTL),
accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(LDAP_AUTH.ATTACH.accessTokenNumUsesLimit)
})
.refine(
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
"Access Token TTL cannot be greater than Access Token Max TTL."
),
response: {
200: z.object({
identityLdapAuth: IdentityLdapAuthsSchema.omit({
encryptedBindDN: true,
encryptedBindPass: true,
encryptedLdapCaCertificate: true
})
})
}
},
handler: async (req) => {
const identityLdapAuth = await server.services.identityLdapAuth.attachLdapAuth({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body,
identityId: req.params.identityId,
isActorSuperAdmin: isSuperAdmin(req.auth)
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.ADD_IDENTITY_LDAP_AUTH,
metadata: {
identityId: req.params.identityId,
url: identityLdapAuth.url,
accessTokenMaxTTL: identityLdapAuth.accessTokenMaxTTL,
accessTokenTTL: identityLdapAuth.accessTokenTTL,
accessTokenNumUsesLimit: identityLdapAuth.accessTokenNumUsesLimit,
allowedFields: req.body.allowedFields
}
}
});
return { identityLdapAuth };
}
});
server.route({
method: "PATCH",
url: "/ldap-auth/identities/:identityId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.LdapAuth],
description: "Update LDAP Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().trim().describe(LDAP_AUTH.UPDATE.identityId)
}),
body: z
.object({
url: z.string().trim().min(1).optional().describe(LDAP_AUTH.UPDATE.url),
bindDN: z.string().trim().min(1).optional().describe(LDAP_AUTH.UPDATE.bindDN),
bindPass: z.string().trim().min(1).optional().describe(LDAP_AUTH.UPDATE.bindPass),
searchBase: z.string().trim().min(1).optional().describe(LDAP_AUTH.UPDATE.searchBase),
searchFilter: z
.string()
.trim()
.min(1)
.optional()
.refine((v) => v === undefined || isValidLdapFilter(v), "Invalid LDAP search filter")
.describe(LDAP_AUTH.UPDATE.searchFilter),
allowedFields: AllowedFieldsSchema.array().optional().describe(LDAP_AUTH.UPDATE.allowedFields),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.optional()
.describe(LDAP_AUTH.UPDATE.accessTokenTrustedIps),
accessTokenTTL: z.number().int().min(0).max(315360000).optional().describe(LDAP_AUTH.UPDATE.accessTokenTTL),
accessTokenNumUsesLimit: z
.number()
.int()
.min(0)
.optional()
.describe(LDAP_AUTH.UPDATE.accessTokenNumUsesLimit),
accessTokenMaxTTL: z
.number()
.int()
.max(315360000)
.min(0)
.optional()
.describe(LDAP_AUTH.UPDATE.accessTokenMaxTTL)
})
.refine(
(val) => (val.accessTokenMaxTTL && val.accessTokenTTL ? val.accessTokenTTL <= val.accessTokenMaxTTL : true),
"Access Token TTL cannot be greater than Access Token Max TTL."
),
response: {
200: z.object({
identityLdapAuth: IdentityLdapAuthsSchema.omit({
encryptedBindDN: true,
encryptedBindPass: true,
encryptedLdapCaCertificate: true
})
})
}
},
handler: async (req) => {
const identityLdapAuth = await server.services.identityLdapAuth.updateLdapAuth({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body,
identityId: req.params.identityId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.UPDATE_IDENTITY_LDAP_AUTH,
metadata: {
identityId: req.params.identityId,
url: identityLdapAuth.url,
accessTokenMaxTTL: identityLdapAuth.accessTokenMaxTTL,
accessTokenTTL: identityLdapAuth.accessTokenTTL,
accessTokenNumUsesLimit: identityLdapAuth.accessTokenNumUsesLimit,
accessTokenTrustedIps: identityLdapAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
allowedFields: req.body.allowedFields
}
}
});
return { identityLdapAuth };
}
});
server.route({
method: "GET",
url: "/ldap-auth/identities/:identityId",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.LdapAuth],
description: "Retrieve LDAP Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().trim().describe(LDAP_AUTH.RETRIEVE.identityId)
}),
response: {
200: z.object({
identityLdapAuth: IdentityLdapAuthsSchema.omit({
encryptedBindDN: true,
encryptedBindPass: true,
encryptedLdapCaCertificate: true
}).extend({
bindDN: z.string(),
bindPass: z.string(),
ldapCaCertificate: z.string().optional()
})
})
}
},
handler: async (req) => {
const identityLdapAuth = await server.services.identityLdapAuth.getLdapAuth({
identityId: req.params.identityId,
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.GET_IDENTITY_LDAP_AUTH,
metadata: {
identityId: identityLdapAuth.identityId
}
}
});
return { identityLdapAuth };
}
});
server.route({
method: "DELETE",
url: "/ldap-auth/identities/:identityId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.LdapAuth],
description: "Delete LDAP Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().trim().describe(LDAP_AUTH.REVOKE.identityId)
}),
response: {
200: z.object({
identityLdapAuth: IdentityLdapAuthsSchema.omit({
encryptedBindDN: true,
encryptedBindPass: true,
encryptedLdapCaCertificate: true
})
})
}
},
handler: async (req) => {
const identityLdapAuth = await server.services.identityLdapAuth.revokeIdentityLdapAuth({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
identityId: req.params.identityId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.REVOKE_IDENTITY_LDAP_AUTH,
metadata: {
identityId: identityLdapAuth.identityId
}
}
});
return { identityLdapAuth };
}
});
};

View File

@@ -19,6 +19,7 @@ import { registerIdentityAzureAuthRouter } from "./identity-azure-auth-router";
import { registerIdentityGcpAuthRouter } from "./identity-gcp-auth-router";
import { registerIdentityJwtAuthRouter } from "./identity-jwt-auth-router";
import { registerIdentityKubernetesRouter } from "./identity-kubernetes-auth-router";
import { registerIdentityLdapAuthRouter } from "./identity-ldap-auth-router";
import { registerIdentityOidcAuthRouter } from "./identity-oidc-auth-router";
import { registerIdentityRouter } from "./identity-router";
import { registerIdentityTokenAuthRouter } from "./identity-token-auth-router";
@@ -63,6 +64,7 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
await authRouter.register(registerIdentityAzureAuthRouter);
await authRouter.register(registerIdentityOidcAuthRouter);
await authRouter.register(registerIdentityJwtAuthRouter);
await authRouter.register(registerIdentityLdapAuthRouter);
},
{ prefix: "/auth" }
);

View File

@@ -2,7 +2,7 @@ import { z } from "zod";
import { ProjectMembershipsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { readLimit } from "@app/server/config/rateLimiter";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -47,7 +47,7 @@ export const registerOrgAdminRouter = async (server: FastifyZodProvider) => {
method: "POST",
url: "/projects/:projectId/grant-admin-access",
config: {
rateLimit: readLimit
rateLimit: writeLimit
},
schema: {
params: z.object({

View File

@@ -19,7 +19,7 @@ import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { ApiDocsTags, PROJECTS } from "@app/lib/api-docs";
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
import { re2Validator } from "@app/lib/zod";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { readLimit, requestAccessLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
import { validateMicrosoftTeamsChannelsSchema } from "@app/services/microsoft-teams/microsoft-teams-fns";
@@ -1006,7 +1006,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
method: "POST",
url: "/:workspaceId/project-access",
config: {
rateLimit: writeLimit
rateLimit: requestAccessLimit
},
schema: {
params: z.object({

View File

@@ -170,7 +170,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
.optional()
.default(InfisicalProjectTemplate.Default)
.describe(PROJECTS.CREATE.template),
type: z.nativeEnum(ProjectType).default(ProjectType.SecretManager)
type: z.nativeEnum(ProjectType).default(ProjectType.SecretManager),
shouldCreateDefaultEnvs: z.boolean().optional().default(true)
}),
response: {
200: z.object({
@@ -190,7 +191,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
slug: req.body.slug,
kmsKeyId: req.body.kmsKeyId,
template: req.body.template,
type: req.body.type
type: req.body.type,
createDefaultEnvs: req.body.shouldCreateDefaultEnvs
});
await server.services.telemetry.sendPostHogEvents({
@@ -272,7 +274,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
},
schema: {
params: z.object({
slug: slugSchema({ min: 5, max: 36 }).describe("The slug of the project to get.")
slug: slugSchema({ max: 36 }).describe("The slug of the project to get.")
}),
response: {
200: projectWithEnv

View File

@@ -30,6 +30,7 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
.leftJoin(TableName.IdentityGcpAuth, `${TableName.Identity}.id`, `${TableName.IdentityGcpAuth}.identityId`)
.leftJoin(TableName.IdentityAwsAuth, `${TableName.Identity}.id`, `${TableName.IdentityAwsAuth}.identityId`)
.leftJoin(TableName.IdentityAzureAuth, `${TableName.Identity}.id`, `${TableName.IdentityAzureAuth}.identityId`)
.leftJoin(TableName.IdentityLdapAuth, `${TableName.Identity}.id`, `${TableName.IdentityLdapAuth}.identityId`)
.leftJoin(
TableName.IdentityKubernetesAuth,
`${TableName.Identity}.id`,
@@ -48,6 +49,7 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityOidcAuth).as("accessTokenTrustedIpsOidc"),
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityTokenAuth).as("accessTokenTrustedIpsToken"),
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityJwtAuth).as("accessTokenTrustedIpsJwt"),
db.ref("accessTokenTrustedIps").withSchema(TableName.IdentityLdapAuth).as("accessTokenTrustedIpsLdap"),
db.ref("name").withSchema(TableName.Identity)
)
.first();
@@ -63,7 +65,8 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => {
trustedIpsKubernetesAuth: doc.accessTokenTrustedIpsK8s,
trustedIpsOidcAuth: doc.accessTokenTrustedIpsOidc,
trustedIpsAccessTokenAuth: doc.accessTokenTrustedIpsToken,
trustedIpsAccessJwtAuth: doc.accessTokenTrustedIpsJwt
trustedIpsAccessJwtAuth: doc.accessTokenTrustedIpsJwt,
trustedIpsAccessLdapAuth: doc.accessTokenTrustedIpsLdap
};
} catch (error) {
throw new DatabaseError({ error, name: "IdAccessTokenFindOne" });

View File

@@ -186,7 +186,8 @@ export const identityAccessTokenServiceFactory = ({
[IdentityAuthMethod.KUBERNETES_AUTH]: identityAccessToken.trustedIpsKubernetesAuth,
[IdentityAuthMethod.OIDC_AUTH]: identityAccessToken.trustedIpsOidcAuth,
[IdentityAuthMethod.TOKEN_AUTH]: identityAccessToken.trustedIpsAccessTokenAuth,
[IdentityAuthMethod.JWT_AUTH]: identityAccessToken.trustedIpsAccessJwtAuth
[IdentityAuthMethod.JWT_AUTH]: identityAccessToken.trustedIpsAccessJwtAuth,
[IdentityAuthMethod.LDAP_AUTH]: identityAccessToken.trustedIpsAccessLdapAuth
};
const trustedIps = trustedIpsMap[identityAccessToken.authMethod as IdentityAuthMethod];

View File

@@ -0,0 +1,11 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TIdentityLdapAuthDALFactory = ReturnType<typeof identityLdapAuthDALFactory>;
export const identityLdapAuthDALFactory = (db: TDbClient) => {
const ldapAuthOrm = ormify(db, TableName.IdentityLdapAuth);
return ldapAuthOrm;
};

View File

@@ -0,0 +1,543 @@
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import { ForbiddenError } from "@casl/ability";
import jwt from "jsonwebtoken";
import { IdentityAuthMethod } from "@app/db/schemas";
import { testLDAPConfig } from "@app/ee/services/ldap-config/ldap-fns";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionIdentityActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import {
constructPermissionErrorMessage,
validatePrivilegeChangeOperation
} from "@app/ee/services/permission/permission-fns";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
import { ActorType, AuthTokenType } from "../auth/auth-type";
import { TIdentityDALFactory } from "../identity/identity-dal";
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
import { TIdentityAccessTokenDALFactory } from "../identity-access-token/identity-access-token-dal";
import { TIdentityAccessTokenJwtPayload } from "../identity-access-token/identity-access-token-types";
import { TKmsServiceFactory } from "../kms/kms-service";
import { KmsDataKey } from "../kms/kms-types";
import { validateIdentityUpdateForSuperAdminPrivileges } from "../super-admin/super-admin-fns";
import { TIdentityLdapAuthDALFactory } from "./identity-ldap-auth-dal";
import {
AllowedFieldsSchema,
TAttachLdapAuthDTO,
TGetLdapAuthDTO,
TLoginLdapAuthDTO,
TRevokeLdapAuthDTO,
TUpdateLdapAuthDTO
} from "./identity-ldap-auth-types";
type TIdentityLdapAuthServiceFactoryDep = {
identityAccessTokenDAL: Pick<TIdentityAccessTokenDALFactory, "create" | "delete">;
identityLdapAuthDAL: Pick<
TIdentityLdapAuthDALFactory,
"findOne" | "transaction" | "create" | "updateById" | "delete"
>;
identityOrgMembershipDAL: Pick<TIdentityOrgDALFactory, "findOne">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
kmsService: TKmsServiceFactory;
identityDAL: TIdentityDALFactory;
};
export type TIdentityLdapAuthServiceFactory = ReturnType<typeof identityLdapAuthServiceFactory>;
export const identityLdapAuthServiceFactory = ({
identityAccessTokenDAL,
identityDAL,
identityLdapAuthDAL,
identityOrgMembershipDAL,
licenseService,
permissionService,
kmsService
}: TIdentityLdapAuthServiceFactoryDep) => {
const getLdapConfig = async (identityId: string) => {
const identity = await identityDAL.findOne({ id: identityId });
if (!identity) throw new NotFoundError({ message: `Identity with ID '${identityId}' not found` });
const identityOrgMembership = await identityOrgMembershipDAL.findOne({ identityId: identity.id });
if (!identityOrgMembership) throw new NotFoundError({ message: `Identity with ID '${identityId}' not found` });
const ldapAuth = await identityLdapAuthDAL.findOne({ identityId: identity.id });
if (!ldapAuth) throw new NotFoundError({ message: `LDAP auth with ID '${identityId}' not found` });
const parsedAllowedFields = ldapAuth.allowedFields
? AllowedFieldsSchema.array().parse(ldapAuth.allowedFields)
: undefined;
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: identityOrgMembership.orgId
});
const bindDN = decryptor({ cipherTextBlob: ldapAuth.encryptedBindDN }).toString();
const bindPass = decryptor({ cipherTextBlob: ldapAuth.encryptedBindPass }).toString();
const ldapCaCertificate = ldapAuth.encryptedLdapCaCertificate
? decryptor({ cipherTextBlob: ldapAuth.encryptedLdapCaCertificate }).toString()
: undefined;
const ldapConfig = {
id: ldapAuth.id,
organization: identityOrgMembership.orgId,
url: ldapAuth.url,
bindDN,
bindPass,
searchBase: ldapAuth.searchBase,
searchFilter: ldapAuth.searchFilter,
caCert: ldapCaCertificate || "",
allowedFields: parsedAllowedFields
};
const opts = {
server: {
url: ldapAuth.url,
bindDN,
bindCredentials: bindPass,
searchBase: ldapAuth.searchBase,
searchFilter: ldapAuth.searchFilter,
...(ldapCaCertificate
? {
tlsOptions: {
ca: [ldapCaCertificate]
}
}
: {})
},
passReqToCallback: true
};
return { opts, ldapConfig };
};
const login = async ({ identityId }: TLoginLdapAuthDTO) => {
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
if (!identityMembershipOrg) {
throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
}
const identityLdapAuth = await identityLdapAuthDAL.findOne({ identityId });
if (!identityLdapAuth) {
throw new NotFoundError({ message: `Failed to find LDAP auth for identity with ID ${identityId}` });
}
const plan = await licenseService.getPlan(identityMembershipOrg.orgId);
if (!plan.ldap) {
throw new BadRequestError({
message:
"Failed to login to identity due to plan restriction. Upgrade plan to login to use LDAP authentication."
});
}
const identityAccessToken = await identityLdapAuthDAL.transaction(async (tx) => {
const newToken = await identityAccessTokenDAL.create(
{
identityId: identityLdapAuth.identityId,
isAccessTokenRevoked: false,
accessTokenTTL: identityLdapAuth.accessTokenTTL,
accessTokenMaxTTL: identityLdapAuth.accessTokenMaxTTL,
accessTokenNumUses: 0,
accessTokenNumUsesLimit: identityLdapAuth.accessTokenNumUsesLimit,
authMethod: IdentityAuthMethod.LDAP_AUTH
},
tx
);
return newToken;
});
const appCfg = getConfig();
const accessToken = jwt.sign(
{
identityId: identityLdapAuth.identityId,
identityAccessTokenId: identityAccessToken.id,
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN
} as TIdentityAccessTokenJwtPayload,
appCfg.AUTH_SECRET,
// akhilmhdh: for non-expiry tokens you should not even set the value, including undefined. Even for undefined jsonwebtoken throws error
Number(identityAccessToken.accessTokenTTL) === 0
? undefined
: {
expiresIn: Number(identityAccessToken.accessTokenTTL)
}
);
return { accessToken, identityLdapAuth, identityAccessToken, identityMembershipOrg };
};
const attachLdapAuth = async ({
identityId,
url,
searchBase,
searchFilter,
bindDN,
bindPass,
ldapCaCertificate,
accessTokenTTL,
accessTokenMaxTTL,
accessTokenNumUsesLimit,
accessTokenTrustedIps,
actorId,
actorAuthMethod,
actor,
actorOrgId,
isActorSuperAdmin,
allowedFields
}: TAttachLdapAuthDTO) => {
await validateIdentityUpdateForSuperAdminPrivileges(identityId, isActorSuperAdmin);
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
if (identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.LDAP_AUTH)) {
throw new BadRequestError({
message: "Failed to add LDAP Auth to already configured identity"
});
}
if (accessTokenMaxTTL > 0 && accessTokenTTL > accessTokenMaxTTL) {
throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" });
}
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
identityMembershipOrg.orgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Create, OrgPermissionSubjects.Identity);
const plan = await licenseService.getPlan(identityMembershipOrg.orgId);
if (!plan.ldap) {
throw new BadRequestError({
message: "Failed to add LDAP Auth to identity due to plan restriction. Upgrade plan to add LDAP Auth."
});
}
const reformattedAccessTokenTrustedIps = accessTokenTrustedIps.map((accessTokenTrustedIp) => {
if (
!plan.ipAllowlisting &&
accessTokenTrustedIp.ipAddress !== "0.0.0.0/0" &&
accessTokenTrustedIp.ipAddress !== "::/0"
)
throw new BadRequestError({
message:
"Failed to add IP access range to access token due to plan restriction. Upgrade plan to add IP access range."
});
if (!isValidIpOrCidr(accessTokenTrustedIp.ipAddress))
throw new BadRequestError({
message: "The IP is not a valid IPv4, IPv6, or CIDR block"
});
return extractIPDetails(accessTokenTrustedIp.ipAddress);
});
if (allowedFields) AllowedFieldsSchema.array().parse(allowedFields);
const identityLdapAuth = await identityLdapAuthDAL.transaction(async (tx) => {
const { encryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: identityMembershipOrg.orgId
});
const { cipherTextBlob: encryptedBindPass } = encryptor({
plainText: Buffer.from(bindPass)
});
let encryptedLdapCaCertificate: Buffer | undefined;
if (ldapCaCertificate) {
const { cipherTextBlob: encryptedCertificate } = encryptor({
plainText: Buffer.from(ldapCaCertificate)
});
encryptedLdapCaCertificate = encryptedCertificate;
}
const { cipherTextBlob: encryptedBindDN } = encryptor({
plainText: Buffer.from(bindDN)
});
const isConnected = await testLDAPConfig({
bindDN,
bindPass,
caCert: ldapCaCertificate || "",
url
});
if (!isConnected) {
throw new BadRequestError({
message:
"Failed to connect to LDAP server. Please ensure that the LDAP server is running and your credentials are correct."
});
}
const doc = await identityLdapAuthDAL.create(
{
identityId: identityMembershipOrg.identityId,
encryptedBindDN,
encryptedBindPass,
searchBase,
searchFilter,
url,
encryptedLdapCaCertificate,
accessTokenMaxTTL,
accessTokenTTL,
accessTokenNumUsesLimit,
accessTokenTrustedIps: JSON.stringify(reformattedAccessTokenTrustedIps),
allowedFields: allowedFields ? JSON.stringify(allowedFields) : undefined
},
tx
);
return doc;
});
return { ...identityLdapAuth, orgId: identityMembershipOrg.orgId };
};
const updateLdapAuth = async ({
identityId,
url,
searchBase,
searchFilter,
bindDN,
bindPass,
ldapCaCertificate,
allowedFields,
accessTokenTTL,
accessTokenMaxTTL,
accessTokenNumUsesLimit,
accessTokenTrustedIps,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TUpdateLdapAuthDTO) => {
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.LDAP_AUTH)) {
throw new NotFoundError({
message: "The identity does not have LDAP Auth attached"
});
}
const identityLdapAuth = await identityLdapAuthDAL.findOne({ identityId });
if (
(accessTokenMaxTTL || identityLdapAuth.accessTokenMaxTTL) > 0 &&
(accessTokenTTL || identityLdapAuth.accessTokenTTL) > (accessTokenMaxTTL || identityLdapAuth.accessTokenMaxTTL)
) {
throw new BadRequestError({ message: "Access token TTL cannot be greater than max TTL" });
}
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
identityMembershipOrg.orgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
const plan = await licenseService.getPlan(identityMembershipOrg.orgId);
if (!plan.ldap) {
throw new BadRequestError({
message: "Failed to update LDAP Auth due to plan restriction. Upgrade plan to update LDAP Auth."
});
}
const reformattedAccessTokenTrustedIps = accessTokenTrustedIps?.map((accessTokenTrustedIp) => {
if (
!plan.ipAllowlisting &&
accessTokenTrustedIp.ipAddress !== "0.0.0.0/0" &&
accessTokenTrustedIp.ipAddress !== "::/0"
)
throw new BadRequestError({
message:
"Failed to add IP access range to access token due to plan restriction. Upgrade plan to add IP access range."
});
if (!isValidIpOrCidr(accessTokenTrustedIp.ipAddress))
throw new BadRequestError({
message: "The IP is not a valid IPv4, IPv6, or CIDR block"
});
return extractIPDetails(accessTokenTrustedIp.ipAddress);
});
if (allowedFields) AllowedFieldsSchema.array().parse(allowedFields);
const { encryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: identityMembershipOrg.orgId
});
let encryptedBindPass: Buffer | undefined;
if (bindPass) {
const { cipherTextBlob: bindPassCiphertext } = encryptor({
plainText: Buffer.from(bindPass)
});
encryptedBindPass = bindPassCiphertext;
}
let encryptedLdapCaCertificate: Buffer | undefined;
if (ldapCaCertificate) {
const { cipherTextBlob: ldapCaCertificateCiphertext } = encryptor({
plainText: Buffer.from(ldapCaCertificate)
});
encryptedLdapCaCertificate = ldapCaCertificateCiphertext;
}
let encryptedBindDN: Buffer | undefined;
if (bindDN) {
const { cipherTextBlob: bindDNCiphertext } = encryptor({
plainText: Buffer.from(bindDN)
});
encryptedBindDN = bindDNCiphertext;
}
const { ldapConfig } = await getLdapConfig(identityId);
const isConnected = await testLDAPConfig({
bindDN: bindDN || ldapConfig.bindDN,
bindPass: bindPass || ldapConfig.bindPass,
caCert: ldapCaCertificate || ldapConfig.caCert,
url: url || ldapConfig.url
});
if (!isConnected) {
throw new BadRequestError({
message:
"Failed to connect to LDAP server. Please ensure that the LDAP server is running and your credentials are correct."
});
}
const updatedLdapAuth = await identityLdapAuthDAL.updateById(identityLdapAuth.id, {
url,
searchBase,
searchFilter,
encryptedBindDN,
encryptedBindPass,
encryptedLdapCaCertificate,
allowedFields: allowedFields ? JSON.stringify(allowedFields) : undefined,
accessTokenMaxTTL,
accessTokenTTL,
accessTokenNumUsesLimit,
accessTokenTrustedIps: reformattedAccessTokenTrustedIps
? JSON.stringify(reformattedAccessTokenTrustedIps)
: undefined
});
return { ...updatedLdapAuth, orgId: identityMembershipOrg.orgId };
};
const getLdapAuth = async ({ identityId, actorId, actor, actorAuthMethod, actorOrgId }: TGetLdapAuthDTO) => {
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.LDAP_AUTH)) {
throw new BadRequestError({
message: "The identity does not have LDAP Auth attached"
});
}
const ldapIdentityAuth = await identityLdapAuthDAL.findOne({ identityId });
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
identityMembershipOrg.orgId,
actorAuthMethod,
actorOrgId
);
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: identityMembershipOrg.orgId
});
const bindDN = decryptor({ cipherTextBlob: ldapIdentityAuth.encryptedBindDN }).toString();
const bindPass = decryptor({ cipherTextBlob: ldapIdentityAuth.encryptedBindPass }).toString();
const ldapCaCertificate = ldapIdentityAuth.encryptedLdapCaCertificate
? decryptor({ cipherTextBlob: ldapIdentityAuth.encryptedLdapCaCertificate }).toString()
: undefined;
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
return { ...ldapIdentityAuth, orgId: identityMembershipOrg.orgId, bindDN, bindPass, ldapCaCertificate };
};
const revokeIdentityLdapAuth = async ({
identityId,
actorId,
actor,
actorAuthMethod,
actorOrgId
}: TRevokeLdapAuthDTO) => {
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.LDAP_AUTH)) {
throw new BadRequestError({
message: "The identity does not have LDAP Auth attached"
});
}
const { permission, membership } = await permissionService.getOrgPermission(
actor,
actorId,
identityMembershipOrg.orgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
const { permission: rolePermission } = await permissionService.getOrgPermission(
ActorType.IDENTITY,
identityMembershipOrg.identityId,
identityMembershipOrg.orgId,
actorAuthMethod,
actorOrgId
);
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
OrgPermissionIdentityActions.RevokeAuth,
OrgPermissionSubjects.Identity,
permission,
rolePermission
);
if (!permissionBoundary.isValid)
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to revoke LDAP auth of identity with more privileged role",
membership.shouldUseNewPrivilegeSystem,
OrgPermissionIdentityActions.RevokeAuth,
OrgPermissionSubjects.Identity
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const revokedIdentityLdapAuth = await identityLdapAuthDAL.transaction(async (tx) => {
const [deletedLdapAuth] = await identityLdapAuthDAL.delete({ identityId }, tx);
await identityAccessTokenDAL.delete({ identityId, authMethod: IdentityAuthMethod.LDAP_AUTH }, tx);
return { ...deletedLdapAuth, orgId: identityMembershipOrg.orgId };
});
return revokedIdentityLdapAuth;
};
return {
attachLdapAuth,
getLdapConfig,
updateLdapAuth,
login,
revokeIdentityLdapAuth,
getLdapAuth
};
};

View File

@@ -0,0 +1,56 @@
import { z } from "zod";
import { TProjectPermission } from "@app/lib/types";
export const AllowedFieldsSchema = z.object({
key: z.string().trim(),
value: z
.string()
.trim()
.transform((val) => val.replace(/\s/g, ""))
});
export type TAllowedFields = z.infer<typeof AllowedFieldsSchema>;
export type TAttachLdapAuthDTO = {
identityId: string;
url: string;
searchBase: string;
searchFilter: string;
bindDN: string;
bindPass: string;
ldapCaCertificate?: string;
allowedFields?: TAllowedFields[];
accessTokenTTL: number;
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
accessTokenTrustedIps: { ipAddress: string }[];
isActorSuperAdmin?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateLdapAuthDTO = {
identityId: string;
url?: string;
searchBase?: string;
searchFilter?: string;
bindDN?: string;
bindPass?: string;
allowedFields?: TAllowedFields[];
ldapCaCertificate?: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: { ipAddress: string }[];
} & Omit<TProjectPermission, "projectId">;
export type TGetLdapAuthDTO = {
identityId: string;
} & Omit<TProjectPermission, "projectId">;
export type TLoginLdapAuthDTO = {
identityId: string;
};
export type TRevokeLdapAuthDTO = {
identityId: string;
} & Omit<TProjectPermission, "projectId">;

View File

@@ -8,7 +8,8 @@ export const buildAuthMethods = ({
oidcId,
azureId,
tokenId,
jwtId
jwtId,
ldapId
}: {
uaId?: string;
gcpId?: string;
@@ -18,6 +19,7 @@ export const buildAuthMethods = ({
azureId?: string;
tokenId?: string;
jwtId?: string;
ldapId?: string;
}) => {
return [
...[uaId ? IdentityAuthMethod.UNIVERSAL_AUTH : null],
@@ -27,6 +29,7 @@ export const buildAuthMethods = ({
...[oidcId ? IdentityAuthMethod.OIDC_AUTH : null],
...[azureId ? IdentityAuthMethod.AZURE_AUTH : null],
...[tokenId ? IdentityAuthMethod.TOKEN_AUTH : null],
...[jwtId ? IdentityAuthMethod.JWT_AUTH : null]
...[jwtId ? IdentityAuthMethod.JWT_AUTH : null],
...[ldapId ? IdentityAuthMethod.LDAP_AUTH : null]
].filter((authMethod) => authMethod) as IdentityAuthMethod[];
};

View File

@@ -14,6 +14,7 @@ import {
TIdentityUniversalAuths,
TOrgRoles
} from "@app/db/schemas";
import { TIdentityLdapAuths } from "@app/db/schemas/identity-ldap-auths";
import { BadRequestError, DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
import { buildKnexFilterForSearchResource } from "@app/lib/search-resource/db";
@@ -81,6 +82,11 @@ export const identityOrgDALFactory = (db: TDbClient) => {
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityJwtAuth}.identityId`
)
.leftJoin<TIdentityLdapAuths>(
TableName.IdentityLdapAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityLdapAuth}.identityId`
)
.select(
selectAllTableCols(TableName.IdentityOrgMembership),
@@ -93,7 +99,7 @@ export const identityOrgDALFactory = (db: TDbClient) => {
db.ref("id").as("azureId").withSchema(TableName.IdentityAzureAuth),
db.ref("id").as("tokenId").withSchema(TableName.IdentityTokenAuth),
db.ref("id").as("jwtId").withSchema(TableName.IdentityJwtAuth),
db.ref("id").as("ldapId").withSchema(TableName.IdentityLdapAuth),
db.ref("name").withSchema(TableName.Identity)
);
@@ -200,6 +206,12 @@ export const identityOrgDALFactory = (db: TDbClient) => {
"paginatedIdentity.identityId",
`${TableName.IdentityJwtAuth}.identityId`
)
.leftJoin<TIdentityLdapAuths>(
TableName.IdentityLdapAuth,
"paginatedIdentity.identityId",
`${TableName.IdentityLdapAuth}.identityId`
)
.select(
db.ref("id").withSchema("paginatedIdentity"),
db.ref("role").withSchema("paginatedIdentity"),
@@ -217,7 +229,8 @@ export const identityOrgDALFactory = (db: TDbClient) => {
db.ref("id").as("oidcId").withSchema(TableName.IdentityOidcAuth),
db.ref("id").as("azureId").withSchema(TableName.IdentityAzureAuth),
db.ref("id").as("tokenId").withSchema(TableName.IdentityTokenAuth),
db.ref("id").as("jwtId").withSchema(TableName.IdentityJwtAuth)
db.ref("id").as("jwtId").withSchema(TableName.IdentityJwtAuth),
db.ref("id").as("ldapId").withSchema(TableName.IdentityLdapAuth)
)
// cr stands for custom role
.select(db.ref("id").as("crId").withSchema(TableName.OrgRoles))
@@ -259,6 +272,7 @@ export const identityOrgDALFactory = (db: TDbClient) => {
oidcId,
azureId,
tokenId,
ldapId,
createdAt,
updatedAt
}) => ({
@@ -290,7 +304,8 @@ export const identityOrgDALFactory = (db: TDbClient) => {
oidcId,
azureId,
tokenId,
jwtId
jwtId,
ldapId
})
}
}),
@@ -406,6 +421,11 @@ export const identityOrgDALFactory = (db: TDbClient) => {
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityJwtAuth}.identityId`
)
.leftJoin(
TableName.IdentityLdapAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityLdapAuth}.identityId`
)
.select(
db.ref("id").withSchema(TableName.IdentityOrgMembership),
db.ref("total_count").withSchema("searchedIdentities"),
@@ -424,7 +444,8 @@ export const identityOrgDALFactory = (db: TDbClient) => {
db.ref("id").as("oidcId").withSchema(TableName.IdentityOidcAuth),
db.ref("id").as("azureId").withSchema(TableName.IdentityAzureAuth),
db.ref("id").as("tokenId").withSchema(TableName.IdentityTokenAuth),
db.ref("id").as("jwtId").withSchema(TableName.IdentityJwtAuth)
db.ref("id").as("jwtId").withSchema(TableName.IdentityJwtAuth),
db.ref("id").as("ldapId").withSchema(TableName.IdentityLdapAuth)
)
// cr stands for custom role
.select(db.ref("id").as("crId").withSchema(TableName.OrgRoles))
@@ -467,6 +488,7 @@ export const identityOrgDALFactory = (db: TDbClient) => {
oidcId,
azureId,
tokenId,
ldapId,
createdAt,
updatedAt
}) => ({
@@ -498,7 +520,8 @@ export const identityOrgDALFactory = (db: TDbClient) => {
oidcId,
azureId,
tokenId,
jwtId
jwtId,
ldapId
})
}
}),

View File

@@ -177,7 +177,6 @@ export const deleteGithubSecrets = async ({
selected_repositories_url?: string | undefined;
}
// @ts-expect-error just octokit ts compatiability issue
const OctokitWithRetry = Octokit.plugin(retry);
let octokit: Octokit;
const appCfg = getConfig();

View File

@@ -6,6 +6,7 @@ import {
Request,
Response
} from "botbuilder";
import { CronJob } from "cron";
import { FastifyReply, FastifyRequest } from "fastify";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
@@ -86,8 +87,17 @@ export const microsoftTeamsServiceFactory = ({
}: TMicrosoftTeamsServiceFactoryDep) => {
let teamsBot: TeamsBot | null = null;
let adapter: CloudAdapter | null = null;
let lastKnownUpdatedAt = new Date();
const initializeTeamsBot = async ({ botAppId, botAppPassword }: { botAppId: string; botAppPassword: string }) => {
const initializeTeamsBot = async ({
botAppId,
botAppPassword,
lastUpdatedAt
}: {
botAppId: string;
botAppPassword: string;
lastUpdatedAt?: Date;
}) => {
logger.info("Initializing Microsoft Teams bot");
teamsBot = new TeamsBot({
botAppId,
@@ -106,6 +116,57 @@ export const microsoftTeamsServiceFactory = ({
})
)
);
if (lastUpdatedAt) {
lastKnownUpdatedAt = lastUpdatedAt;
}
};
const $syncMicrosoftTeamsIntegrationConfiguration = async () => {
try {
const serverCfg = await serverCfgDAL.findById(ADMIN_CONFIG_DB_UUID);
if (!serverCfg) {
throw new BadRequestError({
message: "Failed to get server configuration."
});
}
if (lastKnownUpdatedAt.getTime() === serverCfg.updatedAt.getTime()) {
logger.info("No changes to Microsoft Teams integration configuration, skipping sync");
return;
}
lastKnownUpdatedAt = serverCfg.updatedAt;
if (
serverCfg.encryptedMicrosoftTeamsAppId &&
serverCfg.encryptedMicrosoftTeamsClientSecret &&
serverCfg.encryptedMicrosoftTeamsBotId
) {
const decryptWithRoot = kmsService.decryptWithRootKey();
const decryptedAppId = decryptWithRoot(serverCfg.encryptedMicrosoftTeamsAppId);
const decryptedAppPassword = decryptWithRoot(serverCfg.encryptedMicrosoftTeamsClientSecret);
await initializeTeamsBot({
botAppId: decryptedAppId.toString(),
botAppPassword: decryptedAppPassword.toString()
});
}
} catch (err) {
logger.error(err, "Error syncing Microsoft Teams integration configuration");
}
};
const initializeBackgroundSync = async () => {
logger.info("Setting up background sync process for Microsoft Teams workflow integration configuration");
// initial sync upon startup
await $syncMicrosoftTeamsIntegrationConfiguration();
// sync rate limits configuration every 5 minutes
const job = new CronJob("*/5 * * * *", $syncMicrosoftTeamsIntegrationConfiguration);
job.start();
return job;
};
const start = async () => {
@@ -703,6 +764,7 @@ export const microsoftTeamsServiceFactory = ({
getTeams,
handleMessageEndpoint,
start,
initializeBackgroundSync,
sendNotification,
checkInstallationStatus,
getClientId

View File

@@ -1,15 +1,20 @@
import { ProjectMembershipRole } from "@app/db/schemas";
import { v4 as uuidv4 } from "uuid";
import { ProjectMembershipRole, ProjectType } from "@app/db/schemas";
import {
cryptographicOperatorPermissions,
projectAdminPermissions,
projectMemberPermissions,
projectNoAccessPermissions,
projectViewerPermission
} from "@app/ee/services/permission/project-permission";
projectViewerPermission,
sshHostBootstrapPermissions
} from "@app/ee/services/permission/default-roles";
import { TGetPredefinedRolesDTO } from "@app/services/project-role/project-role-types";
export const getPredefinedRoles = (projectId: string, roleFilter?: ProjectMembershipRole) => {
export const getPredefinedRoles = ({ projectId, projectType, roleFilter }: TGetPredefinedRolesDTO) => {
return [
{
id: "b11b49a9-09a9-4443-916a-4246f9ff2c69", // dummy userid
id: uuidv4(),
projectId,
name: "Admin",
slug: ProjectMembershipRole.Admin,
@@ -19,7 +24,7 @@ export const getPredefinedRoles = (projectId: string, roleFilter?: ProjectMember
updatedAt: new Date()
},
{
id: "b11b49a9-09a9-4443-916a-4246f9ff2c70", // dummy user for zod validation in response
id: uuidv4(),
projectId,
name: "Developer",
slug: ProjectMembershipRole.Member,
@@ -29,7 +34,29 @@ export const getPredefinedRoles = (projectId: string, roleFilter?: ProjectMember
updatedAt: new Date()
},
{
id: "b11b49a9-09a9-4443-916a-4246f9ff2c71", // dummy user for zod validation in response
id: uuidv4(),
projectId,
name: "SSH Host Bootstrapper",
slug: ProjectMembershipRole.SshHostBootstrapper,
permissions: sshHostBootstrapPermissions,
description: "Create and issue SSH Hosts in a project",
createdAt: new Date(),
updatedAt: new Date(),
type: ProjectType.SSH
},
{
id: uuidv4(),
projectId,
name: "Cryptographic Operator",
slug: ProjectMembershipRole.KmsCryptographicOperator,
permissions: cryptographicOperatorPermissions,
description: "Perform cryptographic operations, such as encryption and signing, in a project",
createdAt: new Date(),
updatedAt: new Date(),
type: ProjectType.KMS
},
{
id: uuidv4(),
projectId,
name: "Viewer",
slug: ProjectMembershipRole.Viewer,
@@ -39,7 +66,7 @@ export const getPredefinedRoles = (projectId: string, roleFilter?: ProjectMember
updatedAt: new Date()
},
{
id: "b11b49a9-09a9-4443-916a-4246f9ff2c72", // dummy user for zod validation in response
id: uuidv4(),
projectId,
name: "No Access",
slug: ProjectMembershipRole.NoAccess,
@@ -48,5 +75,5 @@ export const getPredefinedRoles = (projectId: string, roleFilter?: ProjectMember
createdAt: new Date(),
updatedAt: new Date()
}
].filter(({ slug }) => !roleFilter || roleFilter.includes(slug));
].filter(({ slug, type }) => (type ? type === projectType : true) && (!roleFilter || roleFilter === slug));
};

View File

@@ -2,7 +2,7 @@ import { ForbiddenError, MongoAbility, RawRuleOf } from "@casl/ability";
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import { requestContext } from "@fastify/request-context";
import { ActionProjectType, ProjectMembershipRole, TableName } from "@app/db/schemas";
import { ActionProjectType, ProjectMembershipRole, ProjectType, TableName, TProjects } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import {
ProjectPermissionActions,
@@ -34,7 +34,7 @@ type TProjectRoleServiceFactoryDep = {
projectRoleDAL: TProjectRoleDALFactory;
identityDAL: Pick<TIdentityDALFactory, "findById">;
userDAL: Pick<TUserDALFactory, "findById">;
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug" | "findProjectById">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getUserProjectPermission">;
identityProjectMembershipRoleDAL: TIdentityProjectMembershipRoleDALFactory;
projectUserMembershipRoleDAL: TProjectUserMembershipRoleDALFactory;
@@ -98,30 +98,37 @@ export const projectRoleServiceFactory = ({
roleSlug,
filter
}: TGetRoleDetailsDTO) => {
let projectId = "";
let project: TProjects;
if (filter.type === ProjectRoleServiceIdentifierType.SLUG) {
const project = await projectDAL.findProjectBySlug(filter.projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: "Project not found" });
projectId = project.id;
project = await projectDAL.findProjectBySlug(filter.projectSlug, actorOrgId);
} else {
projectId = filter.projectId;
project = await projectDAL.findProjectById(filter.projectId);
}
if (!project) throw new NotFoundError({ message: "Project not found" });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
projectId: project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Role);
if (roleSlug !== "custom" && Object.values(ProjectMembershipRole).includes(roleSlug as ProjectMembershipRole)) {
const predefinedRole = getPredefinedRoles(projectId, roleSlug as ProjectMembershipRole)[0];
const [predefinedRole] = getPredefinedRoles({
projectId: project.id,
projectType: project.type as ProjectType,
roleFilter: roleSlug as ProjectMembershipRole
});
if (!predefinedRole) throw new NotFoundError({ message: `Default role with slug '${roleSlug}' not found` });
return { ...predefinedRole, permissions: UnpackedPermissionSchema.array().parse(predefinedRole.permissions) };
}
const customRole = await projectRoleDAL.findOne({ slug: roleSlug, projectId });
const customRole = await projectRoleDAL.findOne({ slug: roleSlug, projectId: project.id });
if (!customRole) throw new NotFoundError({ message: `Project role with slug '${roleSlug}' not found` });
return { ...customRole, permissions: unpackPermissions(customRole.permissions) };
};
@@ -194,29 +201,32 @@ export const projectRoleServiceFactory = ({
};
const listRoles = async ({ actorOrgId, actorAuthMethod, actorId, actor, filter }: TListRolesDTO) => {
let projectId = "";
let project: TProjects;
if (filter.type === ProjectRoleServiceIdentifierType.SLUG) {
const project = await projectDAL.findProjectBySlug(filter.projectSlug, actorOrgId);
if (!project) throw new BadRequestError({ message: "Project not found" });
projectId = project.id;
project = await projectDAL.findProjectBySlug(filter.projectSlug, actorOrgId);
} else {
projectId = filter.projectId;
project = await projectDAL.findProjectById(filter.projectId);
}
if (!project) throw new BadRequestError({ message: "Project not found" });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
projectId: project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Role);
const customRoles = await projectRoleDAL.find(
{ projectId },
{ projectId: project.id },
{ sort: [[`${TableName.ProjectRoles}.slug` as "slug", "asc"]] }
);
const roles = [...getPredefinedRoles(projectId), ...(customRoles || [])];
const roles = [
...getPredefinedRoles({ projectId: project.id, projectType: project.type as ProjectType }),
...(customRoles || [])
];
return roles;
};

View File

@@ -1,4 +1,4 @@
import { TOrgRolesUpdate, TProjectRolesInsert } from "@app/db/schemas";
import { ProjectMembershipRole, ProjectType, TOrgRolesUpdate, TProjectRolesInsert } from "@app/db/schemas";
import { TProjectPermission } from "@app/lib/types";
export enum ProjectRoleServiceIdentifierType {
@@ -34,3 +34,9 @@ export type TListRolesDTO = {
| { type: ProjectRoleServiceIdentifierType.SLUG; projectSlug: string }
| { type: ProjectRoleServiceIdentifierType.ID; projectId: string };
} & Omit<TProjectPermission, "projectId">;
export type TGetPredefinedRolesDTO = {
projectId: string;
projectType: ProjectType;
roleFilter?: ProjectMembershipRole;
};

View File

@@ -329,14 +329,16 @@ export const projectServiceFactory = ({
// set default environments and root folder for provided environments
let envs: TProjectEnvironments[] = [];
if (projectTemplate) {
envs = await projectEnvDAL.insertMany(
projectTemplate.environments.map((env) => ({ ...env, projectId: project.id })),
tx
);
await folderDAL.insertMany(
envs.map(({ id }) => ({ name: ROOT_FOLDER_NAME, envId: id, version: 1 })),
tx
);
if (projectTemplate.environments) {
envs = await projectEnvDAL.insertMany(
projectTemplate.environments.map((env) => ({ ...env, projectId: project.id })),
tx
);
await folderDAL.insertMany(
envs.map(({ id }) => ({ name: ROOT_FOLDER_NAME, envId: id, version: 1 })),
tx
);
}
await projectRoleDAL.insertMany(
projectTemplate.packedRoles.map((role) => ({
...role,
@@ -592,7 +594,10 @@ export const projectServiceFactory = ({
workspaces.map(async (workspace) => {
return {
...workspace,
roles: [...(workspaceMappedToRoles[workspace.id] || []), ...getPredefinedRoles(workspace.id)]
roles: [
...(workspaceMappedToRoles[workspace.id] || []),
...getPredefinedRoles({ projectId: workspace.id, projectType: workspace.type as ProjectType })
]
};
})
);

View File

@@ -169,7 +169,7 @@ const getParameterStoreTagsRecord = async (
throw new SecretSyncError({
message:
"IAM role has inadequate permissions to manage resource tags. Ensure the following polices are present: ssm:ListTagsForResource, ssm:AddTagsToResource, and ssm:RemoveTagsFromResource",
"IAM role has inadequate permissions to manage resource tags. Ensure the following policies are present: ssm:ListTagsForResource, ssm:AddTagsToResource, and ssm:RemoveTagsFromResource",
shouldRetry: false
});
}

View File

@@ -0,0 +1,49 @@
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { CacheType } from "./super-admin-types";
export type TInvalidateCacheQueueFactoryDep = {
queueService: TQueueServiceFactory;
keyStore: Pick<TKeyStoreFactory, "deleteItems" | "setItemWithExpiry" | "deleteItem">;
};
export type TInvalidateCacheQueueFactory = ReturnType<typeof invalidateCacheQueueFactory>;
export const invalidateCacheQueueFactory = ({ queueService, keyStore }: TInvalidateCacheQueueFactoryDep) => {
const startInvalidate = async (dto: {
data: {
type: CacheType;
};
}) => {
await queueService.queue(QueueName.InvalidateCache, QueueJobs.InvalidateCache, dto, {
removeOnComplete: true,
removeOnFail: true,
jobId: `invalidate-cache-${dto.data.type}`
});
};
queueService.start(QueueName.InvalidateCache, async (job) => {
try {
const {
data: { type }
} = job.data;
await keyStore.setItemWithExpiry("invalidating-cache", 1800, "true"); // 30 minutes max (in case the job somehow silently fails)
if (type === CacheType.ALL || type === CacheType.SECRETS)
await keyStore.deleteItems({ pattern: "secret-manager:*" });
await keyStore.deleteItem("invalidating-cache");
} catch (err) {
logger.error(err, "Failed to invalidate cache");
await keyStore.deleteItem("invalidating-cache");
}
});
return {
startInvalidate
};
};

View File

@@ -25,8 +25,10 @@ import { TOrgServiceFactory } from "../org/org-service";
import { TUserDALFactory } from "../user/user-dal";
import { TUserAliasDALFactory } from "../user-alias/user-alias-dal";
import { UserAliasType } from "../user-alias/user-alias-types";
import { TInvalidateCacheQueueFactory } from "./invalidate-cache-queue";
import { TSuperAdminDALFactory } from "./super-admin-dal";
import {
CacheType,
LoginMethod,
TAdminBootstrapInstanceDTO,
TAdminGetIdentitiesDTO,
@@ -46,9 +48,10 @@ type TSuperAdminServiceFactoryDep = {
kmsService: Pick<TKmsServiceFactory, "encryptWithRootKey" | "decryptWithRootKey" | "updateEncryptionStrategy">;
kmsRootConfigDAL: TKmsRootConfigDALFactory;
orgService: Pick<TOrgServiceFactory, "createOrganization">;
keyStore: Pick<TKeyStoreFactory, "getItem" | "setItemWithExpiry" | "deleteItem">;
keyStore: Pick<TKeyStoreFactory, "getItem" | "setItemWithExpiry" | "deleteItem" | "deleteItems">;
licenseService: Pick<TLicenseServiceFactory, "onPremFeatures">;
microsoftTeamsService: Pick<TMicrosoftTeamsServiceFactory, "initializeTeamsBot">;
invalidateCacheQueue: TInvalidateCacheQueueFactory;
};
export type TSuperAdminServiceFactory = ReturnType<typeof superAdminServiceFactory>;
@@ -64,7 +67,7 @@ export let getServerCfg: () => Promise<
const ADMIN_CONFIG_KEY = "infisical-admin-cfg";
const ADMIN_CONFIG_KEY_EXP = 60; // 60s
const ADMIN_CONFIG_DB_UUID = "00000000-0000-0000-0000-000000000000";
export const ADMIN_CONFIG_DB_UUID = "00000000-0000-0000-0000-000000000000";
export const superAdminServiceFactory = ({
serverCfgDAL,
@@ -80,7 +83,8 @@ export const superAdminServiceFactory = ({
identityAccessTokenDAL,
identityTokenAuthDAL,
identityOrgMembershipDAL,
microsoftTeamsService
microsoftTeamsService,
invalidateCacheQueue
}: TSuperAdminServiceFactoryDep) => {
const initServerCfg = async () => {
// TODO(akhilmhdh): bad pattern time less change this later to me itself
@@ -242,7 +246,8 @@ export const superAdminServiceFactory = ({
await microsoftTeamsService.initializeTeamsBot({
botAppId: decryptedAppId.toString(),
botAppPassword: decryptedAppPassword.toString()
botAppPassword: decryptedAppPassword.toString(),
lastUpdatedAt: updatedServerCfg.updatedAt
});
}
@@ -631,6 +636,16 @@ export const superAdminServiceFactory = ({
await kmsService.updateEncryptionStrategy(strategy);
};
const invalidateCache = async (type: CacheType) => {
await invalidateCacheQueue.startInvalidate({
data: { type }
});
};
const checkIfInvalidatingCache = async () => {
return (await keyStore.getItem("invalidating-cache")) !== null;
};
return {
initServerCfg,
updateServerCfg,
@@ -644,6 +659,8 @@ export const superAdminServiceFactory = ({
getConfiguredEncryptionStrategies,
grantServerAdminAccessToUser,
deleteIdentitySuperAdminAccess,
deleteUserSuperAdminAccess
deleteUserSuperAdminAccess,
invalidateCache,
checkIfInvalidatingCache
};
};

View File

@@ -44,3 +44,8 @@ export enum LoginMethod {
LDAP = "ldap",
OIDC = "oidc"
}
export enum CacheType {
ALL = "all",
SECRETS = "secrets"
}

View File

@@ -21,7 +21,8 @@ export enum PostHogEventTypes {
IssueSshHostUserCert = "Issue SSH Host User Certificate",
IssueSshHostHostCert = "Issue SSH Host Host Certificate",
SignCert = "Sign PKI Certificate",
IssueCert = "Issue PKI Certificate"
IssueCert = "Issue PKI Certificate",
InvalidateCache = "Invalidate Cache"
}
export type TSecretModifiedEvent = {
@@ -203,6 +204,13 @@ export type TIssueCertificateEvent = {
};
};
export type TInvalidateCacheEvent = {
event: PostHogEventTypes.InvalidateCache;
properties: {
userAgent?: string;
};
};
export type TPostHogEvent = { distinctId: string } & (
| TSecretModifiedEvent
| TAdminInitEvent
@@ -221,4 +229,5 @@ export type TPostHogEvent = { distinctId: string } & (
| TIssueSshHostHostCertEvent
| TSignCertificateEvent
| TIssueCertificateEvent
| TInvalidateCacheEvent
);

View File

@@ -1,115 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"regexp"
"testing"
"github.com/stretchr/testify/assert"
)
func TestCommitAllowed(t *testing.T) {
tests := []struct {
allowlist Allowlist
commit string
commitAllowed bool
}{
{
allowlist: Allowlist{
Commits: []string{"commitA"},
},
commit: "commitA",
commitAllowed: true,
},
{
allowlist: Allowlist{
Commits: []string{"commitB"},
},
commit: "commitA",
commitAllowed: false,
},
{
allowlist: Allowlist{
Commits: []string{"commitB"},
},
commit: "",
commitAllowed: false,
},
}
for _, tt := range tests {
assert.Equal(t, tt.commitAllowed, tt.allowlist.CommitAllowed(tt.commit))
}
}
func TestRegexAllowed(t *testing.T) {
tests := []struct {
allowlist Allowlist
secret string
regexAllowed bool
}{
{
allowlist: Allowlist{
Regexes: []*regexp.Regexp{regexp.MustCompile("matchthis")},
},
secret: "a secret: matchthis, done",
regexAllowed: true,
},
{
allowlist: Allowlist{
Regexes: []*regexp.Regexp{regexp.MustCompile("matchthis")},
},
secret: "a secret",
regexAllowed: false,
},
}
for _, tt := range tests {
assert.Equal(t, tt.regexAllowed, tt.allowlist.RegexAllowed(tt.secret))
}
}
func TestPathAllowed(t *testing.T) {
tests := []struct {
allowlist Allowlist
path string
pathAllowed bool
}{
{
allowlist: Allowlist{
Paths: []*regexp.Regexp{regexp.MustCompile("path")},
},
path: "a path",
pathAllowed: true,
},
{
allowlist: Allowlist{
Paths: []*regexp.Regexp{regexp.MustCompile("path")},
},
path: "a ???",
pathAllowed: false,
},
}
for _, tt := range tests {
assert.Equal(t, tt.pathAllowed, tt.allowlist.PathAllowed(tt.path))
}
}

View File

@@ -1,279 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
_ "embed"
"fmt"
"regexp"
"strings"
"github.com/rs/zerolog/log"
"github.com/spf13/viper"
)
//go:embed infisical-scan.toml
var DefaultConfig string
// use to keep track of how many configs we can extend
// yea I know, globals bad
var extendDepth int
const maxExtendDepth = 2
const DefaultScanConfigFileName = ".infisical-scan.toml"
const DefaultScanConfigEnvName = "INFISICAL_SCAN_CONFIG"
const DefaultInfisicalIgnoreFineName = ".infisicalignore"
// ViperConfig is the config struct used by the Viper config package
// to parse the config file. This struct does not include regular expressions.
// It is used as an intermediary to convert the Viper config to the Config struct.
type ViperConfig struct {
Description string
Extend Extend
Rules []struct {
ID string
Description string
Entropy float64
SecretGroup int
Regex string
Keywords []string
Path string
Tags []string
Allowlist struct {
RegexTarget string
Regexes []string
Paths []string
Commits []string
StopWords []string
}
}
Allowlist struct {
RegexTarget string
Regexes []string
Paths []string
Commits []string
StopWords []string
}
}
// Config is a configuration struct that contains rules and an allowlist if present.
type Config struct {
Extend Extend
Path string
Description string
Rules map[string]Rule
Allowlist Allowlist
Keywords []string
// used to keep sarif results consistent
orderedRules []string
}
// Extend is a struct that allows users to define how they want their
// configuration extended by other configuration files.
type Extend struct {
Path string
URL string
UseDefault bool
}
func (vc *ViperConfig) Translate() (Config, error) {
var (
keywords []string
orderedRules []string
)
rulesMap := make(map[string]Rule)
for _, r := range vc.Rules {
var allowlistRegexes []*regexp.Regexp
for _, a := range r.Allowlist.Regexes {
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
}
var allowlistPaths []*regexp.Regexp
for _, a := range r.Allowlist.Paths {
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
}
if r.Keywords == nil {
r.Keywords = []string{}
} else {
for _, k := range r.Keywords {
keywords = append(keywords, strings.ToLower(k))
}
}
if r.Tags == nil {
r.Tags = []string{}
}
var configRegex *regexp.Regexp
var configPathRegex *regexp.Regexp
if r.Regex == "" {
configRegex = nil
} else {
configRegex = regexp.MustCompile(r.Regex)
}
if r.Path == "" {
configPathRegex = nil
} else {
configPathRegex = regexp.MustCompile(r.Path)
}
r := Rule{
Description: r.Description,
RuleID: r.ID,
Regex: configRegex,
Path: configPathRegex,
SecretGroup: r.SecretGroup,
Entropy: r.Entropy,
Tags: r.Tags,
Keywords: r.Keywords,
Allowlist: Allowlist{
RegexTarget: r.Allowlist.RegexTarget,
Regexes: allowlistRegexes,
Paths: allowlistPaths,
Commits: r.Allowlist.Commits,
StopWords: r.Allowlist.StopWords,
},
}
orderedRules = append(orderedRules, r.RuleID)
if r.Regex != nil && r.SecretGroup > r.Regex.NumSubexp() {
return Config{}, fmt.Errorf("%s invalid regex secret group %d, max regex secret group %d", r.Description, r.SecretGroup, r.Regex.NumSubexp())
}
rulesMap[r.RuleID] = r
}
var allowlistRegexes []*regexp.Regexp
for _, a := range vc.Allowlist.Regexes {
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
}
var allowlistPaths []*regexp.Regexp
for _, a := range vc.Allowlist.Paths {
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
}
c := Config{
Description: vc.Description,
Extend: vc.Extend,
Rules: rulesMap,
Allowlist: Allowlist{
RegexTarget: vc.Allowlist.RegexTarget,
Regexes: allowlistRegexes,
Paths: allowlistPaths,
Commits: vc.Allowlist.Commits,
StopWords: vc.Allowlist.StopWords,
},
Keywords: keywords,
orderedRules: orderedRules,
}
if maxExtendDepth != extendDepth {
// disallow both usedefault and path from being set
if c.Extend.Path != "" && c.Extend.UseDefault {
log.Fatal().Msg("unable to load config due to extend.path and extend.useDefault being set")
}
if c.Extend.UseDefault {
c.extendDefault()
} else if c.Extend.Path != "" {
c.extendPath()
}
}
return c, nil
}
func (c *Config) OrderedRules() []Rule {
var orderedRules []Rule
for _, id := range c.orderedRules {
if _, ok := c.Rules[id]; ok {
orderedRules = append(orderedRules, c.Rules[id])
}
}
return orderedRules
}
func (c *Config) extendDefault() {
extendDepth++
viper.SetConfigType("toml")
if err := viper.ReadConfig(strings.NewReader(DefaultConfig)); err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
defaultViperConfig := ViperConfig{}
if err := viper.Unmarshal(&defaultViperConfig); err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
cfg, err := defaultViperConfig.Translate()
if err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
log.Debug().Msg("extending config with default config")
c.extend(cfg)
}
func (c *Config) extendPath() {
extendDepth++
viper.SetConfigFile(c.Extend.Path)
if err := viper.ReadInConfig(); err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
extensionViperConfig := ViperConfig{}
if err := viper.Unmarshal(&extensionViperConfig); err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
cfg, err := extensionViperConfig.Translate()
if err != nil {
log.Fatal().Msgf("failed to load extended config, err: %s", err)
return
}
log.Debug().Msgf("extending config with %s", c.Extend.Path)
c.extend(cfg)
}
func (c *Config) extendURL() {
// TODO
}
func (c *Config) extend(extensionConfig Config) {
for ruleID, rule := range extensionConfig.Rules {
if _, ok := c.Rules[ruleID]; !ok {
log.Trace().Msgf("adding %s to base config", ruleID)
c.Rules[ruleID] = rule
c.Keywords = append(c.Keywords, rule.Keywords...)
}
}
// append allowlists, not attempting to merge
c.Allowlist.Commits = append(c.Allowlist.Commits,
extensionConfig.Allowlist.Commits...)
c.Allowlist.Paths = append(c.Allowlist.Paths,
extensionConfig.Allowlist.Paths...)
c.Allowlist.Regexes = append(c.Allowlist.Regexes,
extensionConfig.Allowlist.Regexes...)
}

View File

@@ -1,170 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"fmt"
"regexp"
"testing"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
)
const configPath = "../testdata/config/"
func TestTranslate(t *testing.T) {
tests := []struct {
cfgName string
cfg Config
wantError error
}{
{
cfgName: "allow_aws_re",
cfg: Config{
Rules: map[string]Rule{"aws-access-key": {
Description: "AWS Access Key",
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-access-key",
Allowlist: Allowlist{
Regexes: []*regexp.Regexp{
regexp.MustCompile("AKIALALEMEL33243OLIA"),
},
},
},
},
},
},
{
cfgName: "allow_commit",
cfg: Config{
Rules: map[string]Rule{"aws-access-key": {
Description: "AWS Access Key",
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-access-key",
Allowlist: Allowlist{
Commits: []string{"allowthiscommit"},
},
},
},
},
},
{
cfgName: "allow_path",
cfg: Config{
Rules: map[string]Rule{"aws-access-key": {
Description: "AWS Access Key",
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-access-key",
Allowlist: Allowlist{
Paths: []*regexp.Regexp{
regexp.MustCompile(".go"),
},
},
},
},
},
},
{
cfgName: "entropy_group",
cfg: Config{
Rules: map[string]Rule{"discord-api-key": {
Description: "Discord API key",
Regex: regexp.MustCompile(`(?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{64})['\"]`),
RuleID: "discord-api-key",
Allowlist: Allowlist{},
Entropy: 3.5,
SecretGroup: 3,
Tags: []string{},
Keywords: []string{},
},
},
},
},
{
cfgName: "bad_entropy_group",
cfg: Config{},
wantError: fmt.Errorf("Discord API key invalid regex secret group 5, max regex secret group 3"),
},
{
cfgName: "base",
cfg: Config{
Rules: map[string]Rule{
"aws-access-key": {
Description: "AWS Access Key",
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-access-key",
},
"aws-secret-key": {
Description: "AWS Secret Key",
Regex: regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-secret-key",
},
"aws-secret-key-again": {
Description: "AWS Secret Key",
Regex: regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
Tags: []string{"key", "AWS"},
Keywords: []string{},
RuleID: "aws-secret-key-again",
},
},
},
},
}
for _, tt := range tests {
viper.Reset()
viper.AddConfigPath(configPath)
viper.SetConfigName(tt.cfgName)
viper.SetConfigType("toml")
err := viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, err := vc.Translate()
if tt.wantError != nil {
if err == nil {
t.Errorf("expected error")
}
assert.Equal(t, tt.wantError, err)
}
assert.Equal(t, cfg.Rules, tt.cfg.Rules)
}
}

View File

@@ -1,8 +0,0 @@
public_ip: 127.0.0.1
auth_secret: example-auth-secret
realm: infisical.org
# set port 5349 for tls
# port: 5349
# tls_private_key_path: /full-path
# tls_ca_path: /full-path
# tls_cert_path: /full-path

View File

@@ -1,8 +0,0 @@
public_ip: 127.0.0.1
auth_secret: changeThisOnProduction
realm: infisical.org
# set port 5349 for tls
# port: 5349
# tls_private_key_path: /full-path
# tls_ca_path: /full-path
# tls_cert_path: /full-path

File diff suppressed because it is too large Load Diff

View File

@@ -1,43 +0,0 @@
package config
import (
"regexp"
)
// Rules contain information that define details on how to detect secrets
type Rule struct {
// Description is the description of the rule.
Description string
// RuleID is a unique identifier for this rule
RuleID string
// Entropy is a float representing the minimum shannon
// entropy a regex group must have to be considered a secret.
Entropy float64
// SecretGroup is an int used to extract secret from regex
// match and used as the group that will have its entropy
// checked if `entropy` is set.
SecretGroup int
// Regex is a golang regular expression used to detect secrets.
Regex *regexp.Regexp
// Path is a golang regular expression used to
// filter secrets by path
Path *regexp.Regexp
// Tags is an array of strings used for metadata
// and reporting purposes.
Tags []string
// Keywords are used for pre-regex check filtering. Rules that contain
// keywords will perform a quick string compare check to make sure the
// keyword(s) are in the content being scanned.
Keywords []string
// Allowlist allows a rule to be ignored for specific
// regexes, paths, and/or commits
Allowlist Allowlist
}

View File

@@ -1,24 +0,0 @@
package config
import (
"regexp"
)
func anyRegexMatch(f string, res []*regexp.Regexp) bool {
for _, re := range res {
if regexMatched(f, re) {
return true
}
}
return false
}
func regexMatched(f string, re *regexp.Regexp) bool {
if re == nil {
return false
}
if re.FindString(f) != "" {
return true
}
return false
}

View File

@@ -25,35 +25,31 @@ package detect
import (
"encoding/json"
"fmt"
"io"
"os"
"path/filepath"
"github.com/rs/zerolog/log"
"github.com/Infisical/infisical-merge/report"
"github.com/Infisical/infisical-merge/detect/report"
)
func IsNew(finding report.Finding, baseline []report.Finding) bool {
func IsNew(finding report.Finding, redact uint, baseline []report.Finding) bool {
// Explicitly testing each property as it gives significantly better performance in comparison to cmp.Equal(). Drawback is that
// the code requires maintanance if/when the Finding struct changes
// the code requires maintenance if/when the Finding struct changes
for _, b := range baseline {
if finding.Author == b.Author &&
finding.Commit == b.Commit &&
finding.Date == b.Date &&
if finding.RuleID == b.RuleID &&
finding.Description == b.Description &&
finding.Email == b.Email &&
finding.EndColumn == b.EndColumn &&
finding.StartLine == b.StartLine &&
finding.EndLine == b.EndLine &&
finding.Entropy == b.Entropy &&
finding.File == b.File &&
// Omit checking finding.Fingerprint - if the format of the fingerprint changes, the users will see unexpected behaviour
finding.Match == b.Match &&
finding.Message == b.Message &&
finding.RuleID == b.RuleID &&
finding.Secret == b.Secret &&
finding.StartColumn == b.StartColumn &&
finding.StartLine == b.StartLine {
finding.EndColumn == b.EndColumn &&
(redact > 0 || (finding.Match == b.Match && finding.Secret == b.Secret)) &&
finding.File == b.File &&
finding.Commit == b.Commit &&
finding.Author == b.Author &&
finding.Email == b.Email &&
finding.Date == b.Date &&
finding.Message == b.Message &&
// Omit checking finding.Fingerprint - if the format of the fingerprint changes, the users will see unexpected behaviour
finding.Entropy == b.Entropy {
return false
}
}
@@ -61,23 +57,12 @@ func IsNew(finding report.Finding, baseline []report.Finding) bool {
}
func LoadBaseline(baselinePath string) ([]report.Finding, error) {
var previousFindings []report.Finding
jsonFile, err := os.Open(baselinePath)
bytes, err := os.ReadFile(baselinePath)
if err != nil {
return nil, fmt.Errorf("could not open %s", baselinePath)
}
defer func() {
if cerr := jsonFile.Close(); cerr != nil {
log.Warn().Err(cerr).Msg("problem closing jsonFile handle")
}
}()
bytes, err := io.ReadAll(jsonFile)
if err != nil {
return nil, fmt.Errorf("could not read data from the file %s", baselinePath)
}
var previousFindings []report.Finding
err = json.Unmarshal(bytes, &previousFindings)
if err != nil {
return nil, fmt.Errorf("the format of the file %s is not supported", baselinePath)
@@ -85,3 +70,34 @@ func LoadBaseline(baselinePath string) ([]report.Finding, error) {
return previousFindings, nil
}
func (d *Detector) AddBaseline(baselinePath string, source string) error {
if baselinePath != "" {
absoluteSource, err := filepath.Abs(source)
if err != nil {
return err
}
absoluteBaseline, err := filepath.Abs(baselinePath)
if err != nil {
return err
}
relativeBaseline, err := filepath.Rel(absoluteSource, absoluteBaseline)
if err != nil {
return err
}
baseline, err := LoadBaseline(baselinePath)
if err != nil {
return err
}
d.baseline = baseline
baselinePath = relativeBaseline
}
d.baselinePath = baselinePath
return nil
}

View File

@@ -1,160 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"errors"
"testing"
"github.com/stretchr/testify/assert"
"github.com/Infisical/infisical-merge/report"
)
func TestIsNew(t *testing.T) {
tests := []struct {
findings report.Finding
baseline []report.Finding
expect bool
}{
{
findings: report.Finding{
Author: "a",
Commit: "0000",
},
baseline: []report.Finding{
{
Author: "a",
Commit: "0000",
},
},
expect: false,
},
{
findings: report.Finding{
Author: "a",
Commit: "0000",
},
baseline: []report.Finding{
{
Author: "a",
Commit: "0002",
},
},
expect: true,
},
{
findings: report.Finding{
Author: "a",
Commit: "0000",
Tags: []string{"a", "b"},
},
baseline: []report.Finding{
{
Author: "a",
Commit: "0000",
Tags: []string{"a", "c"},
},
},
expect: false, // Updated tags doesn't make it a new finding
},
}
for _, test := range tests {
assert.Equal(t, test.expect, IsNew(test.findings, test.baseline))
}
}
func TestFileLoadBaseline(t *testing.T) {
tests := []struct {
Filename string
ExpectedError error
}{
{
Filename: "../testdata/baseline/baseline.csv",
ExpectedError: errors.New("the format of the file ../testdata/baseline/baseline.csv is not supported"),
},
{
Filename: "../testdata/baseline/baseline.sarif",
ExpectedError: errors.New("the format of the file ../testdata/baseline/baseline.sarif is not supported"),
},
{
Filename: "../testdata/baseline/notfound.json",
ExpectedError: errors.New("could not open ../testdata/baseline/notfound.json"),
},
}
for _, test := range tests {
_, err := LoadBaseline(test.Filename)
assert.Equal(t, test.ExpectedError.Error(), err.Error())
}
}
func TestIgnoreIssuesInBaseline(t *testing.T) {
tests := []struct {
findings []report.Finding
baseline []report.Finding
expectCount int
}{
{
findings: []report.Finding{
{
Author: "a",
Commit: "5",
},
},
baseline: []report.Finding{
{
Author: "a",
Commit: "5",
},
},
expectCount: 0,
},
{
findings: []report.Finding{
{
Author: "a",
Commit: "5",
Fingerprint: "a",
},
},
baseline: []report.Finding{
{
Author: "a",
Commit: "5",
Fingerprint: "b",
},
},
expectCount: 0,
},
}
for _, test := range tests {
d, _ := NewDetectorDefaultConfig()
d.baseline = test.baseline
for _, finding := range test.findings {
d.addFinding(finding)
}
assert.Equal(t, test.expectCount, len(d.findings))
}
}

66
cli/detect/cmd/scm/scm.go Normal file
View File

@@ -0,0 +1,66 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package scm
import (
"fmt"
"strings"
)
type Platform int
const (
UnknownPlatform Platform = iota
NoPlatform // Explicitly disable the feature
GitHubPlatform
GitLabPlatform
AzureDevOpsPlatform
// TODO: Add others.
)
func (p Platform) String() string {
return [...]string{
"unknown",
"none",
"github",
"gitlab",
"azuredevops",
}[p]
}
func PlatformFromString(s string) (Platform, error) {
switch strings.ToLower(s) {
case "", "unknown":
return UnknownPlatform, nil
case "none":
return NoPlatform, nil
case "github":
return GitHubPlatform, nil
case "gitlab":
return GitLabPlatform, nil
case "azuredevops":
return AzureDevOpsPlatform, nil
default:
return UnknownPlatform, fmt.Errorf("invalid scm platform value: %s", s)
}
}

View File

@@ -23,63 +23,137 @@
package config
import (
"regexp"
"fmt"
"strings"
"golang.org/x/exp/maps"
"github.com/Infisical/infisical-merge/detect/regexp"
)
type AllowlistMatchCondition int
const (
AllowlistMatchOr AllowlistMatchCondition = iota
AllowlistMatchAnd
)
func (a AllowlistMatchCondition) String() string {
return [...]string{
"OR",
"AND",
}[a]
}
// Allowlist allows a rule to be ignored for specific
// regexes, paths, and/or commits
type Allowlist struct {
// Short human readable description of the allowlist.
Description string
// Regexes is slice of content regular expressions that are allowed to be ignored.
Regexes []*regexp.Regexp
// MatchCondition determines whether all criteria must match.
MatchCondition AllowlistMatchCondition
// RegexTarget
RegexTarget string
// Commits is a slice of commit SHAs that are allowed to be ignored. Defaults to "OR".
Commits []string
// Paths is a slice of path regular expressions that are allowed to be ignored.
Paths []*regexp.Regexp
// Commits is a slice of commit SHAs that are allowed to be ignored.
Commits []string
// Can be `match` or `line`.
//
// If `match` the _Regexes_ will be tested against the match of the _Rule.Regex_.
//
// If `line` the _Regexes_ will be tested against the entire line.
//
// If RegexTarget is empty, it will be tested against the found secret.
RegexTarget string
// Regexes is slice of content regular expressions that are allowed to be ignored.
Regexes []*regexp.Regexp
// StopWords is a slice of stop words that are allowed to be ignored.
// This targets the _secret_, not the content of the regex match like the
// Regexes slice.
StopWords []string
// validated is an internal flag to track whether `Validate()` has been called.
validated bool
}
func (a *Allowlist) Validate() error {
if a.validated {
return nil
}
// Disallow empty allowlists.
if len(a.Commits) == 0 &&
len(a.Paths) == 0 &&
len(a.Regexes) == 0 &&
len(a.StopWords) == 0 {
return fmt.Errorf("must contain at least one check for: commits, paths, regexes, or stopwords")
}
// Deduplicate commits and stopwords.
if len(a.Commits) > 0 {
uniqueCommits := make(map[string]struct{})
for _, commit := range a.Commits {
uniqueCommits[commit] = struct{}{}
}
a.Commits = maps.Keys(uniqueCommits)
}
if len(a.StopWords) > 0 {
uniqueStopwords := make(map[string]struct{})
for _, stopWord := range a.StopWords {
uniqueStopwords[stopWord] = struct{}{}
}
a.StopWords = maps.Keys(uniqueStopwords)
}
a.validated = true
return nil
}
// CommitAllowed returns true if the commit is allowed to be ignored.
func (a *Allowlist) CommitAllowed(c string) bool {
if c == "" {
return false
func (a *Allowlist) CommitAllowed(c string) (bool, string) {
if a == nil || c == "" {
return false, ""
}
for _, commit := range a.Commits {
if commit == c {
return true
return true, c
}
}
return false
return false, ""
}
// PathAllowed returns true if the path is allowed to be ignored.
func (a *Allowlist) PathAllowed(path string) bool {
if a == nil || path == "" {
return false
}
return anyRegexMatch(path, a.Paths)
}
// RegexAllowed returns true if the regex is allowed to be ignored.
func (a *Allowlist) RegexAllowed(s string) bool {
return anyRegexMatch(s, a.Regexes)
func (a *Allowlist) RegexAllowed(secret string) bool {
if a == nil || secret == "" {
return false
}
return anyRegexMatch(secret, a.Regexes)
}
func (a *Allowlist) ContainsStopWord(s string) bool {
func (a *Allowlist) ContainsStopWord(s string) (bool, string) {
if a == nil || s == "" {
return false, ""
}
s = strings.ToLower(s)
for _, stopWord := range a.StopWords {
if strings.Contains(s, strings.ToLower(stopWord)) {
return true
return true, stopWord
}
}
return false
return false, ""
}

426
cli/detect/config/config.go Normal file
View File

@@ -0,0 +1,426 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
_ "embed"
"errors"
"fmt"
"sort"
"strings"
"github.com/spf13/viper"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/regexp"
)
const DefaultScanConfigFileName = ".infisical-scan.toml"
const DefaultScanConfigEnvName = "INFISICAL_SCAN_CONFIG"
const DefaultInfisicalIgnoreFineName = ".infisicalignore"
var (
//go:embed gitleaks.toml
DefaultConfig string
// use to keep track of how many configs we can extend
// yea I know, globals bad
extendDepth int
)
const maxExtendDepth = 2
// ViperConfig is the config struct used by the Viper config package
// to parse the config file. This struct does not include regular expressions.
// It is used as an intermediary to convert the Viper config to the Config struct.
type ViperConfig struct {
Title string
Description string
Extend Extend
Rules []struct {
ID string
Description string
Path string
Regex string
SecretGroup int
Entropy float64
Keywords []string
Tags []string
// Deprecated: this is a shim for backwards-compatibility.
// TODO: Remove this in 9.x.
AllowList *viperRuleAllowlist
Allowlists []*viperRuleAllowlist
}
// Deprecated: this is a shim for backwards-compatibility.
// TODO: Remove this in 9.x.
AllowList *viperGlobalAllowlist
Allowlists []*viperGlobalAllowlist
}
type viperRuleAllowlist struct {
Description string
Condition string
Commits []string
Paths []string
RegexTarget string
Regexes []string
StopWords []string
}
type viperGlobalAllowlist struct {
TargetRules []string
viperRuleAllowlist `mapstructure:",squash"`
}
// Config is a configuration struct that contains rules and an allowlist if present.
type Config struct {
Title string
Extend Extend
Path string
Description string
Rules map[string]Rule
Keywords map[string]struct{}
// used to keep sarif results consistent
OrderedRules []string
Allowlists []*Allowlist
}
// Extend is a struct that allows users to define how they want their
// configuration extended by other configuration files.
type Extend struct {
Path string
URL string
UseDefault bool
DisabledRules []string
}
func (vc *ViperConfig) Translate() (Config, error) {
var (
keywords = make(map[string]struct{})
orderedRules []string
rulesMap = make(map[string]Rule)
ruleAllowlists = make(map[string][]*Allowlist)
)
// Validate individual rules.
for _, vr := range vc.Rules {
var (
pathPat *regexp.Regexp
regexPat *regexp.Regexp
)
if vr.Path != "" {
pathPat = regexp.MustCompile(vr.Path)
}
if vr.Regex != "" {
regexPat = regexp.MustCompile(vr.Regex)
}
if vr.Keywords == nil {
vr.Keywords = []string{}
} else {
for i, k := range vr.Keywords {
keyword := strings.ToLower(k)
keywords[keyword] = struct{}{}
vr.Keywords[i] = keyword
}
}
if vr.Tags == nil {
vr.Tags = []string{}
}
cr := Rule{
RuleID: vr.ID,
Description: vr.Description,
Regex: regexPat,
SecretGroup: vr.SecretGroup,
Entropy: vr.Entropy,
Path: pathPat,
Keywords: vr.Keywords,
Tags: vr.Tags,
}
// Parse the rule allowlists, including the older format for backwards compatibility.
if vr.AllowList != nil {
// TODO: Remove this in v9.
if len(vr.Allowlists) > 0 {
return Config{}, fmt.Errorf("%s: [rules.allowlist] is deprecated, it cannot be used alongside [[rules.allowlist]]", cr.RuleID)
}
vr.Allowlists = append(vr.Allowlists, vr.AllowList)
}
for _, a := range vr.Allowlists {
allowlist, err := parseAllowlist(a)
if err != nil {
return Config{}, fmt.Errorf("%s: [[rules.allowlists]] %w", cr.RuleID, err)
}
cr.Allowlists = append(cr.Allowlists, allowlist)
}
orderedRules = append(orderedRules, cr.RuleID)
rulesMap[cr.RuleID] = cr
}
// Assemble the config.
c := Config{
Title: vc.Title,
Description: vc.Description,
Extend: vc.Extend,
Rules: rulesMap,
Keywords: keywords,
OrderedRules: orderedRules,
}
// Parse the config allowlists, including the older format for backwards compatibility.
if vc.AllowList != nil {
// TODO: Remove this in v9.
if len(vc.Allowlists) > 0 {
return Config{}, errors.New("[allowlist] is deprecated, it cannot be used alongside [[allowlists]]")
}
vc.Allowlists = append(vc.Allowlists, vc.AllowList)
}
for _, a := range vc.Allowlists {
allowlist, err := parseAllowlist(&a.viperRuleAllowlist)
if err != nil {
return Config{}, fmt.Errorf("[[allowlists]] %w", err)
}
// Allowlists with |targetRules| aren't added to the global list.
if len(a.TargetRules) > 0 {
for _, ruleID := range a.TargetRules {
// It's not possible to validate |ruleID| until after extend.
ruleAllowlists[ruleID] = append(ruleAllowlists[ruleID], allowlist)
}
} else {
c.Allowlists = append(c.Allowlists, allowlist)
}
}
if maxExtendDepth != extendDepth {
// disallow both usedefault and path from being set
if c.Extend.Path != "" && c.Extend.UseDefault {
return Config{}, errors.New("unable to load config due to extend.path and extend.useDefault being set")
}
if c.Extend.UseDefault {
if err := c.extendDefault(); err != nil {
return Config{}, err
}
} else if c.Extend.Path != "" {
if err := c.extendPath(); err != nil {
return Config{}, err
}
}
}
// Validate the rules after everything has been assembled (including extended configs).
if extendDepth == 0 {
for _, rule := range c.Rules {
if err := rule.Validate(); err != nil {
return Config{}, err
}
}
// Populate targeted configs.
for ruleID, allowlists := range ruleAllowlists {
rule, ok := c.Rules[ruleID]
if !ok {
return Config{}, fmt.Errorf("[[allowlists]] target rule ID '%s' does not exist", ruleID)
}
rule.Allowlists = append(rule.Allowlists, allowlists...)
c.Rules[ruleID] = rule
}
}
return c, nil
}
func parseAllowlist(a *viperRuleAllowlist) (*Allowlist, error) {
var matchCondition AllowlistMatchCondition
switch strings.ToUpper(a.Condition) {
case "AND", "&&":
matchCondition = AllowlistMatchAnd
case "", "OR", "||":
matchCondition = AllowlistMatchOr
default:
return nil, fmt.Errorf("unknown allowlist |condition| '%s' (expected 'and', 'or')", a.Condition)
}
// Validate the target.
regexTarget := a.RegexTarget
if regexTarget != "" {
switch regexTarget {
case "secret":
regexTarget = ""
case "match", "line":
// do nothing
default:
return nil, fmt.Errorf("unknown allowlist |regexTarget| '%s' (expected 'match', 'line')", regexTarget)
}
}
var allowlistRegexes []*regexp.Regexp
for _, a := range a.Regexes {
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
}
var allowlistPaths []*regexp.Regexp
for _, a := range a.Paths {
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
}
allowlist := &Allowlist{
Description: a.Description,
MatchCondition: matchCondition,
Commits: a.Commits,
Paths: allowlistPaths,
RegexTarget: regexTarget,
Regexes: allowlistRegexes,
StopWords: a.StopWords,
}
if err := allowlist.Validate(); err != nil {
return nil, err
}
return allowlist, nil
}
func (c *Config) GetOrderedRules() []Rule {
var orderedRules []Rule
for _, id := range c.OrderedRules {
if _, ok := c.Rules[id]; ok {
orderedRules = append(orderedRules, c.Rules[id])
}
}
return orderedRules
}
func (c *Config) extendDefault() error {
extendDepth++
viper.SetConfigType("toml")
if err := viper.ReadConfig(strings.NewReader(DefaultConfig)); err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
defaultViperConfig := ViperConfig{}
if err := viper.Unmarshal(&defaultViperConfig); err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
cfg, err := defaultViperConfig.Translate()
if err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
logging.Debug().Msg("extending config with default config")
c.extend(cfg)
return nil
}
func (c *Config) extendPath() error {
extendDepth++
viper.SetConfigFile(c.Extend.Path)
if err := viper.ReadInConfig(); err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
extensionViperConfig := ViperConfig{}
if err := viper.Unmarshal(&extensionViperConfig); err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
cfg, err := extensionViperConfig.Translate()
if err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
logging.Debug().Msgf("extending config with %s", c.Extend.Path)
c.extend(cfg)
return nil
}
func (c *Config) extendURL() {
// TODO
}
func (c *Config) extend(extensionConfig Config) {
// Get config name for helpful log messages.
var configName string
if c.Extend.Path != "" {
configName = c.Extend.Path
} else {
configName = "default"
}
// Convert |Config.DisabledRules| into a map for ease of access.
disabledRuleIDs := map[string]struct{}{}
for _, id := range c.Extend.DisabledRules {
if _, ok := extensionConfig.Rules[id]; !ok {
logging.Warn().
Str("rule-id", id).
Str("config", configName).
Msg("Disabled rule doesn't exist in extended config.")
}
disabledRuleIDs[id] = struct{}{}
}
for ruleID, baseRule := range extensionConfig.Rules {
// Skip the rule.
if _, ok := disabledRuleIDs[ruleID]; ok {
logging.Debug().
Str("rule-id", ruleID).
Str("config", configName).
Msg("Ignoring rule from extended config.")
continue
}
currentRule, ok := c.Rules[ruleID]
if !ok {
// Rule doesn't exist, add it to the config.
c.Rules[ruleID] = baseRule
for _, k := range baseRule.Keywords {
c.Keywords[k] = struct{}{}
}
c.OrderedRules = append(c.OrderedRules, ruleID)
} else {
// Rule exists, merge our changes into the base.
if currentRule.Description != "" {
baseRule.Description = currentRule.Description
}
if currentRule.Entropy != 0 {
baseRule.Entropy = currentRule.Entropy
}
if currentRule.SecretGroup != 0 {
baseRule.SecretGroup = currentRule.SecretGroup
}
if currentRule.Regex != nil {
baseRule.Regex = currentRule.Regex
}
if currentRule.Path != nil {
baseRule.Path = currentRule.Path
}
baseRule.Tags = append(baseRule.Tags, currentRule.Tags...)
baseRule.Keywords = append(baseRule.Keywords, currentRule.Keywords...)
for _, a := range currentRule.Allowlists {
baseRule.Allowlists = append(baseRule.Allowlists, a)
}
// The keywords from the base rule and the extended rule must be merged into the global keywords list
for _, k := range baseRule.Keywords {
c.Keywords[k] = struct{}{}
}
c.Rules[ruleID] = baseRule
}
}
// append allowlists, not attempting to merge
for _, a := range extensionConfig.Allowlists {
c.Allowlists = append(c.Allowlists, a)
}
// sort to keep extended rules in order
sort.Strings(c.OrderedRules)
}

File diff suppressed because it is too large Load Diff

114
cli/detect/config/rule.go Normal file
View File

@@ -0,0 +1,114 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"fmt"
"strings"
"github.com/Infisical/infisical-merge/detect/regexp"
)
// Rules contain information that define details on how to detect secrets
type Rule struct {
// RuleID is a unique identifier for this rule
RuleID string
// Description is the description of the rule.
Description string
// Entropy is a float representing the minimum shannon
// entropy a regex group must have to be considered a secret.
Entropy float64
// SecretGroup is an int used to extract secret from regex
// match and used as the group that will have its entropy
// checked if `entropy` is set.
SecretGroup int
// Regex is a golang regular expression used to detect secrets.
Regex *regexp.Regexp
// Path is a golang regular expression used to
// filter secrets by path
Path *regexp.Regexp
// Tags is an array of strings used for metadata
// and reporting purposes.
Tags []string
// Keywords are used for pre-regex check filtering. Rules that contain
// keywords will perform a quick string compare check to make sure the
// keyword(s) are in the content being scanned.
Keywords []string
// Allowlists allows a rule to be ignored for specific commits, paths, regexes, and/or stopwords.
Allowlists []*Allowlist
// validated is an internal flag to track whether `Validate()` has been called.
validated bool
}
// Validate guards against common misconfigurations.
func (r *Rule) Validate() error {
if r.validated {
return nil
}
// Ensure |id| is present.
if strings.TrimSpace(r.RuleID) == "" {
// Try to provide helpful context, since |id| is empty.
var context string
if r.Regex != nil {
context = ", regex: " + r.Regex.String()
} else if r.Path != nil {
context = ", path: " + r.Path.String()
} else if r.Description != "" {
context = ", description: " + r.Description
}
return fmt.Errorf("rule |id| is missing or empty" + context)
}
// Ensure the rule actually matches something.
if r.Regex == nil && r.Path == nil {
return fmt.Errorf("%s: both |regex| and |path| are empty, this rule will have no effect", r.RuleID)
}
// Ensure |secretGroup| works.
if r.Regex != nil && r.SecretGroup > r.Regex.NumSubexp() {
return fmt.Errorf("%s: invalid regex secret group %d, max regex secret group %d", r.RuleID, r.SecretGroup, r.Regex.NumSubexp())
}
for _, allowlist := range r.Allowlists {
// This will probably never happen.
if allowlist == nil {
continue
}
if err := allowlist.Validate(); err != nil {
return fmt.Errorf("%s: %w", r.RuleID, err)
}
}
r.validated = true
return nil
}

View File

@@ -20,35 +20,27 @@
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
package config
import (
"os"
"strings"
"github.com/Infisical/infisical-merge/config"
"github.com/Infisical/infisical-merge/detect/regexp"
)
const (
// https://cwe.mitre.org/data/definitions/798.html
CWE = "CWE-798"
CWE_DESCRIPTION = "Use of Hard-coded Credentials"
)
func Write(findings []Finding, cfg config.Config, ext string, reportPath string) error {
file, err := os.Create(reportPath)
if err != nil {
return err
func anyRegexMatch(f string, res []*regexp.Regexp) bool {
for _, re := range res {
if regexMatched(f, re) {
return true
}
}
ext = strings.ToLower(ext)
switch ext {
case ".json", "json":
err = writeJson(findings, file)
case ".csv", "csv":
err = writeCsv(findings, file)
case ".sarif", "sarif":
err = writeSarif(cfg, findings, file)
}
return err
return false
}
func regexMatched(f string, re *regexp.Regexp) bool {
if re == nil {
return false
}
if re.FindString(f) != "" {
return true
}
return false
}

328
cli/detect/decoder.go Normal file
View File

@@ -0,0 +1,328 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bytes"
"encoding/base64"
"fmt"
"regexp"
"unicode"
"github.com/Infisical/infisical-merge/detect/logging"
)
var b64LikelyChars [128]byte
var b64Regexp = regexp.MustCompile(`[\w/+-]{16,}={0,3}`)
var decoders = []func(string) ([]byte, error){
base64.StdEncoding.DecodeString,
base64.RawURLEncoding.DecodeString,
}
func init() {
// Basically look for anything that isn't just letters
for _, c := range `0123456789+/-_` {
b64LikelyChars[c] = 1
}
}
// EncodedSegment represents a portion of text that is encoded in some way.
// `decode` supports recusive decoding and can result in "segment trees".
// There can be multiple segments in the original text, so each can be thought
// of as its own tree with the root being the original segment.
type EncodedSegment struct {
// The parent segment in a segment tree. If nil, it is a root segment
parent *EncodedSegment
// Relative start/end are the bounds of the encoded value in the current pass.
relativeStart int
relativeEnd int
// Absolute start/end refer to the bounds of the root segment in this segment
// tree
absoluteStart int
absoluteEnd int
// Decoded start/end refer to the bounds of the decoded value in the current
// pass. These can differ from relative values because decoding can shrink
// or grow the size of the segment.
decodedStart int
decodedEnd int
// This is the actual decoded content in the segment
decodedValue string
// This is the type of encoding
encoding string
}
// isChildOf inspects the bounds of two segments to determine
// if one should be the child of another
func (s EncodedSegment) isChildOf(parent EncodedSegment) bool {
return parent.decodedStart <= s.relativeStart && parent.decodedEnd >= s.relativeEnd
}
// decodedOverlaps checks if the decoded bounds of the segment overlaps a range
func (s EncodedSegment) decodedOverlaps(start, end int) bool {
return start <= s.decodedEnd && end >= s.decodedStart
}
// adjustMatchIndex takes the matchIndex from the current decoding pass and
// updates it to match the absolute matchIndex in the original text.
func (s EncodedSegment) adjustMatchIndex(matchIndex []int) []int {
// The match is within the bounds of the segment so we just return
// the absolute start and end of the root segment.
if s.decodedStart <= matchIndex[0] && matchIndex[1] <= s.decodedEnd {
return []int{
s.absoluteStart,
s.absoluteEnd,
}
}
// Since it overlaps one side and/or the other, we're going to have to adjust
// and climb parents until we're either at the root or we've determined
// we're fully inside one of the parent segments.
adjustedMatchIndex := make([]int, 2)
if matchIndex[0] < s.decodedStart {
// It starts before the encoded segment so adjust the start to match
// the location before it was decoded
matchStartDelta := s.decodedStart - matchIndex[0]
adjustedMatchIndex[0] = s.relativeStart - matchStartDelta
} else {
// It starts within the encoded segment so set the bound to the
// relative start
adjustedMatchIndex[0] = s.relativeStart
}
if matchIndex[1] > s.decodedEnd {
// It ends after the encoded segment so adjust the end to match
// the location before it was decoded
matchEndDelta := matchIndex[1] - s.decodedEnd
adjustedMatchIndex[1] = s.relativeEnd + matchEndDelta
} else {
// It ends within the encoded segment so set the bound to the relative end
adjustedMatchIndex[1] = s.relativeEnd
}
// We're still not at a root segment so we'll need to keep on adjusting
if s.parent != nil {
return s.parent.adjustMatchIndex(adjustedMatchIndex)
}
return adjustedMatchIndex
}
// depth reports how many levels of decoding needed to be done (default is 1)
func (s EncodedSegment) depth() int {
depth := 1
// Climb the tree and increment the depth
for current := &s; current.parent != nil; current = current.parent {
depth++
}
return depth
}
// tags returns additional meta data tags related to the types of segments
func (s EncodedSegment) tags() []string {
return []string{
fmt.Sprintf("decoded:%s", s.encoding),
fmt.Sprintf("decode-depth:%d", s.depth()),
}
}
// Decoder decodes various types of data in place
type Decoder struct {
decodedMap map[string]string
}
// NewDecoder creates a default decoder struct
func NewDecoder() *Decoder {
return &Decoder{
decodedMap: make(map[string]string),
}
}
// decode returns the data with the values decoded in-place
func (d *Decoder) decode(data string, parentSegments []EncodedSegment) (string, []EncodedSegment) {
segments := d.findEncodedSegments(data, parentSegments)
if len(segments) > 0 {
result := bytes.NewBuffer(make([]byte, 0, len(data)))
relativeStart := 0
for _, segment := range segments {
result.WriteString(data[relativeStart:segment.relativeStart])
result.WriteString(segment.decodedValue)
relativeStart = segment.relativeEnd
}
result.WriteString(data[relativeStart:])
return result.String(), segments
}
return data, segments
}
// findEncodedSegments finds the encoded segments in the data and updates the
// segment tree for this pass
func (d *Decoder) findEncodedSegments(data string, parentSegments []EncodedSegment) []EncodedSegment {
if len(data) == 0 {
return []EncodedSegment{}
}
matchIndices := b64Regexp.FindAllStringIndex(data, -1)
if matchIndices == nil {
return []EncodedSegment{}
}
segments := make([]EncodedSegment, 0, len(matchIndices))
// Keeps up with offsets from the text changing size as things are decoded
decodedShift := 0
for _, matchIndex := range matchIndices {
encodedValue := data[matchIndex[0]:matchIndex[1]]
if !isLikelyB64(encodedValue) {
d.decodedMap[encodedValue] = ""
continue
}
decodedValue, alreadyDecoded := d.decodedMap[encodedValue]
// We haven't decoded this yet, so go ahead and decode it
if !alreadyDecoded {
decodedValue = decodeValue(encodedValue)
d.decodedMap[encodedValue] = decodedValue
}
// Skip this segment because there was nothing to check
if len(decodedValue) == 0 {
continue
}
// Create a segment for the encoded data
segment := EncodedSegment{
relativeStart: matchIndex[0],
relativeEnd: matchIndex[1],
absoluteStart: matchIndex[0],
absoluteEnd: matchIndex[1],
decodedStart: matchIndex[0] + decodedShift,
decodedEnd: matchIndex[0] + decodedShift + len(decodedValue),
decodedValue: decodedValue,
encoding: "base64",
}
// Shift decoded start and ends based on size changes
decodedShift += len(decodedValue) - len(encodedValue)
// Adjust the absolute position of segments contained in parent segments
for _, parentSegment := range parentSegments {
if segment.isChildOf(parentSegment) {
segment.absoluteStart = parentSegment.absoluteStart
segment.absoluteEnd = parentSegment.absoluteEnd
segment.parent = &parentSegment
break
}
}
logging.Debug().Msgf("segment found: %#v", segment)
segments = append(segments, segment)
}
return segments
}
// decoders tries a list of decoders and returns the first successful one
func decodeValue(encodedValue string) string {
for _, decoder := range decoders {
decodedValue, err := decoder(encodedValue)
if err == nil && len(decodedValue) > 0 && isASCII(decodedValue) {
return string(decodedValue)
}
}
return ""
}
func isASCII(b []byte) bool {
for i := 0; i < len(b); i++ {
if b[i] > unicode.MaxASCII || b[i] < '\t' {
return false
}
}
return true
}
// Skip a lot of method signatures and things at the risk of missing about
// 1% of base64
func isLikelyB64(s string) bool {
for _, c := range s {
if b64LikelyChars[c] != 0 {
return true
}
}
return false
}
// Find a segment where the decoded bounds overlaps a range
func segmentWithDecodedOverlap(encodedSegments []EncodedSegment, start, end int) *EncodedSegment {
for _, segment := range encodedSegments {
if segment.decodedOverlaps(start, end) {
return &segment
}
}
return nil
}
func (s EncodedSegment) currentLine(currentRaw string) string {
start := 0
end := len(currentRaw)
// Find the start of the range
for i := s.decodedStart; i > -1; i-- {
c := currentRaw[i]
if c == '\n' {
start = i
break
}
}
// Find the end of the range
for i := s.decodedEnd; i < end; i++ {
c := currentRaw[i]
if c == '\n' {
end = i
break
}
}
return currentRaw[start:end]
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,754 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"fmt"
"os"
"path/filepath"
"testing"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
"github.com/Infisical/infisical-merge/config"
"github.com/Infisical/infisical-merge/report"
)
const configPath = "../testdata/config/"
const repoBasePath = "../testdata/repos/"
func TestDetect(t *testing.T) {
tests := []struct {
cfgName string
baselinePath string
fragment Fragment
// NOTE: for expected findings, all line numbers will be 0
// because line deltas are added _after_ the finding is created.
// I.e, if the finding is from a --no-git file, the line number will be
// increase by 1 in DetectFromFiles(). If the finding is from git,
// the line number will be increased by the patch delta.
expectedFindings []report.Finding
wantError error
}{
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OKIA\ // infisical-scan:ignore"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \
\"AKIALALEMEL33243OKIA\ // infisical-scan:ignore"
`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OKIA\"
// infisical-scan:ignore"
`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
Secret: "AKIALALEMEL33243OKIA",
Match: "AKIALALEMEL33243OKIA",
File: "tmp.go",
Line: `awsToken := \"AKIALALEMEL33243OKIA\"`,
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
StartLine: 0,
EndLine: 0,
StartColumn: 15,
EndColumn: 34,
Entropy: 3.1464393,
},
},
},
{
cfgName: "escaped_character_group",
fragment: Fragment{
Raw: `pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{
{
Description: "PyPI upload token",
Secret: "pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB",
Match: "pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB",
Line: `pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB`,
File: "tmp.go",
RuleID: "pypi-upload-token",
Tags: []string{"key", "pypi"},
StartLine: 0,
EndLine: 0,
StartColumn: 1,
EndColumn: 86,
Entropy: 1.9606875,
},
},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
Secret: "AKIALALEMEL33243OLIA",
Match: "AKIALALEMEL33243OLIA",
Line: `awsToken := \"AKIALALEMEL33243OLIA\"`,
File: "tmp.go",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
StartLine: 0,
EndLine: 0,
StartColumn: 15,
EndColumn: 34,
Entropy: 3.0841837,
},
},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `export BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;`,
FilePath: "tmp.sh",
},
expectedFindings: []report.Finding{
{
Description: "Sidekiq Secret",
Match: "BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;",
Secret: "cafebabe:deadbeef",
Line: `export BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;`,
File: "tmp.sh",
RuleID: "sidekiq-secret",
Tags: []string{},
Entropy: 2.6098502,
StartLine: 0,
EndLine: 0,
StartColumn: 8,
EndColumn: 60,
},
},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `echo hello1; export BUNDLE_ENTERPRISE__CONTRIBSYS__COM="cafebabe:deadbeef" && echo hello2`,
FilePath: "tmp.sh",
},
expectedFindings: []report.Finding{
{
Description: "Sidekiq Secret",
Match: "BUNDLE_ENTERPRISE__CONTRIBSYS__COM=\"cafebabe:deadbeef\"",
Secret: "cafebabe:deadbeef",
File: "tmp.sh",
Line: `echo hello1; export BUNDLE_ENTERPRISE__CONTRIBSYS__COM="cafebabe:deadbeef" && echo hello2`,
RuleID: "sidekiq-secret",
Tags: []string{},
Entropy: 2.6098502,
StartLine: 0,
EndLine: 0,
StartColumn: 21,
EndColumn: 74,
},
},
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `url = "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:80/path?param1=true&param2=false#heading1"`,
FilePath: "tmp.sh",
},
expectedFindings: []report.Finding{
{
Description: "Sidekiq Sensitive URL",
Match: "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:",
Secret: "cafeb4b3:d3adb33f",
File: "tmp.sh",
Line: `url = "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:80/path?param1=true&param2=false#heading1"`,
RuleID: "sidekiq-sensitive-url",
Tags: []string{},
Entropy: 2.984234,
StartLine: 0,
EndLine: 0,
StartColumn: 8,
EndColumn: 58,
},
},
},
{
cfgName: "allow_aws_re",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "allow_path",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "allow_commit",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
CommitSHA: "allowthiscommit",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "entropy_group",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{
{
Description: "Discord API key",
Match: "Discord_Public_Key = \"e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5\"",
Secret: "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5",
Line: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
File: "tmp.go",
RuleID: "discord-api-key",
Tags: []string{},
Entropy: 3.7906237,
StartLine: 0,
EndLine: 0,
StartColumn: 7,
EndColumn: 93,
},
},
},
{
cfgName: "generic_with_py_path",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "generic_with_py_path",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.py",
},
expectedFindings: []report.Finding{
{
Description: "Generic API Key",
Match: "Key = \"e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5\"",
Secret: "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5",
Line: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
File: "tmp.py",
RuleID: "generic-api-key",
Tags: []string{},
Entropy: 3.7906237,
StartLine: 0,
EndLine: 0,
StartColumn: 22,
EndColumn: 93,
},
},
},
{
cfgName: "path_only",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.py",
},
expectedFindings: []report.Finding{
{
Description: "Python Files",
Match: "file detected: tmp.py",
File: "tmp.py",
RuleID: "python-files-only",
Tags: []string{},
},
},
},
{
cfgName: "bad_entropy_group",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
wantError: fmt.Errorf("Discord API key invalid regex secret group 5, max regex secret group 3"),
},
{
cfgName: "simple",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: filepath.Join(configPath, "simple.toml"),
},
expectedFindings: []report.Finding{},
},
{
cfgName: "allow_global_aws_re",
fragment: Fragment{
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
FilePath: "tmp.go",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "generic_with_py_path",
fragment: Fragment{
Raw: `const Discord_Public_Key = "load2523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: "tmp.py",
},
expectedFindings: []report.Finding{},
},
{
cfgName: "path_only",
baselinePath: ".baseline.json",
fragment: Fragment{
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
FilePath: ".baseline.json",
},
expectedFindings: []report.Finding{},
},
}
for _, tt := range tests {
viper.Reset()
viper.AddConfigPath(configPath)
viper.SetConfigName(tt.cfgName)
viper.SetConfigType("toml")
err := viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, err := vc.Translate()
cfg.Path = filepath.Join(configPath, tt.cfgName+".toml")
if tt.wantError != nil {
if err == nil {
t.Errorf("expected error")
}
assert.Equal(t, tt.wantError, err)
}
d := NewDetector(cfg)
d.baselinePath = tt.baselinePath
findings := d.Detect(tt.fragment)
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
// TestFromGit tests the FromGit function
func TestFromGit(t *testing.T) {
tests := []struct {
cfgName string
source string
logOpts string
expectedFindings []report.Finding
}{
{
source: filepath.Join(repoBasePath, "small"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 20,
EndLine: 20,
StartColumn: 19,
EndColumn: 38,
Line: "\n awsToken := \"AKIALALEMEL33243OLIA\"",
Secret: "AKIALALEMEL33243OLIA",
Match: "AKIALALEMEL33243OLIA",
File: "main.go",
Date: "2021-11-02T23:37:53Z",
Commit: "1b6da43b82b22e4eaa10bcf8ee591e91abbfc587",
Author: "Zachary Rice",
Email: "zricer@protonmail.com",
Message: "Accidentally add a secret",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "1b6da43b82b22e4eaa10bcf8ee591e91abbfc587:main.go:aws-access-key:20",
},
{
Description: "AWS Access Key",
StartLine: 9,
EndLine: 9,
StartColumn: 17,
EndColumn: 36,
Secret: "AKIALALEMEL33243OLIA",
Match: "AKIALALEMEL33243OLIA",
Line: "\n\taws_token := \"AKIALALEMEL33243OLIA\"",
File: "foo/foo.go",
Date: "2021-11-02T23:48:06Z",
Commit: "491504d5a31946ce75e22554cc34203d8e5ff3ca",
Author: "Zach Rice",
Email: "zricer@protonmail.com",
Message: "adding foo package with secret",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "491504d5a31946ce75e22554cc34203d8e5ff3ca:foo/foo.go:aws-access-key:9",
},
},
},
{
source: filepath.Join(repoBasePath, "small"),
logOpts: "--all foo...",
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 9,
EndLine: 9,
StartColumn: 17,
EndColumn: 36,
Secret: "AKIALALEMEL33243OLIA",
Line: "\n\taws_token := \"AKIALALEMEL33243OLIA\"",
Match: "AKIALALEMEL33243OLIA",
Date: "2021-11-02T23:48:06Z",
File: "foo/foo.go",
Commit: "491504d5a31946ce75e22554cc34203d8e5ff3ca",
Author: "Zach Rice",
Email: "zricer@protonmail.com",
Message: "adding foo package with secret",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "491504d5a31946ce75e22554cc34203d8e5ff3ca:foo/foo.go:aws-access-key:9",
},
},
},
}
err := moveDotGit("dotGit", ".git")
if err != nil {
t.Fatal(err)
}
defer func() {
if err := moveDotGit(".git", "dotGit"); err != nil {
t.Error(err)
}
}()
for _, tt := range tests {
viper.AddConfigPath(configPath)
viper.SetConfigName("simple")
viper.SetConfigType("toml")
err = viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, err := vc.Translate()
if err != nil {
t.Error(err)
}
detector := NewDetector(cfg)
findings, err := detector.DetectGit(tt.source, tt.logOpts, DetectType)
if err != nil {
t.Error(err)
}
for _, f := range findings {
f.Match = "" // remove lines cause copying and pasting them has some wack formatting
}
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
func TestFromGitStaged(t *testing.T) {
tests := []struct {
cfgName string
source string
logOpts string
expectedFindings []report.Finding
}{
{
source: filepath.Join(repoBasePath, "staged"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 7,
EndLine: 7,
StartColumn: 18,
EndColumn: 37,
Line: "\n\taws_token2 := \"AKIALALEMEL33243OLIA\" // this one is not",
Match: "AKIALALEMEL33243OLIA",
Secret: "AKIALALEMEL33243OLIA",
File: "api/api.go",
SymlinkFile: "",
Commit: "",
Entropy: 3.0841837,
Author: "",
Email: "",
Date: "0001-01-01T00:00:00Z",
Message: "",
Tags: []string{
"key",
"AWS",
},
RuleID: "aws-access-key",
Fingerprint: "api/api.go:aws-access-key:7",
},
},
},
}
err := moveDotGit("dotGit", ".git")
if err != nil {
t.Fatal(err)
}
defer func() {
if err := moveDotGit(".git", "dotGit"); err != nil {
t.Error(err)
}
}()
for _, tt := range tests {
viper.AddConfigPath(configPath)
viper.SetConfigName("simple")
viper.SetConfigType("toml")
err = viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, err := vc.Translate()
if err != nil {
t.Error(err)
}
detector := NewDetector(cfg)
detector.AddGitleaksIgnore(filepath.Join(tt.source, ".gitleaksignore"))
findings, err := detector.DetectGit(tt.source, tt.logOpts, ProtectStagedType)
if err != nil {
t.Error(err)
}
for _, f := range findings {
f.Match = "" // remove lines cause copying and pasting them has some wack formatting
}
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
// TestFromFiles tests the FromFiles function
func TestFromFiles(t *testing.T) {
tests := []struct {
cfgName string
source string
expectedFindings []report.Finding
}{
{
source: filepath.Join(repoBasePath, "nogit"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 20,
EndLine: 20,
StartColumn: 16,
EndColumn: 35,
Match: "AKIALALEMEL33243OLIA",
Secret: "AKIALALEMEL33243OLIA",
Line: "\n\tawsToken := \"AKIALALEMEL33243OLIA\"",
File: "../testdata/repos/nogit/main.go",
SymlinkFile: "",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "../testdata/repos/nogit/main.go:aws-access-key:20",
},
},
},
{
source: filepath.Join(repoBasePath, "nogit", "main.go"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "AWS Access Key",
StartLine: 20,
EndLine: 20,
StartColumn: 16,
EndColumn: 35,
Match: "AKIALALEMEL33243OLIA",
Secret: "AKIALALEMEL33243OLIA",
Line: "\n\tawsToken := \"AKIALALEMEL33243OLIA\"",
File: "../testdata/repos/nogit/main.go",
RuleID: "aws-access-key",
Tags: []string{"key", "AWS"},
Entropy: 3.0841837,
Fingerprint: "../testdata/repos/nogit/main.go:aws-access-key:20",
},
},
},
}
for _, tt := range tests {
viper.AddConfigPath(configPath)
viper.SetConfigName("simple")
viper.SetConfigType("toml")
err := viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, _ := vc.Translate()
detector := NewDetector(cfg)
detector.FollowSymlinks = true
findings, err := detector.DetectFiles(tt.source)
if err != nil {
t.Error(err)
}
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
func TestDetectWithSymlinks(t *testing.T) {
tests := []struct {
cfgName string
source string
expectedFindings []report.Finding
}{
{
source: filepath.Join(repoBasePath, "symlinks/file_symlink"),
cfgName: "simple",
expectedFindings: []report.Finding{
{
Description: "Asymmetric Private Key",
StartLine: 1,
EndLine: 1,
StartColumn: 1,
EndColumn: 35,
Match: "-----BEGIN OPENSSH PRIVATE KEY-----",
Secret: "-----BEGIN OPENSSH PRIVATE KEY-----",
Line: "-----BEGIN OPENSSH PRIVATE KEY-----",
File: "../testdata/repos/symlinks/source_file/id_ed25519",
SymlinkFile: "../testdata/repos/symlinks/file_symlink/symlinked_id_ed25519",
RuleID: "apkey",
Tags: []string{"key", "AsymmetricPrivateKey"},
Entropy: 3.587164,
Fingerprint: "../testdata/repos/symlinks/source_file/id_ed25519:apkey:1",
},
},
},
}
for _, tt := range tests {
viper.AddConfigPath(configPath)
viper.SetConfigName("simple")
viper.SetConfigType("toml")
err := viper.ReadInConfig()
if err != nil {
t.Error(err)
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
t.Error(err)
}
cfg, _ := vc.Translate()
detector := NewDetector(cfg)
detector.FollowSymlinks = true
findings, err := detector.DetectFiles(tt.source)
if err != nil {
t.Error(err)
}
assert.ElementsMatch(t, tt.expectedFindings, findings)
}
}
func moveDotGit(from, to string) error {
repoDirs, err := os.ReadDir("../testdata/repos")
if err != nil {
return err
}
for _, dir := range repoDirs {
if to == ".git" {
_, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), "dotGit"))
if os.IsNotExist(err) {
// dont want to delete the only copy of .git accidentally
continue
}
os.RemoveAll(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), ".git"))
}
if !dir.IsDir() {
continue
}
_, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from))
if os.IsNotExist(err) {
continue
}
err = os.Rename(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from),
fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), to))
if err != nil {
return err
}
}
return nil
}

Some files were not shown because too many files have changed in this diff Show More