mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-20 03:49:52 +00:00
Compare commits
8 Commits
oidc
...
daniel/eve
Author | SHA1 | Date | |
---|---|---|---|
4b3de83c07 | |||
804e4c4609 | |||
b124627288 | |||
5c86212b11 | |||
98fcffe718 | |||
d84d2ec7a6 | |||
6780bfb821 | |||
5c94a44e92 |
@ -19,6 +19,10 @@ POSTGRES_DB=infisical
|
||||
# Redis
|
||||
REDIS_URL=redis://redis:6379
|
||||
|
||||
# Optional credentials for MongoDB container instance and Mongo-Express
|
||||
MONGO_USERNAME=root
|
||||
MONGO_PASSWORD=example
|
||||
|
||||
# Website URL
|
||||
# Required
|
||||
SITE_URL=http://localhost:8080
|
||||
|
115
backend/package-lock.json
generated
115
backend/package-lock.json
generated
@ -33,7 +33,7 @@
|
||||
"axios": "^1.6.7",
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.3.3",
|
||||
"bullmq": "^5.1.6",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.26.0",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
@ -53,7 +53,6 @@
|
||||
"passport-github": "^1.1.0",
|
||||
"passport-gitlab2": "^5.0.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-openidconnect": "^0.1.2",
|
||||
"pg": "^8.11.3",
|
||||
"picomatch": "^3.0.1",
|
||||
"pino": "^8.16.2",
|
||||
@ -63,6 +62,7 @@
|
||||
"tweetnacl": "^1.0.3",
|
||||
"tweetnacl-util": "^0.15.1",
|
||||
"uuid": "^9.0.1",
|
||||
"ws": "^8.16.0",
|
||||
"zod": "^3.22.4",
|
||||
"zod-to-json-schema": "^3.22.4"
|
||||
},
|
||||
@ -77,12 +77,12 @@
|
||||
"@types/nodemailer": "^6.4.14",
|
||||
"@types/passport-github": "^1.1.12",
|
||||
"@types/passport-google-oauth20": "^2.0.14",
|
||||
"@types/passport-openidconnect": "^0.1.3",
|
||||
"@types/pg": "^8.10.9",
|
||||
"@types/picomatch": "^2.3.3",
|
||||
"@types/prompt-sync": "^4.2.3",
|
||||
"@types/resolve": "^1.20.6",
|
||||
"@types/uuid": "^9.0.7",
|
||||
"@types/ws": "^8.5.10",
|
||||
"@typescript-eslint/eslint-plugin": "^6.20.0",
|
||||
"@typescript-eslint/parser": "^6.20.0",
|
||||
"eslint": "^8.56.0",
|
||||
@ -2195,6 +2195,7 @@
|
||||
"version": "2.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
||||
"integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@nodelib/fs.stat": "2.0.5",
|
||||
"run-parallel": "^1.1.9"
|
||||
@ -2207,6 +2208,7 @@
|
||||
"version": "2.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz",
|
||||
"integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
@ -2215,6 +2217,7 @@
|
||||
"version": "1.2.8",
|
||||
"resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz",
|
||||
"integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@nodelib/fs.scandir": "2.1.5",
|
||||
"fastq": "^1.6.0"
|
||||
@ -4074,18 +4077,6 @@
|
||||
"@types/passport": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/passport-openidconnect": {
|
||||
"version": "0.1.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/passport-openidconnect/-/passport-openidconnect-0.1.3.tgz",
|
||||
"integrity": "sha512-k1Ni7bG/9OZNo2Qpjg2W6GajL+pww6ZPaNWMXfpteCX4dXf4QgaZLt2hjR5IiPrqwBT9+W8KjCTJ/uhGIoBx/g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/express": "*",
|
||||
"@types/oauth": "*",
|
||||
"@types/passport": "*",
|
||||
"@types/passport-strategy": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/passport-strategy": {
|
||||
"version": "0.2.38",
|
||||
"resolved": "https://registry.npmjs.org/@types/passport-strategy/-/passport-strategy-0.2.38.tgz",
|
||||
@ -4222,6 +4213,15 @@
|
||||
"integrity": "sha512-WUtIVRUZ9i5dYXefDEAI7sh9/O7jGvHg7Df/5O/gtH3Yabe5odI3UWopVR1qbPXQtvOxWu3mM4XxlYeZtMWF4g==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/ws": {
|
||||
"version": "8.5.10",
|
||||
"resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.10.tgz",
|
||||
"integrity": "sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/xml-crypto": {
|
||||
"version": "1.4.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/xml-crypto/-/xml-crypto-1.4.6.tgz",
|
||||
@ -5453,6 +5453,7 @@
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
|
||||
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"fill-range": "^7.0.1"
|
||||
},
|
||||
@ -5502,15 +5503,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/bullmq": {
|
||||
"version": "5.3.3",
|
||||
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.3.3.tgz",
|
||||
"integrity": "sha512-Gc/68HxiCHLMPBiGIqtINxcf8HER/5wvBYMY/6x3tFejlvldUBFaAErMTLDv4TnPsTyzNPrfBKmFCEM58uVnJg==",
|
||||
"version": "5.1.6",
|
||||
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.1.6.tgz",
|
||||
"integrity": "sha512-VkLfig+xm4U3hc4QChzuuAy0NGQ9dfPB8o54hmcZHCX9ofp0Zn6bEY+W3Ytkk76eYwPAgXfywDBlAb2Unjl1Rg==",
|
||||
"dependencies": {
|
||||
"cron-parser": "^4.6.0",
|
||||
"fast-glob": "^3.3.2",
|
||||
"glob": "^8.0.3",
|
||||
"ioredis": "^5.3.2",
|
||||
"lodash": "^4.17.21",
|
||||
"minimatch": "^9.0.3",
|
||||
"msgpackr": "^1.10.1",
|
||||
"node-abort-controller": "^3.1.1",
|
||||
"semver": "^7.5.4",
|
||||
@ -5518,28 +5518,6 @@
|
||||
"uuid": "^9.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/bullmq/node_modules/brace-expansion": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
|
||||
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/bullmq/node_modules/minimatch": {
|
||||
"version": "9.0.3",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz",
|
||||
"integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16 || 14 >=14.17"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/bundle-require": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/bundle-require/-/bundle-require-4.0.2.tgz",
|
||||
@ -6939,6 +6917,7 @@
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz",
|
||||
"integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@nodelib/fs.stat": "^2.0.2",
|
||||
"@nodelib/fs.walk": "^1.2.3",
|
||||
@ -7090,6 +7069,7 @@
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
|
||||
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"to-regex-range": "^5.0.1"
|
||||
},
|
||||
@ -7541,6 +7521,7 @@
|
||||
"version": "5.1.2",
|
||||
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
|
||||
"integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"is-glob": "^4.0.1"
|
||||
},
|
||||
@ -8141,6 +8122,7 @@
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
|
||||
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
@ -8171,6 +8153,7 @@
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
|
||||
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"is-extglob": "^2.1.1"
|
||||
},
|
||||
@ -8205,6 +8188,7 @@
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
|
||||
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.12.0"
|
||||
}
|
||||
@ -8961,6 +8945,7 @@
|
||||
"version": "1.4.1",
|
||||
"resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz",
|
||||
"integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
@ -8977,6 +8962,7 @@
|
||||
"version": "4.0.5",
|
||||
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz",
|
||||
"integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"braces": "^3.0.2",
|
||||
"picomatch": "^2.3.1"
|
||||
@ -8989,6 +8975,7 @@
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
|
||||
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=8.6"
|
||||
},
|
||||
@ -9808,27 +9795,6 @@
|
||||
"url": "https://github.com/sponsors/jaredhanson"
|
||||
}
|
||||
},
|
||||
"node_modules/passport-openidconnect": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/passport-openidconnect/-/passport-openidconnect-0.1.2.tgz",
|
||||
"integrity": "sha512-JX3rTyW+KFZ/E9OF/IpXJPbyLO9vGzcmXB5FgSP2jfL3LGKJPdV7zUE8rWeKeeI/iueQggOeFa3onrCmhxXZTg==",
|
||||
"dependencies": {
|
||||
"oauth": "0.10.x",
|
||||
"passport-strategy": "1.x.x"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.6.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/jaredhanson"
|
||||
}
|
||||
},
|
||||
"node_modules/passport-openidconnect/node_modules/oauth": {
|
||||
"version": "0.10.0",
|
||||
"resolved": "https://registry.npmjs.org/oauth/-/oauth-0.10.0.tgz",
|
||||
"integrity": "sha512-1orQ9MT1vHFGQxhuy7E/0gECD3fd2fCC+PIX+/jgmU/gI3EpRocXtmtvxCO5x3WZ443FLTLFWNDjl5MPJf9u+Q=="
|
||||
},
|
||||
"node_modules/passport-strategy": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/passport-strategy/-/passport-strategy-1.0.0.tgz",
|
||||
@ -10602,6 +10568,7 @@
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
|
||||
"integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
@ -10948,6 +10915,7 @@
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
|
||||
"integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
@ -11748,6 +11716,7 @@
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
|
||||
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"is-number": "^7.0.0"
|
||||
},
|
||||
@ -13762,6 +13731,26 @@
|
||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
|
||||
},
|
||||
"node_modules/ws": {
|
||||
"version": "8.16.0",
|
||||
"resolved": "https://registry.npmjs.org/ws/-/ws-8.16.0.tgz",
|
||||
"integrity": "sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ==",
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"bufferutil": "^4.0.1",
|
||||
"utf-8-validate": ">=5.0.2"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"bufferutil": {
|
||||
"optional": true
|
||||
},
|
||||
"utf-8-validate": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/xml-crypto": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-3.2.0.tgz",
|
||||
|
@ -41,12 +41,12 @@
|
||||
"@types/nodemailer": "^6.4.14",
|
||||
"@types/passport-github": "^1.1.12",
|
||||
"@types/passport-google-oauth20": "^2.0.14",
|
||||
"@types/passport-openidconnect": "^0.1.3",
|
||||
"@types/pg": "^8.10.9",
|
||||
"@types/picomatch": "^2.3.3",
|
||||
"@types/prompt-sync": "^4.2.3",
|
||||
"@types/resolve": "^1.20.6",
|
||||
"@types/uuid": "^9.0.7",
|
||||
"@types/ws": "^8.5.10",
|
||||
"@typescript-eslint/eslint-plugin": "^6.20.0",
|
||||
"@typescript-eslint/parser": "^6.20.0",
|
||||
"eslint": "^8.56.0",
|
||||
@ -95,7 +95,7 @@
|
||||
"axios": "^1.6.7",
|
||||
"axios-retry": "^4.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.3.3",
|
||||
"bullmq": "^5.1.6",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.26.0",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
@ -115,7 +115,6 @@
|
||||
"passport-github": "^1.1.0",
|
||||
"passport-gitlab2": "^5.0.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-openidconnect": "^0.1.2",
|
||||
"pg": "^8.11.3",
|
||||
"picomatch": "^3.0.1",
|
||||
"pino": "^8.16.2",
|
||||
@ -125,6 +124,7 @@
|
||||
"tweetnacl": "^1.0.3",
|
||||
"tweetnacl-util": "^0.15.1",
|
||||
"uuid": "^9.0.1",
|
||||
"ws": "^8.16.0",
|
||||
"zod": "^3.22.4",
|
||||
"zod-to-json-schema": "^3.22.4"
|
||||
}
|
||||
|
17
backend/src/@types/fastify.d.ts
vendored
17
backend/src/@types/fastify.d.ts
vendored
@ -20,6 +20,7 @@ import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
|
||||
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TEventServiceFactory } from "@app/services/event/event-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
|
||||
@ -71,6 +72,21 @@ declare module "fastify" {
|
||||
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
|
||||
}
|
||||
|
||||
interface FastifyReply {
|
||||
sse: ({
|
||||
data,
|
||||
error
|
||||
}:
|
||||
| {
|
||||
data: string;
|
||||
error?: false;
|
||||
}
|
||||
| {
|
||||
error: true;
|
||||
errorMessage: string;
|
||||
}) => void;
|
||||
}
|
||||
|
||||
interface FastifyInstance {
|
||||
services: {
|
||||
login: TAuthLoginFactory;
|
||||
@ -113,6 +129,7 @@ declare module "fastify" {
|
||||
trustedIp: TTrustedIpServiceFactory;
|
||||
secretBlindIndex: TSecretBlindIndexServiceFactory;
|
||||
telemetry: TTelemetryServiceFactory;
|
||||
event: TEventServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
188
backend/src/event/event.ts
Normal file
188
backend/src/event/event.ts
Normal file
@ -0,0 +1,188 @@
|
||||
import { URL } from "node:url";
|
||||
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { Server as WebSocketServer, ServerOptions, WebSocket } from "ws";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { checkIPAgainstBlocklist, TIp } from "@app/lib/ip";
|
||||
import { ActorType, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TIdentityAccessTokenDALFactory } from "@app/services/identity-access-token/identity-access-token-dal";
|
||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||
import { TIdentityAccessTokenJwtPayload } from "@app/services/identity-access-token/identity-access-token-types";
|
||||
|
||||
type TEventSubscriptionFactoryDep = {
|
||||
identityAccessTokenDAL: TIdentityAccessTokenDALFactory;
|
||||
identityAccessTokenServiceFactory: TIdentityAccessTokenServiceFactory;
|
||||
permissionService: TPermissionServiceFactory;
|
||||
};
|
||||
|
||||
enum AuthenticationErrors {
|
||||
NO_PROJECT_ID = "Unauthorized. Project ID is missing",
|
||||
NO_MACHINE = "Unauthorized. Machine Identity Access Token is missing",
|
||||
INVALID_TOKEN_TYPE = "Unauthorized. Invalid token type",
|
||||
INVALID_TOKEN = "Unauthorized. Invalid token",
|
||||
NO_PERMISSION = "Unauthorized. No permission to access project"
|
||||
}
|
||||
|
||||
export type TEventSubscriptionFactory = ReturnType<typeof eventSubscriptionFactory>;
|
||||
|
||||
export const eventSubscriptionFactory = ({
|
||||
identityAccessTokenDAL,
|
||||
permissionService,
|
||||
identityAccessTokenServiceFactory
|
||||
}: TEventSubscriptionFactoryDep) => {
|
||||
const config = getConfig();
|
||||
let connection: WebSocketServer | null = null;
|
||||
const clients = new Map<string, WebSocket[]>();
|
||||
|
||||
const verifyConnection: ServerOptions["verifyClient"] = (info, cb) => {
|
||||
void (async () => {
|
||||
const machineIdentityAccessToken = info.req.headers["machine-identity-access-token"];
|
||||
const projectId = info.req.headers["project-id"];
|
||||
|
||||
if (!projectId || typeof projectId !== "string") {
|
||||
cb(false, 401, AuthenticationErrors.NO_PROJECT_ID);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!machineIdentityAccessToken || typeof machineIdentityAccessToken !== "string") {
|
||||
cb(false, 401, AuthenticationErrors.NO_MACHINE);
|
||||
return;
|
||||
}
|
||||
|
||||
const decodedToken = jwt.verify(machineIdentityAccessToken, config.AUTH_SECRET) as TIdentityAccessTokenJwtPayload;
|
||||
|
||||
if (decodedToken.authTokenType !== AuthTokenType.IDENTITY_ACCESS_TOKEN) {
|
||||
cb(false, 401, AuthenticationErrors.INVALID_TOKEN_TYPE);
|
||||
return;
|
||||
}
|
||||
|
||||
await identityAccessTokenServiceFactory.fnValidateIdentityAccessToken(
|
||||
decodedToken,
|
||||
info.req.socket.remoteAddress
|
||||
);
|
||||
|
||||
const identityAccessToken = await identityAccessTokenDAL.findOne({
|
||||
[`${TableName.IdentityAccessToken}.id` as "id"]: decodedToken.identityAccessTokenId,
|
||||
isAccessTokenRevoked: false
|
||||
});
|
||||
|
||||
if (!identityAccessToken) {
|
||||
cb(false, 401, AuthenticationErrors.INVALID_TOKEN);
|
||||
return;
|
||||
}
|
||||
|
||||
const ipAddress = info.req.socket.remoteAddress;
|
||||
|
||||
if (ipAddress) {
|
||||
// This throws, and im not sure if it really should. TODO
|
||||
checkIPAgainstBlocklist({
|
||||
ipAddress,
|
||||
trustedIps: identityAccessToken?.accessTokenTrustedIps as TIp[]
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
ActorType.IDENTITY,
|
||||
identityAccessToken.identityId,
|
||||
projectId
|
||||
);
|
||||
try {
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets);
|
||||
} catch (err) {
|
||||
cb(false, 401, AuthenticationErrors.NO_PERMISSION);
|
||||
return;
|
||||
}
|
||||
|
||||
cb(true);
|
||||
})();
|
||||
};
|
||||
|
||||
const init = () => {
|
||||
if (connection) return;
|
||||
|
||||
connection = new WebSocketServer({
|
||||
port: 8091,
|
||||
verifyClient: verifyConnection
|
||||
});
|
||||
|
||||
// Purely for testing purposes.
|
||||
connection.on("connection", (ws) => {
|
||||
const projectId = new URL(ws.url).searchParams.get("projectId");
|
||||
|
||||
if (!projectId) {
|
||||
ws.send("Unauthorized. Project ID is missing");
|
||||
ws.close();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!clients.has(projectId)) {
|
||||
clients.set(projectId, []);
|
||||
}
|
||||
clients.get(projectId)?.push(ws);
|
||||
|
||||
ws.on("message", (message) => {
|
||||
console.log("received: %s", message);
|
||||
});
|
||||
|
||||
ws.on("close", () => {
|
||||
const projectClients = clients.get(projectId);
|
||||
|
||||
if (!projectClients) return;
|
||||
|
||||
const index = projectClients.indexOf(ws);
|
||||
|
||||
if (index !== -1) {
|
||||
projectClients.splice(index, 1);
|
||||
}
|
||||
|
||||
if (projectClients.length === 0) {
|
||||
clients.delete(projectId);
|
||||
} else {
|
||||
clients.set(projectId, projectClients);
|
||||
}
|
||||
});
|
||||
|
||||
ws.send("Connected.");
|
||||
});
|
||||
};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const sendNotification = (projectId: string) => {
|
||||
const MESSAGE = "NEW_CHANGE";
|
||||
|
||||
if (!connection) {
|
||||
throw new Error("Connection not initialized");
|
||||
}
|
||||
|
||||
for (const client of connection.clients) {
|
||||
client.send(MESSAGE);
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
init
|
||||
};
|
||||
};
|
||||
|
||||
// var WebSocketServer = require("ws").Server;
|
||||
// var ws = new WebSocketServer({
|
||||
// verifyClient: function (info, cb) {
|
||||
// var token = info.req.headers.token;
|
||||
// if (!token) cb(false, 401, "Unauthorized");
|
||||
// else {
|
||||
// jwt.verify(token, "secret-key", function (err, decoded) {
|
||||
// if (err) {
|
||||
// cb(false, 401, "Unauthorized");
|
||||
// } else {
|
||||
// info.req.user = decoded; //[1]
|
||||
// cb(true);
|
||||
// }
|
||||
// });
|
||||
// }
|
||||
// }
|
||||
// });
|
47
backend/src/server/plugins/sse.ts
Normal file
47
backend/src/server/plugins/sse.ts
Normal file
@ -0,0 +1,47 @@
|
||||
import { FastifyPluginAsync, FastifyReply } from "fastify";
|
||||
import fp from "fastify-plugin";
|
||||
// eslint-disable-next-line @typescript-eslint/require-await
|
||||
export const serversideEvents: FastifyPluginAsync = fp(async function serversideEventsPlgin(instance): Promise<void> {
|
||||
instance.decorateReply("sse", function handler(this: FastifyReply, input): void {
|
||||
// if this already set, it's not first event
|
||||
if (!this.raw.headersSent) {
|
||||
console.log("Setting headers");
|
||||
Object.entries(this.getHeaders()).forEach(([key, value]) => {
|
||||
this.raw.setHeader(key, value ?? "");
|
||||
});
|
||||
this.raw.setHeader("Cache-Control", "no-cache");
|
||||
this.raw.setHeader("Content-Type", "text/event-stream");
|
||||
this.raw.setHeader("Access-Control-Allow-Origin", "*");
|
||||
this.raw.setHeader("Connection", "keep-alive");
|
||||
this.raw.flushHeaders(); // flush the headers to establish SSE with client
|
||||
|
||||
// Ngnix will close idle connections even if the connection is keep-alive. So we send a ping every 15 seconds to keep the connection truly alive.
|
||||
const interval = setInterval(() => {
|
||||
console.log("Sending ping");
|
||||
if (!this.raw.writableEnded) {
|
||||
this.raw.write("event: ping\n");
|
||||
this.raw.write("data: Heartbeat\n\n");
|
||||
}
|
||||
}, 15_000);
|
||||
|
||||
this.raw.on("close", () => {
|
||||
console.log("Connection closed");
|
||||
clearInterval(interval);
|
||||
this.raw.end();
|
||||
});
|
||||
}
|
||||
|
||||
if (input.error) {
|
||||
this.raw.write("event: error\n");
|
||||
this.raw.write(
|
||||
`data: ${JSON.stringify({
|
||||
error: input.errorMessage
|
||||
})}\n\n`
|
||||
);
|
||||
this.raw.end();
|
||||
return;
|
||||
}
|
||||
|
||||
this.raw.write(`data: ${input.data}\n\n`); // res.write() instead of res.send()
|
||||
});
|
||||
});
|
@ -36,6 +36,7 @@ import { trustedIpDALFactory } from "@app/ee/services/trusted-ip/trusted-ip-dal"
|
||||
import { trustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { TQueueServiceFactory } from "@app/queue";
|
||||
import { serversideEvents } from "@app/server/plugins/sse";
|
||||
import { apiKeyDALFactory } from "@app/services/api-key/api-key-dal";
|
||||
import { apiKeyServiceFactory } from "@app/services/api-key/api-key-service";
|
||||
import { authDALFactory } from "@app/services/auth/auth-dal";
|
||||
@ -44,6 +45,7 @@ import { authPaswordServiceFactory } from "@app/services/auth/auth-password-serv
|
||||
import { authSignupServiceFactory } from "@app/services/auth/auth-signup-service";
|
||||
import { tokenDALFactory } from "@app/services/auth-token/auth-token-dal";
|
||||
import { tokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { eventServiceFactory } from "@app/services/event/event-service";
|
||||
import { identityDALFactory } from "@app/services/identity/identity-dal";
|
||||
import { identityOrgDALFactory } from "@app/services/identity/identity-org-dal";
|
||||
import { identityServiceFactory } from "@app/services/identity/identity-service";
|
||||
@ -116,6 +118,8 @@ export const registerRoutes = async (
|
||||
) => {
|
||||
await server.register(registerSecretScannerGhApp, { prefix: "/ss-webhook" });
|
||||
|
||||
const cfg = getConfig();
|
||||
|
||||
// db layers
|
||||
const userDAL = userDALFactory(db);
|
||||
const authDAL = authDALFactory(db);
|
||||
@ -490,6 +494,8 @@ export const registerRoutes = async (
|
||||
licenseService
|
||||
});
|
||||
|
||||
const eventService = eventServiceFactory({ redisUrl: cfg.REDIS_URL });
|
||||
|
||||
await superAdminService.initServerCfg();
|
||||
await auditLogQueue.startAuditLogPruneJob();
|
||||
// setup the communication with license key server
|
||||
@ -535,7 +541,8 @@ export const registerRoutes = async (
|
||||
trustedIp: trustedIpService,
|
||||
scim: scimService,
|
||||
secretBlindIndex: secretBlindIndexService,
|
||||
telemetry: telemetryService
|
||||
telemetry: telemetryService,
|
||||
event: eventService
|
||||
});
|
||||
|
||||
server.decorate<FastifyZodProvider["store"]>("store", {
|
||||
@ -545,6 +552,7 @@ export const registerRoutes = async (
|
||||
await server.register(injectIdentity, { userDAL, serviceTokenDAL });
|
||||
await server.register(injectPermission);
|
||||
await server.register(injectAuditLogInfo);
|
||||
await server.register(serversideEvents, { prefix: "/api/v1/sse" });
|
||||
|
||||
server.route({
|
||||
url: "/api/status",
|
||||
@ -562,7 +570,6 @@ export const registerRoutes = async (
|
||||
}
|
||||
},
|
||||
handler: async () => {
|
||||
const cfg = getConfig();
|
||||
const serverCfg = await getServerCfg();
|
||||
return {
|
||||
date: new Date(),
|
||||
|
@ -16,6 +16,7 @@ import { registerProjectRouter } from "./project-router";
|
||||
import { registerSecretFolderRouter } from "./secret-folder-router";
|
||||
import { registerSecretImportRouter } from "./secret-import-router";
|
||||
import { registerSecretTagRouter } from "./secret-tag-router";
|
||||
import { registerServersideEventsRouter } from "./sse-router";
|
||||
import { registerSsoRouter } from "./sso-router";
|
||||
import { registerUserActionRouter } from "./user-action-router";
|
||||
import { registerUserRouter } from "./user-router";
|
||||
@ -57,4 +58,5 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
|
||||
await server.register(registerIntegrationAuthRouter, { prefix: "/integration-auth" });
|
||||
await server.register(registerWebhookRouter, { prefix: "/webhooks" });
|
||||
await server.register(registerIdentityRouter, { prefix: "/identities" });
|
||||
await server.register(registerServersideEventsRouter, { prefix: "/sse" });
|
||||
};
|
||||
|
48
backend/src/server/routes/v1/sse-router.ts
Normal file
48
backend/src/server/routes/v1/sse-router.ts
Normal file
@ -0,0 +1,48 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
export const registerServersideEventsRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/events/:projectId",
|
||||
schema: {
|
||||
params: z.object({
|
||||
projectId: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.string()
|
||||
}
|
||||
},
|
||||
// onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req, res) => {
|
||||
res.sse({
|
||||
data: JSON.stringify({
|
||||
message: "Connected to event stream"
|
||||
})
|
||||
});
|
||||
|
||||
const subscription = await server.services.event.crateSubscription(req.params.projectId);
|
||||
|
||||
// It's OK to create a event listener here, because it's tied to the local subscription instance. So once the function ends, the listener is removed along with the subscription.
|
||||
// No need to worry about memory leaks!
|
||||
subscription
|
||||
.on("message", (channel, message) => {
|
||||
if (channel === req.params.projectId)
|
||||
res.sse({
|
||||
data: JSON.stringify(message)
|
||||
});
|
||||
})
|
||||
.on("error", (error) => {
|
||||
logger.error(error, "Error in subscription");
|
||||
res.sse({
|
||||
error: true,
|
||||
errorMessage: error.message // ? Should we really return the error message to the client?
|
||||
});
|
||||
});
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/return-await, @typescript-eslint/no-misused-promises
|
||||
req.socket.on("close", async () => await subscription.unsubscribe());
|
||||
}
|
||||
});
|
||||
};
|
@ -8,12 +8,9 @@
|
||||
|
||||
import { Authenticator } from "@fastify/passport";
|
||||
import fastifySession from "@fastify/session";
|
||||
// import { FastifyRequest } from "fastify";
|
||||
import { Strategy as GitHubStrategy } from "passport-github";
|
||||
import { Strategy as GitLabStrategy } from "passport-gitlab2";
|
||||
import { Strategy as GoogleStrategy } from "passport-google-oauth20";
|
||||
import { Strategy as OpenIDConnectStrategy } from "passport-openidconnect";
|
||||
// const OpenIDConnectStrategy = require('passport-openidconnect');
|
||||
import { z } from "zod";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
@ -136,136 +133,6 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* TODO:
|
||||
* 1. Test w static config
|
||||
* 2. Fetch config from db
|
||||
*/
|
||||
|
||||
// const getOIDCConfiguration = (req: FastifyRequest, callback: any) => {
|
||||
// // Fetching things from database or whatever
|
||||
// const { username } = req.body as { username: string };
|
||||
|
||||
// process.nextTick(() => {
|
||||
// const opts = {
|
||||
// issuer: "",
|
||||
// authorizationURL: "",
|
||||
// tokenURL: "",
|
||||
// userInfoURL: "",
|
||||
// clientID: "",
|
||||
// clientSecret: "",
|
||||
// callbackURL: `${'test'}/api/sso/oidc`,
|
||||
// // issuer: ISSUER_URL_OIDC_LOGIN,
|
||||
// // authorizationURL: AUTHORIZATION_URL_OIDC_LOGIN,
|
||||
// // tokenURL: TOKEN_URL_OIDC_LOGIN,
|
||||
// // userInfoURL: USER_INFO_URL_OIDC_LOGIN,
|
||||
// // clientID: CLIENT_ID_OIDC_LOGIN,
|
||||
// // clientSecret: CLIENT_SECRET_OIDC_LOGIN,
|
||||
// // callbackURL: `${SITE_URL}/api/sso/oidc`,
|
||||
// scope: ['profile', 'email'],
|
||||
// passReqToCallback: true
|
||||
// }
|
||||
|
||||
// callback(null, opts);
|
||||
// });
|
||||
// };
|
||||
|
||||
const ISSUER_URL_OIDC_LOGIN = "https://oauth.id.jumpcloud.com/";
|
||||
const AUTHORIZATION_URL_OIDC_LOGIN = "https://oauth.id.jumpcloud.com/oauth2/auth";
|
||||
const TOKEN_URL_OIDC_LOGIN = "https://oauth.id.jumpcloud.com/oauth2/token";
|
||||
const USER_INFO_URL_OIDC_LOGIN = "https://oauth.id.jumpcloud.com/userinfo";
|
||||
const CLIENT_ID_OIDC_LOGIN = "";
|
||||
const CLIENT_SECRET_OIDC_LOGIN = "";
|
||||
const SITE_URL = "";
|
||||
|
||||
const config = {
|
||||
issuer: ISSUER_URL_OIDC_LOGIN,
|
||||
authorizationURL: AUTHORIZATION_URL_OIDC_LOGIN,
|
||||
tokenURL: TOKEN_URL_OIDC_LOGIN,
|
||||
userInfoURL: USER_INFO_URL_OIDC_LOGIN,
|
||||
clientID: CLIENT_ID_OIDC_LOGIN,
|
||||
clientSecret: CLIENT_SECRET_OIDC_LOGIN,
|
||||
callbackURL: `${SITE_URL}/api/v1/sso/oidc`,
|
||||
scope: ["profile", "email"],
|
||||
passReqToCallback: true
|
||||
};
|
||||
|
||||
if (config) {
|
||||
passport.use(
|
||||
new OpenIDConnectStrategy(config, (req: any, issuer: any, profile: any, done: any) => {
|
||||
try {
|
||||
console.log("oidc");
|
||||
console.log("oidc issuer: ", issuer);
|
||||
console.log("oidc profile: ", profile);
|
||||
// const { name: { familyName, givenName }, emails } = profile;
|
||||
done(null, profile);
|
||||
} catch (err) {
|
||||
console.log("oidc err: ", err);
|
||||
done(null, false);
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
server.route({
|
||||
url: "/login/oidc",
|
||||
method: "GET",
|
||||
preValidation: (req, res) => {
|
||||
console.log("oidc login");
|
||||
return (
|
||||
passport.authenticate("openidconnect", {
|
||||
session: false,
|
||||
scope: ["profile", "email"]
|
||||
}) as any
|
||||
)(req, res);
|
||||
},
|
||||
handler: async (req, res) => {
|
||||
console.log("oidc login 2");
|
||||
if (req.passportUser) {
|
||||
return res.code(200).send({ message: "Authentication successful", user: req.passportUser });
|
||||
}
|
||||
return res.code(401).send({ error: "Authentication failed" });
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/oidc",
|
||||
method: "GET",
|
||||
preValidation: (req, res) => {
|
||||
console.log("oidcx req: ", req); // code, state
|
||||
return (
|
||||
passport.authenticate("openidconnect", {
|
||||
session: false,
|
||||
failureRedirect: "/api/v1/sso/login/provider/error",
|
||||
failureMessage: true
|
||||
}) as any
|
||||
)(req, res);
|
||||
},
|
||||
handler: (req, res) => {
|
||||
console.log("oidc 3");
|
||||
if (req.passportUser.isUserCompleted) {
|
||||
// login
|
||||
return res.redirect(`${SITE_URL}/login/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`);
|
||||
}
|
||||
|
||||
// signup
|
||||
return res.redirect(`${SITE_URL}/signup/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`);
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/login/provider/error",
|
||||
method: "GET",
|
||||
handler: (req, res) => {
|
||||
console.log("reqyx: ", req);
|
||||
console.log("resyx: ", res);
|
||||
return res.status(500).send({
|
||||
error: "Authentication error",
|
||||
details: req.query
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/redirect/google",
|
||||
method: "GET",
|
||||
|
@ -16,6 +16,7 @@ import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
|
||||
import { getUserAgentType } from "@app/server/plugins/audit-log";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
|
||||
import { TEventType } from "@app/services/event/event-types";
|
||||
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
|
||||
|
||||
import { secretRawSchema } from "../sanitizedSchemas";
|
||||
@ -919,6 +920,15 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
|
||||
newSecretName
|
||||
});
|
||||
|
||||
await server.services.event.publish(req.body.workspaceId, {
|
||||
type: TEventType.SECRET_UPDATE,
|
||||
payload: {
|
||||
secretId: secret.id,
|
||||
secretKey: req.params.secretName,
|
||||
secretPath: "test/path"
|
||||
}
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: req.body.workspaceId,
|
||||
...req.auditLogInfo,
|
||||
|
87
backend/src/services/event/event-service.ts
Normal file
87
backend/src/services/event/event-service.ts
Normal file
@ -0,0 +1,87 @@
|
||||
/* eslint-disable no-console */
|
||||
import Redis from "ioredis";
|
||||
|
||||
// import { logger } from "@app/lib/logger";
|
||||
import { TEvent, TEventType } from "./event-types";
|
||||
|
||||
type TEventServiceFactoryDep = {
|
||||
redisUrl: string;
|
||||
};
|
||||
|
||||
export type TEventServiceFactory = ReturnType<typeof eventServiceFactory>;
|
||||
|
||||
export const eventServiceFactory = ({ redisUrl }: TEventServiceFactoryDep) => {
|
||||
const publisher = new Redis(redisUrl, { maxRetriesPerRequest: null });
|
||||
|
||||
// Map key: the channel ID.
|
||||
// connections / total number of connections: We keep track of this to know when to unsubscribe and disconnect the client.
|
||||
// client / the subscription: We store this so we can use the same connection/subscription for the same channel. We don't want to create a new connection for each subscription, because that would be a waste of resources and become hard to scale.
|
||||
const redisClients = new Map<
|
||||
string,
|
||||
{
|
||||
client: Redis;
|
||||
connections: number;
|
||||
}
|
||||
>();
|
||||
// Will this work for vertical scaling? The redisClients
|
||||
|
||||
// channel would be the projectId
|
||||
const publish = async (channel: string, event: TEvent[TEventType]) => {
|
||||
await publisher.publish(channel, JSON.stringify(event));
|
||||
};
|
||||
|
||||
const crateSubscription = async (channel: string) => {
|
||||
let subscriber: Redis | null = null;
|
||||
|
||||
const existingSubscriber = redisClients.get(channel);
|
||||
|
||||
if (existingSubscriber) {
|
||||
redisClients.set(channel, {
|
||||
client: existingSubscriber.client,
|
||||
connections: existingSubscriber.connections + 1
|
||||
});
|
||||
|
||||
subscriber = existingSubscriber.client;
|
||||
} else {
|
||||
subscriber = new Redis(redisUrl, { maxRetriesPerRequest: null });
|
||||
|
||||
redisClients.set(channel, {
|
||||
client: subscriber,
|
||||
connections: 1
|
||||
});
|
||||
}
|
||||
|
||||
await subscriber.subscribe(channel, (msg) => {
|
||||
if (msg instanceof Error) {
|
||||
throw msg;
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
on: subscriber.on.bind(subscriber),
|
||||
unsubscribe: async () => {
|
||||
const subscriberToRemove = redisClients.get(channel);
|
||||
|
||||
if (subscriberToRemove) {
|
||||
// If there's only 1 connection, we can fully unsubscribe and disconnect the client.
|
||||
if (subscriberToRemove.connections === 1) {
|
||||
await subscriberToRemove.client.unsubscribe(`${channel}`);
|
||||
await subscriberToRemove.client.quit();
|
||||
redisClients.delete(channel);
|
||||
} else {
|
||||
// If there's more than 1 connection, we just decrement the connections count, because there are still other listeners.
|
||||
redisClients.set(channel, {
|
||||
client: subscriberToRemove.client,
|
||||
connections: subscriberToRemove.connections - 1
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
return {
|
||||
publish,
|
||||
crateSubscription
|
||||
};
|
||||
};
|
35
backend/src/services/event/event-types.ts
Normal file
35
backend/src/services/event/event-types.ts
Normal file
@ -0,0 +1,35 @@
|
||||
import { z } from "zod";
|
||||
|
||||
export enum TEventType {
|
||||
SECRET_UPDATE = "secret_update",
|
||||
SECRET_DELETE = "secret_delete",
|
||||
SECRET_CREATE = "secret_create"
|
||||
}
|
||||
|
||||
export const EventSchema = z.object({
|
||||
secret_create: z.object({
|
||||
payload: z.object({
|
||||
secretId: z.string(),
|
||||
secretKey: z.string(),
|
||||
secretPath: z.string()
|
||||
}),
|
||||
type: z.literal("secret_create")
|
||||
}),
|
||||
secret_update: z.object({
|
||||
payload: z.object({
|
||||
secretId: z.string(),
|
||||
secretKey: z.string(),
|
||||
secretPath: z.string()
|
||||
}),
|
||||
type: z.literal("secret_update")
|
||||
}),
|
||||
secret_delete: z.object({
|
||||
payload: z.object({
|
||||
secretId: z.string(),
|
||||
secretPath: z.string()
|
||||
}),
|
||||
type: z.literal("secret_delete")
|
||||
})
|
||||
});
|
||||
|
||||
export type TEvent = z.infer<typeof EventSchema>;
|
@ -16,7 +16,49 @@ git checkout -b MY_BRANCH_NAME
|
||||
## Set up environment variables
|
||||
|
||||
|
||||
Start by creating a .env file at the root of the Infisical directory then copy the contents of the file linked [here](https://github.com/Infisical/infisical/blob/main/.env.example). View all available [environment variables](https://infisical.com/docs/self-hosting/configuration/envars) and guidance for each.
|
||||
Start by creating a .env file at the root of the Infisical directory then copy the contents of the file below into the .env file.
|
||||
|
||||
<Accordion title=".env file content">
|
||||
```env
|
||||
# Keys
|
||||
# Required key for platform encryption/decryption ops
|
||||
ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218
|
||||
|
||||
# JWT
|
||||
# Required secrets to sign JWT tokens
|
||||
JWT_SIGNUP_SECRET=3679e04ca949f914c03332aaaeba805a
|
||||
JWT_REFRESH_SECRET=5f2f3c8f0159068dc2bbb3a652a716ff
|
||||
JWT_AUTH_SECRET=4be6ba5602e0fa0ac6ac05c3cd4d247f
|
||||
JWT_SERVICE_SECRET=f32f716d70a42c5703f4656015e76200
|
||||
|
||||
# MongoDB
|
||||
# Backend will connect to the MongoDB instance at connection string MONGO_URL which can either be a ref
|
||||
# to the MongoDB container instance or Mongo Cloud
|
||||
# Required
|
||||
MONGO_URL=mongodb://root:example@mongo:27017/?authSource=admin
|
||||
|
||||
# Optional credentials for MongoDB container instance and Mongo-Express
|
||||
MONGO_USERNAME=root
|
||||
MONGO_PASSWORD=example
|
||||
|
||||
# Website URL
|
||||
# Required
|
||||
SITE_URL=http://localhost:8080
|
||||
|
||||
# Mail/SMTP
|
||||
SMTP_HOST='smtp-server'
|
||||
SMTP_PORT='1025'
|
||||
SMTP_NAME='local'
|
||||
SMTP_USERNAME='team@infisical.com'
|
||||
SMTP_PASSWORD=
|
||||
```
|
||||
</Accordion>
|
||||
|
||||
<Warning>
|
||||
The pre-populated environment variable values above are meant to be used in development only. They should never be used in production.
|
||||
</Warning>
|
||||
|
||||
View all available [environment variables](https://infisical.com/docs/self-hosting/configuration/envars) and guidance for each.
|
||||
|
||||
## Starting Infisical for development
|
||||
|
||||
@ -30,7 +72,10 @@ docker-compose -f docker-compose.dev.yml up --build --force-recreate
|
||||
```
|
||||
#### Access local server
|
||||
|
||||
Once all the services have spun up, browse to http://localhost:8080.
|
||||
Once all the services have spun up, browse to http://localhost:8080. To sign in, you may use the default credentials listed below.
|
||||
|
||||
Email: `test@localhost.local`
|
||||
Password: `testInfisical1`
|
||||
|
||||
#### Shutdown local server
|
||||
|
||||
|
@ -1,36 +0,0 @@
|
||||
---
|
||||
title: "General LDAP"
|
||||
description: "Log in to Infisical with LDAP"
|
||||
---
|
||||
|
||||
<Info>
|
||||
LDAP is a paid feature.
|
||||
|
||||
If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical,
|
||||
then you should contact team@infisical.com to purchase an enterprise license to use it.
|
||||
</Info>
|
||||
|
||||
You can configure your organization in Infisical to have members authenticate with the platform via [LDAP](https://en.wikipedia.org/wiki/Lightweight_Directory_Access_Protocol)
|
||||
|
||||
<Steps>
|
||||
<Step title="Prepare the LDAP configuration in Infisical">
|
||||
In Infisical, head to your Organization Settings > Authentication > LDAP Configuration and select **Set up LDAP**.
|
||||
|
||||
Next, input your LDAP server settings.
|
||||
|
||||

|
||||
|
||||
Here's some guidance for each field:
|
||||
|
||||
- URL: The LDAP server to connect to such as `ldap://ldap.your-org.com`, `ldaps://ldap.myorg.com:636` (for connection over SSL/TLS), etc.
|
||||
- Bind DN: The distinguished name of object to bind when performing the user search such as `cn=infisical,ou=Users,dc=acme,dc=com`.
|
||||
- Bind Pass: The password to use along with `Bind DN` when performing the user search.
|
||||
- Search Base / User DN: Base DN under which to perform user search such as `ou=Users,dc=example,dc=com`
|
||||
- CA Certificate: The CA certificate to use when verifying the LDAP server certificate.
|
||||
</Step>
|
||||
<Step title="Enable LDAP in Infisical">
|
||||
Enabling LDAP allows members in your organization to log into Infisical via LDAP.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
@ -1,56 +0,0 @@
|
||||
---
|
||||
title: "JumpCloud LDAP"
|
||||
description: "Configure JumpCloud LDAP for Logging into Infisical"
|
||||
---
|
||||
|
||||
<Info>
|
||||
LDAP is a paid feature.
|
||||
|
||||
If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical,
|
||||
then you should contact team@infisical.com to purchase an enterprise license to use it.
|
||||
</Info>
|
||||
|
||||
<Steps>
|
||||
<Step title="Prepare LDAP in JumpCloud">
|
||||
In JumpCloud, head to USER MANAGEMENT > Users and create a new user via the **Manual user entry** option. This user
|
||||
will be used as a privileged service account to facilitate Infisical's ability to bind/search the LDAP directory.
|
||||
|
||||
When creating the user, input their **First Name**, **Last Name**, **Username** (required), **Company Email** (required), and **Description**.
|
||||
Also, create a password for the user.
|
||||
|
||||
Next, under User Security Settings and Permissions > Permission Settings, check the box next to **Enable as LDAP Bind DN**.
|
||||
|
||||

|
||||
|
||||
</Step>
|
||||
<Step title="Prepare the LDAP configuration in Infisical">
|
||||
In Infisical, head to your Organization Settings > Authentication > LDAP Configuration and select **Set up LDAP**.
|
||||
|
||||
Next, input your JumpCloud LDAP server settings.
|
||||
|
||||

|
||||
|
||||
Here's some guidance for each field:
|
||||
|
||||
- URL: The LDAP server to connect to (`ldaps://ldap.jumpcloud.com:636`).
|
||||
- Bind DN: The distinguished name of object to bind when performing the user search (`uid=<ldap-user-username>,ou=Users,o=<your-org-id>,dc=jumpcloud,dc=com`).
|
||||
- Bind Pass: The password to use along with `Bind DN` when performing the user search.
|
||||
- Search Base / User DN: Base DN under which to perform user search (`ou=Users,o=<your-org-id>,dc=jumpcloud,dc=com`).
|
||||
- CA Certificate: The CA certificate to use when verifying the LDAP server certificate (instructions to obtain the certificate for JumpCloud [here](https://jumpcloud.com/support/connect-to-ldap-with-tls-ssl)).
|
||||
|
||||
<Tip>
|
||||
When filling out the **Bind DN** and **Bind Pass** fields, refer to the username and password of the user created in Step 1.
|
||||
|
||||
Also, for the **Bind DN** and **Search Base / User DN** fields, you'll want to use the organization ID that appears
|
||||
in your LDAP instance **ORG DN**.
|
||||
</Tip>
|
||||
</Step>
|
||||
<Step title="Enable LDAP in Infisical">
|
||||
Enabling LDAP allows members in your organization to log into Infisical via LDAP.
|
||||
|
||||

|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
Resources:
|
||||
- [JumpCloud Cloud LDAP Guide](https://jumpcloud.com/support/use-cloud-ldap)
|
@ -1,23 +0,0 @@
|
||||
---
|
||||
title: "LDAP Overview"
|
||||
description: "Log in to Infisical with LDAP"
|
||||
---
|
||||
<Info>
|
||||
LDAP is a paid feature.
|
||||
|
||||
If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical,
|
||||
then you should contact sales@infisical.com to purchase an enterprise license to use it.
|
||||
</Info>
|
||||
|
||||
You can configure your organization in Infisical to have members authenticate with the platform via [LDAP](https://en.wikipedia.org/wiki/Lightweight_Directory_Access_Protocol)
|
||||
|
||||
To note, configuring LDAP retains the end-to-end encrypted architecture of Infisical because we decouple the authentication and decryption steps; the LDAP server cannot and will not have access to the decryption key needed to decrypt your secrets.
|
||||
|
||||
LDAP providers:
|
||||
|
||||
- Active Directory
|
||||
- [JumpCloud LDAP](/documentation/platform/ldap/jumpcloud)
|
||||
- AWS Directory Service
|
||||
- Foxpass
|
||||
|
||||
Check out the general instructions for configuring LDAP [here](/documentation/platform/ldap/general).
|
@ -1,21 +0,0 @@
|
||||
---
|
||||
title: "Enhancing Security and Usability: Project Upgrades"
|
||||
---
|
||||
|
||||
|
||||
At Infisical, we're constantly striving to elevate the security and usability standards of our platform to better serve our users.
|
||||
With this commitment in mind, we're excited to introduce our latest addition, non-E2EE projects, aimed at addressing two significant issues while enhancing how clients interact with Infisical programmatically.
|
||||
|
||||
Previously, users encountered a challenge where projects risked becoming inaccessible if the project creator deleted their account.
|
||||
Additionally, our API lacked the capability to interact with projects without dealing with complex cryptographic operations.
|
||||
These obstacles made API driven automation and collaboration a painful experience for a majority of our users.
|
||||
|
||||
To overcome these limitations, our upgrade focuses on disabling end-to-end encryption (E2EE) for projects.
|
||||
While this may raise eyebrows, it's important to understand that this decision is a strategic move to make Infisical easer to use and interact with.
|
||||
|
||||
But what does this mean for our users? Essentially nothing, there are no changes required on your end.
|
||||
Rest assured, all sensitive data remains encrypted at rest according to the latest industry standards.
|
||||
Our commitment to security remains unwavering, and this upgrade is a testament to our dedication to delivering on our promises in both security and usability when it comes to secrets management.
|
||||
|
||||
To increase consistency with existing and future integrations, all projects created on Infisical from now on will have end-to-end encryption (E2EE) disabled by default.
|
||||
This will not only reduce confusion for end users, but will also make the Infisical API seamless to use.
|
Binary file not shown.
Before Width: | Height: | Size: 436 KiB |
Binary file not shown.
Before Width: | Height: | Size: 351 KiB |
Binary file not shown.
Before Width: | Height: | Size: 332 KiB After Width: | Height: | Size: 1.2 MiB |
Binary file not shown.
Before Width: | Height: | Size: 64 KiB |
@ -149,14 +149,7 @@
|
||||
"documentation/platform/sso/jumpcloud"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "LDAP",
|
||||
"pages": [
|
||||
"documentation/platform/ldap/overview",
|
||||
"documentation/platform/ldap/jumpcloud",
|
||||
"documentation/platform/ldap/general"
|
||||
]
|
||||
},
|
||||
"documentation/platform/ldap",
|
||||
{
|
||||
"group": "SCIM",
|
||||
"pages": [
|
||||
@ -173,6 +166,7 @@
|
||||
"pages": [
|
||||
"self-hosting/overview",
|
||||
"self-hosting/configuration/requirements",
|
||||
"self-hosting/configuration/schema-migrations",
|
||||
{
|
||||
"group": "Installation methods",
|
||||
"pages": [
|
||||
@ -182,13 +176,6 @@
|
||||
]
|
||||
},
|
||||
"self-hosting/configuration/envars",
|
||||
{
|
||||
"group": "Guides",
|
||||
"pages": [
|
||||
"self-hosting/configuration/schema-migrations",
|
||||
"self-hosting/guides/mongo-to-postgres"
|
||||
]
|
||||
},
|
||||
"self-hosting/faq"
|
||||
]
|
||||
},
|
||||
|
@ -15,7 +15,3 @@ However, in the event you choose to use Infisical without SSL, you can do so by
|
||||
[Learn more about secure cookies](https://really-simple-ssl.com/definition/what-are-secure-cookies/)
|
||||
</Accordion>
|
||||
|
||||
<Accordion title="How can I upgrade my Infisical instance to Postgres version?">
|
||||
Follow the step by step guide [here](self-hosting/guides/mongo-to-postgres) to learn how.
|
||||
</Accordion>
|
||||
|
||||
|
@ -1,195 +0,0 @@
|
||||
---
|
||||
title: "Migrate Mongo to Postgres"
|
||||
description: "How to migrate from MongoDB to PostgreSQL for Infisical"
|
||||
---
|
||||
|
||||
This guide will provide step by step instructions on migrating your Infisical instance running on MongoDB to the newly released PostgreSQL version of Infisical.
|
||||
The newly released Postgres version of Infisical is the only version of Infisical that will receive feature updates and patches going forward.
|
||||
|
||||
<Tip>
|
||||
If you have a small set of secrets, we recommend you to download the secrets and upload them to your new instance of Infisical instead of running the migration script.
|
||||
</Tip>
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before starting the migration, ensure you have the following command line tools installed:
|
||||
|
||||
- [pg_dump](https://www.postgresql.org/docs/current/app-pgrestore.html)
|
||||
- [pg_restore](https://www.postgresql.org/docs/current/app-pgdump.html)
|
||||
- [mongodump](https://www.mongodb.com/docs/database-tools/mongodump/)
|
||||
- [mongorestore](https://www.mongodb.com/docs/database-tools/mongorestore/)
|
||||
- [Docker](https://docs.docker.com/engine/install/)
|
||||
|
||||
## Prepare for migration
|
||||
|
||||
<Steps>
|
||||
<Step title="Backup Production MongoDB Data">
|
||||
While the migration script will not mutate any MongoDB production data, we recommend you to take a backup of your MongoDB instance if possible.
|
||||
</Step>
|
||||
<Step title="Set Migration Mode">
|
||||
To prevent new data entries during the migration, set your Infisical instance to migration mode by setting the environment variable `MIGRATION_MODE=true` and redeploying your instance.
|
||||
This mode will block all write operations, only allowing GET requests. It also disables user logins and sets up a migration page to prevent UI interactions.
|
||||

|
||||
</Step>
|
||||
<Step title="Start local instances of Mongo and Postgres databases">
|
||||
Start local instances of MongoDB and Postgres. This will be used in later steps to process and transform the data locally.
|
||||
|
||||
To start local instances of the two databases, create a file called `docker-compose.yaml` as shown below.
|
||||
|
||||
```yaml docker-compose.yaml
|
||||
version: '3.1'
|
||||
|
||||
services:
|
||||
mongodb:
|
||||
image: mongo
|
||||
restart: always
|
||||
environment:
|
||||
MONGO_INITDB_ROOT_USERNAME: root
|
||||
MONGO_INITDB_ROOT_PASSWORD: example
|
||||
ports:
|
||||
- "27017:27017"
|
||||
volumes:
|
||||
- mongodb_data:/data/db
|
||||
|
||||
postgres:
|
||||
image: postgres
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_PASSWORD: example
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
|
||||
volumes:
|
||||
mongodb_data:
|
||||
postgres_data:
|
||||
```
|
||||
|
||||
Next, run the command below in the same working directory where the `docker-compose.yaml` file resides to start both services.
|
||||
|
||||
```
|
||||
docker-compose up
|
||||
```
|
||||
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Dump MongoDB
|
||||
To speed up the data transformation process, the first step involves transferring the production data from Infisical's MongoDB to a local machine.
|
||||
This is achieved by creating a dump of the production database and then uploading this dumped data into a local Mongo instance.
|
||||
By having a running local instance of the production database, we will significantly reduce the time it takes to run the migration script.
|
||||
|
||||
<Steps>
|
||||
<Step title="Dump MongoDB data to your local machine using">
|
||||
|
||||
```
|
||||
mongodump --uri=<your_mongo_prod_uri> --archive="mongodump-db" --db=<db name> --excludeCollection=auditlogs
|
||||
```
|
||||
|
||||
</Step>
|
||||
<Step title="Restore this data to the local MongoDB instance">
|
||||
```
|
||||
mongorestore --uri=mongodb://root:example@localhost:27017/ --archive="mongodump-db"
|
||||
```
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Start the migration
|
||||
|
||||
Once started, the migration script will transform MongoDB data into an equivalent PostgreSQL format.
|
||||
|
||||
<Steps>
|
||||
<Step title="Clone Infisical Repository">
|
||||
Clone the Infisical MongoDB repository.
|
||||
```
|
||||
git clone https://github.com/Infisical/infisical.git
|
||||
```
|
||||
</Step>
|
||||
<Step title="Install dependencies for backend">
|
||||
```
|
||||
cd backend
|
||||
```
|
||||
|
||||
```
|
||||
npm install
|
||||
```
|
||||
</Step>
|
||||
<Step title="Install dependencies for script">
|
||||
```
|
||||
cd pg-migrator
|
||||
```
|
||||
|
||||
```
|
||||
npm install
|
||||
```
|
||||
</Step>
|
||||
<Step title="Execute Migration Script">
|
||||
```
|
||||
npm run migration
|
||||
```
|
||||
|
||||
When executing the above command, you'll be asked to provide the MongoDB connection string for the database containing your production Infisical data. Since our production Mongo data is transferred to a local Mongo instance, you should input the connection string for this local instance.
|
||||
|
||||
```
|
||||
mongodb://root:example@localhost:27017/<db-name>?authSource=admin
|
||||
```
|
||||
|
||||
<Tip>
|
||||
Remember to replace `<db-name>` with the name of the MongoDB database. If you are not sure the name, you can use [Compass](https://www.mongodb.com/products/tools/compass) to view the available databases.
|
||||
</Tip>
|
||||
|
||||
|
||||
Next, you will be asked to enter the Postgres connection string for the database where the transformed data should be stored.
|
||||
Input the connection string of the local Postgres instance that was set up earlier in the guide.
|
||||
|
||||
```
|
||||
postgres://infisical:infisical@localhost/infisical?sslmode=disable
|
||||
```
|
||||
</Step>
|
||||
|
||||
<Step title="Store migration metadata">
|
||||
Once the script has completed, you will notice a new folder has been created called `db` in the `pg-migrator` folder.
|
||||
This folder contains meta data for schema mapping and can be helpful when debugging migration related issues.
|
||||
We highly recommend you to make a copy of this folder in case you need assistance from the Infisical team during your migration process.
|
||||
|
||||
<Info>
|
||||
The `db` folder does not contain any sensitive data
|
||||
</Info>
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Finalizing Migration
|
||||
At this stage, the data from the Mongo instance of Infisical should have been successfully converted into its Postgres equivalent.
|
||||
The remaining step involves transferring the local Postgres database, which now contains all the migrated data, to your chosen production Postgres environment.
|
||||
Rather than transferring the data row-by-row from your local machine to the production Postgres database, we will first create a dump file from the local Postgres and then upload this file to your production Postgres instance.
|
||||
|
||||
<Steps>
|
||||
<Step title="Dump from local PostgreSQL">
|
||||
```
|
||||
pg_dump -h localhost -U infisical -Fc -b -v -f dumpfilelocation.sql -d infisical
|
||||
```
|
||||
</Step>
|
||||
<Step title="Upload to production PostgreSQL">
|
||||
```
|
||||
pg_restore --clean -v -h <host> -U <db-user-name> -d <database-name> -j 2 dumpfilelocation.sql
|
||||
```
|
||||
|
||||
<Tip>
|
||||
Remember to replace `<host>`, `<db-user-name>`, `<database-name>` with the corresponding details of your production Postgres database.
|
||||
</Tip>
|
||||
</Step>
|
||||
<Step title="Verify Data Upload">
|
||||
Use a tool like Beekeeper Studio to confirm that the data has been successfully transferred to your production Postgres DB.
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Post-Migration Steps
|
||||
|
||||
After successfully migrating the data to PostgreSQL, you can proceed to deploy Infisical using your preferred deployment method.
|
||||
Refer to [Infisical's self-hosting documentation](https://infisical.com/docs/self-hosting/overview) for deployment options.
|
||||
Remember to use your production PostgreSQL connection string for the new deployment and transfer all [environment variables](/self-hosting/configuration/envars) from the MongoDB version of Infisical to the new version (they are all compatible).
|
||||
|
||||
## Additional discussion
|
||||
- When you visit Infisical's [docker hub](https://hub.docker.com/r/infisical/infisical) page, you will notice that image tags end with `-postgres`.
|
||||
This is to indicate that this version of Infisical runs on the new Postgres backend. Any image tag that does not end in `postgres` runs on MongoDB.
|
@ -1,5 +1,4 @@
|
||||
import { useCallback, useState } from "react";
|
||||
import Link from "next/link";
|
||||
import { useRouter } from "next/router";
|
||||
import { faWarning } from "@fortawesome/free-solid-svg-icons";
|
||||
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
||||
@ -98,31 +97,16 @@ export const UpgradeProjectAlert = ({ project }: UpgradeProjectAlertProps): JSX.
|
||||
<div className="flex w-full flex-col text-sm">
|
||||
<span className="mb-2 text-lg font-semibold">Upgrade your project</span>
|
||||
{membership.role === "admin" ? (
|
||||
<>
|
||||
<p>
|
||||
Upgrade your project version to continue receiving the latest improvements and
|
||||
patches.
|
||||
</p>
|
||||
<Link href="/docs/documentation/platform/project-upgrade">
|
||||
<a target="_blank" className="text-primary-400">
|
||||
Learn more
|
||||
</a>
|
||||
</Link>
|
||||
</>
|
||||
<p>
|
||||
Upgrade your project version to continue receiving the latest improvements and patches.
|
||||
</p>
|
||||
) : (
|
||||
<>
|
||||
<p>
|
||||
<span className="font-bold">Please ask a project admin to upgrade the project.</span>
|
||||
<br />
|
||||
Upgrading the project version is required to continue receiving the latest
|
||||
improvements and patches.
|
||||
</p>
|
||||
<Link href="/docs/documentation/platform/project-upgrade">
|
||||
<a target="_blank" className="text-primary-400">
|
||||
Learn more
|
||||
</a>
|
||||
</Link>
|
||||
</>
|
||||
<p>
|
||||
<span className="font-bold">Please ask a project admin to upgrade the project.</span>
|
||||
<br />
|
||||
Upgrading the project version is required to continue receiving the latest improvements
|
||||
and patches.
|
||||
</p>
|
||||
)}
|
||||
{currentStatus && <p className="mt-2 opacity-80">Status: {currentStatus}</p>}
|
||||
</div>
|
||||
|
@ -7,7 +7,7 @@ type: application
|
||||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 1.0.5
|
||||
version: 1.0.3
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
|
@ -13,6 +13,31 @@ server {
|
||||
|
||||
proxy_cookie_path / "/; secure; HttpOnly; SameSite=strict";
|
||||
}
|
||||
|
||||
location /api/v1/sse {
|
||||
proxy_set_header X-Real-RIP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-NginX-Proxy true;
|
||||
|
||||
proxy_pass http://backend:4000;
|
||||
proxy_redirect off;
|
||||
|
||||
proxy_cookie_path / "/; secure; HttpOnly; SameSite=strict";
|
||||
|
||||
# Without proxy buffering turned off, server side events will not work. The requests won't be sent until the request is closed or the buffer is full.
|
||||
client_max_body_size 0;
|
||||
proxy_http_version 1.1;
|
||||
proxy_buffering off;
|
||||
proxy_request_buffering off;
|
||||
|
||||
proxy_read_timeout 24h;
|
||||
|
||||
proxy_cache off;
|
||||
proxy_set_header Connection '';
|
||||
chunked_transfer_encoding off;
|
||||
}
|
||||
|
||||
location / {
|
||||
include /etc/nginx/mime.types;
|
||||
|
@ -66,7 +66,7 @@ enum SecretEncryptionAlgo {
|
||||
AES_256_GCM = "aes-256-gcm",
|
||||
}
|
||||
|
||||
const ENV_SLUG_LENGTH = 500;
|
||||
const ENV_SLUG_LENGTH = 15;
|
||||
|
||||
enum SecretKeyEncoding {
|
||||
UTF8 = "utf8",
|
||||
@ -210,9 +210,9 @@ export const migrateCollection = async <
|
||||
return (await tx
|
||||
.batchInsert<Tables[K]["base"]>(postgresTableName, pgDoc as any)
|
||||
.returning(returnKeys as any)) as Pick<
|
||||
Tables[K]["base"],
|
||||
R[number]
|
||||
>[];
|
||||
Tables[K]["base"],
|
||||
R[number]
|
||||
>[];
|
||||
});
|
||||
await postPgProcessing?.(mongooseDoc, newUserIds);
|
||||
}
|
||||
@ -230,9 +230,9 @@ export const migrateCollection = async <
|
||||
return (await tx
|
||||
.batchInsert(postgresTableName, pgDoc as any)
|
||||
.returning(returnKeys as any)) as Pick<
|
||||
Tables[K]["base"],
|
||||
R[number]
|
||||
>[];
|
||||
Tables[K]["base"],
|
||||
R[number]
|
||||
>[];
|
||||
});
|
||||
await postPgProcessing?.(mongooseDoc, newUserIds);
|
||||
}
|
||||
@ -258,9 +258,9 @@ const main = async () => {
|
||||
try {
|
||||
dotenv.config();
|
||||
|
||||
// process.env.MONGO_DB_URL = "mongodb://root:example@localhost:27017/test?authSource=admin"
|
||||
process.env.MONGO_DB_URL = "mongodb://root:example@localhost:27017/test?authSource=admin"
|
||||
|
||||
// process.env.POSTGRES_DB_URL = "postgres://infisical:infisical@localhost/infisical?sslmode=disable"
|
||||
process.env.POSTGRES_DB_URL = "postgres://infisical:infisical@localhost/infisical?sslmode=disable"
|
||||
|
||||
process.env.START_FRESH = "true";
|
||||
const prompt = promptSync({ sigint: true });
|
||||
@ -313,7 +313,7 @@ const main = async () => {
|
||||
preProcessing: async (doc) => {
|
||||
if (["64058e0ea5c55c6a8203fed7", "64155f5d75c91bf4e176eb85", "6434ff80b82e04f17008aa13"].includes(doc._id.toString())) {
|
||||
console.log("Skipping duplicate user")
|
||||
return
|
||||
return
|
||||
}
|
||||
|
||||
const id = uuidV4();
|
||||
@ -843,9 +843,9 @@ const main = async () => {
|
||||
await folderKv.put(folder.id, id);
|
||||
const parentId = folder?.parentId
|
||||
? await folderKv.get(folder?.parentId).catch((e) => {
|
||||
console.log("parent folder not found==>", folder);
|
||||
throw e;
|
||||
})
|
||||
console.log("parent folder not found==>", folder);
|
||||
throw e;
|
||||
})
|
||||
: null;
|
||||
|
||||
pgFolder.push({
|
||||
@ -1548,8 +1548,8 @@ const main = async () => {
|
||||
returnKeys: ["id"],
|
||||
preProcessing: async (doc) => {
|
||||
// dangling identity
|
||||
if (!await identityKv.get(doc.identity.toString()).catch(() => null)) {
|
||||
return
|
||||
if (!await identityKv.get(doc.identity.toString()).catch(() => null)){
|
||||
return
|
||||
}
|
||||
|
||||
const id = uuidV4();
|
||||
@ -1584,8 +1584,8 @@ const main = async () => {
|
||||
returnKeys: ["id"],
|
||||
preProcessing: async (doc) => {
|
||||
// dangling identity
|
||||
if (!await identityKv.get(doc.identity.toString()).catch(() => null)) {
|
||||
return
|
||||
if (!await identityKv.get(doc.identity.toString()).catch(() => null)){
|
||||
return
|
||||
}
|
||||
|
||||
const identityUAId = await identityUaKv.get(
|
||||
@ -1617,15 +1617,15 @@ const main = async () => {
|
||||
returnKeys: ["id"],
|
||||
preProcessing: async (doc) => {
|
||||
// dangling identity
|
||||
if (!await identityKv.get(doc.identity.toString()).catch(() => null)) {
|
||||
return
|
||||
if (!await identityKv.get(doc.identity.toString()).catch(() => null)){
|
||||
return
|
||||
}
|
||||
|
||||
await identityAccessTokenKv.put(doc._id.toString(), doc._id.toString());
|
||||
const identityUAClientSecretId = doc?.identityUniversalAuthClientSecret
|
||||
? await identityUaClientSecKv.get(
|
||||
doc.identityUniversalAuthClientSecret.toString(),
|
||||
)
|
||||
doc.identityUniversalAuthClientSecret.toString(),
|
||||
)
|
||||
: null;
|
||||
const identityId = await identityKv.get(doc.identity.toString());
|
||||
return {
|
||||
@ -1652,8 +1652,8 @@ const main = async () => {
|
||||
returnKeys: ["id"],
|
||||
preProcessing: async (doc) => {
|
||||
// dangling identity
|
||||
if (!await identityKv.get(doc.identity.toString()).catch(() => null)) {
|
||||
return
|
||||
if (!await identityKv.get(doc.identity.toString()).catch(() => null)){
|
||||
return
|
||||
}
|
||||
|
||||
const id = uuidV4();
|
||||
@ -1687,8 +1687,8 @@ const main = async () => {
|
||||
returnKeys: ["id"],
|
||||
preProcessing: async (doc) => {
|
||||
// dangling identity
|
||||
if (!await identityKv.get(doc.identity.toString()).catch(() => null)) {
|
||||
return
|
||||
if (!await identityKv.get(doc.identity.toString()).catch(() => null)){
|
||||
return
|
||||
}
|
||||
|
||||
const id = uuidV4();
|
||||
@ -2317,8 +2317,8 @@ const main = async () => {
|
||||
|
||||
const statusChangeBy = doc.statusChangeBy
|
||||
? await projectMembKv
|
||||
.get(doc.statusChangeBy.toString())
|
||||
.catch(() => null)
|
||||
.get(doc.statusChangeBy.toString())
|
||||
.catch(() => null)
|
||||
: null;
|
||||
return {
|
||||
id,
|
||||
@ -2454,7 +2454,7 @@ const main = async () => {
|
||||
secretCommentCiphertext:
|
||||
commit.newVersion.secretCommentCiphertext ||
|
||||
secret.secretCommentCiphertext,
|
||||
secretVersion,
|
||||
secretVersion,
|
||||
createdAt: new Date((doc as any).createdAt),
|
||||
updatedAt: new Date((doc as any).updatedAt),
|
||||
};
|
||||
|
Reference in New Issue
Block a user