mirror of
https://github.com/Infisical/infisical.git
synced 2025-07-05 04:29:09 +00:00
Compare commits
17 Commits
postgres-h
...
mongo-to-p
Author | SHA1 | Date | |
---|---|---|---|
646ceba089 | |||
9d202e8501 | |||
1f9f15136e | |||
4bf7e8bbd1 | |||
6891d309da | |||
1cccbca0c5 | |||
2c2e1f5d2e | |||
6946f3901c | |||
82a7010e29 | |||
a1e763fa28 | |||
0992117173 | |||
9419884a26 | |||
850f3a347c | |||
4c9101d18d | |||
06e8e90ad5 | |||
fdd79c0568 | |||
4ef8abdb00 |
@ -19,10 +19,6 @@ POSTGRES_DB=infisical
|
|||||||
# Redis
|
# Redis
|
||||||
REDIS_URL=redis://redis:6379
|
REDIS_URL=redis://redis:6379
|
||||||
|
|
||||||
# Optional credentials for MongoDB container instance and Mongo-Express
|
|
||||||
MONGO_USERNAME=root
|
|
||||||
MONGO_PASSWORD=example
|
|
||||||
|
|
||||||
# Website URL
|
# Website URL
|
||||||
# Required
|
# Required
|
||||||
SITE_URL=http://localhost:8080
|
SITE_URL=http://localhost:8080
|
||||||
|
@ -2,6 +2,6 @@
|
|||||||
|
|
||||||
Thanks for taking the time to contribute! 😃 🚀
|
Thanks for taking the time to contribute! 😃 🚀
|
||||||
|
|
||||||
Please refer to our [Contributing Guide](https://infisical.com/docs/contributing/overview) for instructions on how to contribute.
|
Please refer to our [Contributing Guide](https://infisical.com/docs/contributing/getting-started/overview) for instructions on how to contribute.
|
||||||
|
|
||||||
We also have some 🔥amazing🔥 merch for our contributors. Please reach out to tony@infisical.com for more info 👀
|
We also have some 🔥amazing🔥 merch for our contributors. Please reach out to tony@infisical.com for more info 👀
|
||||||
|
@ -4,10 +4,12 @@ services:
|
|||||||
db-migration:
|
db-migration:
|
||||||
container_name: infisical-db-migration
|
container_name: infisical-db-migration
|
||||||
depends_on:
|
depends_on:
|
||||||
- db
|
db:
|
||||||
|
condition: service_healthy
|
||||||
image: infisical/infisical:latest-postgres
|
image: infisical/infisical:latest-postgres
|
||||||
env_file: .env
|
env_file: .env
|
||||||
command: npm run migration:latest
|
command: npm run migration:latest
|
||||||
|
pull_policy: always
|
||||||
networks:
|
networks:
|
||||||
- infisical
|
- infisical
|
||||||
|
|
||||||
@ -16,12 +18,13 @@ services:
|
|||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
db:
|
db:
|
||||||
condition: service_started
|
condition: service_healthy
|
||||||
redis:
|
redis:
|
||||||
condition: service_started
|
condition: service_started
|
||||||
db-migration:
|
db-migration:
|
||||||
condition: service_completed_successfully
|
condition: service_completed_successfully
|
||||||
image: infisical/infisical:latest-postgres
|
image: infisical/infisical:latest-postgres
|
||||||
|
pull_policy: always
|
||||||
env_file: .env
|
env_file: .env
|
||||||
ports:
|
ports:
|
||||||
- 80:8080
|
- 80:8080
|
||||||
@ -52,6 +55,11 @@ services:
|
|||||||
- pg_data:/data/db
|
- pg_data:/data/db
|
||||||
networks:
|
networks:
|
||||||
- infisical
|
- infisical
|
||||||
|
healthcheck:
|
||||||
|
test: "pg_isready --username=${POSTGRES_USER} && psql --username=${POSTGRES_USER} --list"
|
||||||
|
interval: 5s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 10
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
pg_data:
|
pg_data:
|
||||||
|
@ -16,49 +16,7 @@ git checkout -b MY_BRANCH_NAME
|
|||||||
## Set up environment variables
|
## Set up environment variables
|
||||||
|
|
||||||
|
|
||||||
Start by creating a .env file at the root of the Infisical directory then copy the contents of the file below into the .env file.
|
Start by creating a .env file at the root of the Infisical directory then copy the contents of the file linked [here](https://github.com/Infisical/infisical/blob/main/.env.example). View all available [environment variables](https://infisical.com/docs/self-hosting/configuration/envars) and guidance for each.
|
||||||
|
|
||||||
<Accordion title=".env file content">
|
|
||||||
```env
|
|
||||||
# Keys
|
|
||||||
# Required key for platform encryption/decryption ops
|
|
||||||
ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218
|
|
||||||
|
|
||||||
# JWT
|
|
||||||
# Required secrets to sign JWT tokens
|
|
||||||
JWT_SIGNUP_SECRET=3679e04ca949f914c03332aaaeba805a
|
|
||||||
JWT_REFRESH_SECRET=5f2f3c8f0159068dc2bbb3a652a716ff
|
|
||||||
JWT_AUTH_SECRET=4be6ba5602e0fa0ac6ac05c3cd4d247f
|
|
||||||
JWT_SERVICE_SECRET=f32f716d70a42c5703f4656015e76200
|
|
||||||
|
|
||||||
# MongoDB
|
|
||||||
# Backend will connect to the MongoDB instance at connection string MONGO_URL which can either be a ref
|
|
||||||
# to the MongoDB container instance or Mongo Cloud
|
|
||||||
# Required
|
|
||||||
MONGO_URL=mongodb://root:example@mongo:27017/?authSource=admin
|
|
||||||
|
|
||||||
# Optional credentials for MongoDB container instance and Mongo-Express
|
|
||||||
MONGO_USERNAME=root
|
|
||||||
MONGO_PASSWORD=example
|
|
||||||
|
|
||||||
# Website URL
|
|
||||||
# Required
|
|
||||||
SITE_URL=http://localhost:8080
|
|
||||||
|
|
||||||
# Mail/SMTP
|
|
||||||
SMTP_HOST='smtp-server'
|
|
||||||
SMTP_PORT='1025'
|
|
||||||
SMTP_NAME='local'
|
|
||||||
SMTP_USERNAME='team@infisical.com'
|
|
||||||
SMTP_PASSWORD=
|
|
||||||
```
|
|
||||||
</Accordion>
|
|
||||||
|
|
||||||
<Warning>
|
|
||||||
The pre-populated environment variable values above are meant to be used in development only. They should never be used in production.
|
|
||||||
</Warning>
|
|
||||||
|
|
||||||
View all available [environment variables](https://infisical.com/docs/self-hosting/configuration/envars) and guidance for each.
|
|
||||||
|
|
||||||
## Starting Infisical for development
|
## Starting Infisical for development
|
||||||
|
|
||||||
@ -72,10 +30,7 @@ docker-compose -f docker-compose.dev.yml up --build --force-recreate
|
|||||||
```
|
```
|
||||||
#### Access local server
|
#### Access local server
|
||||||
|
|
||||||
Once all the services have spun up, browse to http://localhost:8080. To sign in, you may use the default credentials listed below.
|
Once all the services have spun up, browse to http://localhost:8080.
|
||||||
|
|
||||||
Email: `test@localhost.local`
|
|
||||||
Password: `testInfisical1`
|
|
||||||
|
|
||||||
#### Shutdown local server
|
#### Shutdown local server
|
||||||
|
|
||||||
|
Binary file not shown.
Before Width: | Height: | Size: 1.2 MiB After Width: | Height: | Size: 332 KiB |
Binary file not shown.
After Width: | Height: | Size: 64 KiB |
@ -165,7 +165,6 @@
|
|||||||
"pages": [
|
"pages": [
|
||||||
"self-hosting/overview",
|
"self-hosting/overview",
|
||||||
"self-hosting/configuration/requirements",
|
"self-hosting/configuration/requirements",
|
||||||
"self-hosting/configuration/schema-migrations",
|
|
||||||
{
|
{
|
||||||
"group": "Installation methods",
|
"group": "Installation methods",
|
||||||
"pages": [
|
"pages": [
|
||||||
@ -175,6 +174,13 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"self-hosting/configuration/envars",
|
"self-hosting/configuration/envars",
|
||||||
|
{
|
||||||
|
"group": "Guides",
|
||||||
|
"pages": [
|
||||||
|
"self-hosting/configuration/schema-migrations",
|
||||||
|
"self-hosting/guides/mongo-to-postgres"
|
||||||
|
]
|
||||||
|
},
|
||||||
"self-hosting/faq"
|
"self-hosting/faq"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -4,7 +4,7 @@ description: "Configure environment variables for self-hosted Infisical"
|
|||||||
---
|
---
|
||||||
|
|
||||||
|
|
||||||
Infisical accepts all configurations via environment variables. For a basic self-hosted instance, at least `ENCRYPTION_KEY`, `AUTH_SECRET`, `DB_CONNECTION_URI` and `REDIS_URL` must be defined.
|
Infisical accepts all configurations via environment variables. For a minimal self-hosted instance, at least `ENCRYPTION_KEY`, `AUTH_SECRET`, `DB_CONNECTION_URI` and `REDIS_URL` must be defined.
|
||||||
However, you can configure additional settings to activate more features as needed.
|
However, you can configure additional settings to activate more features as needed.
|
||||||
|
|
||||||
## General platform
|
## General platform
|
||||||
|
@ -166,7 +166,7 @@ description: "Use Helm chart to install Infisical on your Kubernetes cluster"
|
|||||||
<Step title="Access Infisical">
|
<Step title="Access Infisical">
|
||||||
After deployment, please wait for 2-5 minutes for all pods to reach a running state. Once a significant number of pods are operational, access the IP address revealed through Ingress by your load balancer.
|
After deployment, please wait for 2-5 minutes for all pods to reach a running state. Once a significant number of pods are operational, access the IP address revealed through Ingress by your load balancer.
|
||||||
You can find the IP address/hostname by executing the command `kubectl get ingress`.
|
You can find the IP address/hostname by executing the command `kubectl get ingress`.
|
||||||

|

|
||||||
</Step>
|
</Step>
|
||||||
<Step title="Upgrade your instance">
|
<Step title="Upgrade your instance">
|
||||||
To upgrade your instance of Infisical simply update the docker image tag in your Halm values and rerun the command below.
|
To upgrade your instance of Infisical simply update the docker image tag in your Halm values and rerun the command below.
|
||||||
@ -176,8 +176,8 @@ description: "Use Helm chart to install Infisical on your Kubernetes cluster"
|
|||||||
```
|
```
|
||||||
|
|
||||||
<Tip>
|
<Tip>
|
||||||
Always back up your database before each upgrade, especially in a production environment
|
Always back up your database before each upgrade, especially in a production environment.
|
||||||
</Tip>
|
</Tip>
|
||||||
|
|
||||||
</Step>
|
</Step>
|
||||||
</Steps>
|
</Steps>
|
||||||
|
188
docs/self-hosting/guides/mongo-to-postgres.mdx
Normal file
188
docs/self-hosting/guides/mongo-to-postgres.mdx
Normal file
@ -0,0 +1,188 @@
|
|||||||
|
---
|
||||||
|
title: "Migrate Mongo to Postgres"
|
||||||
|
description: "How to migrate from MongoDB to PostgreSQL for Infisical"
|
||||||
|
---
|
||||||
|
|
||||||
|
This guide will provide step by step instructions on migrating your Infisical instance running on MongoDB to the newly released PostgreSQL version of Infisical.
|
||||||
|
The newly released Postgres version of Infisical is the only version of Infisical that will receive feature updates and patches going forward.
|
||||||
|
|
||||||
|
<Info>
|
||||||
|
If your deployment is using a Docker image tag that includes `postgres`, then you are already using the Postgres version of Infisical, and you can skip this guide.
|
||||||
|
</Info>
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
Before starting the migration, ensure you have the following command line tools installed:
|
||||||
|
|
||||||
|
- [pg_dump](https://www.postgresql.org/docs/current/app-pgrestore.html)
|
||||||
|
- [pg_restore](https://www.postgresql.org/docs/current/app-pgdump.html)
|
||||||
|
- [mongodump](https://www.mongodb.com/docs/database-tools/mongodump/)
|
||||||
|
- [mongorestore](https://www.mongodb.com/docs/database-tools/mongorestore/)
|
||||||
|
- [Docker](https://docs.docker.com/engine/install/)
|
||||||
|
|
||||||
|
## Prepare for migration
|
||||||
|
|
||||||
|
<Steps>
|
||||||
|
<Step title="Backup Production MongoDB Data">
|
||||||
|
While the migration script will not mutate any MongoDB production data, we recommend you to take a backup of your MongoDB instance if possible.
|
||||||
|
</Step>
|
||||||
|
<Step title="Set Migration Mode">
|
||||||
|
To prevent new data entries during the migration, set your Infisical instance to migration mode by setting the environment variable `MIGRATION_MODE=true` and redeploying your instance.
|
||||||
|
This mode will block all write operations, only allowing GET requests. It also disables user logins and sets up a migration page to prevent UI interactions.
|
||||||
|

|
||||||
|
</Step>
|
||||||
|
<Step title="Start local instances of Mongo and Postgres databases">
|
||||||
|
Start local instances of MongoDB and Postgres. This will be used in later steps to process and transform the data locally.
|
||||||
|
|
||||||
|
To start local instances of the two databases, create a file called `docker-compose.yaml` as shown below.
|
||||||
|
|
||||||
|
```yaml docker-compose.yaml
|
||||||
|
version: '3.1'
|
||||||
|
|
||||||
|
services:
|
||||||
|
mongodb:
|
||||||
|
image: mongo
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
MONGO_INITDB_ROOT_USERNAME: root
|
||||||
|
MONGO_INITDB_ROOT_PASSWORD: example
|
||||||
|
ports:
|
||||||
|
- "27017:27017"
|
||||||
|
volumes:
|
||||||
|
- mongodb_data:/data/db
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
POSTGRES_PASSWORD: example
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
mongodb_data:
|
||||||
|
postgres_data:
|
||||||
|
```
|
||||||
|
|
||||||
|
Next, run the command below in the same working directory where the `docker-compose.yaml` file resides to start both services.
|
||||||
|
|
||||||
|
```
|
||||||
|
docker-compose up
|
||||||
|
```
|
||||||
|
|
||||||
|
</Step>
|
||||||
|
</Steps>
|
||||||
|
|
||||||
|
## Dump MongoDB
|
||||||
|
To speed up the data transformation process, the first step involves transferring the production data from Infisical's MongoDB to a local machine.
|
||||||
|
This is achieved by creating a dump of the production database and then uploading this dumped data into a local Mongo instance.
|
||||||
|
By having a running local instance of the production database, we will significantly reduce the time it takes to run the migration script.
|
||||||
|
|
||||||
|
<Steps>
|
||||||
|
<Step title="Dump MongoDB data to your local machine using">
|
||||||
|
|
||||||
|
```
|
||||||
|
mongodump --uri=<your_mongo_prod_uri> --archive="mongodump-db" --db=<db name> --excludeCollection=auditlogs
|
||||||
|
```
|
||||||
|
|
||||||
|
</Step>
|
||||||
|
<Step title="Restore this data to the local MongoDB instance">
|
||||||
|
```
|
||||||
|
mongorestore --uri=mongodb://root:example@localhost:27017/ --archive="mongodump-db"
|
||||||
|
```
|
||||||
|
</Step>
|
||||||
|
</Steps>
|
||||||
|
|
||||||
|
## Start the migration
|
||||||
|
|
||||||
|
Once started, the migration script will transform MongoDB data into an equivalent PostgreSQL format.
|
||||||
|
|
||||||
|
<Steps>
|
||||||
|
<Step title="Clone Infisical Repository">
|
||||||
|
Clone the Infisical MongoDB repository.
|
||||||
|
```
|
||||||
|
git clone https://github.com/Infisical/infisical.git
|
||||||
|
```
|
||||||
|
</Step>
|
||||||
|
<Step title="Change directory">
|
||||||
|
Once the repository has been cloned, change directory to the script folder.
|
||||||
|
```
|
||||||
|
cd pg-migrator
|
||||||
|
```
|
||||||
|
</Step>
|
||||||
|
<Step title="Install dependencies">
|
||||||
|
```
|
||||||
|
npm install
|
||||||
|
```
|
||||||
|
</Step>
|
||||||
|
<Step title="Execute Migration Script">
|
||||||
|
```
|
||||||
|
npm run migration
|
||||||
|
```
|
||||||
|
|
||||||
|
When executing the above command, you'll be asked to provide the MongoDB connection string for the database containing your production Infisical data. Since our production Mongo data is transferred to a local Mongo instance, you should input the connection string for this local instance.
|
||||||
|
|
||||||
|
```
|
||||||
|
mongodb://root:example@localhost:27017/<db-name>?authSource=admin
|
||||||
|
```
|
||||||
|
|
||||||
|
<Tip>
|
||||||
|
Remember to replace `<db-name>` with the name of the MongoDB database. If you are not sure the name, you can use [Compass](https://www.mongodb.com/products/tools/compass) to view the available databases.
|
||||||
|
</Tip>
|
||||||
|
|
||||||
|
|
||||||
|
Next, you will be asked to enter the Postgres connection string for the database where the transformed data should be stored.
|
||||||
|
Input the connection string of the local Postgres instance that was set up earlier in the guide.
|
||||||
|
|
||||||
|
```
|
||||||
|
postgres://infisical:infisical@localhost/infisical?sslmode=disable
|
||||||
|
```
|
||||||
|
</Step>
|
||||||
|
|
||||||
|
<Step title="Store migration metadata">
|
||||||
|
Once the script has completed, you will notice a new folder has been created called `db` in the `pg-migrator` folder.
|
||||||
|
This folder contains meta data for schema mapping and can be helpful when debugging migration related issues.
|
||||||
|
We highly recommend you to make a copy of this folder in case you need assistance from the Infisical team during your migration process.
|
||||||
|
|
||||||
|
<Info>
|
||||||
|
The `db` folder does not contain any sensitive data
|
||||||
|
</Info>
|
||||||
|
</Step>
|
||||||
|
</Steps>
|
||||||
|
|
||||||
|
## Finalizing Migration
|
||||||
|
At this stage, the data from the Mongo instance of Infisical should have been successfully converted into its Postgres equivalent.
|
||||||
|
The remaining step involves transferring the local Postgres database, which now contains all the migrated data, to your chosen production Postgres environment.
|
||||||
|
Rather than transferring the data row-by-row from your local machine to the production Postgres database, we will first create a dump file from the local Postgres and then upload this file to your production Postgres instance.
|
||||||
|
|
||||||
|
<Steps>
|
||||||
|
<Step title="Dump from local PostgreSQL">
|
||||||
|
```
|
||||||
|
pg_dump -h localhost -U infisical -Fc -b -v -f dumpfilelocation.sql -d infisical
|
||||||
|
```
|
||||||
|
</Step>
|
||||||
|
<Step title="Upload to production PostgreSQL">
|
||||||
|
```
|
||||||
|
pg_restore --clean -v -h <host> -U <db-user-name> -d <database-name> -j 2 dumpfilelocation.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
<Tip>
|
||||||
|
Remember to replace `<host>`, `<db-user-name>`, `<database-name>` with the corresponding details of your production Postgres database.
|
||||||
|
</Tip>
|
||||||
|
</Step>
|
||||||
|
<Step title="Verify Data Upload">
|
||||||
|
Use a tool like Beekeeper Studio to confirm that the data has been successfully transferred to your production Postgres DB.
|
||||||
|
</Step>
|
||||||
|
</Steps>
|
||||||
|
|
||||||
|
## Post-Migration Steps
|
||||||
|
|
||||||
|
After successfully migrating the data to PostgreSQL, you can proceed to deploy Infisical using your preferred deployment method.
|
||||||
|
Refer to [Infisical's self-hosting documentation](https://infisical.com/docs/self-hosting/overview) for deployment options.
|
||||||
|
Remember to use your production PostgreSQL connection string for the new deployment and transfer all [environment variables](/self-hosting/configuration/envars) from the MongoDB version of Infisical to the new version (they are all compatible).
|
||||||
|
|
||||||
|
## Additional discussion
|
||||||
|
- When you visit Infisical's [docker hub](https://hub.docker.com/r/infisical/infisical) page, you will notice that image tags end with `-postgres`.
|
||||||
|
This is to indicate that this version of Infisical runs on the new Postgres backend. Any image tag that does not end in `postgres` runs on MongoDB.
|
@ -28,7 +28,7 @@ export default function EnterEmailStep({
|
|||||||
incrementStep
|
incrementStep
|
||||||
}: DownloadBackupPDFStepProps): JSX.Element {
|
}: DownloadBackupPDFStepProps): JSX.Element {
|
||||||
const { createNotification } = useNotificationContext();
|
const { createNotification } = useNotificationContext();
|
||||||
const { mutateAsync } = useSendVerificationEmail();
|
const { mutateAsync, isLoading } = useSendVerificationEmail();
|
||||||
const [emailError, setEmailError] = useState(false);
|
const [emailError, setEmailError] = useState(false);
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
|
|
||||||
@ -91,6 +91,8 @@ export default function EnterEmailStep({
|
|||||||
className='h-14'
|
className='h-14'
|
||||||
colorSchema="primary"
|
colorSchema="primary"
|
||||||
variant="outline_bg"
|
variant="outline_bg"
|
||||||
|
isLoading={isLoading}
|
||||||
|
isDisabled={isLoading}
|
||||||
> {String(t("signup.step1-submit"))} </Button>
|
> {String(t("signup.step1-submit"))} </Button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -90,7 +90,7 @@ export const CreateRotationForm = ({
|
|||||||
<ModalContent
|
<ModalContent
|
||||||
title={`Secret rotation for ${provider.name}`}
|
title={`Secret rotation for ${provider.name}`}
|
||||||
subTitle="Provide the required inputs needed for the rotation"
|
subTitle="Provide the required inputs needed for the rotation"
|
||||||
className="max-w-2xl"
|
className="max-w-2xl max-h-screen overflow-scroll my-4"
|
||||||
>
|
>
|
||||||
<Stepper activeStep={wizardStep} direction="horizontal" className="mb-4">
|
<Stepper activeStep={wizardStep} direction="horizontal" className="mb-4">
|
||||||
{WIZARD_STEPS.map(({ title, description }, index) => (
|
{WIZARD_STEPS.map(({ title, description }, index) => (
|
||||||
|
@ -7,7 +7,7 @@ type: application
|
|||||||
# This is the chart version. This version number should be incremented each time you make changes
|
# This is the chart version. This version number should be incremented each time you make changes
|
||||||
# to the chart and its templates, including the app version.
|
# to the chart and its templates, including the app version.
|
||||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||||
version: 1.0.0
|
version: 1.0.1
|
||||||
|
|
||||||
# This is the version number of the application being deployed. This version number should be
|
# This is the version number of the application being deployed. This version number should be
|
||||||
# incremented each time you make changes to the application. Versions are not expected to
|
# incremented each time you make changes to the application. Versions are not expected to
|
||||||
|
@ -0,0 +1,50 @@
|
|||||||
|
{{ if .Values.ingress.enabled }}
|
||||||
|
{{- $ingress := .Values.ingress }}
|
||||||
|
{{- if and $ingress.ingressClassName (not (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion)) }}
|
||||||
|
{{- if not (hasKey $ingress.annotations "kubernetes.io/ingress.class") }}
|
||||||
|
{{- $_ := set $ingress.annotations "kubernetes.io/ingress.class" $ingress.ingressClassName}}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
apiVersion: networking.k8s.io/v1
|
||||||
|
kind: Ingress
|
||||||
|
metadata:
|
||||||
|
name: infisical-ingress
|
||||||
|
{{- with $ingress.annotations }}
|
||||||
|
annotations:
|
||||||
|
{{- toYaml . | nindent 4 }}
|
||||||
|
{{- end }}
|
||||||
|
spec:
|
||||||
|
{{- if and $ingress.ingressClassName (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion) }}
|
||||||
|
ingressClassName: {{ $ingress.ingressClassName | default "nginx" }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if $ingress.tls }}
|
||||||
|
tls:
|
||||||
|
{{- range $ingress.tls }}
|
||||||
|
- hosts:
|
||||||
|
{{- range .hosts }}
|
||||||
|
- {{ . | quote }}
|
||||||
|
{{- end }}
|
||||||
|
secretName: {{ .secretName }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
rules:
|
||||||
|
- http:
|
||||||
|
paths:
|
||||||
|
- path: /
|
||||||
|
pathType: Prefix
|
||||||
|
backend:
|
||||||
|
service:
|
||||||
|
name: {{ include "infisical.fullname" . }}
|
||||||
|
port:
|
||||||
|
number: 8080
|
||||||
|
- path: /ss-webhook
|
||||||
|
pathType: Exact
|
||||||
|
backend:
|
||||||
|
service:
|
||||||
|
name: {{ include "infisical.fullname" . }}
|
||||||
|
port:
|
||||||
|
number: 8080
|
||||||
|
{{- if $ingress.hostName }}
|
||||||
|
host: {{ $ingress.hostName }}
|
||||||
|
{{- end }}
|
||||||
|
{{ end }}
|
@ -66,7 +66,7 @@ enum SecretEncryptionAlgo {
|
|||||||
AES_256_GCM = "aes-256-gcm",
|
AES_256_GCM = "aes-256-gcm",
|
||||||
}
|
}
|
||||||
|
|
||||||
const ENV_SLUG_LENGTH = 15;
|
const ENV_SLUG_LENGTH = 500;
|
||||||
|
|
||||||
enum SecretKeyEncoding {
|
enum SecretKeyEncoding {
|
||||||
UTF8 = "utf8",
|
UTF8 = "utf8",
|
||||||
@ -210,9 +210,9 @@ export const migrateCollection = async <
|
|||||||
return (await tx
|
return (await tx
|
||||||
.batchInsert<Tables[K]["base"]>(postgresTableName, pgDoc as any)
|
.batchInsert<Tables[K]["base"]>(postgresTableName, pgDoc as any)
|
||||||
.returning(returnKeys as any)) as Pick<
|
.returning(returnKeys as any)) as Pick<
|
||||||
Tables[K]["base"],
|
Tables[K]["base"],
|
||||||
R[number]
|
R[number]
|
||||||
>[];
|
>[];
|
||||||
});
|
});
|
||||||
await postPgProcessing?.(mongooseDoc, newUserIds);
|
await postPgProcessing?.(mongooseDoc, newUserIds);
|
||||||
}
|
}
|
||||||
@ -230,9 +230,9 @@ export const migrateCollection = async <
|
|||||||
return (await tx
|
return (await tx
|
||||||
.batchInsert(postgresTableName, pgDoc as any)
|
.batchInsert(postgresTableName, pgDoc as any)
|
||||||
.returning(returnKeys as any)) as Pick<
|
.returning(returnKeys as any)) as Pick<
|
||||||
Tables[K]["base"],
|
Tables[K]["base"],
|
||||||
R[number]
|
R[number]
|
||||||
>[];
|
>[];
|
||||||
});
|
});
|
||||||
await postPgProcessing?.(mongooseDoc, newUserIds);
|
await postPgProcessing?.(mongooseDoc, newUserIds);
|
||||||
}
|
}
|
||||||
@ -258,9 +258,9 @@ const main = async () => {
|
|||||||
try {
|
try {
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
process.env.MONGO_DB_URL = "mongodb://root:example@localhost:27017/test?authSource=admin"
|
// process.env.MONGO_DB_URL = "mongodb://root:example@localhost:27017/test?authSource=admin"
|
||||||
|
|
||||||
process.env.POSTGRES_DB_URL = "postgres://infisical:infisical@localhost/infisical?sslmode=disable"
|
// process.env.POSTGRES_DB_URL = "postgres://infisical:infisical@localhost/infisical?sslmode=disable"
|
||||||
|
|
||||||
process.env.START_FRESH = "true";
|
process.env.START_FRESH = "true";
|
||||||
const prompt = promptSync({ sigint: true });
|
const prompt = promptSync({ sigint: true });
|
||||||
@ -313,7 +313,7 @@ const main = async () => {
|
|||||||
preProcessing: async (doc) => {
|
preProcessing: async (doc) => {
|
||||||
if (["64058e0ea5c55c6a8203fed7", "64155f5d75c91bf4e176eb85", "6434ff80b82e04f17008aa13"].includes(doc._id.toString())) {
|
if (["64058e0ea5c55c6a8203fed7", "64155f5d75c91bf4e176eb85", "6434ff80b82e04f17008aa13"].includes(doc._id.toString())) {
|
||||||
console.log("Skipping duplicate user")
|
console.log("Skipping duplicate user")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const id = uuidV4();
|
const id = uuidV4();
|
||||||
@ -843,9 +843,9 @@ const main = async () => {
|
|||||||
await folderKv.put(folder.id, id);
|
await folderKv.put(folder.id, id);
|
||||||
const parentId = folder?.parentId
|
const parentId = folder?.parentId
|
||||||
? await folderKv.get(folder?.parentId).catch((e) => {
|
? await folderKv.get(folder?.parentId).catch((e) => {
|
||||||
console.log("parent folder not found==>", folder);
|
console.log("parent folder not found==>", folder);
|
||||||
throw e;
|
throw e;
|
||||||
})
|
})
|
||||||
: null;
|
: null;
|
||||||
|
|
||||||
pgFolder.push({
|
pgFolder.push({
|
||||||
@ -1548,8 +1548,8 @@ const main = async () => {
|
|||||||
returnKeys: ["id"],
|
returnKeys: ["id"],
|
||||||
preProcessing: async (doc) => {
|
preProcessing: async (doc) => {
|
||||||
// dangling identity
|
// dangling identity
|
||||||
if (!await identityKv.get(doc.identity.toString()).catch(() => null)){
|
if (!await identityKv.get(doc.identity.toString()).catch(() => null)) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const id = uuidV4();
|
const id = uuidV4();
|
||||||
@ -1584,8 +1584,8 @@ const main = async () => {
|
|||||||
returnKeys: ["id"],
|
returnKeys: ["id"],
|
||||||
preProcessing: async (doc) => {
|
preProcessing: async (doc) => {
|
||||||
// dangling identity
|
// dangling identity
|
||||||
if (!await identityKv.get(doc.identity.toString()).catch(() => null)){
|
if (!await identityKv.get(doc.identity.toString()).catch(() => null)) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const identityUAId = await identityUaKv.get(
|
const identityUAId = await identityUaKv.get(
|
||||||
@ -1617,15 +1617,15 @@ const main = async () => {
|
|||||||
returnKeys: ["id"],
|
returnKeys: ["id"],
|
||||||
preProcessing: async (doc) => {
|
preProcessing: async (doc) => {
|
||||||
// dangling identity
|
// dangling identity
|
||||||
if (!await identityKv.get(doc.identity.toString()).catch(() => null)){
|
if (!await identityKv.get(doc.identity.toString()).catch(() => null)) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
await identityAccessTokenKv.put(doc._id.toString(), doc._id.toString());
|
await identityAccessTokenKv.put(doc._id.toString(), doc._id.toString());
|
||||||
const identityUAClientSecretId = doc?.identityUniversalAuthClientSecret
|
const identityUAClientSecretId = doc?.identityUniversalAuthClientSecret
|
||||||
? await identityUaClientSecKv.get(
|
? await identityUaClientSecKv.get(
|
||||||
doc.identityUniversalAuthClientSecret.toString(),
|
doc.identityUniversalAuthClientSecret.toString(),
|
||||||
)
|
)
|
||||||
: null;
|
: null;
|
||||||
const identityId = await identityKv.get(doc.identity.toString());
|
const identityId = await identityKv.get(doc.identity.toString());
|
||||||
return {
|
return {
|
||||||
@ -1652,8 +1652,8 @@ const main = async () => {
|
|||||||
returnKeys: ["id"],
|
returnKeys: ["id"],
|
||||||
preProcessing: async (doc) => {
|
preProcessing: async (doc) => {
|
||||||
// dangling identity
|
// dangling identity
|
||||||
if (!await identityKv.get(doc.identity.toString()).catch(() => null)){
|
if (!await identityKv.get(doc.identity.toString()).catch(() => null)) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const id = uuidV4();
|
const id = uuidV4();
|
||||||
@ -1687,8 +1687,8 @@ const main = async () => {
|
|||||||
returnKeys: ["id"],
|
returnKeys: ["id"],
|
||||||
preProcessing: async (doc) => {
|
preProcessing: async (doc) => {
|
||||||
// dangling identity
|
// dangling identity
|
||||||
if (!await identityKv.get(doc.identity.toString()).catch(() => null)){
|
if (!await identityKv.get(doc.identity.toString()).catch(() => null)) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const id = uuidV4();
|
const id = uuidV4();
|
||||||
@ -2317,8 +2317,8 @@ const main = async () => {
|
|||||||
|
|
||||||
const statusChangeBy = doc.statusChangeBy
|
const statusChangeBy = doc.statusChangeBy
|
||||||
? await projectMembKv
|
? await projectMembKv
|
||||||
.get(doc.statusChangeBy.toString())
|
.get(doc.statusChangeBy.toString())
|
||||||
.catch(() => null)
|
.catch(() => null)
|
||||||
: null;
|
: null;
|
||||||
return {
|
return {
|
||||||
id,
|
id,
|
||||||
@ -2454,7 +2454,7 @@ const main = async () => {
|
|||||||
secretCommentCiphertext:
|
secretCommentCiphertext:
|
||||||
commit.newVersion.secretCommentCiphertext ||
|
commit.newVersion.secretCommentCiphertext ||
|
||||||
secret.secretCommentCiphertext,
|
secret.secretCommentCiphertext,
|
||||||
secretVersion,
|
secretVersion,
|
||||||
createdAt: new Date((doc as any).createdAt),
|
createdAt: new Date((doc as any).createdAt),
|
||||||
updatedAt: new Date((doc as any).updatedAt),
|
updatedAt: new Date((doc as any).updatedAt),
|
||||||
};
|
};
|
||||||
|
Reference in New Issue
Block a user