Compare commits

..

61 Commits

Author SHA1 Message Date
8a0fd62785 update docker compose name 2024-02-14 15:19:47 -05:00
c69601c14e Merge pull request #1402 from Infisical/identity-access-token-ttl
Patch identity access token + client secret TTL
2024-02-14 12:19:45 -08:00
faf6323a58 Patch identity ttl conversion 2024-02-14 12:09:56 -08:00
b82d1b6a5d update docker compose on readme 2024-02-13 19:50:58 -05:00
3dcda44c50 Merge pull request #1385 from akhilmhdh/feat/init-container
feat: changed docker compose to use init container pattern for running migration
2024-02-13 19:46:39 -05:00
f320b08ca8 rename docker compose and bring back make up-dev 2024-02-13 19:44:42 -05:00
df6e5674cf patch breaking change ci 2024-02-13 19:10:28 -05:00
6bac143a8e test without pg connection 2024-02-13 19:06:47 -05:00
38b93e499f logs on health check 2024-02-13 18:58:13 -05:00
a521538010 show all logs 2024-02-13 18:47:12 -05:00
8cc2553452 test workflow no db connection 2024-02-13 18:43:43 -05:00
b1cb9de001 correct -d mode 2024-02-13 18:39:28 -05:00
036256b350 add back -d mode 2024-02-13 18:37:04 -05:00
d3a06b82e6 update health check 2024-02-13 18:34:15 -05:00
87436cfb57 Update check-api-for-breaking-changes.yml 2024-02-13 18:04:49 -05:00
5c58a4d1a3 added signup event and restyled admin flow 2024-02-13 13:15:54 -08:00
03a91b2c59 Merge pull request #1387 from akhilmhdh/chore/doc-openapi
chore: changed mintlify to directly get from prod openapi
2024-02-13 00:42:47 -05:00
751361bd54 add new propety to api 2024-02-13 00:38:59 -05:00
b4b88daf36 Revert "test breaking change"
This reverts commit 6546740bd9.
2024-02-13 00:37:49 -05:00
6546740bd9 test breaking change 2024-02-13 00:34:31 -05:00
b32558c66f add . 2024-02-13 00:29:49 -05:00
effd30857e fix typo 2024-02-12 22:45:35 -05:00
60998c8944 Merge pull request #1369 from akhilmhdh/chore/feature-x-guide
feat: added guides for new backend development
2024-02-12 22:35:16 -05:00
3c4d9fd4a9 delete docs in backend 2024-02-12 22:34:36 -05:00
ad70c783e8 add backend guide to contributor 2024-02-12 22:33:55 -05:00
7347362738 rephrase new feature development guide 2024-02-12 21:49:15 -05:00
4b7f2e808b Update overview.mdx 2024-02-12 15:06:56 -08:00
57f9d13189 Merge pull request #1397 from Infisical/infisical-jenkins
Rewrite Infisical Jenkins docs
2024-02-12 18:02:11 -05:00
bd2e8ac922 rewrite jenkins docs 2024-02-12 18:01:20 -05:00
79694750af Remove signup disable check for SAML 2024-02-12 11:43:55 -08:00
03db367a4e Merge pull request #1396 from Infisical/azure-saml
Add disableRequestedAuthnContext for azure saml
2024-02-12 10:40:19 -08:00
b0fb848a92 Add disableRequestedAuthnContext for azure saml 2024-02-12 10:35:38 -08:00
4fdfcd50dc feat: changed check-api-breaking with oasdiff 2024-02-12 22:32:02 +05:30
db205b855a feat: removed mongo comments from compose file 2024-02-12 14:52:19 +05:30
e707f0d235 feat: added description and security over api written in docs 2024-02-12 14:19:49 +05:30
27f4225c44 Merge pull request #1391 from Infisical/ecs-docs
ECS docs with agent
2024-02-11 19:17:01 -05:00
28a9d8e739 complete ecs dcs 2024-02-11 19:14:14 -05:00
a1321e4749 aws ecs with agent docs 2024-02-09 22:31:29 -05:00
d4db01bbde Merge pull request #1388 from Infisical/azure-ad
Correct spInitiated spelling in Azure SAML
2024-02-09 12:31:41 -08:00
39634b8aae Correct spInitiated spelling in Azure SAML 2024-02-09 12:17:29 -08:00
4815ff13ee Update SAML SSO docs to include enforce SAML SSO toggle 2024-02-09 10:36:23 -08:00
fb503756d9 Merge pull request #1372 from Infisical/org-based-auth
Org-Level Auth Enforcement for SAML Orgs and Enhancements for SAML SSO
2024-02-09 13:15:08 -05:00
48a97fb39d chore: changed mintlify to directly get from prod openapi 2024-02-09 22:53:27 +05:30
eeaee4409c revert swap 2024-02-09 12:08:11 -05:00
8d457bb0bf swap src des 2024-02-09 12:01:21 -05:00
d8ea26feb7 feat: changed docker compose to use init container pattern for migration 2024-02-09 13:12:25 +05:30
50c0fae557 continue ecs docs 2024-02-08 20:29:38 -05:00
70e083bae0 feat: open api-diff added detach mode 2024-02-08 22:20:01 +05:30
6a943e275a Updated open-api diff gh action host docker ip 2024-02-08 22:05:35 +05:30
526dc6141b remove -d mode on docker compose 2024-02-08 11:04:26 -05:00
dcab9dcdda update docker compose up 2024-02-08 10:50:25 -05:00
1b0591def8 fix gha breaking change 2024-02-07 16:17:12 -05:00
4b4305bddc Merge pull request #1375 from akhilmhdh/feat/api-diff
feat: github workflow for api diff check, ts check and lint check on PR
2024-02-07 15:26:04 -05:00
fcaff76afa rename git hub action 2024-02-07 14:34:02 -05:00
ae9eb20189 set license server url default 2024-02-07 13:51:55 -05:00
3905d16a7c fix license server axios typo 2024-02-07 13:48:09 -05:00
ecafdb0d01 patch check for version 2024-02-07 13:11:53 -05:00
8313245ae1 feat: github workflow for api diff check, ts check and lint check on PR 2024-02-07 15:13:08 +05:30
dc146d0883 feat: fixed spelling errors 2024-02-05 22:28:40 +05:30
24dd79b566 feat: added guides for new backend development 2024-02-05 16:31:03 +05:30
44f087991c ECS documentation 2024-01-31 23:53:41 -05:00
86 changed files with 1096 additions and 5589 deletions

View File

@ -3,16 +3,18 @@
# THIS IS A SAMPLE ENCRYPTION KEY AND SHOULD NEVER BE USED FOR PRODUCTION
ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218
# Required
DB_CONNECTION_URI=postgres://infisical:infisical@db:5432/infisical
# JWT
# Required secrets to sign JWT tokens
# THIS IS A SAMPLE AUTH_SECRET KEY AND SHOULD NEVER BE USED FOR PRODUCTION
AUTH_SECRET=5lrMXKKWCVocS/uerPsl7V+TX/aaUaI7iDkgl3tSmLE=
# MongoDB
# Backend will connect to the MongoDB instance at connection string MONGO_URL which can either be a ref
# to the MongoDB container instance or Mongo Cloud
# Required
MONGO_URL=mongodb://root:example@mongo:27017/?authSource=admin
# Postgres creds
POSTGRES_PASSWORD=infisical
POSTGRES_USER=infisical
POSTGRES_DB=infisical
# Redis
REDIS_URL=redis://redis:6379

1
.env.migration.example Normal file
View File

@ -0,0 +1 @@
DB_CONNECTION_URI=

View File

@ -0,0 +1,75 @@
name: "Check API For Breaking Changes"
on:
pull_request:
types: [opened, synchronize]
paths:
- "backend/src/server/routes/**"
jobs:
check-be-api-changes:
name: Check API Changes
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: Checkout source
uses: actions/checkout@v3
# - name: Setup Node 20
# uses: actions/setup-node@v3
# with:
# node-version: "20"
# uncomment this when testing locally using nektos/act
- uses: KengoTODA/actions-setup-docker-compose@v1
if: ${{ env.ACT }}
name: Install `docker-compose` for local simulations
with:
version: "2.14.2"
- name: 📦Build the latest image
run: docker build --tag infisical-api .
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker-compose -f docker-compose.prod.yml up -d db redis
- name: Start the server
run: |
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
env:
REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
JWT_AUTH_SECRET: something-random
- uses: actions/setup-go@v5
with:
go-version: '1.21.5'
- name: Wait for container to be stable and check logs
run: |
SECONDS=0
HEALTHY=0
while [ $SECONDS -lt 60 ]; do
if docker ps | grep infisical-api | grep -q healthy; then
echo "Container is healthy."
HEALTHY=1
break
fi
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
docker logs infisical-api
sleep 2
SECONDS=$((SECONDS+2))
done
if [ $HEALTHY -ne 1 ]; then
echo "Container did not become healthy in time"
exit 1
fi
- name: Install openapi-diff
run: go install github.com/tufin/oasdiff@latest
- name: Running OpenAPI Spec diff action
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
- name: cleanup
run: |
docker-compose -f "docker-compose.pg.yml" down
docker stop infisical-api
docker remove infisical-api

View File

@ -1,43 +0,0 @@
name: "Check Backend Pull Request"
on:
pull_request:
types: [opened, synchronize]
paths:
- "backend/**"
- "!backend/README.md"
- "!backend/.*"
- "backend/.eslintrc.js"
jobs:
check-be-pr:
name: Check
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 🔧 Setup Node 16
uses: actions/setup-node@v3
with:
node-version: "16"
cache: "npm"
cache-dependency-path: backend/package-lock.json
- name: 📦 Install dependencies
run: npm ci --only-production
working-directory: backend
# - name: 🧪 Run tests
# run: npm run test:ci
# working-directory: backend
# - name: 📁 Upload test results
# uses: actions/upload-artifact@v3
# if: always()
# with:
# name: be-test-results
# path: |
# ./backend/reports
# ./backend/coverage
- name: 🏗️ Run build
run: npm run build
working-directory: backend

View File

@ -0,0 +1,35 @@
name: "Check Backend PR types and lint"
on:
pull_request:
types: [opened, synchronize]
paths:
- "backend/**"
- "!backend/README.md"
- "!backend/.*"
- "backend/.eslintrc.js"
jobs:
check-be-pr:
name: Check TS and Lint
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 🔧 Setup Node 20
uses: actions/setup-node@v3
with:
node-version: "20"
cache: "npm"
cache-dependency-path: backend/package-lock.json
- name: Install dependencies
run: npm install
working-directory: backend
- name: Run type check
run: npm run type:check
working-directory: backend
- name: Run lint check
run: npm run lint
working-directory: backend

2
.gitignore vendored
View File

@ -6,7 +6,7 @@ node_modules
.env.gamma
.env.prod
.env.infisical
.env.migration
*~
*.swp
*.swo

View File

@ -5,16 +5,10 @@ push:
docker-compose -f docker-compose.yml push
up-dev:
docker-compose -f docker-compose.dev.yml up --build
up-pg-dev:
docker compose -f docker-compose.pg.yml up --build
i-dev:
infisical run -- docker-compose -f docker-compose.dev.yml up --build
docker compose -f docker-compose.dev.yml up --build
up-prod:
docker-compose -f docker-compose.yml up --build
docker-compose -f docker-compose.prod.yml up --build
down:
docker-compose down

View File

@ -84,13 +84,13 @@ To set up and run Infisical locally, make sure you have Git and Docker installed
Linux/macOS:
```console
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker-compose -f docker-compose.yml up
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker-compose -f docker-compose.prod.yml up
```
Windows Command Prompt:
```console
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker-compose -f docker-compose.yml up
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker-compose -f docker-compose.prod.yml up
```
Create an account at `http://localhost:80`

View File

@ -3,13 +3,9 @@ import dotenv from "dotenv";
import path from "path";
import knex from "knex";
import { writeFileSync } from "fs";
import promptSync from "prompt-sync";
const prompt = promptSync({ sigint: true });
dotenv.config({
path: path.join(__dirname, "../.env"),
debug: true
path: path.join(__dirname, "../../.env.migration")
});
const db = knex({
@ -94,17 +90,7 @@ const main = async () => {
.orderBy("table_name")
).filter((el) => !el.tableName.includes("_migrations"));
console.log("Select a table to generate schema");
console.table(tables);
console.log("all: all tables");
const selectedTables = prompt("Type table numbers comma seperated: ");
const tableNumbers =
selectedTables !== "all" ? selectedTables.split(",").map((el) => Number(el)) : [];
for (let i = 0; i < tables.length; i += 1) {
// skip if not desired table
if (selectedTables !== "all" && !tableNumbers.includes(i)) continue;
const { tableName } = tables[i];
const columns = await db(tableName).columnInfo();
const columnNames = Object.keys(columns);
@ -124,16 +110,16 @@ const main = async () => {
if (colInfo.nullable) {
ztype = ztype.concat(".nullable().optional()");
}
schema = schema.concat(`${!schema ? "\n" : ""} ${columnName}: ${ztype},\n`);
schema = schema.concat(
`${!schema ? "\n" : ""} ${columnName}: ${ztype}${colNum === columnNames.length - 1 ? "" : ","}\n`
);
}
const dashcase = tableName.split("_").join("-");
const pascalCase = tableName
.split("_")
.reduce(
(prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`,
""
);
.reduce((prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`, "");
writeFileSync(
path.join(__dirname, "../src/db/schemas", `${dashcase}.ts`),
`// Code generated by automation script, DO NOT EDIT.
@ -152,15 +138,6 @@ export type T${pascalCase}Insert = Omit<T${pascalCase}, TImmutableDBKeys>;
export type T${pascalCase}Update = Partial<Omit<T${pascalCase}, TImmutableDBKeys>>;
`
);
// const file = readFileSync(path.join(__dirname, "../src/db/schemas/index.ts"), "utf8");
// if (!file.includes(`export * from "./${dashcase};"`)) {
// appendFileSync(
// path.join(__dirname, "../src/db/schemas/index.ts"),
// `\nexport * from "./${dashcase}";`,
// "utf8"
// );
// }
}
process.exit(0);

View File

@ -5,9 +5,9 @@ import dotenv from "dotenv";
import type { Knex } from "knex";
import path from "path";
// Update with your config settings.
// Update with your config settings. .
dotenv.config({
path: path.join(__dirname, "../../.env"),
path: path.join(__dirname, "../../../.env.migration"),
debug: true
});
export default {

View File

@ -11,6 +11,13 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
method: "GET",
url: "/:workspaceId/secret-snapshots",
schema: {
description: "Return project secret snapshots ids",
security: [
{
apiKeyAuth: [],
bearerAuth: []
}
],
params: z.object({
workspaceId: z.string().trim()
}),
@ -74,6 +81,13 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
method: "GET",
url: "/:workspaceId/audit-logs",
schema: {
description: "Return audit logs",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
workspaceId: z.string().trim()
}),

View File

@ -19,7 +19,6 @@ import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
type TSAMLConfig = {
callbackUrl: string;
@ -28,6 +27,7 @@ type TSAMLConfig = {
cert: string;
audience: string;
wantAuthnResponseSigned?: boolean;
disableRequestedAuthnContext?: boolean;
};
export const registerSamlRouter = async (server: FastifyZodProvider) => {
@ -67,7 +67,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
throw new BadRequestError({ message: "Failed to authenticate with SAML SSO" });
const samlConfig: TSAMLConfig = {
callbackUrl: `${appCfg.SITE_URL}/api/v1/sso/saml2/${samlConfigId}`,
callbackUrl: `${appCfg.SITE_URL}/api/v1/sso/saml2/${ssoConfig.id}`,
entryPoint: ssoConfig.entryPoint,
issuer: ssoConfig.issuer,
cert: ssoConfig.cert,
@ -77,7 +77,8 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
samlConfig.wantAuthnResponseSigned = false;
}
if (ssoConfig.authProvider === SamlProviders.AZURE_SAML) {
if (req.body.RelayState && JSON.parse(req.body.RelayState).spIntiaited) {
samlConfig.disableRequestedAuthnContext = true;
if (req.body?.RelayState && JSON.parse(req.body.RelayState).spInitiated) {
samlConfig.audience = `spn:${ssoConfig.issuer}`;
}
}
@ -92,7 +93,6 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
// eslint-disable-next-line
async (req, profile, cb) => {
try {
const serverCfg = await getServerCfg();
if (!profile) throw new BadRequestError({ message: "Missing profile" });
const { firstName } = profile;
const email = profile?.email ?? (profile?.emailAddress as string); // emailRippling is added because in Rippling the field `email` reserved
@ -105,7 +105,6 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
email,
firstName: profile.firstName as string,
lastName: profile.lastName as string,
isSignupAllowed: Boolean(serverCfg.allowSignUp),
relayState: (req.body as { RelayState?: string }).RelayState,
authProvider: (req as unknown as FastifyRequest).ssoConfig?.authProvider as string,
orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId as string

View File

@ -57,6 +57,13 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
method: "POST",
url: "/:secretSnapshotId/rollback",
schema: {
description: "Roll back project secrets to those captured in a secret snapshot version.",
security: [
{
apiKeyAuth: [],
bearerAuth: []
}
],
params: z.object({
secretSnapshotId: z.string().trim()
}),

View File

@ -44,7 +44,7 @@ type TLicenseServiceFactoryDep = {
export type TLicenseServiceFactory = ReturnType<typeof licenseServiceFactory>;
const LICENSE_SERVER_CLOUD_LOGIN = "/api/auth/v1/license-server-login";
const LICENSE_SERVER_ON_PREM_LOGIN = "/api/auth/v1/licence-login";
const LICENSE_SERVER_ON_PREM_LOGIN = "/api/auth/v1/license-login";
const FEATURE_CACHE_KEY = (orgId: string, projectId?: string) => `${orgId}-${projectId || ""}`;
export const licenseServiceFactory = ({ orgDAL, permissionService, licenseDAL }: TLicenseServiceFactoryDep) => {
@ -92,7 +92,7 @@ export const licenseServiceFactory = ({ orgDAL, permissionService, licenseDAL }:
// else it would reach catch statement
isValidLicense = true;
} catch (error) {
logger.error(`init-license: encountered an error when init license [error]`, error);
logger.error(error, `init-license: encountered an error when init license`);
}
};

View File

@ -300,19 +300,9 @@ export const samlConfigServiceFactory = ({
};
};
const samlLogin = async ({
firstName,
email,
lastName,
authProvider,
orgId,
relayState,
isSignupAllowed
}: TSamlLoginDTO) => {
const samlLogin = async ({ firstName, email, lastName, authProvider, orgId, relayState }: TSamlLoginDTO) => {
const appCfg = getConfig();
let user = await userDAL.findUserByEmail(email);
const isSamlSignUpDisabled = !isSignupAllowed && !user;
if (isSamlSignUpDisabled) throw new BadRequestError({ message: "User signup disabled", name: "Saml SSO login" });
const organization = await orgDAL.findOrgById(orgId);
if (!organization) throw new BadRequestError({ message: "Org not found" });

View File

@ -41,7 +41,6 @@ export type TSamlLoginDTO = {
lastName?: string;
authProvider: string;
orgId: string;
isSignupAllowed: boolean;
// saml thingy
relayState?: string;
};

View File

@ -95,7 +95,7 @@ const envSchema = z
SECRET_SCANNING_GIT_APP_ID: zpStr(z.string().optional()),
SECRET_SCANNING_PRIVATE_KEY: zpStr(z.string().optional()),
// LICENCE
LICENSE_SERVER_URL: zpStr(z.string().optional()),
LICENSE_SERVER_URL: zpStr(z.string().optional().default("https://portal.infisical.com")),
LICENSE_SERVER_KEY: zpStr(z.string().optional()),
LICENSE_KEY: zpStr(z.string().optional()),
STANDALONE_MODE: z

View File

@ -25,13 +25,13 @@ export const fastifySwagger = fp(async (fastify) => {
],
components: {
securitySchemes: {
bearer: {
bearerAuth: {
type: "http",
scheme: "bearer",
bearerFormat: "JWT",
description: "A service token in Infisical"
description: "An access token in Infisical"
},
apiKey: {
apiKeyAuth: {
type: "apiKey",
in: "header",
name: "X-API-Key",

View File

@ -72,7 +72,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
200: z.object({
message: z.string(),
user: UsersSchema,
token: z.string()
token: z.string(),
new: z.string()
})
}
},
@ -107,7 +108,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
return {
message: "Successfully set up admin account",
user: user.user,
token: token.access
token: token.access,
new: "123"
};
}
});

View File

@ -5,6 +5,7 @@ export const registerIdentityAccessTokenRouter = async (server: FastifyZodProvid
url: "/token/renew",
method: "POST",
schema: {
description: "Renew access token",
body: z.object({
accessToken: z.string().trim()
}),

View File

@ -11,6 +11,12 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
url: "/",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
description: "Create identity",
security: [
{
bearerAuth: []
}
],
body: z.object({
name: z.string().trim(),
organizationId: z.string().trim(),
@ -52,6 +58,12 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
url: "/:identityId",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
description: "Update identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string()
}),
@ -95,6 +107,12 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
url: "/:identityId",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
description: "Delete identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string()
}),

View File

@ -24,6 +24,7 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
url: "/universal-auth/login",
method: "POST",
schema: {
description: "Login with Universal Auth",
body: z.object({
clientId: z.string().trim(),
clientSecret: z.string().trim()
@ -67,6 +68,12 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
method: "POST",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Attach Universal Auth configuration onto identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().trim()
}),
@ -141,6 +148,12 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
method: "PATCH",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Update Universal Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string()
}),
@ -209,6 +222,12 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
method: "GET",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Retrieve Universal Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string()
}),
@ -246,6 +265,12 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
method: "POST",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Create Universal Auth Client Secret for identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string()
}),
@ -291,6 +316,12 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
method: "GET",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "List Universal Auth Client Secrets for identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string()
}),
@ -327,6 +358,12 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
method: "POST",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Revoke Universal Auth Client Secrets for identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string(),
clientSecretId: z.string()

View File

@ -10,6 +10,13 @@ export const registerProjectEnvRouter = async (server: FastifyZodProvider) => {
url: "/:workspaceId/environments",
method: "POST",
schema: {
description: "Create environment",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
workspaceId: z.string().trim()
}),
@ -58,6 +65,13 @@ export const registerProjectEnvRouter = async (server: FastifyZodProvider) => {
url: "/:workspaceId/environments/:id",
method: "PATCH",
schema: {
description: "Update environment",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
workspaceId: z.string().trim(),
id: z.string().trim()
@ -114,6 +128,13 @@ export const registerProjectEnvRouter = async (server: FastifyZodProvider) => {
url: "/:workspaceId/environments/:id",
method: "DELETE",
schema: {
description: "Delete environment",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
workspaceId: z.string().trim(),
id: z.string().trim()

View File

@ -10,6 +10,13 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
url: "/:workspaceId/memberships",
method: "GET",
schema: {
description: "Return project user memberships",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
workspaceId: z.string().trim()
}),
@ -96,6 +103,13 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
url: "/:workspaceId/memberships/:membershipId",
method: "PATCH",
schema: {
description: "Update project user membership",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
workspaceId: z.string().trim(),
membershipId: z.string().trim()
@ -141,6 +155,13 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
url: "/:workspaceId/memberships/:membershipId",
method: "DELETE",
schema: {
description: "Delete project user membership",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
workspaceId: z.string().trim(),
membershipId: z.string().trim()

View File

@ -11,6 +11,13 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
url: "/",
method: "POST",
schema: {
description: "Create folders",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
body: z.object({
workspaceId: z.string().trim(),
environment: z.string().trim(),
@ -57,6 +64,13 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
url: "/:folderId",
method: "PATCH",
schema: {
description: "Update folder",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
// old way this was name
folderId: z.string()
@ -109,6 +123,13 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
url: "/:folderId",
method: "DELETE",
schema: {
description: "Delete a folder",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
folderId: z.string()
}),
@ -158,6 +179,13 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
url: "/",
method: "GET",
schema: {
description: "Get folders",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
querystring: z.object({
workspaceId: z.string().trim(),
environment: z.string().trim(),

View File

@ -11,6 +11,13 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
url: "/",
method: "POST",
schema: {
description: "Create secret imports",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
body: z.object({
workspaceId: z.string().trim(),
environment: z.string().trim(),
@ -65,6 +72,13 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
url: "/:secretImportId",
method: "PATCH",
schema: {
description: "Update secret imports",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
secretImportId: z.string().trim()
}),
@ -128,6 +142,13 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
url: "/:secretImportId",
method: "DELETE",
schema: {
description: "Delete secret imports",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
secretImportId: z.string().trim()
}),
@ -181,6 +202,13 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
url: "/",
method: "GET",
schema: {
description: "Get secret imports",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
querystring: z.object({
workspaceId: z.string().trim(),
environment: z.string().trim(),

View File

@ -10,6 +10,13 @@ export const registerIdentityOrgRouter = async (server: FastifyZodProvider) => {
url: "/:orgId/identity-memberships",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Return organization identity memberships",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
orgId: z.string().trim()
}),

View File

@ -46,6 +46,12 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
url: "/:projectId/identity-memberships/:identityId",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Update project identity memberships",
security: [
{
bearerAuth: []
}
],
params: z.object({
projectId: z.string().trim(),
identityId: z.string().trim()
@ -77,6 +83,12 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
url: "/:projectId/identity-memberships/:identityId",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Delete project identity memberships",
security: [
{
bearerAuth: []
}
],
params: z.object({
projectId: z.string().trim(),
identityId: z.string().trim()
@ -104,6 +116,12 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
url: "/:projectId/identity-memberships",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Return project identity memberships",
security: [
{
bearerAuth: []
}
],
params: z.object({
projectId: z.string().trim()
}),

View File

@ -9,6 +9,13 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
method: "GET",
url: "/:organizationId/memberships",
schema: {
description: "Return organization user memberships",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
organizationId: z.string().trim()
}),
@ -46,6 +53,13 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
method: "GET",
url: "/:organizationId/workspaces",
schema: {
description: "Return projects in organization that user is part of",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
organizationId: z.string().trim()
}),
@ -84,6 +98,13 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
method: "PATCH",
url: "/:organizationId/memberships/:membershipId",
schema: {
description: "Update organization user memberships",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({ organizationId: z.string().trim(), membershipId: z.string().trim() }),
body: z.object({
role: z.string().trim()
@ -113,6 +134,13 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
method: "DELETE",
url: "/:organizationId/memberships/:membershipId",
schema: {
description: "Delete organization user memberships",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({ organizationId: z.string().trim(), membershipId: z.string().trim() }),
response: {
200: z.object({

View File

@ -10,6 +10,12 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
url: "/:workspaceId/encrypted-key",
method: "GET",
schema: {
description: "Return encrypted project key",
security: [
{
apiKeyAuth: []
}
],
params: z.object({
workspaceId: z.string().trim()
}),

View File

@ -21,6 +21,12 @@ export const registerServiceTokenRouter = async (server: FastifyZodProvider) =>
method: "GET",
onRequest: verifyAuth([AuthMode.SERVICE_TOKEN]),
schema: {
description: "Return Infisical Token data",
security: [
{
bearerAuth: []
}
],
response: {
200: ServiceTokensSchema.merge(
z.object({

View File

@ -71,6 +71,12 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
method: "GET",
url: "/me/organizations",
schema: {
description: "Return organizations that current user is part of",
security: [
{
apiKeyAuth: []
}
],
response: {
200: z.object({
organizations: OrganizationsSchema.array()
@ -179,6 +185,12 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
method: "GET",
url: "/me",
schema: {
description: "Retrieve the current user on the request",
security: [
{
apiKeyAuth: []
}
],
response: {
200: z.object({
user: UsersSchema.merge(UserEncryptionKeysSchema.omit({ verifier: true }))

View File

@ -38,6 +38,13 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
url: "/raw",
method: "GET",
schema: {
description: "List secrets",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
querystring: z.object({
workspaceId: z.string().trim().optional(),
environment: z.string().trim().optional(),
@ -121,6 +128,13 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
url: "/raw/:secretName",
method: "GET",
schema: {
description: "Get a secret by name",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
secretName: z.string().trim()
}),
@ -204,6 +218,13 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
url: "/raw/:secretName",
method: "POST",
schema: {
description: "Create secret",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
secretName: z.string().trim()
}),
@ -274,6 +295,13 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
url: "/raw/:secretName",
method: "PATCH",
schema: {
description: "Update secret",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
secretName: z.string().trim()
}),
@ -341,6 +369,13 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
url: "/raw/:secretName",
method: "DELETE",
schema: {
description: "Delete secret",
security: [
{
bearerAuth: [],
apiKeyAuth: []
}
],
params: z.object({
secretName: z.string().trim()
}),

View File

@ -159,6 +159,17 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
userAgent
});
void server.services.telemetry.sendLoopsEvent(user.email, user.firstName || "", user.lastName || "");
void server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.UserSignedUp,
distinctId: user.email,
properties: {
email: user.email,
attributionSource: "Team Invite"
}
});
void res.setCookie("jid", refreshToken, {
httpOnly: true,
path: "/",

View File

@ -35,12 +35,12 @@ export const identityAccessTokenServiceFactory = ({
}
// ttl check
if (accessTokenTTL > 0) {
if (Number(accessTokenTTL) > 0) {
const currentDate = new Date();
if (accessTokenLastRenewedAt) {
// access token has been renewed
const accessTokenRenewed = new Date(accessTokenLastRenewedAt);
const ttlInMilliseconds = accessTokenTTL * 1000;
const ttlInMilliseconds = Number(accessTokenTTL) * 1000;
const expirationDate = new Date(accessTokenRenewed.getTime() + ttlInMilliseconds);
if (currentDate > expirationDate)
@ -50,7 +50,7 @@ export const identityAccessTokenServiceFactory = ({
} else {
// access token has never been renewed
const accessTokenCreated = new Date(accessTokenCreatedAt);
const ttlInMilliseconds = accessTokenTTL * 1000;
const ttlInMilliseconds = Number(accessTokenTTL) * 1000;
const expirationDate = new Date(accessTokenCreated.getTime() + ttlInMilliseconds);
if (currentDate > expirationDate)
@ -61,9 +61,9 @@ export const identityAccessTokenServiceFactory = ({
}
// max ttl checks
if (accessTokenMaxTTL > 0) {
if (Number(accessTokenMaxTTL) > 0) {
const accessTokenCreated = new Date(accessTokenCreatedAt);
const ttlInMilliseconds = accessTokenMaxTTL * 1000;
const ttlInMilliseconds = Number(accessTokenMaxTTL) * 1000;
const currentDate = new Date();
const expirationDate = new Date(accessTokenCreated.getTime() + ttlInMilliseconds);
@ -72,7 +72,7 @@ export const identityAccessTokenServiceFactory = ({
message: "Failed to renew MI access token due to Max TTL expiration"
});
const extendToDate = new Date(currentDate.getTime() + accessTokenTTL);
const extendToDate = new Date(currentDate.getTime() + Number(accessTokenTTL));
if (extendToDate > expirationDate)
throw new UnauthorizedError({
message: "Failed to renew MI access token past its Max TTL expiration"

View File

@ -69,9 +69,9 @@ export const identityUaServiceFactory = ({
if (!validClientSecretInfo) throw new UnauthorizedError();
const { clientSecretTTL, clientSecretNumUses, clientSecretNumUsesLimit } = validClientSecretInfo;
if (clientSecretTTL > 0) {
if (Number(clientSecretTTL) > 0) {
const clientSecretCreated = new Date(validClientSecretInfo.createdAt);
const ttlInMilliseconds = clientSecretTTL * 1000;
const ttlInMilliseconds = Number(clientSecretTTL) * 1000;
const currentDate = new Date();
const expirationTime = new Date(clientSecretCreated.getTime() + ttlInMilliseconds);
@ -124,7 +124,10 @@ export const identityUaServiceFactory = ({
} as TIdentityAccessTokenJwtPayload,
appCfg.AUTH_SECRET,
{
expiresIn: identityAccessToken.accessTokenMaxTTL === 0 ? undefined : identityAccessToken.accessTokenMaxTTL
expiresIn:
Number(identityAccessToken.accessTokenMaxTTL) === 0
? undefined
: Number(identityAccessToken.accessTokenMaxTTL)
}
);

View File

@ -581,7 +581,7 @@ export const secretServiceFactory = ({
secretType = SecretType.Shared;
}
const secret = await (typeof version === undefined
const secret = await (version === undefined
? secretDAL.findOne({
folderId,
type: secretType,

View File

@ -1,4 +1,4 @@
version: '3'
version: "3.9"
services:
nginx:
@ -10,36 +10,84 @@ services:
volumes:
- ./nginx/default.dev.conf:/etc/nginx/conf.d/default.conf:ro
depends_on:
- frontend
- backend
networks:
- infisical-dev
- frontend
backend:
container_name: infisical-dev-backend
restart: unless-stopped
db:
image: postgres:14-alpine
ports:
- "5432:5432"
volumes:
- postgres-data:/var/lib/postgresql/data
environment:
POSTGRES_PASSWORD: infisical
POSTGRES_USER: infisical
POSTGRES_DB: infisical
redis:
image: redis
container_name: infisical-dev-redis
environment:
- ALLOW_EMPTY_PASSWORD=yes
ports:
- 6379:6379
volumes:
- redis_data:/data
redis-commander:
container_name: infisical-dev-redis-commander
image: rediscommander/redis-commander
restart: always
depends_on:
- mongo
- smtp-server
- redis
environment:
- REDIS_HOSTS=local:redis:6379
ports:
- "8085:8081"
db-test:
profiles: ["test"]
image: postgres:14-alpine
ports:
- "5430:5432"
environment:
POSTGRES_PASSWORD: infisical
POSTGRES_USER: infisical
POSTGRES_DB: infisical-test
db-migration:
container_name: infisical-db-migration
depends_on:
- db
build:
context: ./backend
dockerfile: Dockerfile
volumes:
- ./backend/src:/app/src
- ./backend/nodemon.json:/app/nodemon.json
- /app/node_modules
- ./backend/api-documentation.json:/app/api-documentation.json
- ./backend/swagger.ts:/app/swagger.ts
command: npm run dev
dockerfile: Dockerfile.dev
env_file: .env
environment:
- DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable
command: npm run migration:latest
backend:
container_name: infisical-dev-api
build:
context: ./backend
dockerfile: Dockerfile.dev
depends_on:
db:
condition: service_started
redis:
condition: service_started
db-migration:
condition: service_completed_successfully
env_file:
- .env
ports:
- 4000:4000
environment:
- NODE_ENV=development
- MONGO_URL=mongodb://root:example@mongo:27017/?authSource=admin
networks:
- infisical-dev
extra_hosts:
- "host.docker.internal:host-gateway"
- DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable
volumes:
- ./backend/src:/app/src
frontend:
container_name: infisical-dev-frontend
@ -55,81 +103,31 @@ services:
env_file: .env
environment:
- NEXT_PUBLIC_ENV=development
- INFISICAL_TELEMETRY_ENABLED=${TELEMETRY_ENABLED}
networks:
- infisical-dev
- INFISICAL_TELEMETRY_ENABLED=false
mongo:
image: mongo
container_name: infisical-dev-mongo
pgadmin:
image: dpage/pgadmin4
restart: always
env_file: .env
environment:
- MONGO_INITDB_ROOT_USERNAME=root
- MONGO_INITDB_ROOT_PASSWORD=example
volumes:
- mongo-data:/data/db
networks:
- infisical-dev
mongo-express:
container_name: infisical-dev-mongo-express
image: mongo-express
restart: always
depends_on:
- mongo
env_file: .env
environment:
- ME_CONFIG_MONGODB_ADMINUSERNAME=root
- ME_CONFIG_MONGODB_ADMINPASSWORD=example
- ME_CONFIG_MONGODB_URL=mongodb://root:example@mongo:27017/
PGADMIN_DEFAULT_EMAIL: admin@example.com
PGADMIN_DEFAULT_PASSWORD: pass
ports:
- 8081:8081
networks:
- infisical-dev
- 5050:80
depends_on:
- db
smtp-server:
container_name: infisical-dev-smtp-server
image: lytrax/mailhog:latest # https://github.com/mailhog/MailHog/issues/353#issuecomment-821137362
restart: always
logging:
driver: 'none' # disable saving logs
driver: "none" # disable saving logs
ports:
- 1025:1025 # SMTP server
- 8025:8025 # Web UI
networks:
- infisical-dev
redis:
image: redis
container_name: infisical-dev-redis
environment:
- ALLOW_EMPTY_PASSWORD=yes
ports:
- 6379:6379
volumes:
- redis_data:/data
networks:
- infisical-dev
redis-commander:
container_name: infisical-dev-redis-commander
image: rediscommander/redis-commander
restart: always
depends_on:
- redis
environment:
- REDIS_HOSTS=local:redis:6379
ports:
- "8085:8081"
networks:
- infisical-dev
volumes:
mongo-data:
postgres-data:
driver: local
redis_data:
driver: local
networks:
infisical-dev:

View File

@ -1,143 +0,0 @@
version: "3.9"
services:
nginx:
container_name: infisical-dev-nginx
image: nginx
restart: always
ports:
- 8080:80
volumes:
- ./nginx/default.dev.conf:/etc/nginx/conf.d/default.conf:ro
depends_on:
- backend
- frontend
db:
image: postgres:14-alpine
ports:
- "5432:5432"
volumes:
- postgres-data:/var/lib/postgresql/data
environment:
POSTGRES_PASSWORD: infisical
POSTGRES_USER: infisical
POSTGRES_DB: infisical
redis:
image: redis
container_name: infisical-dev-redis
environment:
- ALLOW_EMPTY_PASSWORD=yes
ports:
- 6379:6379
volumes:
- redis_data:/data
redis-commander:
container_name: infisical-dev-redis-commander
image: rediscommander/redis-commander
restart: always
depends_on:
- redis
environment:
- REDIS_HOSTS=local:redis:6379
ports:
- "8085:8081"
db-test:
profiles: ["test"]
image: postgres:14-alpine
ports:
- "5430:5432"
environment:
POSTGRES_PASSWORD: infisical
POSTGRES_USER: infisical
POSTGRES_DB: infisical-test
backend:
container_name: infisical-dev-api
build:
context: ./backend
dockerfile: Dockerfile.dev
depends_on:
- db
env_file:
- .env
environment:
- NODE_ENV=development
- DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable
volumes:
- ./backend/src:/app/src
frontend:
container_name: infisical-dev-frontend
restart: unless-stopped
depends_on:
- backend
build:
context: ./frontend
dockerfile: Dockerfile.dev
volumes:
- ./frontend/src:/app/src/ # mounted whole src to avoid missing reload on new files
- ./frontend/public:/app/public
env_file: .env
environment:
- NEXT_PUBLIC_ENV=development
- INFISICAL_TELEMETRY_ENABLED=false
pgadmin:
image: dpage/pgadmin4
restart: always
environment:
PGADMIN_DEFAULT_EMAIL: admin@example.com
PGADMIN_DEFAULT_PASSWORD: pass
ports:
- 5050:80
depends_on:
- db
smtp-server:
container_name: infisical-dev-smtp-server
image: lytrax/mailhog:latest # https://github.com/mailhog/MailHog/issues/353#issuecomment-821137362
restart: always
logging:
driver: "none" # disable saving logs
ports:
- 1025:1025 # SMTP server
- 8025:8025 # Web UI
# mongo:
# image: mongo
# container_name: infisical-dev-mongo
# restart: always
# env_file: .env
# environment:
# - MONGO_INITDB_ROOT_USERNAME=root
# - MONGO_INITDB_ROOT_PASSWORD=example
# volumes:
# - mongo-data:/data/db
# ports:
# - 27017:27017
#
# mongo-express:
# container_name: infisical-dev-mongo-express
# image: mongo-express
# restart: always
# depends_on:
# - mongo
# env_file: .env
# environment:
# - ME_CONFIG_MONGODB_ADMINUSERNAME=root
# - ME_CONFIG_MONGODB_ADMINPASSWORD=example
# - ME_CONFIG_MONGODB_URL=mongodb://root:example@mongo:27017/
# ports:
# - 8081:8081
volumes:
postgres-data:
driver: local
redis_data:
driver: local
mongo-data:
driver: local

View File

@ -1,12 +1,27 @@
version: "3"
services:
db-migration:
container_name: infisical-db-migration
depends_on:
- db
image: infisical/infisical:latest-postgres
env_file: .env
command: npm run migration:latest
networks:
- infisical
backend:
container_name: infisical-backend
restart: unless-stopped
depends_on:
- mongo
image: infisical/infisical:latest
db:
condition: service_started
redis:
condition: service_started
db-migration:
condition: service_completed_successfully
image: infisical/infisical:latest-postgres
env_file: .env
ports:
- 80:8080
@ -28,21 +43,18 @@ services:
volumes:
- redis_data:/data
mongo:
container_name: infisical-mongo
image: mongo
db:
container_name: infisical-db
image: postgres:14-alpine
restart: always
env_file: .env
environment:
- MONGO_INITDB_ROOT_USERNAME=${MONGO_USERNAME}
- MONGO_INITDB_ROOT_PASSWORD=${MONGO_PASSWORD}
volumes:
- mongo-data:/data/db
- pg_data:/data/db
networks:
- infisical
volumes:
mongo-data:
pg_data:
driver: local
redis_data:
driver: local

View File

@ -1,4 +1,4 @@
---
title: "Create"
openapi: "POST /api/v2/workspace/{workspaceId}/environments"
openapi: "POST /api/v1/workspace/{workspaceId}/environments"
---

View File

@ -1,4 +1,4 @@
---
title: "Delete"
openapi: "DELETE /api/v2/workspace/{workspaceId}/environments"
---
openapi: "DELETE /api/v1/workspace/{workspaceId}/environments/{id}"
---

View File

@ -1,4 +1,4 @@
---
title: "Update"
openapi: "PUT /api/v2/workspace/{workspaceId}/environments"
---
openapi: "PATCH /api/v1/workspace/{workspaceId}/environments/{id}"
---

View File

@ -1,4 +1,4 @@
---
title: "Create"
openapi: "POST /api/v1/folders/"
---
openapi: "POST /api/v1/folders"
---

View File

@ -1,4 +1,4 @@
---
title: "Delete"
openapi: "DELETE /api/v1/folders/{folderName}"
---
openapi: "DELETE /api/v1/folders/{folderId}"
---

View File

@ -1,4 +1,4 @@
---
title: "List"
openapi: "GET /api/v1/folders/"
---
openapi: "GET /api/v1/folders"
---

View File

@ -1,4 +1,4 @@
---
title: "Update"
openapi: "PATCH /api/v1/folders/{folderName}"
---
openapi: "PATCH /api/v1/folders/{folderId}"
---

View File

@ -1,4 +1,4 @@
---
title: "Create"
openapi: "POST /api/v1/identities/"
---
openapi: "POST /api/v1/identities"
---

View File

@ -1,4 +1,4 @@
---
title: "Delete"
openapi: "DELETE /api/v1/identities/{identityId}"
---
---

View File

@ -1,4 +1,4 @@
---
title: "Update"
openapi: "PATCH /api/v1/identities/{identityId}"
---
---

View File

@ -1,4 +1,4 @@
---
title: "List Identity Memberships"
openapi: "GET /api/v2/organizations/{organizationId}/identity-memberships"
---
openapi: "GET /api/v2/organizations/{orgId}/identity-memberships"
---

View File

@ -1,4 +1,4 @@
---
title: "Create"
openapi: "POST /api/v1/secret-imports/"
---
openapi: "POST /api/v1/secret-imports"
---

View File

@ -1,4 +1,4 @@
---
title: "Delete"
openapi: "DELETE /api/v1/secret-imports/{id}"
---
openapi: "DELETE /api/v1/secret-imports/{secretImportId}"
---

View File

@ -1,4 +1,4 @@
---
title: "List"
openapi: "GET /api/v1/secret-imports/"
---
openapi: "GET /api/v1/secret-imports"
---

View File

@ -1,4 +1,4 @@
---
title: "Update"
openapi: "PUT /api/v1/secret-imports/{id}"
---
openapi: "PATCH /api/v1/secret-imports/{secretImportId}"
---

View File

@ -1,6 +1,6 @@
---
title: "Get"
openapi: "GET /api/v2/service-token/"
openapi: "GET /api/v2/service-token"
---
<Warning>

View File

@ -1,4 +1,4 @@
---
title: "Delete Identity Membership"
openapi: "DELETE /api/v2/workspace/{workspaceId}/identity-memberships/{identityId}"
---
openapi: "DELETE /api/v2/workspace/{projectId}/identity-memberships/{identityId}"
---

View File

@ -1,4 +1,4 @@
---
title: "Delete User Membership"
openapi: "DELETE /api/v2/workspace/{workspaceId}/memberships/{membershipId}"
openapi: "DELETE /api/v1/workspace/{workspaceId}/memberships/{membershipId}"
---

View File

@ -1,4 +1,4 @@
---
title: "List Identity Memberships"
openapi: "GET /api/v2/workspace/{workspaceId}/identity-memberships"
---
openapi: "GET /api/v2/workspace/{projectId}/identity-memberships"
---

View File

@ -1,4 +1,4 @@
---
title: "Get User Memberships"
openapi: "GET /api/v2/workspace/{workspaceId}/memberships"
openapi: "GET /api/v1/workspace/{workspaceId}/memberships"
---

View File

@ -1,4 +1,4 @@
---
title: "Roll Back to Snapshot"
openapi: "POST /api/v1/workspace/{workspaceId}/secret-snapshots/rollback"
openapi: "POST /api/v1/secret-snapshot/{secretSnapshotId}/rollback"
---

View File

@ -1,4 +1,4 @@
---
title: "Update Identity Membership"
openapi: "PATCH /api/v2/workspace/{workspaceId}/identity-memberships/{identityId}"
---
openapi: "PATCH /api/v2/workspace/{projectId}/identity-memberships/{identityId}"
---

View File

@ -1,4 +1,4 @@
---
title: "Update User Membership"
openapi: "PATCH /api/v2/workspace/{workspaceId}/memberships/{membershipId}"
openapi: "PATCH /api/v1/workspace/{workspaceId}/memberships/{membershipId}"
---

View File

@ -0,0 +1,82 @@
---
title: 'Backend folder structure'
---
```
├── scripts
├── e2e-test
└── src/
├── @types/
│ ├── knex.d.ts
│ └── fastify.d.ts
├── db/
│ ├── migrations
│ ├── schemas
│ └── seed
├── lib/
│ ├── fn
│ ├── date
│ └── config
├── queue
├── server/
│ ├── routes/
│ │ ├── v1
│ │ └── v2
│ ├── plugins
│ └── config
├── services/
│ ├── auth
│ ├── org
│ └── project/
│ ├── project-service.ts
│ ├── project-types.ts
│ └── project-dal.ts
└── ee/
├── routes
└── services
```
### `backend/scripts`
Contains reusable scripts for backend automation, like running migrations and generating SQL schemas.
### `backend/e2e-test`
Integration tests for the APIs.
### `backend/src`
The source code of the backend.
- `@types`: Type definitions for libraries like Fastify and Knex.
- `db`: Knex.js configuration for the database, including migration, seed files, and SQL type schemas.
- `lib`: Stateless, reusable functions used across the codebase.
- `queue`: Infisical's queue system based on BullMQ.
### `src/server`
- Scope anything related to Fastify/service here.
- Includes routes, Fastify plugins, and server configurations.
- The routes folder contains various versions of routes separated into v1, v2, etc.
### `src/services`
- Handles the core business logic for all operations.
- Follows the co-location principle: related components should be kept together.
- Each service component typically contains:
1. **dal**: Database Access Layer functions for database operations
2. **service**: The service layer containing business logic.
3. **type**: Type definitions used within the service component.
4. **fns**: An optional component for sharing reusable functions related to the service.
5. **queue**: An optional component for queue-specific logic, like `secret-queue.ts`.
### `src/ee`
Follows the same pattern as above, with the exception of a license change from MIT to Infisical Proprietary License.
### Guidelines and Best Practices
- All services are interconnected at `/src/server/routes/index.ts`, following the principle of simple dependency injection.
- Files should be named in dash-case.
- Avoid using classes in the codebase; opt for simple functions instead.
- All committed code must be properly linted using `npm run lint:fix` and type-checked with `npm run type:check`.
- Minimize shared logic between services as much as possible.
- Controllers within a router component should ideally call only one service layer, with exceptions for services like `audit-log` that require access to request object data.

View File

@ -0,0 +1,56 @@
---
title: "Backend development guide"
---
Suppose you're interested in implementing a new feature in Infisical's backend, let's call it "feature-x." Here are the general steps you should follow.
## Database schema migration
In order to run [schema migrations](https://en.wikipedia.org/wiki/Schema_migration#:~:text=A%20schema%20migration%20is%20performed,some%20newer%20or%20older%20version) you need to expose your database connection string. Create a `.env.migration` file to set the database connection URI for migration scripts, or alternatively, export the `DB_CONNECTION_URI` environment variable.
## Creating new database model
If your feature involves a change in the database, you need to first address this by generating the necessary database schemas.
1. If you're adding a new table, update the `TableName` enum in `/src/db/schemas/models.ts` to include the new table name.
2. Create a new migration file by running `npm run migration:new` and give it a relevant name, such as `feature-x`.
3. Navigate to `/src/db/migrations/<timestamp>_<feature-x>.ts`.
4. Modify both the `up` and `down` functions to create or alter Postgres fields on migration up and to revert these changes on migration down, ensuring idempotency as outlined [here](https://github.com/graphile/migrate/blob/main/docs/idempotent-examples.md).
### Generating TS Schemas
While typically you would need to manually write TS types for Knex type-sense, we have automated this process:
1. Start the server.
2. Run `npm run migration:latest` to apply all database changes.
3. Execute `npm run generate:schema` to automatically generate types and schemas using [zod](https://github.com/colinhacks/zod) in the `/src/db/schemas` folder.
4. Update the barrel export in `schema/index` and include the new tables in `/src/@types/knex.d.ts` to enable type-sensing in Knex.js.
## Business Logic
Once the database changes are in place, it's time to create the APIs for `feature-x`:
1. Execute `npm run generate:component`.
2. Choose option 1 for the service component.
3. Name the service in dash-case, like `feature-x`. This will create a `feature-x` folder in `/src/services` containing three files.
1. `feature-x-dal`: The Database Access Layer functions.
2. `feature-x-service`: The service layer where all the business logic is handled.
3. `feature-x-type`: The types used by `feature-x`.
For reusable shared functions, set up a file named `feature-x-fns`.
Use the custom Infisical function `ormify` in `src/lib/knex` for simple database operations within the DAL.
## Connecting the Service Layer to the Server Layer
Server-related logic is handled in `/src/server`. To connect the service layer to the server layer, we use Fastify plugins for dependency injection:
1. Add the service type in the `fastify.d.ts` file under the `service` namespace of a FastifyServerInstance type.
2. In `/src/server/routes/index.ts`, instantiate the required dependencies for `feature-x`, such as the DAL and service layers, and then pass them to `fastify.register("service,{...dependencies})`.
3. This makes the service layer accessible within all routes under the Fastify service instance, accessed via `server.services.<registered service name>.<function>`.
## Writing API Routes
1. To create a route component, run `npm generate:component`.
2. Select option 3, type the router name in dash-case, and provide the version number. This will generate a router file in `src/server/routes/v<version-number>/<router component name>`
1. Implement your logic to connect with the service layer as needed.
2. Import the router component in the version folder's index.ts. For instance, if it's in v1, import it in `v1/index.ts`.
3. Finally, register it under the appropriate prefix for access.

View File

@ -4,7 +4,7 @@ description: "Programmatically interact with Infisical"
---
<Note>
Currently, identities can only be used to make authenticated requests to the Infisical API and SDKs. They do not work with clients such as CLI, K8s Operator, Terraform Provider, etc.
Currently, identities can only be used to make authenticated requests to the Infisical API, SDKs, and Agent. They do not work with clients such as CLI, K8s Operator, Terraform Provider, etc.
We will be releasing compatibility with it across clients in the coming quarter.
</Note>
@ -50,4 +50,4 @@ Check out the following authentication method-specific guides for step-by-step i
- The identity you are trying to read, update, or delete is more privileged than yourself.
- The role you are trying to create an identity for or update an identity to is more privileged than yours.
</Accordion>
</AccordionGroup>
</AccordionGroup>

View File

@ -91,10 +91,17 @@ description: "Configure Azure SAML for Infisical SSO"
![Azure SAML assignment](../../../images/sso/azure/assignment.png)
</Step>
<Step title="Enable SAML SSO in Infisical">
Enabling SAML SSO enforces all members in your organization to only be able to log into Infisical via Azure.
Enabling SAML SSO allows members in your organization to log into Infisical via Azure.
![Azure SAML assignment](../../../images/sso/azure/enable-saml.png)
</Step>
<Step title="Enforce SAML SSO in Infisical">
Enforcing SAML SSO ensures that members in your organization can only access Infisical
by logging into the organization via Azure.
To enforce SAML SSO, you're required to test out the SAML connection by successfully authenticating at least one Azure user with Infisical;
Once you've completed this requirement, you can toggle the **Enforce SAML SSO** button to enforce SAML SSO.
</Step>
</Steps>
<Note>

View File

@ -71,10 +71,17 @@ description: "Configure JumpCloud SAML for Infisical SSO"
![JumpCloud SAML assignment](../../../images/sso/jumpcloud/assignment.png)
</Step>
<Step title="Enable SAML SSO in Infisical">
Enabling SAML SSO enforces all members in your organization to only be able to log into Infisical via JumpCloud.
Enabling SAML SSO allows members in your organization to log into Infisical via JumpCloud.
![JumpCloud SAML assignment](../../../images/sso/jumpcloud/enable-saml.png)
</Step>
<Step title="Enforce SAML SSO in Infisical">
Enforcing SAML SSO ensures that members in your organization can only access Infisical
by logging into the organization via JumpCloud.
To enforce SAML SSO, you're required to test out the SAML connection by successfully authenticating at least one JumpCloud user with Infisical;
Once you've completed this requirement, you can toggle the **Enforce SAML SSO** button to enforce SAML SSO.
</Step>
</Steps>
<Note>

View File

@ -79,7 +79,7 @@ description: "Configure Okta SAML 2.0 for Infisical SSO"
![SAML Okta enable SAML](../../../images/sso/okta/enable-saml.png)
</Step>
<Step title="Enforce SAML SSO in Infisical">
Enforcing SAML SSO requires members in your organization can only access Infisical
Enforcing SAML SSO ensures that members in your organization can only access Infisical
by logging into the organization via Okta.
To enforce SAML SSO, you're required to test out the SAML connection by successfully authenticating at least one Okta user with Infisical;

Binary file not shown.

After

Width:  |  Height:  |  Size: 134 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 191 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 117 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 135 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 107 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 147 KiB

After

Width:  |  Height:  |  Size: 324 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 235 KiB

After

Width:  |  Height:  |  Size: 169 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 208 KiB

After

Width:  |  Height:  |  Size: 220 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 429 KiB

After

Width:  |  Height:  |  Size: 431 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 MiB

After

Width:  |  Height:  |  Size: 732 KiB

View File

@ -3,22 +3,32 @@ title: "Jenkins"
description: "How to effectively and securely manage secrets in Jenkins using Infisical"
---
**Objective**: Fetch secrets from Infisical to Jenkins pipelines
In this guide, we'll outline the steps to deliver secrets from Infisical to Jenkins via the Infisical CLI.
At a high level, the Infisical CLI will be executed within your build environment and use a service token to authenticate with Infisical.
This token must be added as a Jenkins Credential and then passed to the Infisical CLI as an environment variable, enabling it to access and retrieve secrets within your workflows.
Prerequisites:
- Set up and add secrets to [Infisical](https://app.infisical.com).
- You have a working Jenkins installation with the [credentials plugin](https://plugins.jenkins.io/credentials/) installed.
- You have the Infisical CLI installed on your Jenkins executor nodes or container images.
- You have the [Infisical CLI](/cli/overview) installed on your Jenkins executor nodes or container images.
## Add Infisical Service Token to Jenkins
After setting up your project in Infisical and adding the Infisical CLI to container images, you will need to add the Infisical Service Token to Jenkins. Once you have generated the token, browse to **Manage Jenkins > Manage Credentials** in your Jenkins installation.
After setting up your project in Infisical and installing the Infisical CLI to the environment where your Jenkins builds will run, you will need to add the Infisical Service Token to Jenkins.
To generate a Infisical service token, follow the guide [here](/documentation/platform/token).
Once you have generated the token, navigate to **Manage Jenkins > Manage Credentials** in your Jenkins instance.
![Jenkins step 1](../../images/integrations/jenkins/jenkins_1.png)
Click on the credential store you want to store the Infisical Service Token in. In this case, we're using the default Jenkins global store.
<Info>
Each of your projects will have a different INFISICAL_SERVICE_TOKEN though.
Each of your projects will have a different `INFISICAL_TOKEN`.
As a result, it may make sense to spread these out into separate credential domains depending on your use case.
</Info>
@ -28,18 +38,22 @@ Now, click Add Credentials.
![Jenkins step 3](../../images/integrations/jenkins/jenkins_3.png)
Choose **Secret text** from the **Kind** dropdown menu, paste the Infisical Service Token into the **Secret** field, enter `INFISICAL_SERVICE_TOKEN` into the **Description** field, and click **OK**.
Choose **Secret text** for the **Kind** option from the dropdown list and enter the Infisical Service Token in the **Secret** field.
Although the **ID** can be any value, we'll set it to `infisical-service-token` for the sake of this guide.
The description is optional and can be any text you prefer.
![Jenkins step 4](../../images/integrations/jenkins/jenkins_4.png)
When you're done, you should have a credential similar to the one below:
When you're done, you should see a credential similar to the one below:
![Jenkins step 5](../../images/integrations/jenkins/jenkins_5.png)
## Use Infisical in a Freestyle Project
To use Infisical in a Freestyle Project job, you'll need to expose the credential you created above in an environment variable. First, click New Item from the dashboard navigation sidebar:
To fetch secrets with Infisical in a Freestyle Project job, you'll need to expose the credential you created above as an environment variable to the Infisical CLI.
To do so, first click **New Item** from the dashboard navigation sidebar:
![Jenkins step 6](../../images/integrations/jenkins/jenkins_6.png)
@ -51,7 +65,8 @@ Scroll down to the **Build Environment** section and enable the **Use secret tex
![Jenkins step 8](../../images/integrations/jenkins/jenkins_8.png)
Enter INFISICAL_SERVICE_TOKEN in the **Variable** field, select the **Specific credentials** option from the Credentials section and choose INFISICAL_SERVICE_TOKEN from the dropdown menu.
Enter `INFISICAL_TOKEN` in the **Variable** field then click the **Specific credentials** option from the Credentials section and select the credential you created earlier.
In this case, we saved it as `Infisical service token` so we'll choose that from the dropdown menu.
![Jenkins step 9](../../images/integrations/jenkins/jenkins_9.png)
@ -59,15 +74,16 @@ Scroll down to the **Build** section and choose **Execute shell** from the **Add
![Jenkins step 10](../../images/integrations/jenkins/jenkins_10.png)
In the command field, enter the following command and click **Save**:
In the command field, you can now use the Infisical CLI to fetch secrets.
The example command below will print the secrets using the service token passed as a credential. When done, click **Save**.
```
infisical run -- printenv
infisical secrets --env=dev --path=/
```
![Jenkins step 11](../../images/integrations/jenkins/jenkins_11.png)
Finally, click **Build Now** from the navigation sidebar to test your new job.
Finally, click **Build Now** from the navigation sidebar to run your new job.
<Info>
Running into issues? Join Infisical's [community Slack](https://infisical.com/slack) for quick support.
@ -77,7 +93,8 @@ Finally, click **Build Now** from the navigation sidebar to test your new job.
## Use Infisical in a Jenkins Pipeline
To use Infisical in a Pipeline job, you'll need to expose the credential you created above as an environment variable. First, click **New Item** from the dashboard navigation sidebar:
To fetch secrets using Infisical in a Pipeline job, you'll need to expose the Jenkins credential you created above as an environment variable.
To do so, click **New Item** from the dashboard navigation sidebar:
![Jenkins step 6](../../images/integrations/jenkins/jenkins_6.png)
@ -92,31 +109,31 @@ pipeline {
agent any
environment {
INFISICAL_SERVICE_TOKEN = credentials('INFISICAL_SERVICE_TOKEN')
INFISICAL_TOKEN = credentials('infisical-service-token')
}
stages {
stage('Run Infisical') {
steps {
sh("infisical secrets")
sh("infisical secrets --env=dev --path=/")
// doesn't work
// sh("docker run --rm test-container infisical secrets")
// works
// sh("docker run -e INFISICAL_SERVICE_TOKEN=${INFISICAL_SERVICE_TOKEN} --rm test-container infisical secrets")
// sh("docker run -e INFISICAL_TOKEN=${INFISICAL_TOKEN} --rm test-container infisical secrets --env=dev --path=/")
// doesn't work
// sh("docker-compose up -d")
// works
// sh("INFISICAL_SERVICE_TOKEN=${INFISICAL_SERVICE_TOKEN} docker-compose up -d")
// sh("INFISICAL_TOKEN=${INFISICAL_TOKEN} docker-compose up -d")
}
}
}
}
```
This is a very basic sample that you can work from. Jenkins injects the INFISICAL_SERVICE_TOKEN environment variable defined in the pipeline into the shell the commands execute with, but there are some situations where that won't pass through properly notably if you're executing docker containers on the executor machine. The examples above should give you some idea for how that will work.
Finally, click **Build Now** from the navigation sidebar to test your new job.
The example provided above serves as an initial guide. It shows how Jenkins adds the `INFISICAL_TOKEN` environment variable, which is configured in the pipeline, into the shell for executing commands.
There may be instances where this doesn't work as expected in the context of running Docker commands.
However, the list of working examples should provide some insight into how this can be handled properly.

View File

@ -0,0 +1,287 @@
---
title: 'Amazon ECS'
description: "How to deliver secrets to Amazon Elastic Container Service"
---
![ecs diagram](/images/guides/agent-with-ecs/ecs-diagram.png)
This guide will go over the steps needed to access secrets stored in Infisical from Amazon Elastic Container Service (ECS).
At a high level, the steps involve setting up an ECS task with a [Infisical Agent](/infisical-agent/overview) as a sidecar container. This sidecar container uses [Universal Auth](/documentation/platform/identities/universal-auth) to authenticate with Infisical to fetch secrets/access tokens.
Once the secrets/access tokens are retrieved, they are then stored in a shared [Amazon Elastic File System](https://aws.amazon.com/efs/) (EFS) volume. This volume is then made accessible to your application and all of its replicas.
This guide primarily focuses on integrating Infisical Cloud with Amazon ECS on AWS Fargate and Amazon EFS.
However, the principles and steps can be adapted for use with any instance of Infisical (on premise or cloud) and different ECS launch configurations.
## Prerequisites
This guide requires the following prerequisites:
- Infisical account
- Git installed
- Terraform v1.0 or later installed
- Access to AWS credentials
- Understanding of [Infisical Agent](/infisical-agent/overview)
## What we will deploy
For this demonstration, we'll deploy the [File Browser](https://github.com/filebrowser/filebrowser) application on our ECS cluster.
Although this guide focuses on File Browser, the principles outlined here can be applied to any application of your choice.
File Browser plays a key role in this context because it enables us to view all files attached to a specific volume.
This feature is important for our demonstration, as it allows us to verify whether the Infisical agent is depositing the expected files into the designated file volume and if those files are accessible to the application.
<Warning>
Volumes that contain sensitive secrets should not be publicly accessible. The use of File Browser here is solely for demonstration and verification purposes.
</Warning>
## Configure Authentication with Infisical
In order for the Infisical agent to fetch credentials from Infisical, we'll first need to authenticate with Infisical.
While Infisical supports various authentication methods, this guide focuses on using Universal Auth to authenticate the agent with Infisical.
Follow the documentation to configure and generate a client id and client secret with Universal auth [here](/documentation/platform/identities/universal-auth).
Make sure to save these credentials somewhere handy because you'll need them soon.
## Clone guide assets repository
To help you quickly deploy the example application, please clone the guide assets from this [Github repository](https://github.com/Infisical/infisical-guides.git).
This repository contains assets for all Infisical guides. The content for this guide can be found within a sub directory called `aws-ecs-with-agent`.
The guide will assume that `aws-ecs-with-agent` is your working directory going forward.
## Deploy example application
Before we can deploy our full application and its related infrastructure with Terraform, we'll need to first configure our Infisical agent.
### Agent configuration overview
The agent config file defines what authentication method will be used when connecting with Infisical along with where the fetched secrets/access tokens should be saved to.
Since the Infisical agent will be deployed as a sidecar, the agent configuration file and any secret template files will need to be encoded in base64.
This encoding step is necessary as it allows these files to be added into our Terraform configuration file without needing to upload them first.
#### Secret template file
The Infisical agent accepts one or more optional template files. If provided, the agent will fetch secrets using the set authentication method and format the fetched secrets according to the given template file.
For demonstration purposes, we will create the following secret template file.
This template will transform our secrets from Infisical project with the ID `62fd92aa8b63973fee23dec7`, in the `dev` environment, and secrets located in the path `/`, into a `KEY=VALUE` format.
<Tip>
Remember to update the project id, environment slug and secret path to one that exists within your Infisical project
</Tip>
```secrets.template secrets.template
{{- with secret "62fd92aa8b63973fee23dec7" "dev" "/" }}
{{- range . }}
{{ .Key }}={{ .Value }}
{{- end }}
{{- end }}
```
Next, we need encode this template file in `base64` so it can be set in the agent configuration file.
```bash
cat secrets.template | base64
Cnt7LSB3aXRoIHNlY3JldCAiMWVkMjk2MWQtNDM5NS00MmNlLTlkNzQtYjk2ZGQwYmYzMDg0IiAiZGV2IiAiLyIgfX0Ke3stIHJhbmdlIC4gfX0Ke3sgLktleSB9fT17eyAuVmFsdWUgfX0Ke3stIGVuZCB9fQp7ey0gZW5kIH19
```
#### Full agent configuration file
This agent config file will connect with Infisical Cloud using Universal Auth and deposit access tokens at path `/infisical-agent/access-token` and render secrets to file `/infisical-agent/secrets`.
You'll notice that instead of passing the path to the secret template file as we normally would, we set the base64 encoded template from the previous step under `base64-template-content` property.
```yaml agent-config.yaml
infisical:
address: https://app.infisical.com
exit-after-auth: true
auth:
type: universal-auth
config:
remove_client_secret_on_read: false
sinks:
- type: file
config:
path: /infisical-agent/access-token
templates:
- base64-template-content: Cnt7LSB3aXRoIHNlY3JldCAiMWVkMjk2MWQtNDM5NS00MmNlLTlkNzQtYjk2ZGQwYmYzMDg0IiAiZGV2IiAiLyIgfX0Ke3stIHJhbmdlIC4gfX0Ke3sgLktleSB9fT17eyAuVmFsdWUgfX0Ke3stIGVuZCB9fQp7ey0gZW5kIH19
destination-path: /infisical-agent/secrets
```
Again, we'll need to encode the full configuration file in `base64` so it can be easily delivered via Terraform.
```bash
cat agent-config.yaml | base64
aW5maXNpY2FsOgogIGFkZHJlc3M6IGh0dHBzOi8vYXBwLmluZmlzaWNhbC5jb20KICBleGl0LWFmdGVyLWF1dGg6IHRydWUKYXV0aDoKICB0eXBlOiB1bml2ZXJzYWwtYXV0aAogIGNvbmZpZzoKICAgIHJlbW92ZV9jbGllbnRfc2VjcmV0X29uX3JlYWQ6IGZhbHNlCnNpbmtzOgogIC0gdHlwZTogZmlsZQogICAgY29uZmlnOgogICAgICBwYXRoOiAvaW5maXNpY2FsLWFnZW50L2FjY2Vzcy10b2tlbgp0ZW1wbGF0ZXM6CiAgLSBiYXNlNjQtdGVtcGxhdGUtY29udGVudDogQ250N0xTQjNhWFJvSUhObFkzSmxkQ0FpTVdWa01qazJNV1F0TkRNNU5TMDBNbU5sTFRsa056UXRZamsyWkdRd1ltWXpNRGcwSWlBaVpHVjJJaUFpTHlJZ2ZYMEtlM3N0SUhKaGJtZGxJQzRnZlgwS2Uzc2dMa3RsZVNCOWZUMTdleUF1Vm1Gc2RXVWdmWDBLZTNzdElHVnVaQ0I5ZlFwN2V5MGdaVzVrSUgxOQogICAgZGVzdGluYXRpb24tcGF0aDogL2luZmlzaWNhbC1hZ2VudC9zZWNyZXRzCg==
```
## Add auth credentials & agent config
With the base64 encoded agent config file and Universal Auth credentials in hand, it's time to assign them as values in our Terraform config file.
To configure these values, navigate to the `ecs.tf` file in your preferred code editor and assign values to `auth_client_id`, `auth_client_secret`, and `agent_config`.
```hcl ecs.tf
...snip...
data "template_file" "cb_app" {
template = file("./templates/ecs/cb_app.json.tpl")
vars = {
app_image = var.app_image
sidecar_image = var.sidecar_image
app_port = var.app_port
fargate_cpu = var.fargate_cpu
fargate_memory = var.fargate_memory
aws_region = var.aws_region
auth_client_id = "<paste-client-id-string>"
auth_client_secret = "<paset-client-secret-string>"
agent_config = "<paste-base64-encoded-agent-config-string>"
}
}
...snip...
```
<Warning>
To keep this guide simple, `auth_client_id`, `auth_client_secret` have been added directly into the ECS container definition.
However, in production, you should securely fetch these values from AWS Secrets Manager or AWS Parameter store and feed them directly to agent sidecar.
</Warning>
After these values have been set, they will be passed to the Infisical agent during startup through environment variables, as configured in the `infisical-sidecar` container below.
```terraform templates/ecs/cb_app.json.tpl
[
...snip...
{
"name": "infisical-sidecar",
"image": "${sidecar_image}",
"cpu": 1024,
"memory": 1024,
"networkMode": "bridge",
"command": ["agent"],
"essential": false,
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-group": "/ecs/agent",
"awslogs-region": "${aws_region}",
"awslogs-stream-prefix": "ecs"
}
},
"healthCheck": {
"command": ["CMD-SHELL", "agent", "--help"],
"interval": 30,
"timeout": 5,
"retries": 3,
"startPeriod": 0
},
"environment": [
{
"name": "INFISICAL_UNIVERSAL_AUTH_CLIENT_ID",
"value": "${auth_client_id}"
},
{
"name": "INFISICAL_UNIVERSAL_CLIENT_SECRET",
"value": "${auth_client_secret}"
},
{
"name": "INFISICAL_AGENT_CONFIG_BASE64",
"value": "${agent_config}"
}
],
"mountPoints": [
{
"containerPath": "/infisical-agent",
"sourceVolume": "infisical-efs"
}
]
}
]
```
In the above container definition, you'll notice that that the Infisical agent has a `mountPoints` defined.
This mount point is referencing to the already configured EFS volume as shown below.
`containerPath` is set to `/infisical-agent` because that is that the folder we have instructed the agent to deposit the credentials to.
```hcl terraform/efs.tf
resource "aws_efs_file_system" "infisical_efs" {
tags = {
Name = "INFISICAL-ECS-EFS"
}
}
resource "aws_efs_mount_target" "mount" {
count = length(aws_subnet.private.*.id)
file_system_id = aws_efs_file_system.infisical_efs.id
subnet_id = aws_subnet.private[count.index].id
security_groups = [aws_security_group.efs_sg.id]
}
```
## Configure AWS credentials
Because we'll be deploying the example file browser application to AWS via Terraform, you will need to obtain a set of `AWS Access Key` and `Secret Key`.
Once you have generated these credentials, export them to your terminal.
1. Export the AWS Access Key ID:
```bash
export AWS_ACCESS_KEY_ID=<your AWS access key ID>
```
2. Export the AWS Secret Access Key:
```bash
export AWS_SECRET_ACCESS_KEY=<your AWS secret access key>
```
## Deploy terraform configuration
With the agent's sidecar configuration complete, we can now deploy our changes to AWS via Terraform.
1. Change your directory to `terraform`
```sh
cd terraform
```
2. Initialize Terraform
```
$ terraform init
```
3. Preview resources that will be created
```
$ terraform plan
```
4. Trigger resource creation
```bash
$ terraform apply
Do you want to perform these actions?
Terraform will perform the actions described above.
Only 'yes' will be accepted to approve.
Enter a value: yes
```
```bash
Apply complete! Resources: 1 added, 1 changed, 1 destroyed.
Outputs:
alb_hostname = "cb-load-balancer-1675475779.us-east-1.elb.amazonaws.com:8080"
```
Once the resources have been successfully deployed, Terrafrom will output the host address where the file browser application will be accessible.
It may take a few minutes for the application to become fully ready.
## Verify secrets/tokens in EFS volume
To verify that the agent is depositing access tokens and rendering secrets to the paths specified in the agent config, navigate to the web address from the previous step.
Once you visit the address, you'll be prompted to login. Enter the credentials shown below.
![file browser main login page](/images/guides/agent-with-ecs/file_browser_main.png)
Since our EFS volume is mounted to the path of the file browser application, we should see the access token and rendered secret file we defined via the agent config file.
![file browswer dashbaord](/images/guides/agent-with-ecs/filebrowser_afterlogin.png)
As expected, two files are present: `access-token` and `secrets`.
The `access-token` file should hold a valid `Bearer` token, which can be used to make HTTP requests to Infisical.
The `secrets` file should contain secrets, formatted according to the specifications in our secret template file (presented in key=value format).
![file browser access token deposit](/images/guides/agent-with-ecs/access-token-deposit.png)
![file browser secrets render](/images/guides/agent-with-ecs/secrets-deposit.png)

View File

@ -1,5 +1,6 @@
{
"name": "Infisical",
"openapi": "https://app.infisical.com/api/docs/json",
"logo": {
"dark": "/logo/dark.svg",
"light": "/logo/light.svg",
@ -233,7 +234,8 @@
},
"integrations/platforms/kubernetes",
"integrations/frameworks/terraform",
"integrations/platforms/ansible"
"integrations/platforms/ansible",
"integrations/platforms/ecs-with-agent"
]
},
{
@ -462,7 +464,9 @@
{
"group": "Contributing to platform",
"pages": [
"contributing/platform/developing"
"contributing/platform/developing",
"contributing/platform/backend/how-to-create-a-feature",
"contributing/platform/backend/folder-structure"
]
},
{

File diff suppressed because it is too large Load Diff

View File

@ -58,8 +58,8 @@ export default function DonwloadBackupPDFStep({
return (
<div className="flex flex-col items-center w-full h-full md:px-6 mx-auto mb-36 md:mb-16">
<p className="text-xl text-center font-medium flex justify-center text-transparent bg-clip-text bg-gradient-to-b from-white to-bunker-200">
<FontAwesomeIcon icon={faWarning} className="ml-2 mr-3 pt-1 text-2xl text-bunker-200" />
<p className="text-xl text-center font-medium flex flex-col justify-center items-center text-transparent bg-clip-text bg-gradient-to-b from-white to-bunker-200">
<FontAwesomeIcon icon={faWarning} className="ml-2 mr-3 pt-1 mb-6 text-6xl text-bunker-200" />
{t("signup.step4-message")}
</p>
<div className="flex flex-col pb-2 bg-mineshaft-800 border border-mineshaft-600 items-center justify-center text-center lg:w-1/6 w-full md:min-w-[24rem] mt-8 max-w-md text-bunker-300 text-md rounded-md">

View File

@ -128,10 +128,10 @@ export const SignUpPage = () => {
animate={{ opacity: 1, translateX: 0 }}
exit={{ opacity: 0, translateX: 30 }}
>
<div className="flex flex-col items-center space-y-4 text-center">
<div className="flex flex-col items-center space-y-2 text-center">
<img src="/images/gradientLogo.svg" height={90} width={120} alt="Infisical logo" />
<div className="text-4xl">Welcome to Infisical</div>
<div>Create your first Admin Account</div>
<div className="text-4xl pt-4">Welcome to Infisical</div>
<div className="text-bunker-300 pb-4">Create your first Super Admin Account</div>
</div>
<form onSubmit={handleSubmit(handleFormSubmit)}>
<div className="mt-8">
@ -145,7 +145,7 @@ export const SignUpPage = () => {
errorText={error?.message}
isError={Boolean(error)}
>
<Input isFullWidth size="lg" {...field} />
<Input isFullWidth size="md" {...field} />
</FormControl>
)}
/>
@ -158,7 +158,7 @@ export const SignUpPage = () => {
errorText={error?.message}
isError={Boolean(error)}
>
<Input isFullWidth size="lg" {...field} />
<Input isFullWidth size="md" {...field} />
</FormControl>
)}
/>
@ -168,7 +168,7 @@ export const SignUpPage = () => {
name="email"
render={({ field, fieldState: { error } }) => (
<FormControl label="Email" errorText={error?.message} isError={Boolean(error)}>
<Input isFullWidth size="lg" {...field} />
<Input isFullWidth size="md" {...field} />
</FormControl>
)}
/>
@ -181,7 +181,7 @@ export const SignUpPage = () => {
errorText={error?.message}
isError={Boolean(error)}
>
<Input isFullWidth size="lg" type="password" {...field} />
<Input isFullWidth size="md" type="password" {...field} />
</FormControl>
)}
/>
@ -194,13 +194,13 @@ export const SignUpPage = () => {
errorText={error?.message}
isError={Boolean(error)}
>
<Input isFullWidth size="lg" type="password" {...field} />
<Input isFullWidth size="md" type="password" {...field} />
</FormControl>
)}
/>
</div>
<Button type="submit" isFullWidth className="mt-4" isLoading={isSubmitting}>
Let&apos;s Go
<Button type="submit" colorSchema="primary" variant="outline_bg" isFullWidth className="mt-4" isLoading={isSubmitting}>
Continue
</Button>
</form>
</motion.div>

View File

@ -15,8 +15,8 @@ export const DownloadBackupKeys = ({ onGenerate }: Props): JSX.Element => {
return (
<div className="flex flex-col items-center w-full h-full md:px-6 mx-auto mb-36 md:mb-16">
<p className="text-xl text-center font-medium flex justify-center text-transparent bg-clip-text bg-gradient-to-b from-white to-bunker-200">
<FontAwesomeIcon icon={faWarning} className="ml-2 mr-3 pt-1 text-2xl text-bunker-200" />
<p className="text-xl text-center font-medium flex flex-col justify-center items-center text-transparent bg-clip-text bg-gradient-to-b from-white to-bunker-200">
<FontAwesomeIcon icon={faWarning} className="ml-2 mr-3 pt-1 mb-6 text-6xl text-bunker-200" />
{t("signup.step4-message")}
</p>
<div className="flex flex-col pb-2 bg-mineshaft-800 border border-mineshaft-600 items-center justify-center text-center lg:w-1/6 w-full md:min-w-[24rem] mt-8 max-w-md text-bunker-300 text-md rounded-md">