Compare commits

..

34 Commits

Author SHA1 Message Date
1567239fc2 improvement: use secret path input for create policy modal 2025-07-10 16:05:37 -07:00
aae5831f35 Merge pull request #4001 from Infisical/server-admin-sidebar-improvements
improvement(frontend): Server admin sidebar improvements
2025-07-10 15:44:25 -07:00
6f78a6b4c1 Merge pull request #4000 from Infisical/fix-remove-jim-as-sole-author-of-secret-leaks
fix(secret-scanning-v2): Remove Jim as sole author of all secret leaks
2025-07-10 15:41:24 -07:00
7690d5852b improvement: show icons on server admin sidebar and move back to org to top 2025-07-10 15:34:28 -07:00
c2e326b95a fix: remove jim as sole author of all secret leaks 2025-07-10 15:02:38 -07:00
b163c74a05 Merge pull request #3998 from Infisical/fix/foldersCommitsTriggeredOnNestedFolder
Fix folder creation commits triggered on new folder instead of the parent
2025-07-10 16:12:43 -04:00
46a4c6b119 Fix create folder commit issue triggering the commit on the created folder and not the parent folder 2025-07-10 17:02:53 -03:00
b03e9b70a2 Merge pull request #3982 from Infisical/audit-log-secret-path-tooltip
improvement(audit-logs): clarify secret key/path filter behavior for audit logs
2025-07-10 11:22:07 -07:00
f6e1808187 Merge pull request #3930 from Infisical/ENG-3016
feat(dynamic-secrets): AWS IRSA auth method
2025-07-10 13:44:59 -04:00
648cb20eb7 Merge pull request #3994 from Infisical/daniel/podman-docs
docs: add podman compose docs
2025-07-10 21:44:51 +04:00
Sid
fedffea8d5 ENG-2595 (#3976)
* feat: implement railway secret sync

* fix: railway sync config

* feat: add documentation on railway

* fix: undo mock on-prem change

* lint: fix

* fix: cleanup railway integration

* fix: retry and doc images

* fix: sync fields

* fix: query typo

* Update docs/docs.json

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-07-10 22:53:18 +05:30
8917629b96 Remove unused env var from docs 2025-07-10 12:36:53 -04:00
7de45ad220 Feedback + small docs update 2025-07-10 12:33:40 -04:00
5eb52edc52 Merge branch 'main' into ENG-3016 2025-07-10 12:28:39 -04:00
Sid
d3d1fb7190 feat: add more admin environment overrides (#3995)
* feat: add more env overrides
* Reorder alphabetically

---------

Co-authored-by: sidwebworks <xodeveloper@gmail.com>
Co-authored-by: x032205 <x032205@gmail.com>
2025-07-10 21:54:52 +05:30
6531e5b942 Merge pull request #3996 from Infisical/misc/remove-concurrently-for-index-creations
misc: remove concurrently for index creations
2025-07-10 11:48:08 -04:00
4164b2f32a misc: remove concurrently for index creations 2025-07-10 23:42:38 +08:00
0ec56c9928 docs: add podman compose docs 2025-07-10 18:57:25 +04:00
35520cfe99 Merge pull request #3989 from Infisical/add-access-token-index
add index for referencing columns in identity access token
2025-07-10 09:48:39 -04:00
Sid
ba0f6e60e2 fix: yaml secret file parsing (#3837) 2025-07-10 15:33:59 +05:30
579c68b2a3 Merge pull request #3991 from Infisical/helm-update-v0.9.4
Update Helm chart to version v0.9.4
2025-07-10 14:03:10 +04:00
f4ea3e1c75 Update Helm chart to version v0.9.4 2025-07-10 10:02:02 +00:00
7d37ea318f Merge pull request #3990 from Infisical/daniel/operator-logs
fix: add request ID to error logs
2025-07-10 13:57:44 +04:00
5cb7ecc354 fix: update go sdk 2025-07-10 13:35:59 +04:00
5e85de3937 fix lint and short index name 2025-07-09 23:36:55 -04:00
8719e3e75e add index for referencing columns in identity access token
This PR will address issue with very long identity deletions due to a sequential scan over ALL identity access rows during CASCADE
2025-07-09 23:19:01 -04:00
69ece1f3e3 Merge pull request #3986 from Infisical/update-email-reinvite-job
Add jitter and increase window to 12 m
2025-07-09 22:03:02 -04:00
d5cd6f79f9 Merge branch 'main' into update-email-reinvite-job 2025-07-09 19:57:15 -04:00
19c0731166 Add jitter and increase window to 12 m 2025-07-09 19:54:35 -04:00
d2098fda5f Lower perm scope 2025-07-08 23:02:01 -04:00
09d72d6da1 Remove assume role from IRSA 2025-07-08 22:51:43 -04:00
e33a3c281c Merge branch 'main' into ENG-3016 2025-07-08 15:25:15 -04:00
a614b81a7a improvement: clarify secre key/path filter behavior for audit logs 2025-07-08 09:49:22 -07:00
a0e8496256 feat(dynamic-secrets): AWS IRSA auth method 2025-07-05 00:15:54 -04:00
162 changed files with 2790 additions and 1312 deletions

View File

@ -23,7 +23,7 @@ REDIS_URL=redis://redis:6379
# Required
SITE_URL=http://localhost:8080
# Mail/SMTP
# Mail/SMTP
SMTP_HOST=
SMTP_PORT=
SMTP_FROM_ADDRESS=
@ -132,3 +132,6 @@ DATADOG_PROFILING_ENABLED=
DATADOG_ENV=
DATADOG_SERVICE=
DATADOG_HOSTNAME=
# kubernetes
KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN=false

View File

@ -34,6 +34,7 @@ ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ENV NODE_OPTIONS="--max-old-space-size=8192"
# Build
RUN npm run build
@ -77,6 +78,7 @@ RUN npm ci --only-production
COPY /backend .
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
RUN npm i -D tsconfig-paths
ENV NODE_OPTIONS="--max-old-space-size=8192"
RUN npm run build
# Production stage

View File

@ -0,0 +1,46 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
const MIGRATION_TIMEOUT = 30 * 60 * 1000; // 30 minutes
export async function up(knex: Knex): Promise<void> {
const result = await knex.raw("SHOW statement_timeout");
const originalTimeout = result.rows[0].statement_timeout;
try {
await knex.raw(`SET statement_timeout = ${MIGRATION_TIMEOUT}`);
// iat means IdentityAccessToken
await knex.raw(`
CREATE INDEX IF NOT EXISTS idx_iat_identity_id
ON ${TableName.IdentityAccessToken} ("identityId")
`);
await knex.raw(`
CREATE INDEX IF NOT EXISTS idx_iat_ua_client_secret_id
ON ${TableName.IdentityAccessToken} ("identityUAClientSecretId")
`);
} finally {
await knex.raw(`SET statement_timeout = '${originalTimeout}'`);
}
}
export async function down(knex: Knex): Promise<void> {
const result = await knex.raw("SHOW statement_timeout");
const originalTimeout = result.rows[0].statement_timeout;
try {
await knex.raw(`SET statement_timeout = ${MIGRATION_TIMEOUT}`);
await knex.raw(`
DROP INDEX IF EXISTS idx_iat_identity_id
`);
await knex.raw(`
DROP INDEX IF EXISTS idx_iat_ua_client_secret_id
`);
} finally {
await knex.raw(`SET statement_timeout = '${originalTimeout}'`);
}
}

View File

@ -21,7 +21,7 @@ import { randomUUID } from "crypto";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { AwsIamAuthType, DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
@ -81,6 +81,21 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
return client;
}
if (providerInputs.method === AwsIamAuthType.IRSA) {
// Allow instances to disable automatic service account token fetching (e.g. for shared cloud)
if (!appCfg.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN) {
throw new UnauthorizedError({
message: "Failed to get AWS credentials via IRSA: KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN is not enabled."
});
}
// The SDK will automatically pick up credentials from the environment
const client = new IAMClient({
region: providerInputs.region
});
return client;
}
const client = new IAMClient({
region: providerInputs.region,
credentials: {
@ -101,7 +116,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
.catch((err) => {
const message = (err as Error)?.message;
if (
providerInputs.method === AwsIamAuthType.AssumeRole &&
(providerInputs.method === AwsIamAuthType.AssumeRole || providerInputs.method === AwsIamAuthType.IRSA) &&
// assume role will throw an error asking to provider username, but if so this has access in aws correctly
message.includes("Must specify userName when calling with non-User credentials")
) {

View File

@ -28,7 +28,8 @@ export enum SqlProviders {
export enum AwsIamAuthType {
AssumeRole = "assume-role",
AccessKey = "access-key"
AccessKey = "access-key",
IRSA = "irsa"
}
export enum ElasticSearchAuthTypes {
@ -221,6 +222,16 @@ export const DynamicSecretAwsIamSchema = z.preprocess(
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional(),
tags: ResourceMetadataSchema.optional()
}),
z.object({
method: z.literal(AwsIamAuthType.IRSA),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional(),
tags: ResourceMetadataSchema.optional()
})
])
);

View File

@ -5,7 +5,6 @@ import crypto, { KeyObject } from "crypto";
import { BadRequestError, InternalServerError, NotFoundError } from "@app/lib/errors";
import { isValidIp } from "@app/lib/ip";
import { ms } from "@app/lib/ms";
import { OrgServiceActor } from "@app/lib/types";
import { isFQDN } from "@app/lib/validator/validate-url";
import { constructPemChainFromCerts } from "@app/services/certificate/certificate-fns";
import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "@app/services/certificate/certificate-types";
@ -796,26 +795,6 @@ export const kmipServiceFactory = ({
};
};
const getProjectClientCount = async (projectId: string, actor: OrgServiceActor) => {
// Anyone in the project should be able to get count.
await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
projectId,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId
});
const clients = await kmipClientDAL.find(
{
projectId
},
{ count: true }
);
return Number(clients?.[0]?.count ?? 0);
};
return {
createKmipClient,
updateKmipClient,
@ -827,7 +806,6 @@ export const kmipServiceFactory = ({
generateOrgKmipServerCertificate,
getOrgKmip,
getServerCertificateBySerialNumber,
registerServer,
getProjectClientCount
registerServer
};
};

View File

@ -437,7 +437,6 @@ export const secretScanningV2DALFactory = (db: TDbClient) => {
return {
dataSources: {
...dataSourceOrm,
findRaw: dataSourceOrm.find,
find: findDataSource,
findById: findDataSourceById,
findOne: findOneDataSource,

View File

@ -37,7 +37,8 @@ import {
TQueueSecretScanningDataSourceFullScan,
TQueueSecretScanningResourceDiffScan,
TQueueSecretScanningSendNotification,
TSecretScanningDataSourceWithConnection
TSecretScanningDataSourceWithConnection,
TSecretScanningFinding
} from "./secret-scanning-v2-types";
type TSecretRotationV2QueueServiceFactoryDep = {
@ -459,13 +460,16 @@ export const secretScanningV2QueueServiceFactory = async ({
const newFindings = allFindings.filter((finding) => finding.scanId === scanId);
if (newFindings.length) {
const finding = newFindings[0] as TSecretScanningFinding;
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
status: SecretScanningScanStatus.Completed,
resourceName: resource.name,
isDiffScan: true,
dataSource,
numberOfSecrets: newFindings.length,
scanId
scanId,
authorName: finding?.details?.author,
authorEmail: finding?.details?.email
});
}
@ -582,8 +586,8 @@ export const secretScanningV2QueueServiceFactory = async ({
substitutions:
payload.status === SecretScanningScanStatus.Completed
? {
authorName: "Jim",
authorEmail: "jim@infisical.com",
authorName: payload.authorName,
authorEmail: payload.authorEmail,
resourceName,
numberOfSecrets: payload.numberOfSecrets,
isDiffScan: payload.isDiffScan,

View File

@ -881,47 +881,6 @@ export const secretScanningV2ServiceFactory = ({
return config;
};
const getProjectResourcesCount = async (projectId: string, actor: OrgServiceActor) => {
// Anyone in the project should be able to get count.
await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
projectId,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId
});
const dataSources = await secretScanningV2DAL.dataSources.findRaw(
{
projectId
},
{ count: true }
);
const resources = await secretScanningV2DAL.resources.find(
{
$in: {
dataSourceId: dataSources.map((dataSource) => dataSource.id)
}
},
{ count: true }
);
const findings = await secretScanningV2DAL.findings.find(
{
projectId,
status: SecretScanningFindingStatus.Unresolved
},
{ count: true }
);
return {
dataSourceCount: Number(dataSources?.[0]?.count ?? 0),
resourceCount: Number(resources?.[0]?.count ?? 0),
findingCount: Number(findings?.[0]?.count ?? 0)
};
};
return {
listSecretScanningDataSourceOptions,
listSecretScanningDataSourcesByProjectId,
@ -941,7 +900,6 @@ export const secretScanningV2ServiceFactory = ({
updateSecretScanningFindingById,
findSecretScanningConfigByProjectId,
upsertSecretScanningConfig,
getProjectResourcesCount,
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue),
bitbucket: bitbucketSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService)
};

View File

@ -119,7 +119,14 @@ export type TQueueSecretScanningSendNotification = {
resourceName: string;
} & (
| { status: SecretScanningScanStatus.Failed; errorMessage: string }
| { status: SecretScanningScanStatus.Completed; numberOfSecrets: number; scanId: string; isDiffScan: boolean }
| {
status: SecretScanningScanStatus.Completed;
numberOfSecrets: number;
scanId: string;
isDiffScan: boolean;
authorName?: string;
authorEmail?: string;
}
);
export type TCloneRepository = {

View File

@ -8,7 +8,6 @@ import { TSshHostLoginUserDALFactory } from "@app/ee/services/ssh-host/ssh-login
import { TSshHostGroupDALFactory } from "@app/ee/services/ssh-host-group/ssh-host-group-dal";
import { TSshHostGroupMembershipDALFactory } from "@app/ee/services/ssh-host-group/ssh-host-group-membership-dal";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { OrgServiceActor } from "@app/lib/types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
@ -415,26 +414,6 @@ export const sshHostGroupServiceFactory = ({
return { sshHostGroup, sshHost };
};
const getProjectHostGroupCount = async (projectId: string, actor: OrgServiceActor) => {
// Anyone in the project should be able to get count.
await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
projectId,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId
});
const hostGroups = await sshHostGroupDAL.find(
{
projectId
},
{ count: true }
);
return Number(hostGroups?.[0]?.count ?? 0);
};
return {
createSshHostGroup,
getSshHostGroup,
@ -442,7 +421,6 @@ export const sshHostGroupServiceFactory = ({
updateSshHostGroup,
listSshHostGroupHosts,
addHostToSshHostGroup,
removeHostFromSshHostGroup,
getProjectHostGroupCount
removeHostFromSshHostGroup
};
};

View File

@ -13,7 +13,6 @@ import { TSshHostLoginUserMappingDALFactory } from "@app/ee/services/ssh-host/ss
import { TSshHostLoginUserDALFactory } from "@app/ee/services/ssh-host/ssh-login-user-dal";
import { PgSqlLock } from "@app/keystore/keystore";
import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { OrgServiceActor } from "@app/lib/types";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
@ -61,7 +60,6 @@ type TSshHostServiceFactoryDep = {
| "findOne"
| "findSshHostByIdWithLoginMappings"
| "findUserAccessibleSshHosts"
| "find"
>;
sshHostLoginUserDAL: TSshHostLoginUserDALFactory;
sshHostLoginUserMappingDAL: TSshHostLoginUserMappingDALFactory;
@ -639,26 +637,6 @@ export const sshHostServiceFactory = ({
return publicKey;
};
const getProjectHostCount = async (projectId: string, actor: OrgServiceActor) => {
// Anyone in the project should be able to get count.
await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
projectId,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId
});
const hosts = await sshHostDAL.find(
{
projectId
},
{ count: true }
);
return Number(hosts?.[0]?.count ?? 0);
};
return {
listSshHosts,
createSshHost,
@ -668,7 +646,6 @@ export const sshHostServiceFactory = ({
issueSshHostUserCert,
issueSshHostHostCert,
getSshHostUserCaPk,
getSshHostHostCaPk,
getProjectHostCount
getSshHostHostCaPk
};
};

View File

@ -2279,6 +2279,9 @@ export const AppConnections = {
ZABBIX: {
apiToken: "The API Token used to access Zabbix.",
instanceUrl: "The Zabbix instance URL to connect with."
},
RAILWAY: {
apiToken: "The API token used to authenticate with Railway."
}
}
};
@ -2474,6 +2477,14 @@ export const SecretSyncs = {
hostId: "The ID of the Zabbix host to sync secrets to.",
hostName: "The name of the Zabbix host to sync secrets to.",
macroType: "The type of macro to sync secrets to. (0: Text, 1: Secret)"
},
RAILWAY: {
projectId: "The ID of the Railway project to sync secrets to.",
projectName: "The name of the Railway project to sync secrets to.",
environmentId: "The Railway environment to sync secrets to.",
environmentName: "The Railway environment to sync secrets to.",
serviceId: "The Railway service that secrets should be synced to.",
serviceName: "The Railway service that secrets should be synced to."
}
}
};
@ -2594,7 +2605,9 @@ export const SecretRotations = {
export const SecretScanningDataSources = {
LIST: (type?: SecretScanningDataSource) => ({
projectId: `The ID of the project to list ${type ? SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type] : "Scanning"} Data Sources from.`
projectId: `The ID of the project to list ${
type ? SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type] : "Scanning"
} Data Sources from.`
}),
GET_BY_ID: (type: SecretScanningDataSource) => ({
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to retrieve.`

View File

@ -28,6 +28,7 @@ const databaseReadReplicaSchema = z
const envSchema = z
.object({
INFISICAL_PLATFORM_VERSION: zpStr(z.string().optional()),
KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN: zodStrBool.default("false"),
PORT: z.coerce.number().default(IS_PACKAGED ? 8080 : 4000),
DISABLE_SECRET_SCANNING: z
.enum(["true", "false"])
@ -373,6 +374,19 @@ export const overwriteSchema: {
fields: { key: keyof TEnvConfig; description?: string }[];
};
} = {
aws: {
name: "AWS",
fields: [
{
key: "INF_APP_CONNECTION_AWS_ACCESS_KEY_ID",
description: "The Access Key ID of your AWS account."
},
{
key: "INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY",
description: "The Client Secret of your AWS application."
}
]
},
azure: {
name: "Azure",
fields: [
@ -386,16 +400,79 @@ export const overwriteSchema: {
}
]
},
google_sso: {
name: "Google SSO",
gcp: {
name: "GCP",
fields: [
{
key: "CLIENT_ID_GOOGLE_LOGIN",
description: "The Client ID of your GCP OAuth2 application."
key: "INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL",
description: "The GCP Service Account JSON credentials."
}
]
},
github_app: {
name: "GitHub App",
fields: [
{
key: "INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID",
description: "The Client ID of your GitHub application."
},
{
key: "CLIENT_SECRET_GOOGLE_LOGIN",
description: "The Client Secret of your GCP OAuth2 application."
key: "INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET",
description: "The Client Secret of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_APP_SLUG",
description: "The Slug of your GitHub application. This is the one found in the URL."
},
{
key: "INF_APP_CONNECTION_GITHUB_APP_ID",
description: "The App ID of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY",
description: "The Private Key of your GitHub application."
}
]
},
github_oauth: {
name: "GitHub OAuth",
fields: [
{
key: "INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID",
description: "The Client ID of your GitHub OAuth application."
},
{
key: "INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET",
description: "The Client Secret of your GitHub OAuth application."
}
]
},
github_radar_app: {
name: "GitHub Radar App",
fields: [
{
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID",
description: "The Client ID of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET",
description: "The Client Secret of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG",
description: "The Slug of your GitHub application. This is the one found in the URL."
},
{
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_ID",
description: "The App ID of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY",
description: "The Private Key of your GitHub application."
},
{
key: "INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET",
description: "The Webhook Secret of your GitHub application."
}
]
},
@ -412,6 +489,19 @@ export const overwriteSchema: {
}
]
},
gitlab_oauth: {
name: "GitLab OAuth",
fields: [
{
key: "INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_ID",
description: "The Client ID of your GitLab OAuth application."
},
{
key: "INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_SECRET",
description: "The Client Secret of your GitLab OAuth application."
}
]
},
gitlab_sso: {
name: "GitLab SSO",
fields: [
@ -429,6 +519,19 @@ export const overwriteSchema: {
"The URL of your self-hosted instance of GitLab where the OAuth application is registered. If no URL is passed in, this will default to https://gitlab.com."
}
]
},
google_sso: {
name: "Google SSO",
fields: [
{
key: "CLIENT_ID_GOOGLE_LOGIN",
description: "The Client ID of your GCP OAuth2 application."
},
{
key: "CLIENT_SECRET_GOOGLE_LOGIN",
description: "The Client Secret of your GCP OAuth2 application."
}
]
}
};

View File

@ -1,11 +1,18 @@
import axios from "axios";
import axiosRetry from "axios-retry";
import axios, { AxiosInstance, CreateAxiosDefaults } from "axios";
import axiosRetry, { IAxiosRetryConfig } from "axios-retry";
export const request = axios.create();
export function createRequestClient(defaults: CreateAxiosDefaults = {}, retry: IAxiosRetryConfig = {}): AxiosInstance {
const client = axios.create(defaults);
axiosRetry(request, {
retries: 3,
// eslint-disable-next-line
retryDelay: axiosRetry.exponentialDelay,
retryCondition: (err) => axiosRetry.isNetworkError(err) || axiosRetry.isRetryableError(err)
});
axiosRetry(client, {
retries: 3,
// eslint-disable-next-line
retryDelay: axiosRetry.exponentialDelay,
retryCondition: (err) => axiosRetry.isNetworkError(err) || axiosRetry.isRetryableError(err),
...retry
});
return client;
}
export const request = createRequestClient();

View File

@ -12,11 +12,6 @@ type TKnexDynamicPrimitiveOperator<T extends object> =
operator: "notIn";
value: string[];
field: Extract<keyof T, string>;
}
| {
operator: "lte";
value: Date;
field: Extract<keyof T, string>;
};
type TKnexDynamicInOperator<T extends object> = {
@ -87,10 +82,6 @@ export const buildDynamicKnexQuery = <T extends object>(
});
break;
}
case "lte": {
void queryBuilder.where(filterAst.field, "<=", filterAst.value);
break;
}
default:
throw new UnauthorizedError({ message: `Invalid knex dynamic operator: ${filterAst.operator}` });
}

View File

@ -49,7 +49,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
defaultAuthOrgSlug: z.string().nullable(),
defaultAuthOrgAuthEnforced: z.boolean().nullish(),
defaultAuthOrgAuthMethod: z.string().nullish(),
isSecretScanningDisabled: z.boolean()
isSecretScanningDisabled: z.boolean(),
kubernetesAutoFetchServiceAccountToken: z.boolean()
})
})
}
@ -61,7 +62,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
config: {
...config,
isMigrationModeOn: serverEnvs.MAINTENANCE_MODE,
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING,
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN
}
};
}

View File

@ -71,6 +71,10 @@ import {
PostgresConnectionListItemSchema,
SanitizedPostgresConnectionSchema
} from "@app/services/app-connection/postgres";
import {
RailwayConnectionListItemSchema,
SanitizedRailwayConnectionSchema
} from "@app/services/app-connection/railway";
import {
RenderConnectionListItemSchema,
SanitizedRenderConnectionSchema
@ -123,7 +127,8 @@ const SanitizedAppConnectionSchema = z.union([
...SanitizedGitLabConnectionSchema.options,
...SanitizedCloudflareConnectionSchema.options,
...SanitizedBitbucketConnectionSchema.options,
...SanitizedZabbixConnectionSchema.options
...SanitizedZabbixConnectionSchema.options,
...SanitizedRailwayConnectionSchema.options
]);
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
@ -157,7 +162,8 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
GitLabConnectionListItemSchema,
CloudflareConnectionListItemSchema,
BitbucketConnectionListItemSchema,
ZabbixConnectionListItemSchema
ZabbixConnectionListItemSchema,
RailwayConnectionListItemSchema
]);
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {

View File

@ -25,6 +25,7 @@ import { registerLdapConnectionRouter } from "./ldap-connection-router";
import { registerMsSqlConnectionRouter } from "./mssql-connection-router";
import { registerMySqlConnectionRouter } from "./mysql-connection-router";
import { registerPostgresConnectionRouter } from "./postgres-connection-router";
import { registerRailwayConnectionRouter } from "./railway-connection-router";
import { registerRenderConnectionRouter } from "./render-connection-router";
import { registerTeamCityConnectionRouter } from "./teamcity-connection-router";
import { registerTerraformCloudConnectionRouter } from "./terraform-cloud-router";
@ -66,5 +67,6 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
[AppConnection.GitLab]: registerGitLabConnectionRouter,
[AppConnection.Cloudflare]: registerCloudflareConnectionRouter,
[AppConnection.Bitbucket]: registerBitbucketConnectionRouter,
[AppConnection.Zabbix]: registerZabbixConnectionRouter
[AppConnection.Zabbix]: registerZabbixConnectionRouter,
[AppConnection.Railway]: registerRailwayConnectionRouter
};

View File

@ -0,0 +1,67 @@
import { z } from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateRailwayConnectionSchema,
SanitizedRailwayConnectionSchema,
UpdateRailwayConnectionSchema
} from "@app/services/app-connection/railway";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerRailwayConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.Railway,
server,
sanitizedResponseSchema: SanitizedRailwayConnectionSchema,
createSchema: CreateRailwayConnectionSchema,
updateSchema: UpdateRailwayConnectionSchema
});
// The below endpoints are not exposed and for Infisical App use
server.route({
method: "GET",
url: `/:connectionId/projects`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z.object({
projects: z
.object({
name: z.string(),
id: z.string(),
services: z.array(
z.object({
name: z.string(),
id: z.string()
})
),
environments: z.array(
z.object({
name: z.string(),
id: z.string()
})
)
})
.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const projects = await server.services.appConnection.railway.listProjects(connectionId, req.permission);
return { projects };
}
});
};

View File

@ -9,7 +9,7 @@ import { DASHBOARD } from "@app/lib/api-docs";
import { BadRequestError } from "@app/lib/errors";
import { removeTrailingSlash } from "@app/lib/fn";
import { OrderByDirection } from "@app/lib/types";
import { readLimit, secretsLimit } from "@app/server/config/rateLimiter";
import { secretsLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { getUserAgentType } from "@app/server/plugins/audit-log";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@ -1354,128 +1354,4 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
return { secrets };
}
});
server.route({
method: "GET",
url: "/project-overview",
config: {
rateLimit: readLimit
},
schema: {
security: [
{
bearerAuth: []
}
],
querystring: z.object({
projectId: z.string(),
projectSlug: z.string()
}),
response: {
200: z.object({
accessControl: z.object({
userCount: z.number(),
machineIdentityCount: z.number(),
groupCount: z.number()
}),
secretsManagement: z.object({
secretCount: z.number(),
environmentCount: z.number(),
pendingApprovalCount: z.number()
}),
certificateManagement: z.object({
internalCaCount: z.number(),
externalCaCount: z.number(),
expiryCount: z.number()
}),
kms: z.object({
keyCount: z.number(),
kmipClientCount: z.number()
}),
ssh: z.object({
hostCount: z.number(),
hostGroupCount: z.number()
}),
secretScanning: z.object({
dataSourceCount: z.number(),
resourceCount: z.number(),
findingCount: z.number()
})
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const {
query: { projectId, projectSlug },
permission
} = req;
const userCount = await server.services.projectMembership.getProjectMembershipCount(projectId, permission);
const machineIdentityCount = await server.services.identityProject.getProjectIdentityCount(projectId, permission);
const groupCount = await server.services.groupProject.getProjectGroupCount(projectId, permission);
const secretsManagement = await server.services.secret.getProjectSecretResourcesCount(projectId, permission);
const accessApprovals = await server.services.accessApprovalRequest.getCount({
projectSlug,
actor: permission.type,
actorId: permission.id,
actorOrgId: permission.orgId,
actorAuthMethod: permission.authMethod
});
const secretApprovals = await server.services.secretApprovalRequest.requestCount({
projectId,
actor: permission.type,
actorId: permission.id,
actorOrgId: permission.orgId,
actorAuthMethod: permission.authMethod
});
const certificateAuthorityCount = await server.services.certificateAuthority.getProjectCertificateAuthorityCount(
projectId,
permission
);
const expiryCount = await server.services.certificate.getProjectExpiringCertificates(projectId, permission);
const keyCount = await server.services.cmek.getProjectKeyCount(projectId, permission);
const kmipClientCount = await server.services.kmip.getProjectClientCount(projectId, permission);
const hostCount = await server.services.sshHost.getProjectHostCount(projectId, permission);
const hostGroupCount = await server.services.sshHostGroup.getProjectHostGroupCount(projectId, permission);
const secretScanning = await server.services.secretScanningV2.getProjectResourcesCount(projectId, permission);
return {
accessControl: {
userCount,
machineIdentityCount,
groupCount
},
secretsManagement: {
...secretsManagement,
pendingApprovalCount: accessApprovals.count.pendingCount + secretApprovals.open
},
certificateManagement: {
...certificateAuthorityCount,
expiryCount
},
kms: {
keyCount,
kmipClientCount
},
ssh: {
hostCount,
hostGroupCount
},
secretScanning
};
}
});
};

View File

@ -17,6 +17,7 @@ import { registerGitLabSyncRouter } from "./gitlab-sync-router";
import { registerHCVaultSyncRouter } from "./hc-vault-sync-router";
import { registerHerokuSyncRouter } from "./heroku-sync-router";
import { registerHumanitecSyncRouter } from "./humanitec-sync-router";
import { registerRailwaySyncRouter } from "./railway-sync-router";
import { registerRenderSyncRouter } from "./render-sync-router";
import { registerTeamCitySyncRouter } from "./teamcity-sync-router";
import { registerTerraformCloudSyncRouter } from "./terraform-cloud-sync-router";
@ -49,5 +50,6 @@ export const SECRET_SYNC_REGISTER_ROUTER_MAP: Record<SecretSync, (server: Fastif
[SecretSync.Flyio]: registerFlyioSyncRouter,
[SecretSync.GitLab]: registerGitLabSyncRouter,
[SecretSync.CloudflarePages]: registerCloudflarePagesSyncRouter,
[SecretSync.Zabbix]: registerZabbixSyncRouter
[SecretSync.Zabbix]: registerZabbixSyncRouter,
[SecretSync.Railway]: registerRailwaySyncRouter
};

View File

@ -0,0 +1,17 @@
import {
CreateRailwaySyncSchema,
RailwaySyncSchema,
UpdateRailwaySyncSchema
} from "@app/services/secret-sync/railway/railway-sync-schemas";
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
export const registerRailwaySyncRouter = async (server: FastifyZodProvider) =>
registerSyncSecretsEndpoints({
destination: SecretSync.Railway,
server,
responseSchema: RailwaySyncSchema,
createSchema: CreateRailwaySyncSchema,
updateSchema: UpdateRailwaySyncSchema
});

View File

@ -34,6 +34,7 @@ import { GitLabSyncListItemSchema, GitLabSyncSchema } from "@app/services/secret
import { HCVaultSyncListItemSchema, HCVaultSyncSchema } from "@app/services/secret-sync/hc-vault";
import { HerokuSyncListItemSchema, HerokuSyncSchema } from "@app/services/secret-sync/heroku";
import { HumanitecSyncListItemSchema, HumanitecSyncSchema } from "@app/services/secret-sync/humanitec";
import { RailwaySyncListItemSchema, RailwaySyncSchema } from "@app/services/secret-sync/railway/railway-sync-schemas";
import { RenderSyncListItemSchema, RenderSyncSchema } from "@app/services/secret-sync/render/render-sync-schemas";
import { TeamCitySyncListItemSchema, TeamCitySyncSchema } from "@app/services/secret-sync/teamcity";
import { TerraformCloudSyncListItemSchema, TerraformCloudSyncSchema } from "@app/services/secret-sync/terraform-cloud";
@ -64,7 +65,8 @@ const SecretSyncSchema = z.discriminatedUnion("destination", [
FlyioSyncSchema,
GitLabSyncSchema,
CloudflarePagesSyncSchema,
ZabbixSyncSchema
ZabbixSyncSchema,
RailwaySyncSchema
]);
const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
@ -90,7 +92,8 @@ const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
FlyioSyncListItemSchema,
GitLabSyncListItemSchema,
CloudflarePagesSyncListItemSchema,
ZabbixSyncListItemSchema
ZabbixSyncListItemSchema,
RailwaySyncListItemSchema
]);
export const registerSecretSyncRouter = async (server: FastifyZodProvider) => {

View File

@ -28,8 +28,9 @@ export enum AppConnection {
Flyio = "flyio",
GitLab = "gitlab",
Cloudflare = "cloudflare",
Bitbucket = "bitbucket",
Zabbix = "zabbix"
Zabbix = "zabbix",
Railway = "railway",
Bitbucket = "bitbucket"
}
export enum AWSRegion {

View File

@ -91,6 +91,7 @@ import { getMsSqlConnectionListItem, MsSqlConnectionMethod } from "./mssql";
import { MySqlConnectionMethod } from "./mysql/mysql-connection-enums";
import { getMySqlConnectionListItem } from "./mysql/mysql-connection-fns";
import { getPostgresConnectionListItem, PostgresConnectionMethod } from "./postgres";
import { getRailwayConnectionListItem, validateRailwayConnectionCredentials } from "./railway";
import { RenderConnectionMethod } from "./render/render-connection-enums";
import { getRenderConnectionListItem, validateRenderConnectionCredentials } from "./render/render-connection-fns";
import {
@ -143,8 +144,9 @@ export const listAppConnectionOptions = () => {
getFlyioConnectionListItem(),
getGitLabConnectionListItem(),
getCloudflareConnectionListItem(),
getBitbucketConnectionListItem(),
getZabbixConnectionListItem()
getZabbixConnectionListItem(),
getRailwayConnectionListItem(),
getBitbucketConnectionListItem()
].sort((a, b) => a.name.localeCompare(b.name));
};
@ -225,8 +227,9 @@ export const validateAppConnectionCredentials = async (
[AppConnection.Flyio]: validateFlyioConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.GitLab]: validateGitLabConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Cloudflare]: validateCloudflareConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Bitbucket]: validateBitbucketConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Zabbix]: validateZabbixConnectionCredentials as TAppConnectionCredentialsValidator
[AppConnection.Zabbix]: validateZabbixConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Railway]: validateRailwayConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Bitbucket]: validateBitbucketConnectionCredentials as TAppConnectionCredentialsValidator
};
return VALIDATE_APP_CONNECTION_CREDENTIALS_MAP[appConnection.app](appConnection);
@ -345,8 +348,9 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
[AppConnection.Flyio]: platformManagedCredentialsNotSupported,
[AppConnection.GitLab]: platformManagedCredentialsNotSupported,
[AppConnection.Cloudflare]: platformManagedCredentialsNotSupported,
[AppConnection.Bitbucket]: platformManagedCredentialsNotSupported,
[AppConnection.Zabbix]: platformManagedCredentialsNotSupported
[AppConnection.Zabbix]: platformManagedCredentialsNotSupported,
[AppConnection.Railway]: platformManagedCredentialsNotSupported,
[AppConnection.Bitbucket]: platformManagedCredentialsNotSupported
};
export const enterpriseAppCheck = async (

View File

@ -30,8 +30,9 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
[AppConnection.Flyio]: "Fly.io",
[AppConnection.GitLab]: "GitLab",
[AppConnection.Cloudflare]: "Cloudflare",
[AppConnection.Bitbucket]: "Bitbucket",
[AppConnection.Zabbix]: "Zabbix"
[AppConnection.Zabbix]: "Zabbix",
[AppConnection.Railway]: "Railway",
[AppConnection.Bitbucket]: "Bitbucket"
};
export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanType> = {
@ -64,6 +65,7 @@ export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanTyp
[AppConnection.Flyio]: AppConnectionPlanType.Regular,
[AppConnection.GitLab]: AppConnectionPlanType.Regular,
[AppConnection.Cloudflare]: AppConnectionPlanType.Regular,
[AppConnection.Bitbucket]: AppConnectionPlanType.Regular,
[AppConnection.Zabbix]: AppConnectionPlanType.Regular
[AppConnection.Zabbix]: AppConnectionPlanType.Regular,
[AppConnection.Railway]: AppConnectionPlanType.Regular,
[AppConnection.Bitbucket]: AppConnectionPlanType.Regular
};

View File

@ -72,6 +72,8 @@ import { ValidateLdapConnectionCredentialsSchema } from "./ldap";
import { ValidateMsSqlConnectionCredentialsSchema } from "./mssql";
import { ValidateMySqlConnectionCredentialsSchema } from "./mysql";
import { ValidatePostgresConnectionCredentialsSchema } from "./postgres";
import { ValidateRailwayConnectionCredentialsSchema } from "./railway";
import { railwayConnectionService } from "./railway/railway-connection-service";
import { ValidateRenderConnectionCredentialsSchema } from "./render/render-connection-schema";
import { renderConnectionService } from "./render/render-connection-service";
import { ValidateTeamCityConnectionCredentialsSchema } from "./teamcity";
@ -124,8 +126,9 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
[AppConnection.Flyio]: ValidateFlyioConnectionCredentialsSchema,
[AppConnection.GitLab]: ValidateGitLabConnectionCredentialsSchema,
[AppConnection.Cloudflare]: ValidateCloudflareConnectionCredentialsSchema,
[AppConnection.Bitbucket]: ValidateBitbucketConnectionCredentialsSchema,
[AppConnection.Zabbix]: ValidateZabbixConnectionCredentialsSchema
[AppConnection.Zabbix]: ValidateZabbixConnectionCredentialsSchema,
[AppConnection.Railway]: ValidateRailwayConnectionCredentialsSchema,
[AppConnection.Bitbucket]: ValidateBitbucketConnectionCredentialsSchema
};
export const appConnectionServiceFactory = ({
@ -536,7 +539,8 @@ export const appConnectionServiceFactory = ({
flyio: flyioConnectionService(connectAppConnectionById),
gitlab: gitlabConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
cloudflare: cloudflareConnectionService(connectAppConnectionById),
bitbucket: bitbucketConnectionService(connectAppConnectionById),
zabbix: zabbixConnectionService(connectAppConnectionById)
zabbix: zabbixConnectionService(connectAppConnectionById),
railway: railwayConnectionService(connectAppConnectionById),
bitbucket: bitbucketConnectionService(connectAppConnectionById)
};
};

View File

@ -141,6 +141,12 @@ import {
TPostgresConnectionInput,
TValidatePostgresConnectionCredentialsSchema
} from "./postgres";
import {
TRailwayConnection,
TRailwayConnectionConfig,
TRailwayConnectionInput,
TValidateRailwayConnectionCredentialsSchema
} from "./railway";
import {
TRenderConnection,
TRenderConnectionConfig,
@ -210,6 +216,7 @@ export type TAppConnection = { id: string } & (
| TCloudflareConnection
| TBitbucketConnection
| TZabbixConnection
| TRailwayConnection
);
export type TAppConnectionRaw = NonNullable<Awaited<ReturnType<TAppConnectionDALFactory["findById"]>>>;
@ -248,6 +255,7 @@ export type TAppConnectionInput = { id: string } & (
| TCloudflareConnectionInput
| TBitbucketConnectionInput
| TZabbixConnectionInput
| TRailwayConnectionInput
);
export type TSqlConnectionInput =
@ -293,7 +301,8 @@ export type TAppConnectionConfig =
| TGitLabConnectionConfig
| TCloudflareConnectionConfig
| TBitbucketConnectionConfig
| TZabbixConnectionConfig;
| TZabbixConnectionConfig
| TRailwayConnectionConfig;
export type TValidateAppConnectionCredentialsSchema =
| TValidateAwsConnectionCredentialsSchema
@ -326,7 +335,8 @@ export type TValidateAppConnectionCredentialsSchema =
| TValidateGitLabConnectionCredentialsSchema
| TValidateCloudflareConnectionCredentialsSchema
| TValidateBitbucketConnectionCredentialsSchema
| TValidateZabbixConnectionCredentialsSchema;
| TValidateZabbixConnectionCredentialsSchema
| TValidateRailwayConnectionCredentialsSchema;
export type TListAwsConnectionKmsKeys = {
connectionId: string;

View File

@ -0,0 +1,4 @@
export * from "./railway-connection-constants";
export * from "./railway-connection-fns";
export * from "./railway-connection-schemas";
export * from "./railway-connection-types";

View File

@ -0,0 +1,5 @@
export enum RailwayConnectionMethod {
AccountToken = "account-token",
ProjectToken = "project-token",
TeamToken = "team-token"
}

View File

@ -0,0 +1,66 @@
/* eslint-disable no-await-in-loop */
import { AxiosError } from "axios";
import { BadRequestError } from "@app/lib/errors";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { RailwayConnectionMethod } from "./railway-connection-constants";
import { RailwayPublicAPI } from "./railway-connection-public-client";
import { TRailwayConnection, TRailwayConnectionConfig } from "./railway-connection-types";
export const getRailwayConnectionListItem = () => {
return {
name: "Railway" as const,
app: AppConnection.Railway as const,
methods: Object.values(RailwayConnectionMethod)
};
};
export const validateRailwayConnectionCredentials = async (config: TRailwayConnectionConfig) => {
const { credentials, method } = config;
try {
await RailwayPublicAPI.healthcheck({
method,
credentials
});
} catch (error: unknown) {
if (error instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
});
}
throw new BadRequestError({
message: "Unable to validate connection - verify credentials"
});
}
return credentials;
};
export const listProjects = async (appConnection: TRailwayConnection) => {
const { credentials, method } = appConnection;
try {
return await RailwayPublicAPI.listProjects({
method,
credentials
});
} catch (error: unknown) {
if (error instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to list projects: ${error.message || "Unknown error"}`
});
}
if (error instanceof BadRequestError) {
throw error;
}
throw new BadRequestError({
message: "Unable to list projects",
error
});
}
};

View File

@ -0,0 +1,237 @@
/* eslint-disable class-methods-use-this */
import { AxiosError, AxiosInstance, AxiosResponse } from "axios";
import { createRequestClient } from "@app/lib/config/request";
import { BadRequestError } from "@app/lib/errors";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { RailwayConnectionMethod } from "./railway-connection-constants";
import {
RailwayAccountWorkspaceListSchema,
RailwayGetProjectsByProjectTokenSchema,
RailwayGetSubscriptionTypeSchema,
RailwayProjectsListSchema
} from "./railway-connection-schemas";
import { RailwayProject, TRailwayConnectionConfig, TRailwayResponse } from "./railway-connection-types";
type RailwaySendReqOptions = Pick<TRailwayConnectionConfig, "credentials" | "method">;
export function getRailwayAuthHeaders(method: RailwayConnectionMethod, token: string): Record<string, string> {
switch (method) {
case RailwayConnectionMethod.AccountToken:
case RailwayConnectionMethod.TeamToken:
return {
Authorization: token
};
case RailwayConnectionMethod.ProjectToken:
return {
"Project-Access-Token": token
};
default:
throw new Error(`Unsupported Railway connection method`);
}
}
export function getRailwayRatelimiter(headers: AxiosResponse["headers"]): {
isRatelimited: boolean;
maxAttempts: number;
wait: () => Promise<void>;
} {
const retryAfter: number | undefined = headers["Retry-After"] as number | undefined;
const requestsLeft = parseInt(headers["X-RateLimit-Remaining"] as string, 10);
const limitResetAt = headers["X-RateLimit-Reset"] as string;
const now = +new Date();
const nextReset = +new Date(limitResetAt);
const remaining = Math.min(0, nextReset - now);
const wait = () => {
return new Promise<void>((res) => {
setTimeout(res, remaining);
});
};
return {
isRatelimited: Boolean(retryAfter || requestsLeft === 0),
wait,
maxAttempts: 3
};
}
class RailwayPublicClient {
private client: AxiosInstance;
constructor() {
this.client = createRequestClient({
method: "POST",
baseURL: IntegrationUrls.RAILWAY_API_URL,
headers: {
"Content-Type": "application/json"
}
});
}
async send<T extends TRailwayResponse>(
query: string,
options: RailwaySendReqOptions,
variables: Record<string, string | Record<string, string>> = {},
retryAttempt: number = 0
): Promise<T["data"] | undefined> {
const body = {
query,
variables
};
const response = await this.client.request<T>({
data: body,
headers: getRailwayAuthHeaders(options.method, options.credentials.apiToken)
});
const { errors } = response.data;
if (Array.isArray(errors) && errors.length > 0) {
throw new AxiosError(errors[0].message);
}
const limiter = getRailwayRatelimiter(response.headers);
if (limiter.isRatelimited && retryAttempt <= limiter.maxAttempts) {
await limiter.wait();
return this.send(query, options, variables, retryAttempt + 1);
}
return response.data.data;
}
healthcheck(config: RailwaySendReqOptions) {
switch (config.method) {
case RailwayConnectionMethod.AccountToken:
return this.send(`{ me { teams { edges { node { id } } } } }`, config);
case RailwayConnectionMethod.ProjectToken:
return this.send(`{ projectToken { projectId environmentId project { id } } }`, config);
case RailwayConnectionMethod.TeamToken:
return this.send(`{ projects { edges { node { id name team { id } } } } }`, config);
default:
throw new Error(`Unsupported Railway connection method`);
}
}
async getSubscriptionType(config: RailwaySendReqOptions & { projectId: string }) {
const res = await this.send(
`query project($projectId: String!) { project(id: $projectId) { subscriptionType }}`,
config,
{
projectId: config.projectId
}
);
const data = await RailwayGetSubscriptionTypeSchema.parseAsync(res);
return data.project.subscriptionType;
}
async listProjects(config: RailwaySendReqOptions): Promise<RailwayProject[]> {
switch (config.method) {
case RailwayConnectionMethod.TeamToken: {
const res = await this.send(
`{ projects { edges { node { id, name, services{ edges{ node { id, name } } } environments { edges { node { name, id } } } } } } }`,
config
);
const data = await RailwayProjectsListSchema.parseAsync(res);
return data.projects.edges.map((p) => ({
id: p.node.id,
name: p.node.name,
environments: p.node.environments.edges.map((e) => e.node),
services: p.node.services.edges.map((s) => s.node)
}));
}
case RailwayConnectionMethod.AccountToken: {
const res = await this.send(
`{ me { workspaces { id, name, team{ projects{ edges{ node{ id, name, services{ edges { node { name, id } } } environments { edges { node { name, id } } } } } } } } } }`,
config
);
const data = await RailwayAccountWorkspaceListSchema.parseAsync(res);
return data.me.workspaces.flatMap((w) =>
w.team.projects.edges.map((p) => ({
id: p.node.id,
name: p.node.name,
environments: p.node.environments.edges.map((e) => e.node),
services: p.node.services.edges.map((s) => s.node)
}))
);
}
case RailwayConnectionMethod.ProjectToken: {
const res = await this.send(
`query { projectToken { project { id, name, services { edges { node { name, id } } } environments { edges { node { name, id } } } } } }`,
config
);
const data = await RailwayGetProjectsByProjectTokenSchema.parseAsync(res);
const p = data.projectToken.project;
return [
{
id: p.id,
name: p.name,
environments: p.environments.edges.map((e) => e.node),
services: p.services.edges.map((s) => s.node)
}
];
}
default:
throw new Error(`Unsupported Railway connection method`);
}
}
async getVariables(
config: RailwaySendReqOptions,
variables: { projectId: string; environmentId: string; serviceId?: string }
) {
const res = await this.send<TRailwayResponse<{ variables: Record<string, string> }>>(
`query variables($environmentId: String!, $projectId: String!, $serviceId: String) { variables( projectId: $projectId, environmentId: $environmentId, serviceId: $serviceId ) }`,
config,
variables
);
if (!res?.variables) {
throw new BadRequestError({
message: "Failed to get railway variables - empty response"
});
}
return res.variables;
}
async deleteVariable(
config: RailwaySendReqOptions,
variables: { input: { projectId: string; environmentId: string; name: string; serviceId?: string } }
) {
await this.send<TRailwayResponse<{ variables: Record<string, string> }>>(
`mutation variableDelete($input: VariableDeleteInput!) { variableDelete(input: $input) }`,
config,
variables
);
}
async upsertVariable(
config: RailwaySendReqOptions,
variables: { input: { projectId: string; environmentId: string; name: string; value: string; serviceId?: string } }
) {
await this.send<TRailwayResponse<{ variables: Record<string, string> }>>(
`mutation variableUpsert($input: VariableUpsertInput!) { variableUpsert(input: $input) }`,
config,
variables
);
}
}
export const RailwayPublicAPI = new RailwayPublicClient();

View File

@ -0,0 +1,117 @@
import z from "zod";
import { AppConnections } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
BaseAppConnectionSchema,
GenericCreateAppConnectionFieldsSchema,
GenericUpdateAppConnectionFieldsSchema
} from "@app/services/app-connection/app-connection-schemas";
import { RailwayConnectionMethod } from "./railway-connection-constants";
export const RailwayConnectionMethodSchema = z
.nativeEnum(RailwayConnectionMethod)
.describe(AppConnections.CREATE(AppConnection.Railway).method);
export const RailwayConnectionAccessTokenCredentialsSchema = z.object({
apiToken: z
.string()
.trim()
.min(1, "API Token required")
.max(255)
.describe(AppConnections.CREDENTIALS.RAILWAY.apiToken)
});
const BaseRailwayConnectionSchema = BaseAppConnectionSchema.extend({
app: z.literal(AppConnection.Railway)
});
export const RailwayConnectionSchema = BaseRailwayConnectionSchema.extend({
method: RailwayConnectionMethodSchema,
credentials: RailwayConnectionAccessTokenCredentialsSchema
});
export const SanitizedRailwayConnectionSchema = z.discriminatedUnion("method", [
BaseRailwayConnectionSchema.extend({
method: RailwayConnectionMethodSchema,
credentials: RailwayConnectionAccessTokenCredentialsSchema.pick({})
})
]);
export const ValidateRailwayConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: RailwayConnectionMethodSchema,
credentials: RailwayConnectionAccessTokenCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.Railway).credentials
)
})
]);
export const CreateRailwayConnectionSchema = ValidateRailwayConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.Railway)
);
export const UpdateRailwayConnectionSchema = z
.object({
credentials: RailwayConnectionAccessTokenCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.Railway).credentials
)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Railway));
export const RailwayConnectionListItemSchema = z.object({
name: z.literal("Railway"),
app: z.literal(AppConnection.Railway),
methods: z.nativeEnum(RailwayConnectionMethod).array()
});
export const RailwayResourceSchema = z.object({
node: z.object({
id: z.string(),
name: z.string()
})
});
export const RailwayProjectEdgeSchema = z.object({
node: z.object({
id: z.string(),
name: z.string(),
services: z.object({
edges: z.array(RailwayResourceSchema)
}),
environments: z.object({
edges: z.array(RailwayResourceSchema)
})
})
});
export const RailwayProjectsListSchema = z.object({
projects: z.object({
edges: z.array(RailwayProjectEdgeSchema)
})
});
export const RailwayAccountWorkspaceListSchema = z.object({
me: z.object({
workspaces: z.array(
z.object({
id: z.string(),
name: z.string(),
team: RailwayProjectsListSchema
})
)
})
});
export const RailwayGetProjectsByProjectTokenSchema = z.object({
projectToken: z.object({
project: RailwayProjectEdgeSchema.shape.node
})
});
export const RailwayGetSubscriptionTypeSchema = z.object({
project: z.object({
subscriptionType: z.enum(["free", "hobby", "pro", "trial"])
})
});

View File

@ -0,0 +1,30 @@
import { logger } from "@app/lib/logger";
import { OrgServiceActor } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import { listProjects as getRailwayProjects } from "./railway-connection-fns";
import { TRailwayConnection } from "./railway-connection-types";
type TGetAppConnectionFunc = (
app: AppConnection,
connectionId: string,
actor: OrgServiceActor
) => Promise<TRailwayConnection>;
export const railwayConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
const listProjects = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.Railway, connectionId, actor);
try {
const projects = await getRailwayProjects(appConnection);
return projects;
} catch (error) {
logger.error(error, "Failed to establish connection with Railway");
return [];
}
};
return {
listProjects
};
};

View File

@ -0,0 +1,79 @@
import z from "zod";
import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
CreateRailwayConnectionSchema,
RailwayConnectionSchema,
ValidateRailwayConnectionCredentialsSchema
} from "./railway-connection-schemas";
export type TRailwayConnection = z.infer<typeof RailwayConnectionSchema>;
export type TRailwayConnectionInput = z.infer<typeof CreateRailwayConnectionSchema> & {
app: AppConnection.Railway;
};
export type TValidateRailwayConnectionCredentialsSchema = typeof ValidateRailwayConnectionCredentialsSchema;
export type TRailwayConnectionConfig = DiscriminativePick<TRailwayConnection, "method" | "app" | "credentials"> & {
orgId: string;
};
export type TRailwayService = {
id: string;
name: string;
};
export type TRailwayEnvironment = {
id: string;
name: string;
};
export type RailwayProject = {
id: string;
name: string;
services: TRailwayService[];
environments: TRailwayEnvironment[];
};
export type TRailwayResponse<T = unknown> = {
data?: T;
errors?: {
message: string;
}[];
};
export type TAccountProjectListResponse = TRailwayResponse<{
projects: {
edges: TProjectEdge[];
};
}>;
export interface TProjectEdge {
node: {
id: string;
name: string;
services: {
edges: TServiceEdge[];
};
environments: {
edges: TEnvironmentEdge[];
};
};
}
type TServiceEdge = {
node: {
id: string;
name: string;
};
};
type TEnvironmentEdge = {
node: {
id: string;
name: string;
};
};

View File

@ -47,9 +47,8 @@ type TCertificateAuthorityServiceFactoryDep = {
| "findByIdWithAssociatedCa"
| "findWithAssociatedCa"
| "findByNameAndProjectIdWithAssociatedCa"
| "find"
>;
externalCertificateAuthorityDAL: Pick<TExternalCertificateAuthorityDALFactory, "create" | "update" | "find">;
externalCertificateAuthorityDAL: Pick<TExternalCertificateAuthorityDALFactory, "create" | "update">;
internalCertificateAuthorityService: TInternalCertificateAuthorityServiceFactory;
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug" | "findOne" | "updateById" | "findById" | "transaction">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
@ -383,36 +382,11 @@ export const certificateAuthorityServiceFactory = ({
throw new BadRequestError({ message: "Invalid certificate authority type" });
};
const getProjectCertificateAuthorityCount = async (projectId: string, actor: OrgServiceActor) => {
// Anyone in the project should be able to get count.
await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
projectId,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId
});
const cas = await certificateAuthorityDAL.find({ projectId });
const externalCas = await externalCertificateAuthorityDAL.find(
{ $in: { caId: cas.map((ca) => ca.id) } },
{ count: true }
);
const externalCaCount = Number(externalCas?.[0]?.count ?? 0);
return {
externalCaCount,
internalCaCount: cas.length - externalCaCount
};
};
return {
createCertificateAuthority,
findCertificateAuthorityByNameAndProjectId,
listCertificateAuthoritiesByProjectId,
updateCertificateAuthority,
deleteCertificateAuthority,
getProjectCertificateAuthorityCount
deleteCertificateAuthority
};
};

View File

@ -9,7 +9,6 @@ import {
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { OrgServiceActor } from "@app/lib/types";
import { TCertificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal";
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal";
@ -601,38 +600,6 @@ export const certificateServiceFactory = ({
};
};
const getProjectExpiringCertificates = async (projectId: string, actor: OrgServiceActor) => {
// Anyone in the project should be able to get count.
await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
projectId,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId
});
const fourteenDaysFromNow = new Date(new Date().setDate(new Date().getDate() + 14));
const expiringCertificates = await certificateDAL.find(
{
projectId,
$complex: {
operator: "and",
value: [
{
operator: "lte",
field: "notAfter",
value: fourteenDaysFromNow
}
]
}
},
{ count: true }
);
return Number(expiringCertificates?.[0]?.count ?? 0);
};
return {
getCert,
getCertPrivateKey,
@ -640,7 +607,6 @@ export const certificateServiceFactory = ({
revokeCert,
getCertBody,
importCert,
getCertBundle,
getProjectExpiringCertificates
getCertBundle
};
};

View File

@ -375,26 +375,6 @@ export const cmekServiceFactory = ({ kmsService, kmsDAL, permissionService }: TC
};
};
const getProjectKeyCount = async (projectId: string, actor: OrgServiceActor) => {
// Anyone in the project should be able to get count.
await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
projectId,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId
});
const keys = await kmsDAL.find(
{
projectId
},
{ count: true }
);
return Number(keys?.[0]?.count ?? 0);
};
return {
createCmek,
updateCmekById,
@ -407,7 +387,6 @@ export const cmekServiceFactory = ({ kmsService, kmsDAL, permissionService }: TC
cmekSign,
cmekVerify,
listSigningAlgorithms,
getPublicKey,
getProjectKeyCount
getPublicKey
};
};

View File

@ -13,7 +13,6 @@ import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { OrgServiceActor } from "@app/lib/types";
import { isUuidV4 } from "@app/lib/validator";
import { TGroupDALFactory } from "../../ee/services/group/group-dal";
@ -34,10 +33,7 @@ import {
} from "./group-project-types";
type TGroupProjectServiceFactoryDep = {
groupProjectDAL: Pick<
TGroupProjectDALFactory,
"findOne" | "transaction" | "create" | "delete" | "findByProjectId" | "find"
>;
groupProjectDAL: Pick<TGroupProjectDALFactory, "findOne" | "transaction" | "create" | "delete" | "findByProjectId">;
groupProjectMembershipRoleDAL: Pick<
TGroupProjectMembershipRoleDALFactory,
"create" | "transaction" | "insertMany" | "delete"
@ -512,33 +508,12 @@ export const groupProjectServiceFactory = ({
return { users: members, totalCount };
};
const getProjectGroupCount = async (projectId: string, actor: OrgServiceActor) => {
// Anyone in the project should be able to get count.
await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
projectId,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId
});
const projectGroups = await groupProjectDAL.find(
{
projectId
},
{ count: true }
);
return Number(projectGroups?.[0]?.count ?? 0);
};
return {
addGroupToProject,
updateGroupInProject,
removeGroupFromProject,
listGroupsInProject,
getGroupInProject,
listProjectGroupUsers,
getProjectGroupCount
listProjectGroupUsers
};
};

View File

@ -10,7 +10,6 @@ import { ProjectPermissionIdentityActions, ProjectPermissionSub } from "@app/ee/
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { OrgServiceActor } from "@app/lib/types";
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
import { TProjectDALFactory } from "../project/project-dal";
@ -404,33 +403,12 @@ export const identityProjectServiceFactory = ({
return identityMembership;
};
const getProjectIdentityCount = async (projectId: string, actor: OrgServiceActor) => {
// Anyone in the project should be able to get count.
await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
projectId,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId
});
const identityMemberships = await identityProjectDAL.find(
{
projectId
},
{ count: true }
);
return Number(identityMemberships?.[0]?.count ?? 0);
};
return {
createProjectIdentity,
updateProjectIdentity,
deleteProjectIdentity,
listProjectIdentities,
getProjectIdentityByIdentityId,
getProjectIdentityByMembershipId,
getProjectIdentityCount
getProjectIdentityByMembershipId
};
};

View File

@ -108,16 +108,16 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
const now = new Date();
const oneWeekAgo = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000);
const oneMonthAgo = new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000);
const threeMonthsAgo = new Date(now.getTime() - 90 * 24 * 60 * 60 * 1000);
const twelveMonthsAgo = new Date(now.getTime() - 360 * 24 * 60 * 60 * 1000);
const memberships = await db
.replicaNode()(TableName.OrgMembership)
.where("status", "invited")
.where((qb) => {
// lastInvitedAt is null AND createdAt is between 1 week and 3 months ago
// lastInvitedAt is null AND createdAt is between 1 week and 12 months ago
void qb
.whereNull(`${TableName.OrgMembership}.lastInvitedAt`)
.whereBetween(`${TableName.OrgMembership}.createdAt`, [threeMonthsAgo, oneWeekAgo]);
.whereBetween(`${TableName.OrgMembership}.createdAt`, [twelveMonthsAgo, oneWeekAgo]);
})
.orWhere((qb) => {
// lastInvitedAt is older than 1 week ago AND createdAt is younger than 1 month ago

View File

@ -36,6 +36,8 @@ import { getConfig } from "@app/lib/config/env";
import { generateAsymmetricKeyPair } from "@app/lib/crypto";
import { generateSymmetricKey, infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
import { generateUserSrpKeys } from "@app/lib/crypto/srp";
import { applyJitter } from "@app/lib/dates";
import { delay as delayMs } from "@app/lib/delay";
import {
BadRequestError,
ForbiddenRequestError,
@ -44,9 +46,10 @@ import {
UnauthorizedError
} from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { isDisposableEmail } from "@app/lib/validator";
import { TQueueServiceFactory } from "@app/queue";
import { QueueName, TQueueServiceFactory } from "@app/queue";
import { getDefaultOrgMembershipRoleForUpdateOrg } from "@app/services/org/org-role-fns";
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
@ -1438,6 +1441,8 @@ export const orgServiceFactory = ({
* Re-send emails to users who haven't accepted an invite yet
*/
const notifyInvitedUsers = async () => {
logger.info(`${QueueName.DailyResourceCleanUp}: notify invited users started`);
const invitedUsers = await orgMembershipDAL.findRecentInvitedMemberships();
const appCfg = getConfig();
@ -1461,24 +1466,32 @@ export const orgServiceFactory = ({
});
if (invitedUser.inviteEmail) {
await smtpService.sendMail({
template: SmtpTemplates.OrgInvite,
subjectLine: `Reminder: You have been invited to ${org.name} on Infisical`,
recipients: [invitedUser.inviteEmail],
substitutions: {
organizationName: org.name,
email: invitedUser.inviteEmail,
organizationId: org.id.toString(),
token,
callback_url: `${appCfg.SITE_URL}/signupinvite`
}
});
notifiedUsers.push(invitedUser.id);
await delayMs(Math.max(0, applyJitter(0, 2000)));
try {
await smtpService.sendMail({
template: SmtpTemplates.OrgInvite,
subjectLine: `Reminder: You have been invited to ${org.name} on Infisical`,
recipients: [invitedUser.inviteEmail],
substitutions: {
organizationName: org.name,
email: invitedUser.inviteEmail,
organizationId: org.id.toString(),
token,
callback_url: `${appCfg.SITE_URL}/signupinvite`
}
});
notifiedUsers.push(invitedUser.id);
} catch (err) {
logger.error(err, `${QueueName.DailyResourceCleanUp}: notify invited users failed to send email`);
}
}
})
);
await orgMembershipDAL.updateLastInvitedAtByIds(notifiedUsers);
logger.info(`${QueueName.DailyResourceCleanUp}: notify invited users completed`);
};
return {

View File

@ -14,7 +14,6 @@ import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ForbiddenRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { OrgServiceActor } from "@app/lib/types";
import { TUserGroupMembershipDALFactory } from "../../ee/services/group/user-group-membership-dal";
import { ActorType } from "../auth/auth-type";
@ -568,35 +567,6 @@ export const projectMembershipServiceFactory = ({
return deletedMembership;
};
const getProjectMembershipCount = async (projectId: string, actor: OrgServiceActor) => {
// Anyone in the project should be able to get count.
await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
projectId,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId
});
const projectMemberships = await projectMembershipDAL.find({
projectId
});
const users = await userDAL.find(
{
$in: {
id: projectMemberships.map((membership) => membership.userId)
},
isGhost: false
},
{
count: true
}
);
return Number(users?.[0]?.count ?? 0);
};
return {
getProjectMemberships,
getProjectMembershipByUsername,
@ -605,7 +575,6 @@ export const projectMembershipServiceFactory = ({
deleteProjectMembership, // TODO: Remove this
addUsersToProject,
leaveProject,
getProjectMembershipById,
getProjectMembershipCount
getProjectMembershipById
};
};

View File

@ -214,7 +214,7 @@ export const secretFolderServiceFactory = ({
}
},
message: "Folder created",
folderId: doc.id,
folderId: parentFolder.id,
changes: [
{
type: CommitType.ADD,

View File

@ -0,0 +1,10 @@
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import { TSecretSyncListItem } from "@app/services/secret-sync/secret-sync-types";
export const RAILWAY_SYNC_LIST_OPTION: TSecretSyncListItem = {
name: "Railway",
destination: SecretSync.Railway,
connection: AppConnection.Railway,
canImportSecrets: true
};

View File

@ -0,0 +1,124 @@
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import { RailwayPublicAPI } from "@app/services/app-connection/railway/railway-connection-public-client";
import { matchesSchema } from "@app/services/secret-sync/secret-sync-fns";
import { SecretSyncError } from "../secret-sync-errors";
import { TSecretMap } from "../secret-sync-types";
import { TRailwaySyncWithCredentials } from "./railway-sync-types";
export const RailwaySyncFns = {
async getSecrets(secretSync: TRailwaySyncWithCredentials): Promise<TSecretMap> {
try {
const config = secretSync.destinationConfig;
const variables = await RailwayPublicAPI.getVariables(secretSync.connection, {
projectId: config.projectId,
environmentId: config.environmentId,
serviceId: config.serviceId || undefined
});
const entries = {} as TSecretMap;
for (const [key, value] of Object.entries(variables)) {
// Skip importing private railway variables
// eslint-disable-next-line no-continue
if (key.startsWith("RAILWAY_")) continue;
entries[key] = {
value
};
}
return entries;
} catch (error) {
throw new SecretSyncError({
error,
message: "Failed to import secrets from Railway"
});
}
},
async syncSecrets(secretSync: TRailwaySyncWithCredentials, secretMap: TSecretMap) {
const {
environment,
syncOptions: { disableSecretDeletion, keySchema }
} = secretSync;
const railwaySecrets = await this.getSecrets(secretSync);
const config = secretSync.destinationConfig;
for await (const key of Object.keys(secretMap)) {
try {
const existing = railwaySecrets[key];
if (existing === undefined || existing.value !== secretMap[key].value) {
await RailwayPublicAPI.upsertVariable(secretSync.connection, {
input: {
projectId: config.projectId,
environmentId: config.environmentId,
serviceId: config.serviceId || undefined,
name: key,
value: secretMap[key].value ?? ""
}
});
}
} catch (error) {
throw new SecretSyncError({
error,
secretKey: key
});
}
}
if (disableSecretDeletion) return;
for await (const key of Object.keys(railwaySecrets)) {
try {
// eslint-disable-next-line no-continue
if (!matchesSchema(key, environment?.slug || "", keySchema)) continue;
if (!secretMap[key]) {
await RailwayPublicAPI.deleteVariable(secretSync.connection, {
input: {
projectId: config.projectId,
environmentId: config.environmentId,
serviceId: config.serviceId || undefined,
name: key
}
});
}
} catch (error) {
throw new SecretSyncError({
error,
secretKey: key
});
}
}
},
async removeSecrets(secretSync: TRailwaySyncWithCredentials, secretMap: TSecretMap) {
const existing = await this.getSecrets(secretSync);
const config = secretSync.destinationConfig;
for await (const secret of Object.keys(existing)) {
try {
if (secret in secretMap) {
await RailwayPublicAPI.deleteVariable(secretSync.connection, {
input: {
projectId: config.projectId,
environmentId: config.environmentId,
serviceId: config.serviceId || undefined,
name: secret
}
});
}
} catch (error) {
throw new SecretSyncError({
error,
secretKey: secret
});
}
}
}
};

View File

@ -0,0 +1,56 @@
import { z } from "zod";
import { SecretSyncs } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import {
BaseSecretSyncSchema,
GenericCreateSecretSyncFieldsSchema,
GenericUpdateSecretSyncFieldsSchema
} from "@app/services/secret-sync/secret-sync-schemas";
import { TSyncOptionsConfig } from "@app/services/secret-sync/secret-sync-types";
const RailwaySyncDestinationConfigSchema = z.object({
projectId: z
.string()
.trim()
.min(1, "Railway project ID required")
.describe(SecretSyncs.DESTINATION_CONFIG.RAILWAY.projectId),
projectName: z.string().trim().describe(SecretSyncs.DESTINATION_CONFIG.RAILWAY.projectName),
environmentId: z
.string()
.trim()
.min(1, "Railway environment ID required")
.describe(SecretSyncs.DESTINATION_CONFIG.RAILWAY.environmentId),
environmentName: z.string().trim().describe(SecretSyncs.DESTINATION_CONFIG.RAILWAY.environmentName),
serviceId: z.string().optional().describe(SecretSyncs.DESTINATION_CONFIG.RAILWAY.serviceId),
serviceName: z.string().optional().describe(SecretSyncs.DESTINATION_CONFIG.RAILWAY.serviceName)
});
const RailwaySyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: true };
export const RailwaySyncSchema = BaseSecretSyncSchema(SecretSync.Railway, RailwaySyncOptionsConfig).extend({
destination: z.literal(SecretSync.Railway),
destinationConfig: RailwaySyncDestinationConfigSchema
});
export const CreateRailwaySyncSchema = GenericCreateSecretSyncFieldsSchema(
SecretSync.Railway,
RailwaySyncOptionsConfig
).extend({
destinationConfig: RailwaySyncDestinationConfigSchema
});
export const UpdateRailwaySyncSchema = GenericUpdateSecretSyncFieldsSchema(
SecretSync.Railway,
RailwaySyncOptionsConfig
).extend({
destinationConfig: RailwaySyncDestinationConfigSchema.optional()
});
export const RailwaySyncListItemSchema = z.object({
name: z.literal("Railway"),
connection: z.literal(AppConnection.Railway),
destination: z.literal(SecretSync.Railway),
canImportSecrets: z.literal(true)
});

View File

@ -0,0 +1,31 @@
import z from "zod";
import { TRailwayConnection } from "@app/services/app-connection/railway";
import { CreateRailwaySyncSchema, RailwaySyncListItemSchema, RailwaySyncSchema } from "./railway-sync-schemas";
export type TRailwaySyncListItem = z.infer<typeof RailwaySyncListItemSchema>;
export type TRailwaySync = z.infer<typeof RailwaySyncSchema>;
export type TRailwaySyncInput = z.infer<typeof CreateRailwaySyncSchema>;
export type TRailwaySyncWithCredentials = TRailwaySync & {
connection: TRailwayConnection;
};
export type TRailwaySecret = {
createdAt: string;
environmentId?: string | null;
id: string;
isSealed: boolean;
name: string;
serviceId?: string | null;
updatedAt: string;
};
export type TRailwayVariablesGraphResponse = {
data: {
variables: Record<string, string>;
};
};

View File

@ -21,7 +21,8 @@ export enum SecretSync {
Flyio = "flyio",
GitLab = "gitlab",
CloudflarePages = "cloudflare-pages",
Zabbix = "zabbix"
Zabbix = "zabbix",
Railway = "railway"
}
export enum SecretSyncInitialSyncBehavior {

View File

@ -39,6 +39,8 @@ import { HC_VAULT_SYNC_LIST_OPTION, HCVaultSyncFns } from "./hc-vault";
import { HEROKU_SYNC_LIST_OPTION, HerokuSyncFns } from "./heroku";
import { HUMANITEC_SYNC_LIST_OPTION } from "./humanitec";
import { HumanitecSyncFns } from "./humanitec/humanitec-sync-fns";
import { RAILWAY_SYNC_LIST_OPTION } from "./railway/railway-sync-constants";
import { RailwaySyncFns } from "./railway/railway-sync-fns";
import { RENDER_SYNC_LIST_OPTION, RenderSyncFns } from "./render";
import { SECRET_SYNC_PLAN_MAP } from "./secret-sync-maps";
import { TEAMCITY_SYNC_LIST_OPTION, TeamCitySyncFns } from "./teamcity";
@ -70,7 +72,8 @@ const SECRET_SYNC_LIST_OPTIONS: Record<SecretSync, TSecretSyncListItem> = {
[SecretSync.Flyio]: FLYIO_SYNC_LIST_OPTION,
[SecretSync.GitLab]: GITLAB_SYNC_LIST_OPTION,
[SecretSync.CloudflarePages]: CLOUDFLARE_PAGES_SYNC_LIST_OPTION,
[SecretSync.Zabbix]: ZABBIX_SYNC_LIST_OPTION
[SecretSync.Zabbix]: ZABBIX_SYNC_LIST_OPTION,
[SecretSync.Railway]: RAILWAY_SYNC_LIST_OPTION
};
export const listSecretSyncOptions = () => {
@ -240,6 +243,8 @@ export const SecretSyncFns = {
return CloudflarePagesSyncFns.syncSecrets(secretSync, schemaSecretMap);
case SecretSync.Zabbix:
return ZabbixSyncFns.syncSecrets(secretSync, schemaSecretMap);
case SecretSync.Railway:
return RailwaySyncFns.syncSecrets(secretSync, schemaSecretMap);
default:
throw new Error(
`Unhandled sync destination for sync secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`
@ -335,6 +340,9 @@ export const SecretSyncFns = {
case SecretSync.Zabbix:
secretMap = await ZabbixSyncFns.getSecrets(secretSync);
break;
case SecretSync.Railway:
secretMap = await RailwaySyncFns.getSecrets(secretSync);
break;
default:
throw new Error(
`Unhandled sync destination for get secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`
@ -414,6 +422,8 @@ export const SecretSyncFns = {
return CloudflarePagesSyncFns.removeSecrets(secretSync, schemaSecretMap);
case SecretSync.Zabbix:
return ZabbixSyncFns.removeSecrets(secretSync, schemaSecretMap);
case SecretSync.Railway:
return RailwaySyncFns.removeSecrets(secretSync, schemaSecretMap);
default:
throw new Error(
`Unhandled sync destination for remove secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}`

View File

@ -24,7 +24,8 @@ export const SECRET_SYNC_NAME_MAP: Record<SecretSync, string> = {
[SecretSync.Flyio]: "Fly.io",
[SecretSync.GitLab]: "GitLab",
[SecretSync.CloudflarePages]: "Cloudflare Pages",
[SecretSync.Zabbix]: "Zabbix"
[SecretSync.Zabbix]: "Zabbix",
[SecretSync.Railway]: "Railway"
};
export const SECRET_SYNC_CONNECTION_MAP: Record<SecretSync, AppConnection> = {
@ -50,7 +51,8 @@ export const SECRET_SYNC_CONNECTION_MAP: Record<SecretSync, AppConnection> = {
[SecretSync.Flyio]: AppConnection.Flyio,
[SecretSync.GitLab]: AppConnection.GitLab,
[SecretSync.CloudflarePages]: AppConnection.Cloudflare,
[SecretSync.Zabbix]: AppConnection.Zabbix
[SecretSync.Zabbix]: AppConnection.Zabbix,
[SecretSync.Railway]: AppConnection.Railway
};
export const SECRET_SYNC_PLAN_MAP: Record<SecretSync, SecretSyncPlanType> = {
@ -76,5 +78,6 @@ export const SECRET_SYNC_PLAN_MAP: Record<SecretSync, SecretSyncPlanType> = {
[SecretSync.Flyio]: SecretSyncPlanType.Regular,
[SecretSync.GitLab]: SecretSyncPlanType.Regular,
[SecretSync.CloudflarePages]: SecretSyncPlanType.Regular,
[SecretSync.Zabbix]: SecretSyncPlanType.Regular
[SecretSync.Zabbix]: SecretSyncPlanType.Regular,
[SecretSync.Railway]: SecretSyncPlanType.Regular
};

View File

@ -94,6 +94,12 @@ import {
THumanitecSyncListItem,
THumanitecSyncWithCredentials
} from "./humanitec";
import {
TRailwaySync,
TRailwaySyncInput,
TRailwaySyncListItem,
TRailwaySyncWithCredentials
} from "./railway/railway-sync-types";
import {
TRenderSync,
TRenderSyncInput,
@ -138,7 +144,8 @@ export type TSecretSync =
| TFlyioSync
| TGitLabSync
| TCloudflarePagesSync
| TZabbixSync;
| TZabbixSync
| TRailwaySync;
export type TSecretSyncWithCredentials =
| TAwsParameterStoreSyncWithCredentials
@ -163,7 +170,8 @@ export type TSecretSyncWithCredentials =
| TFlyioSyncWithCredentials
| TGitLabSyncWithCredentials
| TCloudflarePagesSyncWithCredentials
| TZabbixSyncWithCredentials;
| TZabbixSyncWithCredentials
| TRailwaySyncWithCredentials;
export type TSecretSyncInput =
| TAwsParameterStoreSyncInput
@ -188,7 +196,8 @@ export type TSecretSyncInput =
| TFlyioSyncInput
| TGitLabSyncInput
| TCloudflarePagesSyncInput
| TZabbixSyncInput;
| TZabbixSyncInput
| TRailwaySyncInput;
export type TSecretSyncListItem =
| TAwsParameterStoreSyncListItem
@ -213,7 +222,8 @@ export type TSecretSyncListItem =
| TFlyioSyncListItem
| TGitLabSyncListItem
| TCloudflarePagesSyncListItem
| TZabbixSyncListItem;
| TZabbixSyncListItem
| TRailwaySyncListItem;
export type TSyncOptionsConfig = {
canImportSecrets: boolean;

View File

@ -811,7 +811,6 @@ export const secretV2BridgeDALFactory = ({ db, keyStore }: TSecretV2DalArg) => {
return {
...secretOrm,
rawFind: secretOrm.find,
update,
bulkUpdate,
deleteMany,

View File

@ -26,7 +26,6 @@ import { diff, groupBy } from "@app/lib/fn";
import { setKnexStringValue } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { OrgServiceActor } from "@app/lib/types";
import { ActorType } from "../auth/auth-type";
import { TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
@ -87,7 +86,7 @@ type TSecretV2BridgeServiceFactoryDep = {
secretTagDAL: TSecretTagDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne" | "findBySlugs" | "find">;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne" | "findBySlugs">;
folderDAL: Pick<
TSecretFolderDALFactory,
| "findBySecretPath"
@ -2915,39 +2914,6 @@ export const secretV2BridgeServiceFactory = ({
});
};
const getProjectSecretResourcesCount = async (projectId: string, actor: OrgServiceActor) => {
// Anyone in the project should be able to get count.
await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
projectId,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId
});
const environments = await projectEnvDAL.find({
projectId
});
const folders = await folderDAL.find({
isReserved: false,
$in: {
envId: environments.map((env) => env.id)
}
});
const secrets = await secretDAL.rawFind(
{
$in: {
folderId: folders.map((folder) => folder.id)
}
},
{ countDistinct: "key" }
);
return { environmentCount: environments.length, secretCount: Number(secrets?.[0]?.count ?? 0) };
};
return {
createSecret,
deleteSecret,
@ -2967,7 +2933,6 @@ export const secretV2BridgeServiceFactory = ({
getSecretsByFolderMappings,
getSecretById,
getAccessibleSecrets,
getSecretVersionsByIds,
getProjectSecretResourcesCount
getSecretVersionsByIds
};
};

View File

@ -3339,20 +3339,6 @@ export const secretServiceFactory = ({
}));
};
const getProjectSecretResourcesCount = async (projectId: string, actor: OrgServiceActor) => {
const { shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId);
if (!shouldUseSecretV2Bridge)
throw new BadRequestError({
message: "Project version does not support pagination",
name: "PaginationNotSupportedError"
});
const count = await secretV2BridgeService.getProjectSecretResourcesCount(projectId, actor);
return count;
};
return {
attachTags,
detachTags,
@ -3385,7 +3371,6 @@ export const secretServiceFactory = ({
getSecretByIdRaw,
getAccessibleSecrets,
getSecretVersionsV2ByIds,
getChangeVersions,
getProjectSecretResourcesCount
getChangeVersions
};
};

View File

@ -0,0 +1,4 @@
---
title: "Available"
openapi: "GET /api/v1/app-connections/railway/available"
---

View File

@ -0,0 +1,8 @@
---
title: "Create"
openapi: "POST /api/v1/app-connections/railway"
---
<Note>
Check out the configuration docs for [Railway Connections](/integrations/app-connections/railway) to learn how to obtain the required credentials.
</Note>

View File

@ -0,0 +1,4 @@
---
title: "Delete"
openapi: "DELETE /api/v1/app-connections/railway/{connectionId}"
---

View File

@ -0,0 +1,4 @@
---
title: "Get by ID"
openapi: "GET /api/v1/app-connections/railway/{connectionId}"
---

View File

@ -0,0 +1,4 @@
---
title: "Get by Name"
openapi: "GET /api/v1/app-connections/railway/connection-name/{connectionName}"
---

View File

@ -0,0 +1,4 @@
---
title: "List"
openapi: "GET /api/v1/app-connections/railway"
---

View File

@ -0,0 +1,8 @@
---
title: "Update"
openapi: "PATCH /api/v1/app-connections/railway/{connectionId}"
---
<Note>
Check out the configuration docs for [Railway Connections](/integrations/app-connections/railway) to learn how to obtain the required credentials.
</Note>

View File

@ -0,0 +1,4 @@
---
title: "Create"
openapi: "POST /api/v1/secret-syncs/railway"
---

View File

@ -0,0 +1,4 @@
---
title: "Delete"
openapi: "DELETE /api/v1/secret-syncs/railway/{syncId}"
---

View File

@ -0,0 +1,4 @@
---
title: "Get by ID"
openapi: "GET /api/v1/secret-syncs/railway/{syncId}"
---

View File

@ -0,0 +1,4 @@
---
title: "Get by Name"
openapi: "GET /api/v1/secret-syncs/railway/sync-name/{syncName}"
---

View File

@ -0,0 +1,4 @@
---
title: "Import Secrets"
openapi: "POST /api/v1/secret-syncs/railway/{syncId}/import-secrets"
---

View File

@ -0,0 +1,4 @@
---
title: "List"
openapi: "GET /api/v1/secret-syncs/railway"
---

View File

@ -0,0 +1,4 @@
---
title: "Remove Secrets"
openapi: "POST /api/v1/secret-syncs/railway/{syncId}/remove-secrets"
---

View File

@ -0,0 +1,4 @@
---
title: "Sync Secrets"
openapi: "POST /api/v1/secret-syncs/railway/{syncId}/sync-secrets"
---

View File

@ -0,0 +1,4 @@
---
title: "Update"
openapi: "PATCH /api/v1/secret-syncs/railway/{syncId}"
---

View File

@ -78,10 +78,7 @@
},
{
"group": "Infisical SSH",
"pages": [
"documentation/platform/ssh/overview",
"documentation/platform/ssh/host-groups"
]
"pages": ["documentation/platform/ssh/overview", "documentation/platform/ssh/host-groups"]
},
{
"group": "Key Management (KMS)",
@ -378,10 +375,7 @@
},
{
"group": "Architecture",
"pages": [
"internals/architecture/components",
"internals/architecture/cloud"
]
"pages": ["internals/architecture/components", "internals/architecture/cloud"]
},
"internals/security",
"internals/service-tokens"
@ -488,6 +482,7 @@
"integrations/app-connections/oci",
"integrations/app-connections/oracledb",
"integrations/app-connections/postgres",
"integrations/app-connections/railway",
"integrations/app-connections/render",
"integrations/app-connections/teamcity",
"integrations/app-connections/terraform-cloud",
@ -522,6 +517,7 @@
"integrations/secret-syncs/heroku",
"integrations/secret-syncs/humanitec",
"integrations/secret-syncs/oci-vault",
"integrations/secret-syncs/railway",
"integrations/secret-syncs/render",
"integrations/secret-syncs/teamcity",
"integrations/secret-syncs/terraform-cloud",
@ -550,10 +546,7 @@
"integrations/cloud/gcp-secret-manager",
{
"group": "Cloudflare",
"pages": [
"integrations/cloud/cloudflare-pages",
"integrations/cloud/cloudflare-workers"
]
"pages": ["integrations/cloud/cloudflare-pages", "integrations/cloud/cloudflare-workers"]
},
"integrations/cloud/terraform-cloud",
"integrations/cloud/databricks",
@ -563,8 +556,8 @@
"integrations/cloud/digital-ocean-app-platform",
"integrations/cloud/heroku",
"integrations/cloud/netlify",
"integrations/cloud/railway",
"integrations/cloud/flyio",
"integrations/cloud/railway",
"integrations/cloud/render",
"integrations/cloud/laravel-forge",
"integrations/cloud/supabase",
@ -665,11 +658,7 @@
"cli/commands/reset",
{
"group": "infisical scan",
"pages": [
"cli/commands/scan",
"cli/commands/scan-git-changes",
"cli/commands/scan-install"
]
"pages": ["cli/commands/scan", "cli/commands/scan-git-changes", "cli/commands/scan-install"]
}
]
},
@ -853,30 +842,30 @@
{
"group": "Organizations",
"pages": [
{
"group": "OIDC SSO",
"pages": [
"api-reference/endpoints/organizations/oidc-sso/get-oidc-config",
"api-reference/endpoints/organizations/oidc-sso/update-oidc-config",
"api-reference/endpoints/organizations/oidc-sso/create-oidc-config"
]
},
{
"group": "LDAP SSO",
"pages": [
"api-reference/endpoints/organizations/ldap-sso/get-ldap-config",
"api-reference/endpoints/organizations/ldap-sso/update-ldap-config",
"api-reference/endpoints/organizations/ldap-sso/create-ldap-config"
]
},
{
"group": "SAML SSO",
"pages": [
"api-reference/endpoints/organizations/saml-sso/get-saml-config",
"api-reference/endpoints/organizations/saml-sso/update-saml-config",
"api-reference/endpoints/organizations/saml-sso/create-saml-config"
]
},
{
"group": "OIDC SSO",
"pages": [
"api-reference/endpoints/organizations/oidc-sso/get-oidc-config",
"api-reference/endpoints/organizations/oidc-sso/update-oidc-config",
"api-reference/endpoints/organizations/oidc-sso/create-oidc-config"
]
},
{
"group": "LDAP SSO",
"pages": [
"api-reference/endpoints/organizations/ldap-sso/get-ldap-config",
"api-reference/endpoints/organizations/ldap-sso/update-ldap-config",
"api-reference/endpoints/organizations/ldap-sso/create-ldap-config"
]
},
{
"group": "SAML SSO",
"pages": [
"api-reference/endpoints/organizations/saml-sso/get-saml-config",
"api-reference/endpoints/organizations/saml-sso/update-saml-config",
"api-reference/endpoints/organizations/saml-sso/create-saml-config"
]
},
"api-reference/endpoints/organizations/memberships",
"api-reference/endpoints/organizations/update-membership",
"api-reference/endpoints/organizations/delete-membership",
@ -993,9 +982,7 @@
"pages": [
{
"group": "Kubernetes",
"pages": [
"api-reference/endpoints/dynamic-secrets/kubernetes/create-lease"
]
"pages": ["api-reference/endpoints/dynamic-secrets/kubernetes/create-lease"]
},
"api-reference/endpoints/dynamic-secrets/create",
"api-reference/endpoints/dynamic-secrets/update",
@ -1517,6 +1504,18 @@
"api-reference/endpoints/app-connections/postgres/delete"
]
},
{
"group": "Railway",
"pages": [
"api-reference/endpoints/app-connections/railway/list",
"api-reference/endpoints/app-connections/railway/available",
"api-reference/endpoints/app-connections/railway/get-by-id",
"api-reference/endpoints/app-connections/railway/get-by-name",
"api-reference/endpoints/app-connections/railway/create",
"api-reference/endpoints/app-connections/railway/update",
"api-reference/endpoints/app-connections/railway/delete"
]
},
{
"group": "Render",
"pages": [
@ -1826,6 +1825,20 @@
"api-reference/endpoints/secret-syncs/oci-vault/remove-secrets"
]
},
{
"group": "Railway",
"pages": [
"api-reference/endpoints/secret-syncs/railway/list",
"api-reference/endpoints/secret-syncs/railway/get-by-id",
"api-reference/endpoints/secret-syncs/railway/get-by-name",
"api-reference/endpoints/secret-syncs/railway/create",
"api-reference/endpoints/secret-syncs/railway/update",
"api-reference/endpoints/secret-syncs/railway/delete",
"api-reference/endpoints/secret-syncs/railway/sync-secrets",
"api-reference/endpoints/secret-syncs/railway/import-secrets",
"api-reference/endpoints/secret-syncs/railway/remove-secrets"
]
},
{
"group": "Render",
"pages": [
@ -2172,7 +2185,7 @@
"api": {
"openapi": "https://app.infisical.com/api/docs/json",
"mdx": {
"server": ["https://app.infisical.com", "http://localhost:8080"]
"server": ["https://app.infisical.com"]
}
},
"appearance": {

View File

@ -3,13 +3,13 @@ title: "AWS IAM"
description: "Learn how to dynamically generate AWS IAM Users."
---
The Infisical AWS IAM dynamic secret allows you to generate AWS IAM Users on demand based on configured AWS policy.
The Infisical AWS IAM dynamic secret allows you to generate AWS IAM Users on demand based on a configured AWS policy. Infisical supports several authentication methods to connect to your AWS account, including assuming an IAM Role, using IAM Roles for Service Accounts (IRSA) on EKS, or static Access Keys.
## Prerequisite
Infisical needs an initial AWS IAM user with the required permissions to create sub IAM users. This IAM user will be responsible for managing the lifecycle of new IAM users.
Infisical needs an AWS IAM principal (a user or a role) with the required permissions to create and manage other IAM users. This principal will be responsible for the lifecycle of the dynamically generated users.
<Accordion title="Managing AWS IAM User minimum permission policy">
<Accordion title="Required IAM Permissions">
```json
{
@ -235,7 +235,169 @@ Replace **\<account id\>** with your AWS account id and **\<aws-scope-path\>** w
![Provision Lease](/images/platform/dynamic-secrets/lease-values-aws-iam.png)
</Step>
</Steps>
</Tab>
<Tab title="IRSA (EKS)">
This method is recommended for self-hosted Infisical instances running on AWS EKS. It uses [IAM Roles for Service Accounts (IRSA)](https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html) to securely grant permissions to the Infisical pods without managing static credentials.
<Warning type="warning" title="IRSA Configuration Prerequisite">
In order to use IRSA, the `KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN` environment variable must be set to `true` for your self-hosted Infisical instance.
</Warning>
<Steps>
<Step title="Create an IAM OIDC provider for your cluster">
If you don't already have one, you need to create an IAM OIDC provider for your EKS cluster. This allows IAM to trust authentication tokens from your Kubernetes cluster.
1. Find your cluster's OIDC provider URL from the EKS console or by using the AWS CLI:
`aws eks describe-cluster --name <your-cluster-name> --query "cluster.identity.oidc.issuer" --output text`
2. Navigate to the [IAM Identity Providers](https://console.aws.amazon.com/iam/home#/providers) page in your AWS Console and create a new OpenID Connect provider with the URL and `sts.amazonaws.com` as the audience.
![Create OIDC Provider Placeholder](/images/integrations/aws/irsa-create-oidc-provider.png)
</Step>
<Step title="Create the Managing User IAM Role for Infisical">
1. Navigate to the [Create IAM Role](https://console.aws.amazon.com/iamv2/home#/roles/create?step=selectEntities) page in your AWS Console.
2. Select **Web identity** as the **Trusted Entity Type**.
3. Choose the OIDC provider you created in the previous step.
4. For the **Audience**, select `sts.amazonaws.com`.
![IAM Role Creation for IRSA](/images/integrations/aws/irsa-iam-role-creation.png)
5. Attach the permission policy detailed in the **Prerequisite** section at the top of this page.
6. After creating the role, edit its **Trust relationship** to specify the service account Infisical is using in your cluster. This ensures only the Infisical pod can assume this role.
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"Federated": "arn:aws:iam::<ACCOUNT_ID>:oidc-provider/oidc.eks.<REGION>.amazonaws.com/id/<OIDC_ID>"
},
"Action": "sts:AssumeRoleWithWebIdentity",
"Condition": {
"StringEquals": {
"oidc.eks.<REGION>.amazonaws.com/id/<OIDC_ID>:sub": "system:serviceaccount:<K8S_NAMESPACE>:<INFISICAL_SERVICE_ACCOUNT_NAME>",
"oidc.eks.<REGION>.amazonaws.com/id/<OIDC_ID>:aud": "sts.amazonaws.com"
}
}
}
]
}
```
Replace `<ACCOUNT_ID>`, `<REGION>`, `<OIDC_ID>`, `<K8S_NAMESPACE>`, and `<INFISICAL_SERVICE_ACCOUNT_NAME>` with your specific values.
</Step>
<Step title="Annotate the Infisical Kubernetes Service Account">
For the IRSA mechanism to work, the Infisical service account in your Kubernetes cluster must be annotated with the ARN of the IAM role you just created.
Run the following command, replacing the placeholders with your values:
```bash
kubectl annotate serviceaccount -n <infisical-namespace> <infisical-service-account> \
eks.amazonaws.com/role-arn=arn:aws:iam::<account-id>:role/<iam-role-name>
```
This annotation tells the EKS Pod Identity Webhook to inject the necessary environment variables and tokens into the Infisical pod, allowing it to assume the specified IAM role.
</Step>
<Step title="Secret Overview Dashboard">
Navigate to the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret to.
</Step>
<Step title="Click on the 'Add Dynamic Secret' button">
![Add Dynamic Secret Button](../../../images/platform/dynamic-secrets/add-dynamic-secret-button.png)
</Step>
<Step title="Select AWS IAM">
![Dynamic Secret Modal](../../../images/platform/dynamic-secrets/dynamic-secret-modal-aws-iam.png)
</Step>
<Step title="Provide the inputs for dynamic secret parameters">
![Dynamic Secret Setup Modal for IRSA](/images/platform/dynamic-secrets/dynamic-secret-setup-modal-aws-iam-irsa.png)
<ParamField path="Secret Name" type="string" required>
Name by which you want the secret to be referenced
</ParamField>
<ParamField path="Default TTL" type="string" required>
Default time-to-live for a generated secret (it is possible to modify this value after a secret is generated)
</ParamField>
<ParamField path="Max TTL" type="string" required>
Maximum time-to-live for a generated secret
</ParamField>
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
- `{{identity.name}}`: Name of the identity that is generating the secret
- `{{random N}}`: Random string of N characters
Allowed template functions are
- `truncate`: Truncates a string to a specified length
- `replace`: Replaces a substring with another value
Examples:
```
{{randomUsername}} // 3POnzeFyK9gW2nioK0q2gMjr6CZqsRiX
{{unixTimestamp}} // 17490641580
{{identity.name}} // testuser
{{random-5}} // x9k2m
{{truncate identity.name 4}} // test
{{replace identity.name 'user' 'replace'}} // testreplace
```
</ParamField>
<ParamField path="Tags" type="map<string, string>[]">
Tags to be added to the created IAM User resource.
</ParamField>
<ParamField path="Method" type="string" required>
Select *IRSA* method.
</ParamField>
<ParamField path="Aws Role ARN" type="string" required>
The ARN of the AWS IAM Role for the service account to assume.
</ParamField>
<ParamField path="AWS IAM Path" type="string">
[IAM AWS Path](https://aws.amazon.com/blogs/security/optimize-aws-administration-with-iam-paths/) to scope created IAM User resource access.
</ParamField>
<ParamField path="AWS Region" type="string" required>
The AWS data center region.
</ParamField>
<ParamField path="IAM User Permission Boundary" type="string" required>
The IAM Policy ARN of the [AWS Permissions Boundary](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_boundaries.html) to attach to IAM users created in the role.
</ParamField>
<ParamField path="AWS IAM Groups" type="string">
The AWS IAM groups that should be assigned to the created users. Multiple values can be provided by separating them with commas
</ParamField>
<ParamField path="AWS Policy ARNs" type="string">
The AWS IAM managed policies that should be attached to the created users. Multiple values can be provided by separating them with commas
</ParamField>
<ParamField path="AWS IAM Policy Document" type="string">
The AWS IAM inline policy that should be attached to the created users.
Multiple values can be provided by separating them with commas
</ParamField>
<ParamField path="Username Template" type="string" default="{{randomUsername}}">
Specifies a template for generating usernames. This field allows customization of how usernames are automatically created.
Allowed template variables are
- `{{randomUsername}}`: Random username string
- `{{unixTimestamp}}`: Current Unix timestamp
</ParamField>
</Step>
<Step title="Click 'Submit'">
After submitting the form, you will see a dynamic secret created in the dashboard.
![Dynamic Secret](../../../images/platform/dynamic-secrets/dynamic-secret.png)
</Step>
<Step title="Generate dynamic secrets">
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate.png)
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty.png)
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png)
<Tip>
Ensure that the TTL for the lease falls within the maximum TTL defined when configuring the dynamic secret in step 4.
</Tip>
Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you.
![Provision Lease](/images/platform/dynamic-secrets/lease-values-aws-iam.png)
</Step>
</Steps>
</Tab>
<Tab title="Access Key">
Infisical will use the provided **Access Key ID** and **Secret Key** to connect to your AWS instance.
@ -263,9 +425,9 @@ Replace **\<account id\>** with your AWS account id and **\<aws-scope-path\>** w
Maximum time-to-live for a generated secret
</ParamField>
<ParamField path="Method" type="string" required>
Select *Access Key* method.
</ParamField>
<ParamField path="Method" type="string" required>
Select *Access Key* method.
</ParamField>
<ParamField path="AWS Access Key" type="string" required>
The managing AWS IAM User Access Key

Binary file not shown.

After

Width:  |  Height:  |  Size: 282 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 268 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 283 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 285 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 284 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 498 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 711 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 434 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 282 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 308 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 192 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 282 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 208 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 320 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 285 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 289 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 285 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 346 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 373 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 667 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 755 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 616 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 670 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 661 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 617 KiB

Some files were not shown because too many files have changed in this diff Show More