Compare commits

..

1 Commits

Author SHA1 Message Date
github-actions
f985728eea chore: renamed new migration files to latest timestamp (gh-action) 2024-05-07 18:10:11 +00:00
552 changed files with 17025 additions and 19032 deletions

View File

@@ -74,21 +74,21 @@ jobs:
uses: pr-mpt/actions-commit-hash@v2 uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition - name: Download task definition
run: | run: |
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json aws ecs describe-task-definition --task-definition infisical-prod-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition - name: Render Amazon ECS task definition
id: render-web-container id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1 uses: aws-actions/amazon-ecs-render-task-definition@v1
with: with:
task-definition: task-definition.json task-definition: task-definition.json
container-name: infisical-core-platform container-name: infisical-prod-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }} image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info" environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service - name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1 uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with: with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }} task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-platform service: infisical-prod-platform
cluster: infisical-core-platform cluster: infisical-prod-platform
wait-for-service-stability: true wait-for-service-stability: true
production-postgres-deployment: production-postgres-deployment:
@@ -122,19 +122,19 @@ jobs:
uses: pr-mpt/actions-commit-hash@v2 uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition - name: Download task definition
run: | run: |
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json aws ecs describe-task-definition --task-definition infisical-prod-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition - name: Render Amazon ECS task definition
id: render-web-container id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1 uses: aws-actions/amazon-ecs-render-task-definition@v1
with: with:
task-definition: task-definition.json task-definition: task-definition.json
container-name: infisical-core-platform container-name: infisical-prod-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }} image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info" environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service - name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1 uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with: with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }} task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-platform service: infisical-prod-platform
cluster: infisical-core-platform cluster: infisical-prod-platform
wait-for-service-stability: true wait-for-service-stability: true

View File

@@ -40,14 +40,13 @@ jobs:
REDIS_URL: redis://172.17.0.1:6379 REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
JWT_AUTH_SECRET: something-random JWT_AUTH_SECRET: something-random
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- uses: actions/setup-go@v5 - uses: actions/setup-go@v5
with: with:
go-version: '1.21.5' go-version: '1.21.5'
- name: Wait for container to be stable and check logs - name: Wait for container to be stable and check logs
run: | run: |
SECONDS=0 SECONDS=0
r HEALTHY=0 HEALTHY=0
while [ $SECONDS -lt 60 ]; do while [ $SECONDS -lt 60 ]; do
if docker ps | grep infisical-api | grep -q healthy; then if docker ps | grep infisical-api | grep -q healthy; then
echo "Container is healthy." echo "Container is healthy."
@@ -74,4 +73,4 @@ jobs:
run: | run: |
docker-compose -f "docker-compose.dev.yml" down docker-compose -f "docker-compose.dev.yml" down
docker stop infisical-api docker stop infisical-api
docker remove infisical-api docker remove infisical-api

View File

@@ -38,15 +38,13 @@ jobs:
rm added_files.txt rm added_files.txt
git commit -m "chore: renamed new migration files to latest timestamp (gh-action)" git commit -m "chore: renamed new migration files to latest timestamp (gh-action)"
- name: Get PR details - name: Get the username of the person who closed the PR
id: pr_details run: |
run: |
PR_NUMBER=${{ github.event.pull_request.number }} PR_NUMBER=${{ github.event.pull_request.number }}
PR_MERGER=$(curl -s "https://api.github.com/repos/${{ github.repository }}/pulls/$PR_NUMBER" | jq -r '.merged_by.login') PR_CLOSER=$(curl -s -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" "https://api.github.com/repos/${{ github.repository }}/pulls/$PR_NUMBER" | jq -r '.closed_by.login')
echo "PR Number: $PR_NUMBER" echo "PR Number: $PR_NUMBER"
echo "PR Merger: $PR_MERGER" echo "PR Closer: $PR_CLOSER"
echo "pr_merger=$PR_MERGER" >> $GITHUB_OUTPUT echo "pr_closer=$PR_CLOSER" >> $GITHUB_OUTPUT
- name: Create Pull Request - name: Create Pull Request
if: env.SKIP_RENAME != 'true' if: env.SKIP_RENAME != 'true'
@@ -56,4 +54,3 @@ jobs:
commit-message: 'chore: renamed new migration files to latest UTC (gh-action)' commit-message: 'chore: renamed new migration files to latest UTC (gh-action)'
title: 'GH Action: rename new migration file timestamp' title: 'GH Action: rename new migration file timestamp'
branch-suffix: timestamp branch-suffix: timestamp
reviewers: ${{ steps.pr_details.outputs.pr_merger }}

View File

@@ -1,6 +1,7 @@
ARG POSTHOG_HOST=https://app.posthog.com ARG POSTHOG_HOST=https://app.posthog.com
ARG POSTHOG_API_KEY=posthog-api-key ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id ARG INTERCOM_ID=intercom-id
ARG SAML_ORG_SLUG=saml-org-slug-default
FROM node:20-alpine AS base FROM node:20-alpine AS base
@@ -34,7 +35,9 @@ ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
ARG INTERCOM_ID ARG INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION ARG INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
# Build # Build
RUN npm run build RUN npm run build
@@ -52,7 +55,6 @@ VOLUME /app/.next/cache/images
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
COPY --from=frontend-builder /app/public ./public COPY --from=frontend-builder /app/public ./public
RUN chown non-root-user:nodejs ./public/data RUN chown non-root-user:nodejs ./public/data
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./ COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
@@ -91,18 +93,9 @@ RUN mkdir frontend-build
# Production stage # Production stage
FROM base AS production FROM base AS production
RUN apk add --upgrade --no-cache ca-certificates
RUN addgroup --system --gid 1001 nodejs \ RUN addgroup --system --gid 1001 nodejs \
&& adduser --system --uid 1001 non-root-user && adduser --system --uid 1001 non-root-user
# Give non-root-user permission to update SSL certs
RUN chown -R non-root-user /etc/ssl/certs
RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt
RUN chmod -R u+rwx /etc/ssl/certs
RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt
RUN chown non-root-user /usr/sbin/update-ca-certificates
RUN chmod u+rx /usr/sbin/update-ca-certificates
## set pre baked keys ## set pre baked keys
ARG POSTHOG_API_KEY ARG POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \ ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
@@ -110,6 +103,9 @@ ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
ARG INTERCOM_ID=intercom-id ARG INTERCOM_ID=intercom-id
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \ ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG \
BAKED_NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
WORKDIR / WORKDIR /

View File

@@ -1,5 +1,4 @@
import { TKeyStoreFactory } from "@app/keystore/keystore"; import { TKeyStoreFactory } from "@app/keystore/keystore";
import { Lock } from "@app/lib/red-lock";
export const mockKeyStore = (): TKeyStoreFactory => { export const mockKeyStore = (): TKeyStoreFactory => {
const store: Record<string, string | number | Buffer> = {}; const store: Record<string, string | number | Buffer> = {};
@@ -26,12 +25,6 @@ export const mockKeyStore = (): TKeyStoreFactory => {
}, },
incrementBy: async () => { incrementBy: async () => {
return 1; return 1;
}, }
acquireLock: () => {
return Promise.resolve({
release: () => {}
}) as Promise<Lock>;
},
waitTillReady: async () => {}
}; };
}; };

File diff suppressed because it is too large Load Diff

View File

@@ -95,13 +95,11 @@
"axios": "^1.6.7", "axios": "^1.6.7",
"axios-retry": "^4.0.0", "axios-retry": "^4.0.0",
"bcrypt": "^5.1.1", "bcrypt": "^5.1.1",
"bullmq": "^5.4.2", "bullmq": "^5.3.3",
"cassandra-driver": "^4.7.2", "cassandra-driver": "^4.7.2",
"dotenv": "^16.4.1", "dotenv": "^16.4.1",
"fastify": "^4.26.0", "fastify": "^4.26.0",
"fastify-plugin": "^4.5.1", "fastify-plugin": "^4.5.1",
"google-auth-library": "^9.9.0",
"googleapis": "^137.1.0",
"handlebars": "^4.7.8", "handlebars": "^4.7.8",
"ioredis": "^5.3.2", "ioredis": "^5.3.2",
"jmespath": "^0.16.0", "jmespath": "^0.16.0",
@@ -112,7 +110,7 @@
"libsodium-wrappers": "^0.7.13", "libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0", "lodash.isequal": "^4.5.0",
"ms": "^2.1.3", "ms": "^2.1.3",
"mysql2": "^3.9.8", "mysql2": "^3.9.7",
"nanoid": "^5.0.4", "nanoid": "^5.0.4",
"nodemailer": "^6.9.9", "nodemailer": "^6.9.9",
"ora": "^7.0.1", "ora": "^7.0.1",

View File

@@ -35,8 +35,6 @@ const getZodPrimitiveType = (type: string) => {
return "z.coerce.number()"; return "z.coerce.number()";
case "text": case "text":
return "z.string()"; return "z.string()";
case "bytea":
return "zodBuffer";
default: default:
throw new Error(`Invalid type: ${type}`); throw new Error(`Invalid type: ${type}`);
} }
@@ -98,15 +96,10 @@ const main = async () => {
const columnNames = Object.keys(columns); const columnNames = Object.keys(columns);
let schema = ""; let schema = "";
const zodImportSet = new Set<string>();
for (let colNum = 0; colNum < columnNames.length; colNum++) { for (let colNum = 0; colNum < columnNames.length; colNum++) {
const columnName = columnNames[colNum]; const columnName = columnNames[colNum];
const colInfo = columns[columnName]; const colInfo = columns[columnName];
let ztype = getZodPrimitiveType(colInfo.type); let ztype = getZodPrimitiveType(colInfo.type);
if (["zodBuffer"].includes(ztype)) {
zodImportSet.add(ztype);
}
// don't put optional on id // don't put optional on id
if (colInfo.defaultValue && columnName !== "id") { if (colInfo.defaultValue && columnName !== "id") {
const { defaultValue } = colInfo; const { defaultValue } = colInfo;
@@ -128,8 +121,6 @@ const main = async () => {
.split("_") .split("_")
.reduce((prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`, ""); .reduce((prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`, "");
const zodImports = Array.from(zodImportSet);
// the insert and update are changed to zod input type to use default cases // the insert and update are changed to zod input type to use default cases
writeFileSync( writeFileSync(
path.join(__dirname, "../src/db/schemas", `${dashcase}.ts`), path.join(__dirname, "../src/db/schemas", `${dashcase}.ts`),
@@ -140,8 +131,6 @@ const main = async () => {
import { z } from "zod"; import { z } from "zod";
${zodImports.length ? `import { ${zodImports.join(",")} } from \"@app/lib/zod\";` : ""}
import { TImmutableDBKeys } from "./models"; import { TImmutableDBKeys } from "./models";
export const ${pascalCase}Schema = z.object({${schema}}); export const ${pascalCase}Schema = z.object({${schema}});

View File

@@ -32,10 +32,6 @@ import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-se
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service"; import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service"; import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service"; import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service"; import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service"; import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service";
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service"; import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
@@ -52,8 +48,6 @@ import { TSecretServiceFactory } from "@app/services/secret/secret-service";
import { TSecretBlindIndexServiceFactory } from "@app/services/secret-blind-index/secret-blind-index-service"; import { TSecretBlindIndexServiceFactory } from "@app/services/secret-blind-index/secret-blind-index-service";
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service"; import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service"; import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service"; import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service"; import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service"; import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
@@ -109,7 +103,6 @@ declare module "fastify" {
projectKey: TProjectKeyServiceFactory; projectKey: TProjectKeyServiceFactory;
projectRole: TProjectRoleServiceFactory; projectRole: TProjectRoleServiceFactory;
secret: TSecretServiceFactory; secret: TSecretServiceFactory;
secretReplication: TSecretReplicationServiceFactory;
secretTag: TSecretTagServiceFactory; secretTag: TSecretTagServiceFactory;
secretImport: TSecretImportServiceFactory; secretImport: TSecretImportServiceFactory;
projectBot: TProjectBotServiceFactory; projectBot: TProjectBotServiceFactory;
@@ -122,10 +115,6 @@ declare module "fastify" {
identityAccessToken: TIdentityAccessTokenServiceFactory; identityAccessToken: TIdentityAccessTokenServiceFactory;
identityProject: TIdentityProjectServiceFactory; identityProject: TIdentityProjectServiceFactory;
identityUa: TIdentityUaServiceFactory; identityUa: TIdentityUaServiceFactory;
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
identityGcpAuth: TIdentityGcpAuthServiceFactory;
identityAwsAuth: TIdentityAwsAuthServiceFactory;
identityAzureAuth: TIdentityAzureAuthServiceFactory;
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory; accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
accessApprovalRequest: TAccessApprovalRequestServiceFactory; accessApprovalRequest: TAccessApprovalRequestServiceFactory;
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory; secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
@@ -146,7 +135,6 @@ declare module "fastify" {
dynamicSecretLease: TDynamicSecretLeaseServiceFactory; dynamicSecretLease: TDynamicSecretLeaseServiceFactory;
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory; projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory; identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
secretSharing: TSecretSharingServiceFactory;
}; };
// this is exclusive use for middlewares in which we need to inject data // this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer // everywhere else access using service layer

View File

@@ -59,18 +59,6 @@ import {
TIdentityAccessTokens, TIdentityAccessTokens,
TIdentityAccessTokensInsert, TIdentityAccessTokensInsert,
TIdentityAccessTokensUpdate, TIdentityAccessTokensUpdate,
TIdentityAwsAuths,
TIdentityAwsAuthsInsert,
TIdentityAwsAuthsUpdate,
TIdentityAzureAuths,
TIdentityAzureAuthsInsert,
TIdentityAzureAuthsUpdate,
TIdentityGcpAuths,
TIdentityGcpAuthsInsert,
TIdentityGcpAuthsUpdate,
TIdentityKubernetesAuths,
TIdentityKubernetesAuthsInsert,
TIdentityKubernetesAuthsUpdate,
TIdentityOrgMemberships, TIdentityOrgMemberships,
TIdentityOrgMembershipsInsert, TIdentityOrgMembershipsInsert,
TIdentityOrgMembershipsUpdate, TIdentityOrgMembershipsUpdate,
@@ -98,15 +86,6 @@ import {
TIntegrations, TIntegrations,
TIntegrationsInsert, TIntegrationsInsert,
TIntegrationsUpdate, TIntegrationsUpdate,
TKmsKeys,
TKmsKeysInsert,
TKmsKeysUpdate,
TKmsKeyVersions,
TKmsKeyVersionsInsert,
TKmsKeyVersionsUpdate,
TKmsRootConfig,
TKmsRootConfigInsert,
TKmsRootConfigUpdate,
TLdapConfigs, TLdapConfigs,
TLdapConfigsInsert, TLdapConfigsInsert,
TLdapConfigsUpdate, TLdapConfigsUpdate,
@@ -185,9 +164,6 @@ import {
TSecretImports, TSecretImports,
TSecretImportsInsert, TSecretImportsInsert,
TSecretImportsUpdate, TSecretImportsUpdate,
TSecretReferences,
TSecretReferencesInsert,
TSecretReferencesUpdate,
TSecretRotationOutputs, TSecretRotationOutputs,
TSecretRotationOutputsInsert, TSecretRotationOutputsInsert,
TSecretRotationOutputsUpdate, TSecretRotationOutputsUpdate,
@@ -198,9 +174,6 @@ import {
TSecretScanningGitRisks, TSecretScanningGitRisks,
TSecretScanningGitRisksInsert, TSecretScanningGitRisksInsert,
TSecretScanningGitRisksUpdate, TSecretScanningGitRisksUpdate,
TSecretSharing,
TSecretSharingInsert,
TSecretSharingUpdate,
TSecretsInsert, TSecretsInsert,
TSecretSnapshotFolders, TSecretSnapshotFolders,
TSecretSnapshotFoldersInsert, TSecretSnapshotFoldersInsert,
@@ -325,11 +298,6 @@ declare module "knex/types/tables" {
>; >;
[TableName.ProjectKeys]: Knex.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>; [TableName.ProjectKeys]: Knex.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
[TableName.Secret]: Knex.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>; [TableName.Secret]: Knex.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
[TableName.SecretReference]: Knex.CompositeTableType<
TSecretReferences,
TSecretReferencesInsert,
TSecretReferencesUpdate
>;
[TableName.SecretBlindIndex]: Knex.CompositeTableType< [TableName.SecretBlindIndex]: Knex.CompositeTableType<
TSecretBlindIndexes, TSecretBlindIndexes,
TSecretBlindIndexesInsert, TSecretBlindIndexesInsert,
@@ -342,7 +310,6 @@ declare module "knex/types/tables" {
TSecretFolderVersionsInsert, TSecretFolderVersionsInsert,
TSecretFolderVersionsUpdate TSecretFolderVersionsUpdate
>; >;
[TableName.SecretSharing]: Knex.CompositeTableType<TSecretSharing, TSecretSharingInsert, TSecretSharingUpdate>;
[TableName.SecretTag]: Knex.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>; [TableName.SecretTag]: Knex.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
[TableName.SecretImport]: Knex.CompositeTableType<TSecretImports, TSecretImportsInsert, TSecretImportsUpdate>; [TableName.SecretImport]: Knex.CompositeTableType<TSecretImports, TSecretImportsInsert, TSecretImportsUpdate>;
[TableName.Integration]: Knex.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>; [TableName.Integration]: Knex.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
@@ -359,26 +326,6 @@ declare module "knex/types/tables" {
TIdentityUniversalAuthsInsert, TIdentityUniversalAuthsInsert,
TIdentityUniversalAuthsUpdate TIdentityUniversalAuthsUpdate
>; >;
[TableName.IdentityKubernetesAuth]: Knex.CompositeTableType<
TIdentityKubernetesAuths,
TIdentityKubernetesAuthsInsert,
TIdentityKubernetesAuthsUpdate
>;
[TableName.IdentityGcpAuth]: Knex.CompositeTableType<
TIdentityGcpAuths,
TIdentityGcpAuthsInsert,
TIdentityGcpAuthsUpdate
>;
[TableName.IdentityAwsAuth]: Knex.CompositeTableType<
TIdentityAwsAuths,
TIdentityAwsAuthsInsert,
TIdentityAwsAuthsUpdate
>;
[TableName.IdentityAzureAuth]: Knex.CompositeTableType<
TIdentityAzureAuths,
TIdentityAzureAuthsInsert,
TIdentityAzureAuthsUpdate
>;
[TableName.IdentityUaClientSecret]: Knex.CompositeTableType< [TableName.IdentityUaClientSecret]: Knex.CompositeTableType<
TIdentityUaClientSecrets, TIdentityUaClientSecrets,
TIdentityUaClientSecretsInsert, TIdentityUaClientSecretsInsert,
@@ -525,13 +472,5 @@ declare module "knex/types/tables" {
TSecretVersionTagJunctionInsert, TSecretVersionTagJunctionInsert,
TSecretVersionTagJunctionUpdate TSecretVersionTagJunctionUpdate
>; >;
// KMS service
[TableName.KmsServerRootConfig]: Knex.CompositeTableType<
TKmsRootConfig,
TKmsRootConfigInsert,
TKmsRootConfigUpdate
>;
[TableName.KmsKey]: Knex.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
[TableName.KmsKeyVersion]: Knex.CompositeTableType<TKmsKeyVersions, TKmsKeyVersionsInsert, TKmsKeyVersionsUpdate>;
} }
} }

View File

@@ -0,0 +1 @@

View File

@@ -1,30 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityAwsAuth))) {
await knex.schema.createTable(TableName.IdentityAwsAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.timestamps(true, true, true);
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.string("type").notNullable();
t.string("stsEndpoint").notNullable();
t.string("allowedPrincipalArns").notNullable();
t.string("allowedAccountIds").notNullable();
});
}
await createOnUpdateTrigger(knex, TableName.IdentityAwsAuth);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityAwsAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityAwsAuth);
}

View File

@@ -1,30 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityGcpAuth))) {
await knex.schema.createTable(TableName.IdentityGcpAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.timestamps(true, true, true);
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.string("type").notNullable();
t.string("allowedServiceAccounts").notNullable();
t.string("allowedProjects").notNullable();
t.string("allowedZones").notNullable(); // GCE only (fully qualified zone names)
});
}
await createOnUpdateTrigger(knex, TableName.IdentityGcpAuth);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityGcpAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityGcpAuth);
}

View File

@@ -1,24 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SecretReference))) {
await knex.schema.createTable(TableName.SecretReference, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("environment").notNullable();
t.string("secretPath").notNullable();
t.uuid("secretId").notNullable();
t.foreign("secretId").references("id").inTable(TableName.Secret).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.SecretReference);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SecretReference);
await dropOnUpdateTrigger(knex, TableName.SecretReference);
}

View File

@@ -1,36 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityKubernetesAuth))) {
await knex.schema.createTable(TableName.IdentityKubernetesAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.timestamps(true, true, true);
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.string("kubernetesHost").notNullable();
t.text("encryptedCaCert").notNullable();
t.string("caCertIV").notNullable();
t.string("caCertTag").notNullable();
t.text("encryptedTokenReviewerJwt").notNullable();
t.string("tokenReviewerJwtIV").notNullable();
t.string("tokenReviewerJwtTag").notNullable();
t.string("allowedNamespaces").notNullable();
t.string("allowedNames").notNullable();
t.string("allowedAudience").notNullable();
});
}
await createOnUpdateTrigger(knex, TableName.IdentityKubernetesAuth);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityKubernetesAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityKubernetesAuth);
}

View File

@@ -1,43 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasIsSyncedColumn = await knex.schema.hasColumn(TableName.Integration, "isSynced");
const hasSyncMessageColumn = await knex.schema.hasColumn(TableName.Integration, "syncMessage");
const hasLastSyncJobId = await knex.schema.hasColumn(TableName.Integration, "lastSyncJobId");
await knex.schema.alterTable(TableName.Integration, (t) => {
if (!hasIsSyncedColumn) {
t.boolean("isSynced").nullable();
}
if (!hasSyncMessageColumn) {
t.text("syncMessage").nullable();
}
if (!hasLastSyncJobId) {
t.string("lastSyncJobId").nullable();
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasIsSyncedColumn = await knex.schema.hasColumn(TableName.Integration, "isSynced");
const hasSyncMessageColumn = await knex.schema.hasColumn(TableName.Integration, "syncMessage");
const hasLastSyncJobId = await knex.schema.hasColumn(TableName.Integration, "lastSyncJobId");
await knex.schema.alterTable(TableName.Integration, (t) => {
if (hasIsSyncedColumn) {
t.dropColumn("isSynced");
}
if (hasSyncMessageColumn) {
t.dropColumn("syncMessage");
}
if (hasLastSyncJobId) {
t.dropColumn("lastSyncJobId");
}
});
}

View File

@@ -1,26 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
if (await knex.schema.hasTable(TableName.AuditLog)) {
await knex.schema.alterTable(TableName.AuditLog, (t) => {
if (doesProjectIdExist) t.index("projectId");
if (doesOrgIdExist) t.index("orgId");
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
if (await knex.schema.hasTable(TableName.AuditLog)) {
await knex.schema.alterTable(TableName.AuditLog, (t) => {
if (doesProjectIdExist) t.dropIndex("projectId");
if (doesOrgIdExist) t.dropIndex("orgId");
});
}
}

View File

@@ -1,22 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesEnvIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "envId");
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
if (doesEnvIdExist) t.index("envId");
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesEnvIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "envId");
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
if (doesEnvIdExist) t.dropIndex("envId");
});
}
}

View File

@@ -1,22 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesEnvIdExist = await knex.schema.hasColumn(TableName.SecretVersion, "envId");
if (await knex.schema.hasTable(TableName.SecretVersion)) {
await knex.schema.alterTable(TableName.SecretVersion, (t) => {
if (doesEnvIdExist) t.index("envId");
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesEnvIdExist = await knex.schema.hasColumn(TableName.SecretVersion, "envId");
if (await knex.schema.hasTable(TableName.SecretVersion)) {
await knex.schema.alterTable(TableName.SecretVersion, (t) => {
if (doesEnvIdExist) t.dropIndex("envId");
});
}
}

View File

@@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesSnapshotIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "snapshotId");
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
if (doesSnapshotIdExist) t.index("snapshotId");
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesSnapshotIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "snapshotId");
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
if (doesSnapshotIdExist) t.dropIndex("snapshotId");
});
}
}

View File

@@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesSnapshotIdExist = await knex.schema.hasColumn(TableName.SnapshotFolder, "snapshotId");
if (await knex.schema.hasTable(TableName.SnapshotFolder)) {
await knex.schema.alterTable(TableName.SnapshotFolder, (t) => {
if (doesSnapshotIdExist) t.index("snapshotId");
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesSnapshotIdExist = await knex.schema.hasColumn(TableName.SnapshotFolder, "snapshotId");
if (await knex.schema.hasTable(TableName.SnapshotFolder)) {
await knex.schema.alterTable(TableName.SnapshotFolder, (t) => {
if (doesSnapshotIdExist) t.dropIndex("snapshotId");
});
}
}

View File

@@ -1,24 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesFolderIdExist = await knex.schema.hasColumn(TableName.Secret, "folderId");
const doesUserIdExist = await knex.schema.hasColumn(TableName.Secret, "userId");
if (await knex.schema.hasTable(TableName.Secret)) {
await knex.schema.alterTable(TableName.Secret, (t) => {
if (doesFolderIdExist && doesUserIdExist) t.index(["folderId", "userId"]);
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesFolderIdExist = await knex.schema.hasColumn(TableName.Secret, "folderId");
const doesUserIdExist = await knex.schema.hasColumn(TableName.Secret, "userId");
if (await knex.schema.hasTable(TableName.Secret)) {
await knex.schema.alterTable(TableName.Secret, (t) => {
if (doesUserIdExist && doesFolderIdExist) t.dropIndex(["folderId", "userId"]);
});
}
}

View File

@@ -1,22 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesExpireAtExist = await knex.schema.hasColumn(TableName.AuditLog, "expiresAt");
if (await knex.schema.hasTable(TableName.AuditLog)) {
await knex.schema.alterTable(TableName.AuditLog, (t) => {
if (doesExpireAtExist) t.index("expiresAt");
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesExpireAtExist = await knex.schema.hasColumn(TableName.AuditLog, "expiresAt");
if (await knex.schema.hasTable(TableName.AuditLog)) {
await knex.schema.alterTable(TableName.AuditLog, (t) => {
if (doesExpireAtExist) t.dropIndex("expiresAt");
});
}
}

View File

@@ -1,29 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityAzureAuth))) {
await knex.schema.createTable(TableName.IdentityAzureAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.timestamps(true, true, true);
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.string("tenantId").notNullable();
t.string("resource").notNullable();
t.string("allowedServicePrincipalIds").notNullable();
});
}
await createOnUpdateTrigger(knex, TableName.IdentityAzureAuth);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityAzureAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityAzureAuth);
}

View File

@@ -1,43 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasConsecutiveFailedMfaAttempts = await knex.schema.hasColumn(TableName.Users, "consecutiveFailedMfaAttempts");
const hasIsLocked = await knex.schema.hasColumn(TableName.Users, "isLocked");
const hasTemporaryLockDateEnd = await knex.schema.hasColumn(TableName.Users, "temporaryLockDateEnd");
await knex.schema.alterTable(TableName.Users, (t) => {
if (!hasConsecutiveFailedMfaAttempts) {
t.integer("consecutiveFailedMfaAttempts").defaultTo(0);
}
if (!hasIsLocked) {
t.boolean("isLocked").defaultTo(false);
}
if (!hasTemporaryLockDateEnd) {
t.dateTime("temporaryLockDateEnd").nullable();
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasConsecutiveFailedMfaAttempts = await knex.schema.hasColumn(TableName.Users, "consecutiveFailedMfaAttempts");
const hasIsLocked = await knex.schema.hasColumn(TableName.Users, "isLocked");
const hasTemporaryLockDateEnd = await knex.schema.hasColumn(TableName.Users, "temporaryLockDateEnd");
await knex.schema.alterTable(TableName.Users, (t) => {
if (hasConsecutiveFailedMfaAttempts) {
t.dropColumn("consecutiveFailedMfaAttempts");
}
if (hasIsLocked) {
t.dropColumn("isLocked");
}
if (hasTemporaryLockDateEnd) {
t.dropColumn("temporaryLockDateEnd");
}
});
}

View File

@@ -1,29 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SecretSharing))) {
await knex.schema.createTable(TableName.SecretSharing, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name").notNullable();
t.text("encryptedValue").notNullable();
t.text("iv").notNullable();
t.text("tag").notNullable();
t.text("hashedHex").notNullable();
t.timestamp("expiresAt").notNullable();
t.uuid("userId").notNullable();
t.uuid("orgId").notNullable();
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.SecretSharing);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SecretSharing);
}

View File

@@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesSecretVersionIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "secretVersionId");
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
if (doesSecretVersionIdExist) t.index("secretVersionId");
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesSecretVersionIdExist = await knex.schema.hasColumn(TableName.SnapshotSecret, "secretVersionId");
if (await knex.schema.hasTable(TableName.SnapshotSecret)) {
await knex.schema.alterTable(TableName.SnapshotSecret, (t) => {
if (doesSecretVersionIdExist) t.dropIndex("secretVersionId");
});
}
}

View File

@@ -1,29 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SecretSharing))) {
await knex.schema.createTable(TableName.SecretSharing, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name").notNullable();
t.text("encryptedValue").notNullable();
t.text("iv").notNullable();
t.text("tag").notNullable();
t.text("hashedHex").notNullable();
t.timestamp("expiresAt").notNullable();
t.uuid("userId").notNullable();
t.uuid("orgId").notNullable();
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.SecretSharing);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SecretSharing);
}

View File

@@ -1,33 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasExpiresAfterViewsColumn = await knex.schema.hasColumn(TableName.SecretSharing, "expiresAfterViews");
const hasSecretNameColumn = await knex.schema.hasColumn(TableName.SecretSharing, "name");
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
if (!hasExpiresAfterViewsColumn) {
t.integer("expiresAfterViews");
}
if (hasSecretNameColumn) {
t.dropColumn("name");
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasExpiresAfterViewsColumn = await knex.schema.hasColumn(TableName.SecretSharing, "expiresAfterViews");
const hasSecretNameColumn = await knex.schema.hasColumn(TableName.SecretSharing, "name");
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
if (hasExpiresAfterViewsColumn) {
t.dropColumn("expiresAfterViews");
}
if (!hasSecretNameColumn) {
t.string("name").notNullable();
}
});
}

View File

@@ -1,85 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesSecretImportIsReplicationExist = await knex.schema.hasColumn(TableName.SecretImport, "isReplication");
const doesSecretImportIsReplicationSuccessExist = await knex.schema.hasColumn(
TableName.SecretImport,
"isReplicationSuccess"
);
const doesSecretImportReplicationStatusExist = await knex.schema.hasColumn(
TableName.SecretImport,
"replicationStatus"
);
const doesSecretImportLastReplicatedExist = await knex.schema.hasColumn(TableName.SecretImport, "lastReplicated");
const doesSecretImportIsReservedExist = await knex.schema.hasColumn(TableName.SecretImport, "isReserved");
if (await knex.schema.hasTable(TableName.SecretImport)) {
await knex.schema.alterTable(TableName.SecretImport, (t) => {
if (!doesSecretImportIsReplicationExist) t.boolean("isReplication").defaultTo(false);
if (!doesSecretImportIsReplicationSuccessExist) t.boolean("isReplicationSuccess").nullable();
if (!doesSecretImportReplicationStatusExist) t.text("replicationStatus").nullable();
if (!doesSecretImportLastReplicatedExist) t.datetime("lastReplicated").nullable();
if (!doesSecretImportIsReservedExist) t.boolean("isReserved").defaultTo(false);
});
}
const doesSecretFolderReservedExist = await knex.schema.hasColumn(TableName.SecretFolder, "isReserved");
if (await knex.schema.hasTable(TableName.SecretFolder)) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
if (!doesSecretFolderReservedExist) t.boolean("isReserved").defaultTo(false);
});
}
const doesSecretApprovalRequestIsReplicatedExist = await knex.schema.hasColumn(
TableName.SecretApprovalRequest,
"isReplicated"
);
if (await knex.schema.hasTable(TableName.SecretApprovalRequest)) {
await knex.schema.alterTable(TableName.SecretApprovalRequest, (t) => {
if (!doesSecretApprovalRequestIsReplicatedExist) t.boolean("isReplicated");
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesSecretImportIsReplicationExist = await knex.schema.hasColumn(TableName.SecretImport, "isReplication");
const doesSecretImportIsReplicationSuccessExist = await knex.schema.hasColumn(
TableName.SecretImport,
"isReplicationSuccess"
);
const doesSecretImportReplicationStatusExist = await knex.schema.hasColumn(
TableName.SecretImport,
"replicationStatus"
);
const doesSecretImportLastReplicatedExist = await knex.schema.hasColumn(TableName.SecretImport, "lastReplicated");
const doesSecretImportIsReservedExist = await knex.schema.hasColumn(TableName.SecretImport, "isReserved");
if (await knex.schema.hasTable(TableName.SecretImport)) {
await knex.schema.alterTable(TableName.SecretImport, (t) => {
if (doesSecretImportIsReplicationExist) t.dropColumn("isReplication");
if (doesSecretImportIsReplicationSuccessExist) t.dropColumn("isReplicationSuccess");
if (doesSecretImportReplicationStatusExist) t.dropColumn("replicationStatus");
if (doesSecretImportLastReplicatedExist) t.dropColumn("lastReplicated");
if (doesSecretImportIsReservedExist) t.dropColumn("isReserved");
});
}
const doesSecretFolderReservedExist = await knex.schema.hasColumn(TableName.SecretFolder, "isReserved");
if (await knex.schema.hasTable(TableName.SecretFolder)) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
if (doesSecretFolderReservedExist) t.dropColumn("isReserved");
});
}
const doesSecretApprovalRequestIsReplicatedExist = await knex.schema.hasColumn(
TableName.SecretApprovalRequest,
"isReplicated"
);
if (await knex.schema.hasTable(TableName.SecretApprovalRequest)) {
await knex.schema.alterTable(TableName.SecretApprovalRequest, (t) => {
if (doesSecretApprovalRequestIsReplicatedExist) t.dropColumn("isReplicated");
});
}
}

View File

@@ -1,56 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.KmsServerRootConfig))) {
await knex.schema.createTable(TableName.KmsServerRootConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.binary("encryptedRootKey").notNullable();
});
}
await createOnUpdateTrigger(knex, TableName.KmsServerRootConfig);
if (!(await knex.schema.hasTable(TableName.KmsKey))) {
await knex.schema.createTable(TableName.KmsKey, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.binary("encryptedKey").notNullable();
t.string("encryptionAlgorithm").notNullable();
t.integer("version").defaultTo(1).notNullable();
t.string("description");
t.boolean("isDisabled").defaultTo(false);
t.boolean("isReserved").defaultTo(true);
t.string("projectId");
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("orgId");
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
});
}
await createOnUpdateTrigger(knex, TableName.KmsKey);
if (!(await knex.schema.hasTable(TableName.KmsKeyVersion))) {
await knex.schema.createTable(TableName.KmsKeyVersion, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.binary("encryptedKey").notNullable();
t.integer("version").notNullable();
t.uuid("kmsKeyId").notNullable();
t.foreign("kmsKeyId").references("id").inTable(TableName.KmsKey).onDelete("CASCADE");
});
}
await createOnUpdateTrigger(knex, TableName.KmsKeyVersion);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.KmsServerRootConfig);
await dropOnUpdateTrigger(knex, TableName.KmsServerRootConfig);
await knex.schema.dropTableIfExists(TableName.KmsKeyVersion);
await dropOnUpdateTrigger(knex, TableName.KmsKeyVersion);
await knex.schema.dropTableIfExists(TableName.KmsKey);
await dropOnUpdateTrigger(knex, TableName.KmsKey);
}

View File

@@ -11,8 +11,8 @@ export const AccessApprovalPoliciesSchema = z.object({
id: z.string().uuid(), id: z.string().uuid(),
name: z.string(), name: z.string(),
approvals: z.number().default(1), approvals: z.number().default(1),
secretPath: z.string().nullable().optional(),
envId: z.string().uuid(), envId: z.string().uuid(),
secretPath: z.string().nullable().optional(),
createdAt: z.date(), createdAt: z.date(),
updatedAt: z.date() updatedAt: z.date()
}); });

View File

@@ -1,27 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityAwsAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
identityId: z.string().uuid(),
type: z.string(),
stsEndpoint: z.string(),
allowedPrincipalArns: z.string(),
allowedAccountIds: z.string()
});
export type TIdentityAwsAuths = z.infer<typeof IdentityAwsAuthsSchema>;
export type TIdentityAwsAuthsInsert = Omit<z.input<typeof IdentityAwsAuthsSchema>, TImmutableDBKeys>;
export type TIdentityAwsAuthsUpdate = Partial<Omit<z.input<typeof IdentityAwsAuthsSchema>, TImmutableDBKeys>>;

View File

@@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityAzureAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
identityId: z.string().uuid(),
tenantId: z.string(),
resource: z.string(),
allowedServicePrincipalIds: z.string()
});
export type TIdentityAzureAuths = z.infer<typeof IdentityAzureAuthsSchema>;
export type TIdentityAzureAuthsInsert = Omit<z.input<typeof IdentityAzureAuthsSchema>, TImmutableDBKeys>;
export type TIdentityAzureAuthsUpdate = Partial<Omit<z.input<typeof IdentityAzureAuthsSchema>, TImmutableDBKeys>>;

View File

@@ -1,27 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityGcpAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
identityId: z.string().uuid(),
type: z.string(),
allowedServiceAccounts: z.string(),
allowedProjects: z.string(),
allowedZones: z.string()
});
export type TIdentityGcpAuths = z.infer<typeof IdentityGcpAuthsSchema>;
export type TIdentityGcpAuthsInsert = Omit<z.input<typeof IdentityGcpAuthsSchema>, TImmutableDBKeys>;
export type TIdentityGcpAuthsUpdate = Partial<Omit<z.input<typeof IdentityGcpAuthsSchema>, TImmutableDBKeys>>;

View File

@@ -1,35 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityKubernetesAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
identityId: z.string().uuid(),
kubernetesHost: z.string(),
encryptedCaCert: z.string(),
caCertIV: z.string(),
caCertTag: z.string(),
encryptedTokenReviewerJwt: z.string(),
tokenReviewerJwtIV: z.string(),
tokenReviewerJwtTag: z.string(),
allowedNamespaces: z.string(),
allowedNames: z.string(),
allowedAudience: z.string()
});
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;
export type TIdentityKubernetesAuthsInsert = Omit<z.input<typeof IdentityKubernetesAuthsSchema>, TImmutableDBKeys>;
export type TIdentityKubernetesAuthsUpdate = Partial<
Omit<z.input<typeof IdentityKubernetesAuthsSchema>, TImmutableDBKeys>
>;

View File

@@ -17,10 +17,6 @@ export * from "./group-project-memberships";
export * from "./groups"; export * from "./groups";
export * from "./identities"; export * from "./identities";
export * from "./identity-access-tokens"; export * from "./identity-access-tokens";
export * from "./identity-aws-auths";
export * from "./identity-azure-auths";
export * from "./identity-gcp-auths";
export * from "./identity-kubernetes-auths";
export * from "./identity-org-memberships"; export * from "./identity-org-memberships";
export * from "./identity-project-additional-privilege"; export * from "./identity-project-additional-privilege";
export * from "./identity-project-membership-role"; export * from "./identity-project-membership-role";
@@ -30,9 +26,6 @@ export * from "./identity-universal-auths";
export * from "./incident-contacts"; export * from "./incident-contacts";
export * from "./integration-auths"; export * from "./integration-auths";
export * from "./integrations"; export * from "./integrations";
export * from "./kms-key-versions";
export * from "./kms-keys";
export * from "./kms-root-config";
export * from "./ldap-configs"; export * from "./ldap-configs";
export * from "./ldap-group-maps"; export * from "./ldap-group-maps";
export * from "./models"; export * from "./models";
@@ -60,11 +53,9 @@ export * from "./secret-blind-indexes";
export * from "./secret-folder-versions"; export * from "./secret-folder-versions";
export * from "./secret-folders"; export * from "./secret-folders";
export * from "./secret-imports"; export * from "./secret-imports";
export * from "./secret-references";
export * from "./secret-rotation-outputs"; export * from "./secret-rotation-outputs";
export * from "./secret-rotations"; export * from "./secret-rotations";
export * from "./secret-scanning-git-risks"; export * from "./secret-scanning-git-risks";
export * from "./secret-sharing";
export * from "./secret-snapshot-folders"; export * from "./secret-snapshot-folders";
export * from "./secret-snapshot-secrets"; export * from "./secret-snapshot-secrets";
export * from "./secret-snapshots"; export * from "./secret-snapshots";

View File

@@ -28,10 +28,7 @@ export const IntegrationsSchema = z.object({
secretPath: z.string().default("/"), secretPath: z.string().default("/"),
createdAt: z.date(), createdAt: z.date(),
updatedAt: z.date(), updatedAt: z.date(),
lastUsed: z.date().nullable().optional(), lastUsed: z.date().nullable().optional()
isSynced: z.boolean().nullable().optional(),
syncMessage: z.string().nullable().optional(),
lastSyncJobId: z.string().nullable().optional()
}); });
export type TIntegrations = z.infer<typeof IntegrationsSchema>; export type TIntegrations = z.infer<typeof IntegrationsSchema>;

View File

@@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmsKeyVersionsSchema = z.object({
id: z.string().uuid(),
encryptedKey: zodBuffer,
version: z.number(),
kmsKeyId: z.string().uuid()
});
export type TKmsKeyVersions = z.infer<typeof KmsKeyVersionsSchema>;
export type TKmsKeyVersionsInsert = Omit<z.input<typeof KmsKeyVersionsSchema>, TImmutableDBKeys>;
export type TKmsKeyVersionsUpdate = Partial<Omit<z.input<typeof KmsKeyVersionsSchema>, TImmutableDBKeys>>;

View File

@@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmsKeysSchema = z.object({
id: z.string().uuid(),
encryptedKey: zodBuffer,
encryptionAlgorithm: z.string(),
version: z.number().default(1),
description: z.string().nullable().optional(),
isDisabled: z.boolean().default(false).nullable().optional(),
isReserved: z.boolean().default(true).nullable().optional(),
projectId: z.string().nullable().optional(),
orgId: z.string().uuid().nullable().optional()
});
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;
export type TKmsKeysInsert = Omit<z.input<typeof KmsKeysSchema>, TImmutableDBKeys>;
export type TKmsKeysUpdate = Partial<Omit<z.input<typeof KmsKeysSchema>, TImmutableDBKeys>>;

View File

@@ -1,19 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmsRootConfigSchema = z.object({
id: z.string().uuid(),
encryptedRootKey: zodBuffer
});
export type TKmsRootConfig = z.infer<typeof KmsRootConfigSchema>;
export type TKmsRootConfigInsert = Omit<z.input<typeof KmsRootConfigSchema>, TImmutableDBKeys>;
export type TKmsRootConfigUpdate = Partial<Omit<z.input<typeof KmsRootConfigSchema>, TImmutableDBKeys>>;

View File

@@ -28,8 +28,6 @@ export enum TableName {
ProjectUserMembershipRole = "project_user_membership_roles", ProjectUserMembershipRole = "project_user_membership_roles",
ProjectKeys = "project_keys", ProjectKeys = "project_keys",
Secret = "secrets", Secret = "secrets",
SecretReference = "secret_references",
SecretSharing = "secret_sharing",
SecretBlindIndex = "secret_blind_indexes", SecretBlindIndex = "secret_blind_indexes",
SecretVersion = "secret_versions", SecretVersion = "secret_versions",
SecretFolder = "secret_folders", SecretFolder = "secret_folders",
@@ -46,11 +44,7 @@ export enum TableName {
Identity = "identities", Identity = "identities",
IdentityAccessToken = "identity_access_tokens", IdentityAccessToken = "identity_access_tokens",
IdentityUniversalAuth = "identity_universal_auths", IdentityUniversalAuth = "identity_universal_auths",
IdentityKubernetesAuth = "identity_kubernetes_auths",
IdentityGcpAuth = "identity_gcp_auths",
IdentityAzureAuth = "identity_azure_auths",
IdentityUaClientSecret = "identity_ua_client_secrets", IdentityUaClientSecret = "identity_ua_client_secrets",
IdentityAwsAuth = "identity_aws_auths",
IdentityOrgMembership = "identity_org_memberships", IdentityOrgMembership = "identity_org_memberships",
IdentityProjectMembership = "identity_project_memberships", IdentityProjectMembership = "identity_project_memberships",
IdentityProjectMembershipRole = "identity_project_membership_role", IdentityProjectMembershipRole = "identity_project_membership_role",
@@ -81,11 +75,7 @@ export enum TableName {
DynamicSecretLease = "dynamic_secret_leases", DynamicSecretLease = "dynamic_secret_leases",
// junction tables with tags // junction tables with tags
JnSecretTag = "secret_tag_junction", JnSecretTag = "secret_tag_junction",
SecretVersionTag = "secret_version_tag_junction", SecretVersionTag = "secret_version_tag_junction"
// KMS Service
KmsServerRootConfig = "kms_root_config",
KmsKey = "kms_keys",
KmsKeyVersion = "kms_key_versions"
} }
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt"; export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
@@ -152,9 +142,5 @@ export enum ProjectUpgradeStatus {
} }
export enum IdentityAuthMethod { export enum IdentityAuthMethod {
Univeral = "universal-auth", Univeral = "universal-auth"
KUBERNETES_AUTH = "kubernetes-auth",
GCP_AUTH = "gcp-auth",
AWS_AUTH = "aws-auth",
AZURE_AUTH = "azure-auth"
} }

View File

@@ -18,8 +18,7 @@ export const SecretApprovalRequestsSchema = z.object({
statusChangeBy: z.string().uuid().nullable().optional(), statusChangeBy: z.string().uuid().nullable().optional(),
committerId: z.string().uuid(), committerId: z.string().uuid(),
createdAt: z.date(), createdAt: z.date(),
updatedAt: z.date(), updatedAt: z.date()
isReplicated: z.boolean().nullable().optional()
}); });
export type TSecretApprovalRequests = z.infer<typeof SecretApprovalRequestsSchema>; export type TSecretApprovalRequests = z.infer<typeof SecretApprovalRequestsSchema>;

View File

@@ -14,8 +14,7 @@ export const SecretFoldersSchema = z.object({
createdAt: z.date(), createdAt: z.date(),
updatedAt: z.date(), updatedAt: z.date(),
envId: z.string().uuid(), envId: z.string().uuid(),
parentId: z.string().uuid().nullable().optional(), parentId: z.string().uuid().nullable().optional()
isReserved: z.boolean().default(false).nullable().optional()
}); });
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>; export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;

View File

@@ -15,12 +15,7 @@ export const SecretImportsSchema = z.object({
position: z.number(), position: z.number(),
createdAt: z.date(), createdAt: z.date(),
updatedAt: z.date(), updatedAt: z.date(),
folderId: z.string().uuid(), folderId: z.string().uuid()
isReplication: z.boolean().default(false).nullable().optional(),
isReplicationSuccess: z.boolean().nullable().optional(),
replicationStatus: z.string().nullable().optional(),
lastReplicated: z.date().nullable().optional(),
isReserved: z.boolean().default(false).nullable().optional()
}); });
export type TSecretImports = z.infer<typeof SecretImportsSchema>; export type TSecretImports = z.infer<typeof SecretImportsSchema>;

View File

@@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretReferencesSchema = z.object({
id: z.string().uuid(),
environment: z.string(),
secretPath: z.string(),
secretId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretReferences = z.infer<typeof SecretReferencesSchema>;
export type TSecretReferencesInsert = Omit<z.input<typeof SecretReferencesSchema>, TImmutableDBKeys>;
export type TSecretReferencesUpdate = Partial<Omit<z.input<typeof SecretReferencesSchema>, TImmutableDBKeys>>;

View File

@@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretSharingSchema = z.object({
id: z.string().uuid(),
encryptedValue: z.string(),
iv: z.string(),
tag: z.string(),
hashedHex: z.string(),
expiresAt: z.date(),
userId: z.string().uuid(),
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
expiresAfterViews: z.number().nullable().optional()
});
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;
export type TSecretSharingInsert = Omit<z.input<typeof SecretSharingSchema>, TImmutableDBKeys>;
export type TSecretSharingUpdate = Partial<Omit<z.input<typeof SecretSharingSchema>, TImmutableDBKeys>>;

View File

@@ -22,10 +22,7 @@ export const UsersSchema = z.object({
updatedAt: z.date(), updatedAt: z.date(),
isGhost: z.boolean().default(false), isGhost: z.boolean().default(false),
username: z.string(), username: z.string(),
isEmailVerified: z.boolean().default(false).nullable().optional(), isEmailVerified: z.boolean().nullable().optional()
consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(),
isLocked: z.boolean().default(false).nullable().optional(),
temporaryLockDateEnd: z.date().nullable().optional()
}); });
export type TUsers = z.infer<typeof UsersSchema>; export type TUsers = z.infer<typeof UsersSchema>;

View File

@@ -5,15 +5,10 @@ import { z } from "zod";
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types"; import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types";
import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs"; import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid"; import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { import { PermissionSchema, SanitizedIdentityPrivilegeSchema } from "@app/server/routes/sanitizedSchemas";
ProjectPermissionSchema,
ProjectSpecificPrivilegePermissionSchema,
SanitizedIdentityPrivilegeSchema
} from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type"; import { AuthMode } from "@app/services/auth/auth-type";
export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => { export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
@@ -44,12 +39,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
}) })
.optional() .optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug), .describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: ProjectPermissionSchema.array() permissions: PermissionSchema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
.optional(),
privilegePermission: ProjectSpecificPrivilegePermissionSchema.describe(
IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.privilegePermission
).optional()
}), }),
response: { response: {
200: z.object({ 200: z.object({
@@ -59,18 +49,6 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
}, },
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => { handler: async (req) => {
const { permissions, privilegePermission } = req.body;
if (!permissions && !privilegePermission) {
throw new BadRequestError({ message: "Permission or privilegePermission must be provided" });
}
const permission = privilegePermission
? privilegePermission.actions.map((action) => ({
action,
subject: privilegePermission.subject,
conditions: privilegePermission.conditions
}))
: permissions!;
const privilege = await server.services.identityProjectAdditionalPrivilege.create({ const privilege = await server.services.identityProjectAdditionalPrivilege.create({
actorId: req.permission.id, actorId: req.permission.id,
actor: req.permission.type, actor: req.permission.type,
@@ -79,7 +57,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
...req.body, ...req.body,
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)), slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
isTemporary: false, isTemporary: false,
permissions: JSON.stringify(packRules(permission)) permissions: JSON.stringify(packRules(req.body.permissions))
}); });
return { privilege }; return { privilege };
} }
@@ -112,12 +90,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
}) })
.optional() .optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug), .describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: ProjectPermissionSchema.array() permissions: PermissionSchema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions),
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
.optional(),
privilegePermission: ProjectSpecificPrivilegePermissionSchema.describe(
IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.privilegePermission
).optional(),
temporaryMode: z temporaryMode: z
.nativeEnum(IdentityProjectAdditionalPrivilegeTemporaryMode) .nativeEnum(IdentityProjectAdditionalPrivilegeTemporaryMode)
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.temporaryMode), .describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.temporaryMode),
@@ -138,19 +111,6 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
}, },
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => { handler: async (req) => {
const { permissions, privilegePermission } = req.body;
if (!permissions && !privilegePermission) {
throw new BadRequestError({ message: "Permission or privilegePermission must be provided" });
}
const permission = privilegePermission
? privilegePermission.actions.map((action) => ({
action,
subject: privilegePermission.subject,
conditions: privilegePermission.conditions
}))
: permissions!;
const privilege = await server.services.identityProjectAdditionalPrivilege.create({ const privilege = await server.services.identityProjectAdditionalPrivilege.create({
actorId: req.permission.id, actorId: req.permission.id,
actor: req.permission.type, actor: req.permission.type,
@@ -159,7 +119,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
...req.body, ...req.body,
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)), slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
isTemporary: true, isTemporary: true,
permissions: JSON.stringify(packRules(permission)) permissions: JSON.stringify(packRules(req.body.permissions))
}); });
return { privilege }; return { privilege };
} }
@@ -195,17 +155,14 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
message: "Slug must be a valid slug" message: "Slug must be a valid slug"
}) })
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.newSlug), .describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.newSlug),
permissions: ProjectPermissionSchema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.permissions), permissions: PermissionSchema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
privilegePermission: ProjectSpecificPrivilegePermissionSchema.describe(
IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.privilegePermission
).optional(),
isTemporary: z.boolean().describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary), isTemporary: z.boolean().describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary),
temporaryMode: z temporaryMode: z
.nativeEnum(IdentityProjectAdditionalPrivilegeTemporaryMode) .nativeEnum(IdentityProjectAdditionalPrivilegeTemporaryMode)
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.temporaryMode), .describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.temporaryMode),
temporaryRange: z temporaryRange: z
.string() .string()
.refine((val) => typeof val === "undefined" || ms(val) > 0, "Temporary range must be a positive number") .refine((val) => ms(val) > 0, "Temporary range must be a positive number")
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.temporaryRange), .describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.temporaryRange),
temporaryAccessStartTime: z temporaryAccessStartTime: z
.string() .string()
@@ -222,18 +179,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
}, },
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => { handler: async (req) => {
const { permissions, privilegePermission, ...updatedInfo } = req.body.privilegeDetails; const updatedInfo = req.body.privilegeDetails;
if (!permissions && !privilegePermission) {
throw new BadRequestError({ message: "Permission or privilegePermission must be provided" });
}
const permission = privilegePermission
? privilegePermission.actions.map((action) => ({
action,
subject: privilegePermission.subject,
conditions: privilegePermission.conditions
}))
: permissions!;
const privilege = await server.services.identityProjectAdditionalPrivilege.updateBySlug({ const privilege = await server.services.identityProjectAdditionalPrivilege.updateBySlug({
actorId: req.permission.id, actorId: req.permission.id,
actor: req.permission.type, actor: req.permission.type,
@@ -244,7 +190,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
projectSlug: req.body.projectSlug, projectSlug: req.body.projectSlug,
data: { data: {
...updatedInfo, ...updatedInfo,
permissions: permission ? JSON.stringify(packRules(permission)) : undefined permissions: updatedInfo?.permissions ? JSON.stringify(packRules(updatedInfo.permissions)) : undefined
} }
}); });
return { privilege }; return { privilege };

View File

@@ -23,7 +23,7 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
.min(1) .min(1)
.trim() .trim()
.refine( .refine(
(val) => !Object.values(OrgMembershipRole).includes(val as OrgMembershipRole), (val) => !Object.keys(OrgMembershipRole).includes(val),
"Please choose a different slug, the slug you have entered is reserved" "Please choose a different slug, the slug you have entered is reserved"
) )
.refine((v) => slugify(v) === v, { .refine((v) => slugify(v) === v, {

View File

@@ -1,232 +1,146 @@
import { packRules } from "@casl/ability/extra";
import slugify from "@sindresorhus/slugify";
import { z } from "zod"; import { z } from "zod";
import { ProjectMembershipRole, ProjectMembershipsSchema, ProjectRolesSchema } from "@app/db/schemas"; import { ProjectMembershipsSchema, ProjectRolesSchema } from "@app/db/schemas";
import { PROJECT_ROLE } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { ProjectPermissionSchema, SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type"; import { AuthMode } from "@app/services/auth/auth-type";
export const registerProjectRoleRouter = async (server: FastifyZodProvider) => { export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
server.route({ server.route({
method: "POST", method: "POST",
url: "/:projectSlug/roles", url: "/:projectId/roles",
config: { config: {
rateLimit: writeLimit rateLimit: writeLimit
}, },
schema: { schema: {
description: "Create a project role",
security: [
{
bearerAuth: []
}
],
params: z.object({ params: z.object({
projectSlug: z.string().trim().describe(PROJECT_ROLE.CREATE.projectSlug) projectId: z.string().trim()
}), }),
body: z.object({ body: z.object({
slug: z slug: z.string().trim(),
.string() name: z.string().trim(),
.toLowerCase() description: z.string().trim().optional(),
.trim() permissions: z.any().array()
.min(1)
.refine(
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid"
})
.describe(PROJECT_ROLE.CREATE.slug),
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
permissions: ProjectPermissionSchema.array().describe(PROJECT_ROLE.CREATE.permissions)
}), }),
response: { response: {
200: z.object({ 200: z.object({
role: SanitizedRoleSchema role: ProjectRolesSchema
}) })
} }
}, },
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => { handler: async (req) => {
const role = await server.services.projectRole.createRole({ const role = await server.services.projectRole.createRole(
actorAuthMethod: req.permission.authMethod, req.permission.type,
actorId: req.permission.id, req.permission.id,
actorOrgId: req.permission.orgId, req.params.projectId,
actor: req.permission.type, req.body,
projectSlug: req.params.projectSlug, req.permission.authMethod,
data: { req.permission.orgId
...req.body, );
permissions: JSON.stringify(packRules(req.body.permissions))
}
});
return { role }; return { role };
} }
}); });
server.route({ server.route({
method: "PATCH", method: "PATCH",
url: "/:projectSlug/roles/:roleId", url: "/:projectId/roles/:roleId",
config: { config: {
rateLimit: writeLimit rateLimit: writeLimit
}, },
schema: { schema: {
description: "Update a project role",
security: [
{
bearerAuth: []
}
],
params: z.object({ params: z.object({
projectSlug: z.string().trim().describe(PROJECT_ROLE.UPDATE.projectSlug), projectId: z.string().trim(),
roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId) roleId: z.string().trim()
}), }),
body: z.object({ body: z.object({
slug: z slug: z.string().trim().optional(),
.string() name: z.string().trim().optional(),
.toLowerCase() description: z.string().trim().optional(),
.trim() permissions: z.any().array()
.optional()
.describe(PROJECT_ROLE.UPDATE.slug)
.refine(
(val) =>
typeof val === "undefined" ||
!Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
)
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
message: "Slug must be a valid"
}),
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
permissions: ProjectPermissionSchema.array().describe(PROJECT_ROLE.UPDATE.permissions)
}), }),
response: { response: {
200: z.object({ 200: z.object({
role: SanitizedRoleSchema role: ProjectRolesSchema
}) })
} }
}, },
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => { handler: async (req) => {
const role = await server.services.projectRole.updateRole({ const role = await server.services.projectRole.updateRole(
actorAuthMethod: req.permission.authMethod, req.permission.type,
actorId: req.permission.id, req.permission.id,
actorOrgId: req.permission.orgId, req.params.projectId,
actor: req.permission.type, req.params.roleId,
projectSlug: req.params.projectSlug, req.body,
roleId: req.params.roleId, req.permission.authMethod,
data: { req.permission.orgId
...req.body, );
permissions: JSON.stringify(packRules(req.body.permissions))
}
});
return { role }; return { role };
} }
}); });
server.route({ server.route({
method: "DELETE", method: "DELETE",
url: "/:projectSlug/roles/:roleId", url: "/:projectId/roles/:roleId",
config: { config: {
rateLimit: writeLimit rateLimit: writeLimit
}, },
schema: { schema: {
description: "Delete a project role",
security: [
{
bearerAuth: []
}
],
params: z.object({ params: z.object({
projectSlug: z.string().trim().describe(PROJECT_ROLE.DELETE.projectSlug), projectId: z.string().trim(),
roleId: z.string().trim().describe(PROJECT_ROLE.DELETE.roleId) roleId: z.string().trim()
}), }),
response: { response: {
200: z.object({ 200: z.object({
role: SanitizedRoleSchema role: ProjectRolesSchema
}) })
} }
}, },
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => { handler: async (req) => {
const role = await server.services.projectRole.deleteRole({ const role = await server.services.projectRole.deleteRole(
actorAuthMethod: req.permission.authMethod, req.permission.type,
actorId: req.permission.id, req.permission.id,
actorOrgId: req.permission.orgId, req.params.projectId,
actor: req.permission.type, req.params.roleId,
projectSlug: req.params.projectSlug, req.permission.authMethod,
roleId: req.params.roleId req.permission.orgId
}); );
return { role }; return { role };
} }
}); });
server.route({ server.route({
method: "GET", method: "GET",
url: "/:projectSlug/roles", url: "/:projectId/roles",
config: {
rateLimit: readLimit
},
schema: {
description: "List project role",
security: [
{
bearerAuth: []
}
],
params: z.object({
projectSlug: z.string().trim().describe(PROJECT_ROLE.LIST.projectSlug)
}),
response: {
200: z.object({
roles: ProjectRolesSchema.omit({ permissions: true }).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const roles = await server.services.projectRole.listRoles({
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actor: req.permission.type,
projectSlug: req.params.projectSlug
});
return { roles };
}
});
server.route({
method: "GET",
url: "/:projectSlug/roles/slug/:slug",
config: { config: {
rateLimit: readLimit rateLimit: readLimit
}, },
schema: { schema: {
params: z.object({ params: z.object({
projectSlug: z.string().trim().describe(PROJECT_ROLE.GET_ROLE_BY_SLUG.projectSlug), projectId: z.string().trim()
slug: z.string().trim().describe(PROJECT_ROLE.GET_ROLE_BY_SLUG.roleSlug)
}), }),
response: { response: {
200: z.object({ 200: z.object({
role: SanitizedRoleSchema data: z.object({
roles: ProjectRolesSchema.omit({ permissions: true })
.merge(z.object({ permissions: z.unknown() }))
.array()
})
}) })
} }
}, },
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => { handler: async (req) => {
const role = await server.services.projectRole.getRoleBySlug({ const roles = await server.services.projectRole.listRoles(
actorAuthMethod: req.permission.authMethod, req.permission.type,
actorId: req.permission.id, req.permission.id,
actorOrgId: req.permission.orgId, req.params.projectId,
actor: req.permission.type, req.permission.authMethod,
projectSlug: req.params.projectSlug, req.permission.orgId
roleSlug: req.params.slug );
}); return { data: { roles } };
return { role };
} }
}); });

View File

@@ -32,20 +32,22 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
}), }),
response: { response: {
200: z.object({ 200: z.object({
approvals: SecretApprovalRequestsSchema.extend({ approvals: SecretApprovalRequestsSchema.merge(
// secretPath: z.string(), z.object({
policy: z.object({ // secretPath: z.string(),
id: z.string(), policy: z.object({
name: z.string(), id: z.string(),
approvals: z.number(), name: z.string(),
approvers: z.string().array(), approvals: z.number(),
secretPath: z.string().optional().nullable() approvers: z.string().array(),
}), secretPath: z.string().optional().nullable()
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(), }),
environment: z.string(), commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
reviewers: z.object({ member: z.string(), status: z.string() }).array(), environment: z.string(),
approvers: z.string().array() reviewers: z.object({ member: z.string(), status: z.string() }).array(),
}).array() approvers: z.string().array()
})
).array()
}) })
} }
}, },

View File

@@ -3,6 +3,7 @@ import { RawAxiosRequestHeaders } from "axios";
import { SecretKeyEncoding } from "@app/db/schemas"; import { SecretKeyEncoding } from "@app/db/schemas";
import { request } from "@app/lib/config/request"; import { request } from "@app/lib/config/request";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption"; import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue"; import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { TProjectDALFactory } from "@app/services/project/project-dal"; import { TProjectDALFactory } from "@app/services/project/project-dal";
@@ -112,7 +113,35 @@ export const auditLogQueueServiceFactory = ({
); );
}); });
queueService.start(QueueName.AuditLogPrune, async () => {
logger.info(`${QueueName.AuditLogPrune}: queue task started`);
await auditLogDAL.pruneAuditLog();
logger.info(`${QueueName.AuditLogPrune}: queue task completed`);
});
// we do a repeat cron job in utc timezone at 12 Midnight each day
const startAuditLogPruneJob = async () => {
// clear previous job
await queueService.stopRepeatableJob(
QueueName.AuditLogPrune,
QueueJobs.AuditLogPrune,
{ pattern: "0 0 * * *", utc: true },
QueueName.AuditLogPrune // just a job id
);
await queueService.queue(QueueName.AuditLogPrune, QueueJobs.AuditLogPrune, undefined, {
delay: 5000,
jobId: QueueName.AuditLogPrune,
repeat: { pattern: "0 0 * * *", utc: true }
});
};
queueService.listen(QueueName.AuditLogPrune, "failed", (err) => {
logger.error(err?.failedReason, `${QueueName.AuditLogPrune}: log pruning failed`);
});
return { return {
pushToLog pushToLog,
startAuditLogPruneJob
}; };
}; };

View File

@@ -51,7 +51,6 @@ export enum EventType {
UNAUTHORIZE_INTEGRATION = "unauthorize-integration", UNAUTHORIZE_INTEGRATION = "unauthorize-integration",
CREATE_INTEGRATION = "create-integration", CREATE_INTEGRATION = "create-integration",
DELETE_INTEGRATION = "delete-integration", DELETE_INTEGRATION = "delete-integration",
MANUAL_SYNC_INTEGRATION = "manual-sync-integration",
ADD_TRUSTED_IP = "add-trusted-ip", ADD_TRUSTED_IP = "add-trusted-ip",
UPDATE_TRUSTED_IP = "update-trusted-ip", UPDATE_TRUSTED_IP = "update-trusted-ip",
DELETE_TRUSTED_IP = "delete-trusted-ip", DELETE_TRUSTED_IP = "delete-trusted-ip",
@@ -64,25 +63,9 @@ export enum EventType {
ADD_IDENTITY_UNIVERSAL_AUTH = "add-identity-universal-auth", ADD_IDENTITY_UNIVERSAL_AUTH = "add-identity-universal-auth",
UPDATE_IDENTITY_UNIVERSAL_AUTH = "update-identity-universal-auth", UPDATE_IDENTITY_UNIVERSAL_AUTH = "update-identity-universal-auth",
GET_IDENTITY_UNIVERSAL_AUTH = "get-identity-universal-auth", GET_IDENTITY_UNIVERSAL_AUTH = "get-identity-universal-auth",
LOGIN_IDENTITY_KUBERNETES_AUTH = "login-identity-kubernetes-auth",
ADD_IDENTITY_KUBERNETES_AUTH = "add-identity-kubernetes-auth",
UPDATE_IDENTITY_KUBENETES_AUTH = "update-identity-kubernetes-auth",
GET_IDENTITY_KUBERNETES_AUTH = "get-identity-kubernetes-auth",
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret", CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret", REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret", GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
LOGIN_IDENTITY_GCP_AUTH = "login-identity-gcp-auth",
ADD_IDENTITY_GCP_AUTH = "add-identity-gcp-auth",
UPDATE_IDENTITY_GCP_AUTH = "update-identity-gcp-auth",
GET_IDENTITY_GCP_AUTH = "get-identity-gcp-auth",
LOGIN_IDENTITY_AWS_AUTH = "login-identity-aws-auth",
ADD_IDENTITY_AWS_AUTH = "add-identity-aws-auth",
UPDATE_IDENTITY_AWS_AUTH = "update-identity-aws-auth",
GET_IDENTITY_AWS_AUTH = "get-identity-aws-auth",
LOGIN_IDENTITY_AZURE_AUTH = "login-identity-azure-auth",
ADD_IDENTITY_AZURE_AUTH = "add-identity-azure-auth",
UPDATE_IDENTITY_AZURE_AUTH = "update-identity-azure-auth",
GET_IDENTITY_AZURE_AUTH = "get-identity-azure-auth",
CREATE_ENVIRONMENT = "create-environment", CREATE_ENVIRONMENT = "create-environment",
UPDATE_ENVIRONMENT = "update-environment", UPDATE_ENVIRONMENT = "update-environment",
DELETE_ENVIRONMENT = "delete-environment", DELETE_ENVIRONMENT = "delete-environment",
@@ -286,25 +269,6 @@ interface DeleteIntegrationEvent {
}; };
} }
interface ManualSyncIntegrationEvent {
type: EventType.MANUAL_SYNC_INTEGRATION;
metadata: {
integrationId: string;
integration: string;
environment: string;
secretPath: string;
url?: string;
app?: string;
appId?: string;
targetEnvironment?: string;
targetEnvironmentId?: string;
targetService?: string;
targetServiceId?: string;
path?: string;
region?: string;
};
}
interface AddTrustedIPEvent { interface AddTrustedIPEvent {
type: EventType.ADD_TRUSTED_IP; type: EventType.ADD_TRUSTED_IP;
metadata: { metadata: {
@@ -419,50 +383,6 @@ interface GetIdentityUniversalAuthEvent {
}; };
} }
interface LoginIdentityKubernetesAuthEvent {
type: EventType.LOGIN_IDENTITY_KUBERNETES_AUTH;
metadata: {
identityId: string;
identityKubernetesAuthId: string;
identityAccessTokenId: string;
};
}
interface AddIdentityKubernetesAuthEvent {
type: EventType.ADD_IDENTITY_KUBERNETES_AUTH;
metadata: {
identityId: string;
kubernetesHost: string;
allowedNamespaces: string;
allowedNames: string;
accessTokenTTL: number;
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
};
}
interface UpdateIdentityKubernetesAuthEvent {
type: EventType.UPDATE_IDENTITY_KUBENETES_AUTH;
metadata: {
identityId: string;
kubernetesHost?: string;
allowedNamespaces?: string;
allowedNames?: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
};
}
interface GetIdentityKubernetesAuthEvent {
type: EventType.GET_IDENTITY_KUBERNETES_AUTH;
metadata: {
identityId: string;
};
}
interface CreateIdentityUniversalAuthClientSecretEvent { interface CreateIdentityUniversalAuthClientSecretEvent {
type: EventType.CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET; type: EventType.CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET;
metadata: { metadata: {
@@ -486,138 +406,6 @@ interface RevokeIdentityUniversalAuthClientSecretEvent {
}; };
} }
interface LoginIdentityGcpAuthEvent {
type: EventType.LOGIN_IDENTITY_GCP_AUTH;
metadata: {
identityId: string;
identityGcpAuthId: string;
identityAccessTokenId: string;
};
}
interface AddIdentityGcpAuthEvent {
type: EventType.ADD_IDENTITY_GCP_AUTH;
metadata: {
identityId: string;
type: string;
allowedServiceAccounts: string;
allowedProjects: string;
allowedZones: string;
accessTokenTTL: number;
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
};
}
interface UpdateIdentityGcpAuthEvent {
type: EventType.UPDATE_IDENTITY_GCP_AUTH;
metadata: {
identityId: string;
type?: string;
allowedServiceAccounts?: string;
allowedProjects?: string;
allowedZones?: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
};
}
interface GetIdentityGcpAuthEvent {
type: EventType.GET_IDENTITY_GCP_AUTH;
metadata: {
identityId: string;
};
}
interface LoginIdentityAwsAuthEvent {
type: EventType.LOGIN_IDENTITY_AWS_AUTH;
metadata: {
identityId: string;
identityAwsAuthId: string;
identityAccessTokenId: string;
};
}
interface AddIdentityAwsAuthEvent {
type: EventType.ADD_IDENTITY_AWS_AUTH;
metadata: {
identityId: string;
stsEndpoint: string;
allowedPrincipalArns: string;
allowedAccountIds: string;
accessTokenTTL: number;
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
};
}
interface UpdateIdentityAwsAuthEvent {
type: EventType.UPDATE_IDENTITY_AWS_AUTH;
metadata: {
identityId: string;
stsEndpoint?: string;
allowedPrincipalArns?: string;
allowedAccountIds?: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
};
}
interface GetIdentityAwsAuthEvent {
type: EventType.GET_IDENTITY_AWS_AUTH;
metadata: {
identityId: string;
};
}
interface LoginIdentityAzureAuthEvent {
type: EventType.LOGIN_IDENTITY_AZURE_AUTH;
metadata: {
identityId: string;
identityAzureAuthId: string;
identityAccessTokenId: string;
};
}
interface AddIdentityAzureAuthEvent {
type: EventType.ADD_IDENTITY_AZURE_AUTH;
metadata: {
identityId: string;
tenantId: string;
resource: string;
accessTokenTTL: number;
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
};
}
interface UpdateIdentityAzureAuthEvent {
type: EventType.UPDATE_IDENTITY_AZURE_AUTH;
metadata: {
identityId: string;
tenantId?: string;
resource?: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
};
}
interface GetIdentityAzureAuthEvent {
type: EventType.GET_IDENTITY_AZURE_AUTH;
metadata: {
identityId: string;
};
}
interface CreateEnvironmentEvent { interface CreateEnvironmentEvent {
type: EventType.CREATE_ENVIRONMENT; type: EventType.CREATE_ENVIRONMENT;
metadata: { metadata: {
@@ -857,7 +645,6 @@ export type Event =
| UnauthorizeIntegrationEvent | UnauthorizeIntegrationEvent
| CreateIntegrationEvent | CreateIntegrationEvent
| DeleteIntegrationEvent | DeleteIntegrationEvent
| ManualSyncIntegrationEvent
| AddTrustedIPEvent | AddTrustedIPEvent
| UpdateTrustedIPEvent | UpdateTrustedIPEvent
| DeleteTrustedIPEvent | DeleteTrustedIPEvent
@@ -870,25 +657,9 @@ export type Event =
| AddIdentityUniversalAuthEvent | AddIdentityUniversalAuthEvent
| UpdateIdentityUniversalAuthEvent | UpdateIdentityUniversalAuthEvent
| GetIdentityUniversalAuthEvent | GetIdentityUniversalAuthEvent
| LoginIdentityKubernetesAuthEvent
| AddIdentityKubernetesAuthEvent
| UpdateIdentityKubernetesAuthEvent
| GetIdentityKubernetesAuthEvent
| CreateIdentityUniversalAuthClientSecretEvent | CreateIdentityUniversalAuthClientSecretEvent
| GetIdentityUniversalAuthClientSecretsEvent | GetIdentityUniversalAuthClientSecretsEvent
| RevokeIdentityUniversalAuthClientSecretEvent | RevokeIdentityUniversalAuthClientSecretEvent
| LoginIdentityGcpAuthEvent
| AddIdentityGcpAuthEvent
| UpdateIdentityGcpAuthEvent
| GetIdentityGcpAuthEvent
| LoginIdentityAwsAuthEvent
| AddIdentityAwsAuthEvent
| UpdateIdentityAwsAuthEvent
| GetIdentityAwsAuthEvent
| LoginIdentityAzureAuthEvent
| AddIdentityAzureAuthEvent
| UpdateIdentityAzureAuthEvent
| GetIdentityAzureAuthEvent
| CreateEnvironmentEvent | CreateEnvironmentEvent
| UpdateEnvironmentEvent | UpdateEnvironmentEvent
| DeleteEnvironmentEvent | DeleteEnvironmentEvent

View File

@@ -16,8 +16,6 @@ export const licenseDALFactory = (db: TDbClient) => {
void bd.where({ orgId }); void bd.where({ orgId });
} }
}) })
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
.where(`${TableName.Users}.isGhost`, false)
.count(); .count();
return doc?.[0].count; return doc?.[0].count;
} catch (error) { } catch (error) {

View File

@@ -7,24 +7,14 @@ import {
SecretType, SecretType,
TSecretApprovalRequestsSecretsInsert TSecretApprovalRequestsSecretsInsert
} from "@app/db/schemas"; } from "@app/db/schemas";
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { BadRequestError, UnauthorizedError } from "@app/lib/errors"; import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
import { groupBy, pick, unique } from "@app/lib/fn"; import { groupBy, pick, unique } from "@app/lib/fn";
import { alphaNumericNanoId } from "@app/lib/nanoid"; import { alphaNumericNanoId } from "@app/lib/nanoid";
import { ActorType } from "@app/services/auth/auth-type"; import { ActorType } from "@app/services/auth/auth-type";
import { TProjectDALFactory } from "@app/services/project/project-dal"; import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TSecretDALFactory } from "@app/services/secret/secret-dal"; import { TSecretDALFactory } from "@app/services/secret/secret-dal";
import {
fnSecretBlindIndexCheck,
fnSecretBlindIndexCheckV2,
fnSecretBulkDelete,
fnSecretBulkInsert,
fnSecretBulkUpdate,
getAllNestedSecretReferences
} from "@app/services/secret/secret-fns";
import { TSecretQueueFactory } from "@app/services/secret/secret-queue"; import { TSecretQueueFactory } from "@app/services/secret/secret-queue";
import { SecretOperations } from "@app/services/secret/secret-types"; import { TSecretServiceFactory } from "@app/services/secret/secret-service";
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal"; import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal"; import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal";
import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/secret-blind-index-dal"; import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/secret-blind-index-dal";
@@ -39,6 +29,7 @@ import { TSecretApprovalRequestReviewerDALFactory } from "./secret-approval-requ
import { TSecretApprovalRequestSecretDALFactory } from "./secret-approval-request-secret-dal"; import { TSecretApprovalRequestSecretDALFactory } from "./secret-approval-request-secret-dal";
import { import {
ApprovalStatus, ApprovalStatus,
CommitType,
RequestState, RequestState,
TApprovalRequestCountDTO, TApprovalRequestCountDTO,
TGenerateSecretApprovalRequestDTO, TGenerateSecretApprovalRequestDTO,
@@ -51,11 +42,10 @@ import {
type TSecretApprovalRequestServiceFactoryDep = { type TSecretApprovalRequestServiceFactoryDep = {
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">; permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
secretApprovalRequestDAL: TSecretApprovalRequestDALFactory; secretApprovalRequestDAL: TSecretApprovalRequestDALFactory;
secretApprovalRequestSecretDAL: TSecretApprovalRequestSecretDALFactory; secretApprovalRequestSecretDAL: TSecretApprovalRequestSecretDALFactory;
secretApprovalRequestReviewerDAL: TSecretApprovalRequestReviewerDALFactory; secretApprovalRequestReviewerDAL: TSecretApprovalRequestReviewerDALFactory;
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findSecretPathByFolderIds">; folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findById" | "findSecretPathByFolderIds">;
secretDAL: TSecretDALFactory; secretDAL: TSecretDALFactory;
secretTagDAL: Pick<TSecretTagDALFactory, "findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret">; secretTagDAL: Pick<TSecretTagDALFactory, "findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret">;
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">; secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
@@ -63,7 +53,15 @@ type TSecretApprovalRequestServiceFactoryDep = {
secretVersionDAL: Pick<TSecretVersionDALFactory, "findLatestVersionMany" | "insertMany">; secretVersionDAL: Pick<TSecretVersionDALFactory, "findLatestVersionMany" | "insertMany">;
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "insertMany">; secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "insertMany">;
projectDAL: Pick<TProjectDALFactory, "checkProjectUpgradeStatus">; projectDAL: Pick<TProjectDALFactory, "checkProjectUpgradeStatus">;
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets" | "removeSecretReminder">; secretService: Pick<
TSecretServiceFactory,
| "fnSecretBulkInsert"
| "fnSecretBulkUpdate"
| "fnSecretBlindIndexCheck"
| "fnSecretBulkDelete"
| "fnSecretBlindIndexCheckV2"
>;
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets">;
}; };
export type TSecretApprovalRequestServiceFactory = ReturnType<typeof secretApprovalRequestServiceFactory>; export type TSecretApprovalRequestServiceFactory = ReturnType<typeof secretApprovalRequestServiceFactory>;
@@ -80,9 +78,9 @@ export const secretApprovalRequestServiceFactory = ({
projectDAL, projectDAL,
permissionService, permissionService,
snapshotService, snapshotService,
secretService,
secretVersionDAL, secretVersionDAL,
secretQueueService, secretQueueService
projectBotService
}: TSecretApprovalRequestServiceFactoryDep) => { }: TSecretApprovalRequestServiceFactoryDep) => {
const requestCount = async ({ projectId, actor, actorId, actorOrgId, actorAuthMethod }: TApprovalRequestCountDTO) => { const requestCount = async ({ projectId, actor, actorId, actorOrgId, actorAuthMethod }: TApprovalRequestCountDTO) => {
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" }); if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
@@ -299,12 +297,11 @@ export const secretApprovalRequestServiceFactory = ({
const secretApprovalSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id); const secretApprovalSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
if (!secretApprovalSecrets) throw new BadRequestError({ message: "No secrets found" }); if (!secretApprovalSecrets) throw new BadRequestError({ message: "No secrets found" });
const conflicts: Array<{ secretId: string; op: SecretOperations }> = []; const conflicts: Array<{ secretId: string; op: CommitType }> = [];
let secretCreationCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Create); let secretCreationCommits = secretApprovalSecrets.filter(({ op }) => op === CommitType.Create);
if (secretCreationCommits.length) { if (secretCreationCommits.length) {
const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await fnSecretBlindIndexCheckV2({ const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await secretService.fnSecretBlindIndexCheckV2({
folderId, folderId,
secretDAL,
inputSecrets: secretCreationCommits.map(({ secretBlindIndex }) => { inputSecrets: secretCreationCommits.map(({ secretBlindIndex }) => {
if (!secretBlindIndex) { if (!secretBlindIndex) {
throw new BadRequestError({ throw new BadRequestError({
@@ -317,19 +314,17 @@ export const secretApprovalRequestServiceFactory = ({
secretCreationCommits secretCreationCommits
.filter(({ secretBlindIndex }) => conflictGroupByBlindIndex[secretBlindIndex || ""]) .filter(({ secretBlindIndex }) => conflictGroupByBlindIndex[secretBlindIndex || ""])
.forEach((el) => { .forEach((el) => {
conflicts.push({ op: SecretOperations.Create, secretId: el.id }); conflicts.push({ op: CommitType.Create, secretId: el.id });
}); });
secretCreationCommits = secretCreationCommits.filter( secretCreationCommits = secretCreationCommits.filter(
({ secretBlindIndex }) => !conflictGroupByBlindIndex[secretBlindIndex || ""] ({ secretBlindIndex }) => !conflictGroupByBlindIndex[secretBlindIndex || ""]
); );
} }
let secretUpdationCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Update); let secretUpdationCommits = secretApprovalSecrets.filter(({ op }) => op === CommitType.Update);
if (secretUpdationCommits.length) { if (secretUpdationCommits.length) {
const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await fnSecretBlindIndexCheckV2({ const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await secretService.fnSecretBlindIndexCheckV2({
folderId, folderId,
secretDAL,
userId: "",
inputSecrets: secretUpdationCommits inputSecrets: secretUpdationCommits
.filter(({ secretBlindIndex, secret }) => secret && secret.secretBlindIndex !== secretBlindIndex) .filter(({ secretBlindIndex, secret }) => secret && secret.secretBlindIndex !== secretBlindIndex)
.map(({ secretBlindIndex }) => { .map(({ secretBlindIndex }) => {
@@ -347,7 +342,7 @@ export const secretApprovalRequestServiceFactory = ({
(secretBlindIndex && conflictGroupByBlindIndex[secretBlindIndex]) || !secretId (secretBlindIndex && conflictGroupByBlindIndex[secretBlindIndex]) || !secretId
) )
.forEach((el) => { .forEach((el) => {
conflicts.push({ op: SecretOperations.Update, secretId: el.id }); conflicts.push({ op: CommitType.Update, secretId: el.id });
}); });
secretUpdationCommits = secretUpdationCommits.filter( secretUpdationCommits = secretUpdationCommits.filter(
@@ -356,11 +351,11 @@ export const secretApprovalRequestServiceFactory = ({
); );
} }
const secretDeletionCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Delete); const secretDeletionCommits = secretApprovalSecrets.filter(({ op }) => op === CommitType.Delete);
const botKey = await projectBotService.getBotKey(projectId).catch(() => null);
const mergeStatus = await secretApprovalRequestDAL.transaction(async (tx) => { const mergeStatus = await secretApprovalRequestDAL.transaction(async (tx) => {
const newSecrets = secretCreationCommits.length const newSecrets = secretCreationCommits.length
? await fnSecretBulkInsert({ ? await secretService.fnSecretBulkInsert({
tx, tx,
folderId, folderId,
inputSecrets: secretCreationCommits.map((el) => ({ inputSecrets: secretCreationCommits.map((el) => ({
@@ -384,17 +379,7 @@ export const secretApprovalRequestServiceFactory = ({
]), ]),
tags: el?.tags.map(({ id }) => id), tags: el?.tags.map(({ id }) => id),
version: 1, version: 1,
type: SecretType.Shared, type: SecretType.Shared
references: botKey
? getAllNestedSecretReferences(
decryptSymmetric128BitHexKeyUTF8({
ciphertext: el.secretValueCiphertext,
iv: el.secretValueIV,
tag: el.secretValueTag,
key: botKey
})
)
: undefined
})), })),
secretDAL, secretDAL,
secretVersionDAL, secretVersionDAL,
@@ -403,7 +388,7 @@ export const secretApprovalRequestServiceFactory = ({
}) })
: []; : [];
const updatedSecrets = secretUpdationCommits.length const updatedSecrets = secretUpdationCommits.length
? await fnSecretBulkUpdate({ ? await secretService.fnSecretBulkUpdate({
folderId, folderId,
projectId, projectId,
tx, tx,
@@ -429,17 +414,7 @@ export const secretApprovalRequestServiceFactory = ({
"secretReminderNote", "secretReminderNote",
"secretReminderRepeatDays", "secretReminderRepeatDays",
"secretBlindIndex" "secretBlindIndex"
]), ])
references: botKey
? getAllNestedSecretReferences(
decryptSymmetric128BitHexKeyUTF8({
ciphertext: el.secretValueCiphertext,
iv: el.secretValueIV,
tag: el.secretValueTag,
key: botKey
})
)
: undefined
} }
})), })),
secretDAL, secretDAL,
@@ -449,13 +424,11 @@ export const secretApprovalRequestServiceFactory = ({
}) })
: []; : [];
const deletedSecret = secretDeletionCommits.length const deletedSecret = secretDeletionCommits.length
? await fnSecretBulkDelete({ ? await secretService.fnSecretBulkDelete({
projectId, projectId,
folderId, folderId,
tx, tx,
actorId: "", actorId: "",
secretDAL,
secretQueueService,
inputSecrets: secretDeletionCommits.map(({ secretBlindIndex }) => { inputSecrets: secretDeletionCommits.map(({ secretBlindIndex }) => {
if (!secretBlindIndex) { if (!secretBlindIndex) {
throw new BadRequestError({ throw new BadRequestError({
@@ -482,14 +455,12 @@ export const secretApprovalRequestServiceFactory = ({
}; };
}); });
await snapshotService.performSnapshot(folderId); await snapshotService.performSnapshot(folderId);
const [folder] = await folderDAL.findSecretPathByFolderIds(projectId, [folderId]); const folder = await folderDAL.findById(folderId);
if (!folder) throw new BadRequestError({ message: "Folder not found" }); // TODO(akhilmhdh-pg): change query to do secret path from folder
await secretQueueService.syncSecrets({ await secretQueueService.syncSecrets({
projectId, projectId,
secretPath: folder.path, secretPath: "/",
environmentSlug: folder.environmentSlug, environment: folder?.environment.envSlug as string
actorId,
actor
}); });
return mergeStatus; return mergeStatus;
}; };
@@ -537,9 +508,9 @@ export const secretApprovalRequestServiceFactory = ({
const commits: Omit<TSecretApprovalRequestsSecretsInsert, "requestId">[] = []; const commits: Omit<TSecretApprovalRequestsSecretsInsert, "requestId">[] = [];
const commitTagIds: Record<string, string[]> = {}; const commitTagIds: Record<string, string[]> = {};
// for created secret approval change // for created secret approval change
const createdSecrets = data[SecretOperations.Create]; const createdSecrets = data[CommitType.Create];
if (createdSecrets && createdSecrets?.length) { if (createdSecrets && createdSecrets?.length) {
const { keyName2BlindIndex } = await fnSecretBlindIndexCheck({ const { keyName2BlindIndex } = await secretService.fnSecretBlindIndexCheck({
inputSecrets: createdSecrets, inputSecrets: createdSecrets,
folderId, folderId,
isNew: true, isNew: true,
@@ -550,7 +521,7 @@ export const secretApprovalRequestServiceFactory = ({
commits.push( commits.push(
...createdSecrets.map(({ secretName, ...el }) => ({ ...createdSecrets.map(({ secretName, ...el }) => ({
...el, ...el,
op: SecretOperations.Create as const, op: CommitType.Create as const,
version: 1, version: 1,
secretBlindIndex: keyName2BlindIndex[secretName], secretBlindIndex: keyName2BlindIndex[secretName],
algorithm: SecretEncryptionAlgo.AES_256_GCM, algorithm: SecretEncryptionAlgo.AES_256_GCM,
@@ -562,12 +533,12 @@ export const secretApprovalRequestServiceFactory = ({
}); });
} }
// not secret approval for update operations // not secret approval for update operations
const updatedSecrets = data[SecretOperations.Update]; const updatedSecrets = data[CommitType.Update];
if (updatedSecrets && updatedSecrets?.length) { if (updatedSecrets && updatedSecrets?.length) {
// get all blind index // get all blind index
// Find all those secrets // Find all those secrets
// if not throw not found // if not throw not found
const { keyName2BlindIndex, secrets: secretsToBeUpdated } = await fnSecretBlindIndexCheck({ const { keyName2BlindIndex, secrets: secretsToBeUpdated } = await secretService.fnSecretBlindIndexCheck({
inputSecrets: updatedSecrets, inputSecrets: updatedSecrets,
folderId, folderId,
isNew: false, isNew: false,
@@ -578,8 +549,8 @@ export const secretApprovalRequestServiceFactory = ({
// now find any secret that needs to update its name // now find any secret that needs to update its name
// same process as above // same process as above
const nameUpdatedSecrets = updatedSecrets.filter(({ newSecretName }) => Boolean(newSecretName)); const nameUpdatedSecrets = updatedSecrets.filter(({ newSecretName }) => Boolean(newSecretName));
const { keyName2BlindIndex: newKeyName2BlindIndex } = await fnSecretBlindIndexCheck({ const { keyName2BlindIndex: newKeyName2BlindIndex } = await secretService.fnSecretBlindIndexCheck({
inputSecrets: nameUpdatedSecrets.map(({ newSecretName }) => ({ secretName: newSecretName as string })), inputSecrets: nameUpdatedSecrets,
folderId, folderId,
isNew: true, isNew: true,
blindIndexCfg, blindIndexCfg,
@@ -596,14 +567,14 @@ export const secretApprovalRequestServiceFactory = ({
const secretId = secsGroupedByBlindIndex[keyName2BlindIndex[secretName]][0].id; const secretId = secsGroupedByBlindIndex[keyName2BlindIndex[secretName]][0].id;
const secretBlindIndex = const secretBlindIndex =
newSecretName && newKeyName2BlindIndex[newSecretName] newSecretName && newKeyName2BlindIndex[newSecretName]
? newKeyName2BlindIndex?.[newSecretName] ? newKeyName2BlindIndex?.[secretName]
: keyName2BlindIndex[secretName]; : keyName2BlindIndex[secretName];
// add tags // add tags
if (tagIds?.length) commitTagIds[keyName2BlindIndex[secretName]] = tagIds; if (tagIds?.length) commitTagIds[keyName2BlindIndex[secretName]] = tagIds;
return { return {
...latestSecretVersions[secretId], ...latestSecretVersions[secretId],
...el, ...el,
op: SecretOperations.Update as const, op: CommitType.Update as const,
secret: secretId, secret: secretId,
secretVersion: latestSecretVersions[secretId].id, secretVersion: latestSecretVersions[secretId].id,
secretBlindIndex, secretBlindIndex,
@@ -613,12 +584,12 @@ export const secretApprovalRequestServiceFactory = ({
); );
} }
// deleted secrets // deleted secrets
const deletedSecrets = data[SecretOperations.Delete]; const deletedSecrets = data[CommitType.Delete];
if (deletedSecrets && deletedSecrets.length) { if (deletedSecrets && deletedSecrets.length) {
// get all blind index // get all blind index
// Find all those secrets // Find all those secrets
// if not throw not found // if not throw not found
const { keyName2BlindIndex, secrets } = await fnSecretBlindIndexCheck({ const { keyName2BlindIndex, secrets } = await secretService.fnSecretBlindIndexCheck({
inputSecrets: deletedSecrets, inputSecrets: deletedSecrets,
folderId, folderId,
isNew: false, isNew: false,
@@ -639,7 +610,7 @@ export const secretApprovalRequestServiceFactory = ({
if (!latestSecretVersions[secretId].secretBlindIndex) if (!latestSecretVersions[secretId].secretBlindIndex)
throw new BadRequestError({ message: "Failed to find secret blind index" }); throw new BadRequestError({ message: "Failed to find secret blind index" });
return { return {
op: SecretOperations.Delete as const, op: CommitType.Delete as const,
...latestSecretVersions[secretId], ...latestSecretVersions[secretId],
secretBlindIndex: latestSecretVersions[secretId].secretBlindIndex as string, secretBlindIndex: latestSecretVersions[secretId].secretBlindIndex as string,
secret: secretId, secret: secretId,

View File

@@ -1,6 +1,11 @@
import { TImmutableDBKeys, TSecretApprovalPolicies, TSecretApprovalRequestsSecrets } from "@app/db/schemas"; import { TImmutableDBKeys, TSecretApprovalPolicies, TSecretApprovalRequestsSecrets } from "@app/db/schemas";
import { TProjectPermission } from "@app/lib/types"; import { TProjectPermission } from "@app/lib/types";
import { SecretOperations } from "@app/services/secret/secret-types";
export enum CommitType {
Create = "create",
Update = "update",
Delete = "delete"
}
export enum RequestState { export enum RequestState {
Open = "open", Open = "open",
@@ -13,14 +18,14 @@ export enum ApprovalStatus {
REJECTED = "rejected" REJECTED = "rejected"
} }
export type TApprovalCreateSecret = Omit< type TApprovalCreateSecret = Omit<
TSecretApprovalRequestsSecrets, TSecretApprovalRequestsSecrets,
TImmutableDBKeys | "version" | "algorithm" | "keyEncoding" | "requestId" | "op" | "secretVersion" | "secretBlindIndex" TImmutableDBKeys | "version" | "algorithm" | "keyEncoding" | "requestId" | "op" | "secretVersion" | "secretBlindIndex"
> & { > & {
secretName: string; secretName: string;
tagIds?: string[]; tagIds?: string[];
}; };
export type TApprovalUpdateSecret = Partial<TApprovalCreateSecret> & { type TApprovalUpdateSecret = Partial<TApprovalCreateSecret> & {
secretName: string; secretName: string;
newSecretName?: string; newSecretName?: string;
tagIds?: string[]; tagIds?: string[];
@@ -31,9 +36,9 @@ export type TGenerateSecretApprovalRequestDTO = {
secretPath: string; secretPath: string;
policy: TSecretApprovalPolicies; policy: TSecretApprovalPolicies;
data: { data: {
[SecretOperations.Create]?: TApprovalCreateSecret[]; [CommitType.Create]?: TApprovalCreateSecret[];
[SecretOperations.Update]?: TApprovalUpdateSecret[]; [CommitType.Update]?: TApprovalUpdateSecret[];
[SecretOperations.Delete]?: { secretName: string }[]; [CommitType.Delete]?: { secretName: string }[];
}; };
} & TProjectPermission; } & TProjectPermission;

View File

@@ -1 +0,0 @@
export const MAX_REPLICATION_DEPTH = 5;

View File

@@ -1,10 +0,0 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TSecretReplicationDALFactory = ReturnType<typeof secretReplicationDALFactory>;
export const secretReplicationDALFactory = (db: TDbClient) => {
const orm = ormify(db, TableName.SecretVersion);
return orm;
};

View File

@@ -1,485 +0,0 @@
import { SecretType, TSecrets } from "@app/db/schemas";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
import { TSecretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal";
import { TSecretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal";
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { BadRequestError } from "@app/lib/errors";
import { groupBy, unique } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { QueueName, TQueueServiceFactory } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
import { fnSecretBulkInsert, fnSecretBulkUpdate } from "@app/services/secret/secret-fns";
import { TSecretQueueFactory, uniqueSecretQueueKey } from "@app/services/secret/secret-queue";
import { SecretOperations } from "@app/services/secret/secret-types";
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal";
import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/secret-blind-index-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { ReservedFolders } from "@app/services/secret-folder/secret-folder-types";
import { TSecretImportDALFactory } from "@app/services/secret-import/secret-import-dal";
import { fnSecretsFromImports } from "@app/services/secret-import/secret-import-fns";
import { TSecretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
import { MAX_REPLICATION_DEPTH } from "./secret-replication-constants";
type TSecretReplicationServiceFactoryDep = {
secretDAL: Pick<
TSecretDALFactory,
"find" | "findByBlindIndexes" | "insertMany" | "bulkUpdate" | "delete" | "upsertSecretReferences" | "transaction"
>;
secretVersionDAL: Pick<TSecretVersionDALFactory, "find" | "insertMany" | "update" | "findLatestVersionMany">;
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "updateById" | "findByFolderIds">;
folderDAL: Pick<
TSecretFolderDALFactory,
"findSecretPathByFolderIds" | "findBySecretPath" | "create" | "findOne" | "findByManySecretPath"
>;
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "find" | "insertMany">;
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets" | "replicateSecrets">;
queueService: Pick<TQueueServiceFactory, "start" | "listen" | "queue" | "stopJobById">;
secretApprovalPolicyService: Pick<TSecretApprovalPolicyServiceFactory, "getSecretApprovalPolicy">;
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "setItemWithExpiry" | "getItem">;
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
secretTagDAL: Pick<TSecretTagDALFactory, "findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret" | "find">;
secretApprovalRequestDAL: Pick<TSecretApprovalRequestDALFactory, "create" | "transaction">;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findOne">;
secretApprovalRequestSecretDAL: Pick<
TSecretApprovalRequestSecretDALFactory,
"insertMany" | "insertApprovalSecretTags"
>;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
};
export type TSecretReplicationServiceFactory = ReturnType<typeof secretReplicationServiceFactory>;
const SECRET_IMPORT_SUCCESS_LOCK = 10;
const keystoreReplicationSuccessKey = (jobId: string, secretImportId: string) => `${jobId}-${secretImportId}`;
const getReplicationKeyLockPrefix = (projectId: string, environmentSlug: string, secretPath: string) =>
`REPLICATION_SECRET_${projectId}-${environmentSlug}-${secretPath}`;
export const getReplicationFolderName = (importId: string) => `${ReservedFolders.SecretReplication}${importId}`;
const getDecryptedKeyValue = (key: string, secret: TSecrets) => {
const secretKey = decryptSymmetric128BitHexKeyUTF8({
ciphertext: secret.secretKeyCiphertext,
iv: secret.secretKeyIV,
tag: secret.secretKeyTag,
key
});
const secretValue = decryptSymmetric128BitHexKeyUTF8({
ciphertext: secret.secretValueCiphertext,
iv: secret.secretValueIV,
tag: secret.secretValueTag,
key
});
return { key: secretKey, value: secretValue };
};
export const secretReplicationServiceFactory = ({
secretDAL,
queueService,
secretVersionDAL,
secretImportDAL,
keyStore,
secretVersionTagDAL,
secretTagDAL,
folderDAL,
secretApprovalPolicyService,
secretApprovalRequestSecretDAL,
secretApprovalRequestDAL,
secretQueueService,
projectMembershipDAL,
projectBotService
}: TSecretReplicationServiceFactoryDep) => {
const getReplicatedSecrets = (
botKey: string,
localSecrets: TSecrets[],
importedSecrets: { secrets: TSecrets[] }[]
) => {
const deDupe = new Set<string>();
const secrets = localSecrets
.filter(({ secretBlindIndex }) => Boolean(secretBlindIndex))
.map((el) => {
const decryptedSecret = getDecryptedKeyValue(botKey, el);
deDupe.add(decryptedSecret.key);
return { ...el, secretKey: decryptedSecret.key, secretValue: decryptedSecret.value };
});
for (let i = importedSecrets.length - 1; i >= 0; i = -1) {
importedSecrets[i].secrets.forEach((el) => {
const decryptedSecret = getDecryptedKeyValue(botKey, el);
if (deDupe.has(decryptedSecret.key) || !el.secretBlindIndex) {
return;
}
deDupe.add(decryptedSecret.key);
secrets.push({ ...el, secretKey: decryptedSecret.key, secretValue: decryptedSecret.value });
});
}
return secrets;
};
// IMPORTANT NOTE BEFORE READING THE FUNCTION
// SOURCE - Where secrets are copied from
// DESTINATION - Where the replicated imports that points to SOURCE from Destination
queueService.start(QueueName.SecretReplication, async (job) => {
logger.info(job.data, "Replication started");
const {
secretPath,
environmentSlug,
projectId,
actorId,
actor,
pickOnlyImportIds,
_deDupeReplicationQueue: deDupeReplicationQueue,
_deDupeQueue: deDupeQueue,
_depth: depth = 0
} = job.data;
if (depth > MAX_REPLICATION_DEPTH) return;
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, secretPath);
if (!folder) return;
// the the replicated imports made to the source. These are the destinations
const destinationSecretImports = await secretImportDAL.find({
importPath: secretPath,
importEnv: folder.envId
});
// CASE: normal mode <- link import <- replicated import
const nonReplicatedDestinationImports = destinationSecretImports.filter(({ isReplication }) => !isReplication);
if (nonReplicatedDestinationImports.length) {
// keep calling sync secret for all the imports made
const importedFolderIds = unique(nonReplicatedDestinationImports, (i) => i.folderId).map(
({ folderId }) => folderId
);
const importedFolders = await folderDAL.findSecretPathByFolderIds(projectId, importedFolderIds);
const foldersGroupedById = groupBy(importedFolders.filter(Boolean), (i) => i?.id as string);
await Promise.all(
nonReplicatedDestinationImports
.filter(({ folderId }) => Boolean(foldersGroupedById[folderId][0]?.path as string))
// filter out already synced ones
.filter(
({ folderId }) =>
!deDupeQueue?.[
uniqueSecretQueueKey(
foldersGroupedById[folderId][0]?.environmentSlug as string,
foldersGroupedById[folderId][0]?.path as string
)
]
)
.map(({ folderId }) =>
secretQueueService.replicateSecrets({
projectId,
secretPath: foldersGroupedById[folderId][0]?.path as string,
environmentSlug: foldersGroupedById[folderId][0]?.environmentSlug as string,
actorId,
actor,
_depth: depth + 1,
_deDupeReplicationQueue: deDupeReplicationQueue,
_deDupeQueue: deDupeQueue
})
)
);
}
let destinationReplicatedSecretImports = destinationSecretImports.filter(({ isReplication }) =>
Boolean(isReplication)
);
destinationReplicatedSecretImports = pickOnlyImportIds
? destinationReplicatedSecretImports.filter(({ id }) => pickOnlyImportIds?.includes(id))
: destinationReplicatedSecretImports;
if (!destinationReplicatedSecretImports.length) return;
const botKey = await projectBotService.getBotKey(projectId);
// these are the secrets to be added in replicated folders
const sourceLocalSecrets = await secretDAL.find({ folderId: folder.id, type: SecretType.Shared });
const sourceSecretImports = await secretImportDAL.find({ folderId: folder.id });
const sourceImportedSecrets = await fnSecretsFromImports({
allowedImports: sourceSecretImports,
secretDAL,
folderDAL,
secretImportDAL
});
// secrets that gets replicated across imports
const sourceSecrets = getReplicatedSecrets(botKey, sourceLocalSecrets, sourceImportedSecrets);
const sourceSecretsGroupByBlindIndex = groupBy(sourceSecrets, (i) => i.secretBlindIndex as string);
const lock = await keyStore.acquireLock(
[getReplicationKeyLockPrefix(projectId, environmentSlug, secretPath)],
5000
);
try {
/* eslint-disable no-await-in-loop */
for (const destinationSecretImport of destinationReplicatedSecretImports) {
try {
const hasJobCompleted = await keyStore.getItem(
keystoreReplicationSuccessKey(job.id as string, destinationSecretImport.id),
KeyStorePrefixes.SecretReplication
);
if (hasJobCompleted) {
logger.info(
{ jobId: job.id, importId: destinationSecretImport.id },
"Skipping this job as this has been successfully replicated."
);
// eslint-disable-next-line
continue;
}
const [destinationFolder] = await folderDAL.findSecretPathByFolderIds(projectId, [
destinationSecretImport.folderId
]);
if (!destinationFolder) throw new BadRequestError({ message: "Imported folder not found" });
let destinationReplicationFolder = await folderDAL.findOne({
parentId: destinationFolder.id,
name: getReplicationFolderName(destinationSecretImport.id),
isReserved: true
});
if (!destinationReplicationFolder) {
destinationReplicationFolder = await folderDAL.create({
parentId: destinationFolder.id,
name: getReplicationFolderName(destinationSecretImport.id),
envId: destinationFolder.envId,
isReserved: true
});
}
const destinationReplicationFolderId = destinationReplicationFolder.id;
const destinationLocalSecretsFromDB = await secretDAL.find({
folderId: destinationReplicationFolderId
});
const destinationLocalSecrets = destinationLocalSecretsFromDB.map((el) => {
const decryptedSecret = getDecryptedKeyValue(botKey, el);
return { ...el, secretKey: decryptedSecret.key, secretValue: decryptedSecret.value };
});
const destinationLocalSecretsGroupedByBlindIndex = groupBy(
destinationLocalSecrets.filter(({ secretBlindIndex }) => Boolean(secretBlindIndex)),
(i) => i.secretBlindIndex as string
);
const locallyCreatedSecrets = sourceSecrets
.filter(
({ secretBlindIndex }) => !destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]
)
.map((el) => ({ ...el, operation: SecretOperations.Create })); // rewrite update ops to create
const locallyUpdatedSecrets = sourceSecrets
.filter(
({ secretBlindIndex, secretKey, secretValue }) =>
destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0] &&
// if key or value changed
(destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]?.secretKey !== secretKey ||
destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]?.secretValue !==
secretValue)
)
.map((el) => ({ ...el, operation: SecretOperations.Update })); // rewrite update ops to create
const locallyDeletedSecrets = destinationLocalSecrets
.filter(({ secretBlindIndex }) => !sourceSecretsGroupByBlindIndex[secretBlindIndex as string]?.[0])
.map((el) => ({ ...el, operation: SecretOperations.Delete }));
const isEmtpy =
locallyCreatedSecrets.length + locallyUpdatedSecrets.length + locallyDeletedSecrets.length === 0;
// eslint-disable-next-line
if (isEmtpy) continue;
const policy = await secretApprovalPolicyService.getSecretApprovalPolicy(
projectId,
destinationFolder.environmentSlug,
destinationFolder.path
);
// this means it should be a approval request rather than direct replication
if (policy && actor === ActorType.USER) {
const membership = await projectMembershipDAL.findOne({ projectId, userId: actorId });
if (!membership) {
logger.error("Project membership not found in %s for user %s", projectId, actorId);
return;
}
const localSecretsLatestVersions = destinationLocalSecrets.map(({ id }) => id);
const latestSecretVersions = await secretVersionDAL.findLatestVersionMany(
destinationReplicationFolderId,
localSecretsLatestVersions
);
await secretApprovalRequestDAL.transaction(async (tx) => {
const approvalRequestDoc = await secretApprovalRequestDAL.create(
{
folderId: destinationReplicationFolderId,
slug: alphaNumericNanoId(),
policyId: policy.id,
status: "open",
hasMerged: false,
committerId: membership.id,
isReplicated: true
},
tx
);
const commits = locallyCreatedSecrets
.concat(locallyUpdatedSecrets)
.concat(locallyDeletedSecrets)
.map((doc) => {
const { operation } = doc;
const localSecret = destinationLocalSecretsGroupedByBlindIndex[doc.secretBlindIndex as string]?.[0];
return {
op: operation,
keyEncoding: doc.keyEncoding,
algorithm: doc.algorithm,
requestId: approvalRequestDoc.id,
metadata: doc.metadata,
secretKeyIV: doc.secretKeyIV,
secretKeyTag: doc.secretKeyTag,
secretKeyCiphertext: doc.secretKeyCiphertext,
secretValueIV: doc.secretValueIV,
secretValueTag: doc.secretValueTag,
secretValueCiphertext: doc.secretValueCiphertext,
secretBlindIndex: doc.secretBlindIndex,
secretCommentIV: doc.secretCommentIV,
secretCommentTag: doc.secretCommentTag,
secretCommentCiphertext: doc.secretCommentCiphertext,
skipMultilineEncoding: doc.skipMultilineEncoding,
// except create operation other two needs the secret id and version id
...(operation !== SecretOperations.Create
? { secretId: localSecret.id, secretVersion: latestSecretVersions[localSecret.id].id }
: {})
};
});
const approvalCommits = await secretApprovalRequestSecretDAL.insertMany(commits, tx);
return { ...approvalRequestDoc, commits: approvalCommits };
});
} else {
await secretDAL.transaction(async (tx) => {
if (locallyCreatedSecrets.length) {
await fnSecretBulkInsert({
folderId: destinationReplicationFolderId,
secretVersionDAL,
secretDAL,
tx,
secretTagDAL,
secretVersionTagDAL,
inputSecrets: locallyCreatedSecrets.map((doc) => {
return {
keyEncoding: doc.keyEncoding,
algorithm: doc.algorithm,
type: doc.type,
metadata: doc.metadata,
secretKeyIV: doc.secretKeyIV,
secretKeyTag: doc.secretKeyTag,
secretKeyCiphertext: doc.secretKeyCiphertext,
secretValueIV: doc.secretValueIV,
secretValueTag: doc.secretValueTag,
secretValueCiphertext: doc.secretValueCiphertext,
secretBlindIndex: doc.secretBlindIndex,
secretCommentIV: doc.secretCommentIV,
secretCommentTag: doc.secretCommentTag,
secretCommentCiphertext: doc.secretCommentCiphertext,
skipMultilineEncoding: doc.skipMultilineEncoding
};
})
});
}
if (locallyUpdatedSecrets.length) {
await fnSecretBulkUpdate({
projectId,
folderId: destinationReplicationFolderId,
secretVersionDAL,
secretDAL,
tx,
secretTagDAL,
secretVersionTagDAL,
inputSecrets: locallyUpdatedSecrets.map((doc) => {
return {
filter: {
folderId: destinationReplicationFolderId,
id: destinationLocalSecretsGroupedByBlindIndex[doc.secretBlindIndex as string][0].id
},
data: {
keyEncoding: doc.keyEncoding,
algorithm: doc.algorithm,
type: doc.type,
metadata: doc.metadata,
secretKeyIV: doc.secretKeyIV,
secretKeyTag: doc.secretKeyTag,
secretKeyCiphertext: doc.secretKeyCiphertext,
secretValueIV: doc.secretValueIV,
secretValueTag: doc.secretValueTag,
secretValueCiphertext: doc.secretValueCiphertext,
secretBlindIndex: doc.secretBlindIndex,
secretCommentIV: doc.secretCommentIV,
secretCommentTag: doc.secretCommentTag,
secretCommentCiphertext: doc.secretCommentCiphertext,
skipMultilineEncoding: doc.skipMultilineEncoding
}
};
})
});
}
if (locallyDeletedSecrets.length) {
await secretDAL.delete(
{
$in: {
id: locallyDeletedSecrets.map(({ id }) => id)
},
folderId: destinationReplicationFolderId
},
tx
);
}
});
await secretQueueService.syncSecrets({
projectId,
secretPath: destinationFolder.path,
environmentSlug: destinationFolder.environmentSlug,
actorId,
actor,
_depth: depth + 1,
_deDupeReplicationQueue: deDupeReplicationQueue,
_deDupeQueue: deDupeQueue
});
}
// this is used to avoid multiple times generating secret approval by failed one
await keyStore.setItemWithExpiry(
keystoreReplicationSuccessKey(job.id as string, destinationSecretImport.id),
SECRET_IMPORT_SUCCESS_LOCK,
1,
KeyStorePrefixes.SecretReplication
);
await secretImportDAL.updateById(destinationSecretImport.id, {
lastReplicated: new Date(),
replicationStatus: null,
isReplicationSuccess: true
});
} catch (err) {
logger.error(
err,
`Failed to replicate secret with import id=[${destinationSecretImport.id}] env=[${destinationSecretImport.importEnv.slug}] path=[${destinationSecretImport.importPath}]`
);
await secretImportDAL.updateById(destinationSecretImport.id, {
lastReplicated: new Date(),
replicationStatus: (err as Error)?.message.slice(0, 500),
isReplicationSuccess: false
});
}
}
/* eslint-enable no-await-in-loop */
} finally {
await lock.release();
logger.info(job.data, "Replication finished");
}
});
queueService.listen(QueueName.SecretReplication, "failed", (job, err) => {
logger.error(err, "Failed to replicate secret", job?.data);
});
};

View File

@@ -1,3 +0,0 @@
export type TSyncSecretReplicationDTO = {
id: string;
};

View File

@@ -90,17 +90,15 @@ export const secretScanningServiceFactory = ({
const { const {
data: { repositories } data: { repositories }
} = await octokit.apps.listReposAccessibleToInstallation(); } = await octokit.apps.listReposAccessibleToInstallation();
if (!appCfg.DISABLE_SECRET_SCANNING) { await Promise.all(
await Promise.all( repositories.map(({ id, full_name }) =>
repositories.map(({ id, full_name }) => secretScanningQueue.startFullRepoScan({
secretScanningQueue.startFullRepoScan({ organizationId: session.orgId,
organizationId: session.orgId, installationId,
installationId, repository: { id, fullName: full_name }
repository: { id, fullName: full_name } })
}) )
) );
);
}
return { installatedApp }; return { installatedApp };
}; };
@@ -153,7 +151,6 @@ export const secretScanningServiceFactory = ({
}; };
const handleRepoPushEvent = async (payload: WebhookEventMap["push"]) => { const handleRepoPushEvent = async (payload: WebhookEventMap["push"]) => {
const appCfg = getConfig();
const { commits, repository, installation, pusher } = payload; const { commits, repository, installation, pusher } = payload;
if (!commits || !repository || !installation || !pusher) { if (!commits || !repository || !installation || !pusher) {
return; return;
@@ -164,15 +161,13 @@ export const secretScanningServiceFactory = ({
}); });
if (!installationLink) return; if (!installationLink) return;
if (!appCfg.DISABLE_SECRET_SCANNING) { await secretScanningQueue.startPushEventScan({
await secretScanningQueue.startPushEventScan({ commits,
commits, pusher: { name: pusher.name, email: pusher.email },
pusher: { name: pusher.name, email: pusher.email }, repository: { fullName: repository.full_name, id: repository.id },
repository: { fullName: repository.full_name, id: repository.id }, organizationId: installationLink.orgId,
organizationId: installationLink.orgId, installationId: String(installation?.id)
installationId: String(installation?.id) });
});
}
}; };
const handleRepoDeleteEvent = async (installationId: string, repositoryIds: string[]) => { const handleRepoDeleteEvent = async (installationId: string, repositoryIds: string[]) => {

View File

@@ -220,7 +220,7 @@ export const secretSnapshotServiceFactory = ({
const deletedTopLevelSecsGroupById = groupBy(deletedTopLevelSecs, (item) => item.id); const deletedTopLevelSecsGroupById = groupBy(deletedTopLevelSecs, (item) => item.id);
// this will remove all secrets and folders on child // this will remove all secrets and folders on child
// due to sql foreign key and link list connection removing the folders removes everything below too // due to sql foreign key and link list connection removing the folders removes everything below too
const deletedFolders = await folderDAL.delete({ parentId: snapshot.folderId, isReserved: false }, tx); const deletedFolders = await folderDAL.delete({ parentId: snapshot.folderId }, tx);
const deletedTopLevelFolders = groupBy( const deletedTopLevelFolders = groupBy(
deletedFolders.filter(({ parentId }) => parentId === snapshot.folderId), deletedFolders.filter(({ parentId }) => parentId === snapshot.folderId),
(item) => item.id (item) => item.id

View File

@@ -1,75 +1,20 @@
import { Redis } from "ioredis"; import { Redis } from "ioredis";
import { Redlock, Settings } from "@app/lib/red-lock";
export type TKeyStoreFactory = ReturnType<typeof keyStoreFactory>; export type TKeyStoreFactory = ReturnType<typeof keyStoreFactory>;
// all the key prefixes used must be set here to avoid conflict
export enum KeyStorePrefixes {
SecretReplication = "secret-replication-import-lock"
}
type TWaitTillReady = {
key: string;
waitingCb?: () => void;
keyCheckCb: (val: string | null) => boolean;
waitIteration?: number;
delay?: number;
jitter?: number;
};
export const keyStoreFactory = (redisUrl: string) => { export const keyStoreFactory = (redisUrl: string) => {
const redis = new Redis(redisUrl); const redis = new Redis(redisUrl);
const redisLock = new Redlock([redis], { retryCount: 2, retryDelay: 200 });
const setItem = async (key: string, value: string | number | Buffer, prefix?: string) => const setItem = async (key: string, value: string | number | Buffer) => redis.set(key, value);
redis.set(prefix ? `${prefix}:${key}` : key, value);
const getItem = async (key: string, prefix?: string) => redis.get(prefix ? `${prefix}:${key}` : key); const getItem = async (key: string) => redis.get(key);
const setItemWithExpiry = async ( const setItemWithExpiry = async (key: string, exp: number | string, value: string | number | Buffer) =>
key: string, redis.setex(key, exp, value);
exp: number | string,
value: string | number | Buffer,
prefix?: string
) => redis.setex(prefix ? `${prefix}:${key}` : key, exp, value);
const deleteItem = async (key: string) => redis.del(key); const deleteItem = async (key: string) => redis.del(key);
const incrementBy = async (key: string, value: number) => redis.incrby(key, value); const incrementBy = async (key: string, value: number) => redis.incrby(key, value);
const waitTillReady = async ({ return { setItem, getItem, setItemWithExpiry, deleteItem, incrementBy };
key,
waitingCb,
keyCheckCb,
waitIteration = 10,
delay = 1000,
jitter = 200
}: TWaitTillReady) => {
let attempts = 0;
let isReady = keyCheckCb(await getItem(key));
while (!isReady) {
if (attempts > waitIteration) return;
// eslint-disable-next-line
await new Promise((resolve) => {
waitingCb?.();
setTimeout(resolve, Math.max(0, delay + Math.floor((Math.random() * 2 - 1) * jitter)));
});
attempts += 1;
// eslint-disable-next-line
isReady = keyCheckCb(await getItem(key, "wait_till_ready"));
}
};
return {
setItem,
getItem,
setItemWithExpiry,
deleteItem,
incrementBy,
acquireLock(resources: string[], duration: number, settings?: Partial<Settings>) {
return redisLock.acquire(resources, duration, settings);
},
waitTillReady
};
}; };

View File

@@ -89,21 +89,6 @@ export const UNIVERSAL_AUTH = {
}, },
RENEW_ACCESS_TOKEN: { RENEW_ACCESS_TOKEN: {
accessToken: "The access token to renew." accessToken: "The access token to renew."
},
REVOKE_ACCESS_TOKEN: {
accessToken: "The access token to revoke."
}
} as const;
export const AWS_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login.",
iamHttpRequestMethod: "The HTTP request method used in the signed request.",
iamRequestUrl:
"The base64-encoded HTTP URL used in the signed request. Most likely, the base64-encoding of https://sts.amazonaws.com/",
iamRequestBody:
"The base64-encoded body of the signed request. Most likely, the base64-encoding of Action=GetCallerIdentity&Version=2011-06-15.",
iamRequestHeaders: "The base64-encoded headers of the sts:GetCallerIdentity signed request."
} }
} as const; } as const;
@@ -148,6 +133,36 @@ export const PROJECTS = {
name: "The new name of the project.", name: "The new name of the project.",
autoCapitalization: "Disable or enable auto-capitalization for the project." autoCapitalization: "Disable or enable auto-capitalization for the project."
}, },
INVITE_MEMBER: {
projectId: "The ID of the project to invite the member to.",
emails: "A list of organization member emails to invite to the project.",
usernames: "A list of usernames to invite to the project."
},
REMOVE_MEMBER: {
projectId: "The ID of the project to remove the member from.",
emails: "A list of organization member emails to remove from the project.",
usernames: "A list of usernames to remove from the project."
},
GET_USER_MEMBERSHIPS: {
workspaceId: "The ID of the project to get memberships from."
},
UPDATE_USER_MEMBERSHIP: {
workspaceId: "The ID of the project to update the membership for.",
membershipId: "The ID of the membership to update.",
roles: "A list of roles to update the membership to."
},
LIST_IDENTITY_MEMBERSHIPS: {
projectId: "The ID of the project to get identity memberships from."
},
UPDATE_IDENTITY_MEMBERSHIP: {
projectId: "The ID of the project to update the identity membership for.",
identityId: "The ID of the identity to update the membership for.",
roles: "A list of roles to update the membership to."
},
DELETE_IDENTITY_MEMBERSHIP: {
projectId: "The ID of the project to delete the identity membership from.",
identityId: "The ID of the identity to delete the membership from."
},
GET_KEY: { GET_KEY: {
workspaceId: "The ID of the project to get the key from." workspaceId: "The ID of the project to get the key from."
}, },
@@ -186,72 +201,6 @@ export const PROJECTS = {
} }
} as const; } as const;
export const PROJECT_USERS = {
INVITE_MEMBER: {
projectId: "The ID of the project to invite the member to.",
emails: "A list of organization member emails to invite to the project.",
usernames: "A list of usernames to invite to the project."
},
REMOVE_MEMBER: {
projectId: "The ID of the project to remove the member from.",
emails: "A list of organization member emails to remove from the project.",
usernames: "A list of usernames to remove from the project."
},
GET_USER_MEMBERSHIPS: {
workspaceId: "The ID of the project to get memberships from."
},
GET_USER_MEMBERSHIP: {
workspaceId: "The ID of the project to get memberships from.",
username: "The username to get project membership of. Email is the default username."
},
UPDATE_USER_MEMBERSHIP: {
workspaceId: "The ID of the project to update the membership for.",
membershipId: "The ID of the membership to update.",
roles: "A list of roles to update the membership to."
}
};
export const PROJECT_IDENTITIES = {
LIST_IDENTITY_MEMBERSHIPS: {
projectId: "The ID of the project to get identity memberships from."
},
GET_IDENTITY_MEMBERSHIP_BY_ID: {
identityId: "The ID of the identity to get the membership for.",
projectId: "The ID of the project to get the identity membership for."
},
UPDATE_IDENTITY_MEMBERSHIP: {
projectId: "The ID of the project to update the identity membership for.",
identityId: "The ID of the identity to update the membership for.",
roles: {
description: "A list of role slugs to assign to the identity project membership.",
role: "The role slug to assign to the newly created identity project membership.",
isTemporary:
"Whether the assigned role is temporary. If isTemporary is set true, must provide temporaryMode, temporaryRange and temporaryAccessStartTime.",
temporaryMode: "Type of temporary expiry.",
temporaryRange: "Expiry time for temporary access. In relative mode it could be 1s,2m,3h",
temporaryAccessStartTime: "Time to which the temporary access starts"
}
},
DELETE_IDENTITY_MEMBERSHIP: {
projectId: "The ID of the project to delete the identity membership from.",
identityId: "The ID of the identity to delete the membership from."
},
CREATE_IDENTITY_MEMBERSHIP: {
projectId: "The ID of the project to create the identity membership from.",
identityId: "The ID of the identity to create the membership from.",
role: "The role slug to assign to the newly created identity project membership.",
roles: {
description: "A list of role slugs to assign to the newly created identity project membership.",
role: "The role slug to assign to the newly created identity project membership.",
isTemporary:
"Whether the assigned role is temporary. If isTemporary is set true, must provide temporaryMode, temporaryRange and temporaryAccessStartTime.",
temporaryMode: "Type of temporary expiry.",
temporaryRange: "Expiry time for temporary access. In relative mode it could be 1s,2m,3h",
temporaryAccessStartTime: "Time to which the temporary access starts"
}
}
};
export const ENVIRONMENTS = { export const ENVIRONMENTS = {
CREATE: { CREATE: {
workspaceId: "The ID of the project to create the environment in.", workspaceId: "The ID of the project to create the environment in.",
@@ -291,7 +240,6 @@ export const FOLDERS = {
name: "The new name of the folder.", name: "The new name of the folder.",
path: "The path of the folder to update.", path: "The path of the folder to update.",
directory: "The new directory of the folder to update. (Deprecated in favor of path)", directory: "The new directory of the folder to update. (Deprecated in favor of path)",
projectSlug: "The slug of the project where the folder is located.",
workspaceId: "The ID of the project where the folder is located." workspaceId: "The ID of the project where the folder is located."
}, },
DELETE: { DELETE: {
@@ -328,8 +276,7 @@ export const RAW_SECRETS = {
recursive: recursive:
"Whether or not to fetch all secrets from the specified base path, and all of its subdirectories. Note, the max depth is 20 deep.", "Whether or not to fetch all secrets from the specified base path, and all of its subdirectories. Note, the max depth is 20 deep.",
workspaceId: "The ID of the project to list secrets from.", workspaceId: "The ID of the project to list secrets from.",
workspaceSlug: workspaceSlug: "The slug of the project to list secrets from. This parameter is only usable by machine identities.",
"The slug of the project to list secrets from. This parameter is only applicable by machine identities.",
environment: "The slug of the environment to list secrets from.", environment: "The slug of the environment to list secrets from.",
secretPath: "The secret path to list secrets from.", secretPath: "The secret path to list secrets from.",
includeImports: "Weather to include imported secrets or not." includeImports: "Weather to include imported secrets or not."
@@ -348,7 +295,6 @@ export const RAW_SECRETS = {
GET: { GET: {
secretName: "The name of the secret to get.", secretName: "The name of the secret to get.",
workspaceId: "The ID of the project to get the secret from.", workspaceId: "The ID of the project to get the secret from.",
workspaceSlug: "The slug of the project to get the secret from.",
environment: "The slug of the environment to get the secret from.", environment: "The slug of the environment to get the secret from.",
secretPath: "The path of the secret to get.", secretPath: "The path of the secret to get.",
version: "The version of the secret to get.", version: "The version of the secret to get.",
@@ -521,8 +467,7 @@ export const IDENTITY_ADDITIONAL_PRIVILEGE = {
projectSlug: "The slug of the project of the identity in.", projectSlug: "The slug of the project of the identity in.",
identityId: "The ID of the identity to create.", identityId: "The ID of the identity to create.",
slug: "The slug of the privilege to create.", slug: "The slug of the privilege to create.",
permissions: `@deprecated - use privilegePermission permissions: `The permission object for the privilege.
The permission object for the privilege.
- Read secrets - Read secrets
\`\`\` \`\`\`
{ "permissions": [{"action": "read", "subject": "secrets"]} { "permissions": [{"action": "read", "subject": "secrets"]}
@@ -536,7 +481,6 @@ The permission object for the privilege.
- { "permissions": [{"action": "read", "subject": "secrets", "conditions": { "environment": "dev", "secretPath": { "$glob": "/" } }}] } - { "permissions": [{"action": "read", "subject": "secrets", "conditions": { "environment": "dev", "secretPath": { "$glob": "/" } }}] }
\`\`\` \`\`\`
`, `,
privilegePermission: "The permission object for the privilege.",
isPackPermission: "Whether the server should pack(compact) the permission object.", isPackPermission: "Whether the server should pack(compact) the permission object.",
isTemporary: "Whether the privilege is temporary.", isTemporary: "Whether the privilege is temporary.",
temporaryMode: "Type of temporary access given. Types: relative", temporaryMode: "Type of temporary access given. Types: relative",
@@ -548,8 +492,7 @@ The permission object for the privilege.
identityId: "The ID of the identity to update.", identityId: "The ID of the identity to update.",
slug: "The slug of the privilege to update.", slug: "The slug of the privilege to update.",
newSlug: "The new slug of the privilege to update.", newSlug: "The new slug of the privilege to update.",
permissions: `@deprecated - use privilegePermission permissions: `The permission object for the privilege.
The permission object for the privilege.
- Read secrets - Read secrets
\`\`\` \`\`\`
{ "permissions": [{"action": "read", "subject": "secrets"]} { "permissions": [{"action": "read", "subject": "secrets"]}
@@ -563,7 +506,6 @@ The permission object for the privilege.
- { "permissions": [{"action": "read", "subject": "secrets", "conditions": { "environment": "dev", "secretPath": { "$glob": "/" } }}] } - { "permissions": [{"action": "read", "subject": "secrets", "conditions": { "environment": "dev", "secretPath": { "$glob": "/" } }}] }
\`\`\` \`\`\`
`, `,
privilegePermission: "The permission object for the privilege.",
isTemporary: "Whether the privilege is temporary.", isTemporary: "Whether the privilege is temporary.",
temporaryMode: "Type of temporary access given. Types: relative", temporaryMode: "Type of temporary access given. Types: relative",
temporaryRange: "TTL for the temporay time. Eg: 1m, 1h, 1d", temporaryRange: "TTL for the temporay time. Eg: 1m, 1h, 1d",
@@ -661,7 +603,6 @@ export const INTEGRATION = {
targetServiceId: targetServiceId:
"The service based grouping identifier ID of the external provider. Used in Terraform cloud, Checkly, Railway and NorthFlank", "The service based grouping identifier ID of the external provider. Used in Terraform cloud, Checkly, Railway and NorthFlank",
owner: "External integration providers service entity owner. Used in Github.", owner: "External integration providers service entity owner. Used in Github.",
url: "The self-hosted URL of the platform to integrate with",
path: "Path to save the synced secrets. Used by Gitlab, AWS Parameter Store, Vault", path: "Path to save the synced secrets. Used by Gitlab, AWS Parameter Store, Vault",
region: "AWS region to sync secrets to.", region: "AWS region to sync secrets to.",
scope: "Scope of the provider. Used by Github, Qovery", scope: "Scope of the provider. Used by Github, Qovery",
@@ -669,12 +610,10 @@ export const INTEGRATION = {
secretPrefix: "The prefix for the saved secret. Used by GCP.", secretPrefix: "The prefix for the saved secret. Used by GCP.",
secretSuffix: "The suffix for the saved secret. Used by GCP.", secretSuffix: "The suffix for the saved secret. Used by GCP.",
initialSyncBehavoir: "Type of syncing behavoir with the integration.", initialSyncBehavoir: "Type of syncing behavoir with the integration.",
mappingBehavior: "The mapping behavior of the integration.",
shouldAutoRedeploy: "Used by Render to trigger auto deploy.", shouldAutoRedeploy: "Used by Render to trigger auto deploy.",
secretGCPLabel: "The label for GCP secrets.", secretGCPLabel: "The label for GCP secrets.",
secretAWSTag: "The tags for AWS secrets.", secretAWSTag: "The tags for AWS secrets.",
kmsKeyId: "The ID of the encryption key from AWS KMS.", kmsKeyId: "The ID of the encryption key from AWS KMS."
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store."
} }
}, },
UPDATE: { UPDATE: {
@@ -691,9 +630,6 @@ export const INTEGRATION = {
}, },
DELETE: { DELETE: {
integrationId: "The ID of the integration object." integrationId: "The ID of the integration object."
},
SYNC: {
integrationId: "The ID of the integration object to manually sync"
} }
}; };
@@ -722,32 +658,3 @@ export const AUDIT_LOG_STREAMS = {
id: "The ID of the audit log stream to get details." id: "The ID of the audit log stream to get details."
} }
}; };
export const PROJECT_ROLE = {
CREATE: {
projectSlug: "Slug of the project to create the role for.",
slug: "The slug of the role.",
name: "The name of the role.",
description: "The description for the role.",
permissions: "The permissions assigned to the role."
},
UPDATE: {
projectSlug: "Slug of the project to update the role for.",
roleId: "The ID of the role to update",
slug: "The slug of the role.",
name: "The name of the role.",
description: "The description for the role.",
permissions: "The permissions assigned to the role."
},
DELETE: {
projectSlug: "Slug of the project to delete this role for.",
roleId: "The ID of the role to update"
},
GET_ROLE_BY_SLUG: {
projectSlug: "The slug of the project.",
roleSlug: "The slug of the role to get details"
},
LIST: {
projectSlug: "The slug of the project to list the roles of."
}
};

View File

@@ -13,10 +13,6 @@ const zodStrBool = z
const envSchema = z const envSchema = z
.object({ .object({
PORT: z.coerce.number().default(4000), PORT: z.coerce.number().default(4000),
DISABLE_SECRET_SCANNING: z
.enum(["true", "false"])
.default("false")
.transform((el) => el === "true"),
REDIS_URL: zpStr(z.string()), REDIS_URL: zpStr(z.string()),
HOST: zpStr(z.string().default("localhost")), HOST: zpStr(z.string().default("localhost")),
DB_CONNECTION_URI: zpStr(z.string().describe("Postgres database connection string")).default( DB_CONNECTION_URI: zpStr(z.string().describe("Postgres database connection string")).default(
@@ -75,7 +71,6 @@ const envSchema = z
.optional() .optional()
.default(process.env.URL_GITLAB_LOGIN ?? GITLAB_URL) .default(process.env.URL_GITLAB_LOGIN ?? GITLAB_URL)
), // fallback since URL_GITLAB_LOGIN has been renamed ), // fallback since URL_GITLAB_LOGIN has been renamed
DEFAULT_SAML_ORG_SLUG: zpStr(z.string().optional()).default(process.env.NEXT_PUBLIC_SAML_ORG_SLUG),
// integration client secrets // integration client secrets
// heroku // heroku
CLIENT_ID_HEROKU: zpStr(z.string().optional()), CLIENT_ID_HEROKU: zpStr(z.string().optional()),
@@ -132,8 +127,7 @@ const envSchema = z
isSecretScanningConfigured: isSecretScanningConfigured:
Boolean(data.SECRET_SCANNING_GIT_APP_ID) && Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) && Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET), Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET)
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG
})); }));
let envCfg: Readonly<z.infer<typeof envSchema>>; let envCfg: Readonly<z.infer<typeof envSchema>>;

View File

@@ -1,49 +0,0 @@
import crypto from "crypto";
import { SymmetricEncryption, TSymmetricEncryptionFns } from "./types";
const getIvLength = () => {
return 12;
};
const getTagLength = () => {
return 16;
};
export const symmetricCipherService = (type: SymmetricEncryption): TSymmetricEncryptionFns => {
const IV_LENGTH = getIvLength();
const TAG_LENGTH = getTagLength();
const encrypt = (text: Buffer, key: Buffer) => {
const iv = crypto.randomBytes(IV_LENGTH);
const cipher = crypto.createCipheriv(type, key, iv);
let encrypted = cipher.update(text);
encrypted = Buffer.concat([encrypted, cipher.final()]);
// Get the authentication tag
const tag = cipher.getAuthTag();
// Concatenate IV, encrypted text, and tag into a single buffer
const ciphertextBlob = Buffer.concat([iv, encrypted, tag]);
return ciphertextBlob;
};
const decrypt = (ciphertextBlob: Buffer, key: Buffer) => {
// Extract the IV, encrypted text, and tag from the buffer
const iv = ciphertextBlob.subarray(0, IV_LENGTH);
const tag = ciphertextBlob.subarray(-TAG_LENGTH);
const encrypted = ciphertextBlob.subarray(IV_LENGTH, -TAG_LENGTH);
const decipher = crypto.createDecipheriv(type, key, iv);
decipher.setAuthTag(tag);
const decrypted = Buffer.concat([decipher.update(encrypted), decipher.final()]);
return decrypted;
};
return {
encrypt,
decrypt
};
};

View File

@@ -1,2 +0,0 @@
export { symmetricCipherService } from "./cipher";
export { SymmetricEncryption } from "./types";

View File

@@ -1,9 +0,0 @@
export enum SymmetricEncryption {
AES_GCM_256 = "aes-256-gcm",
AES_GCM_128 = "aes-128-gcm"
}
export type TSymmetricEncryptionFns = {
encrypt: (text: Buffer, key: Buffer) => Buffer;
decrypt: (blob: Buffer, key: Buffer) => Buffer;
};

View File

@@ -11,8 +11,6 @@ import { getConfig } from "../config/env";
export const decodeBase64 = (s: string) => naclUtils.decodeBase64(s); export const decodeBase64 = (s: string) => naclUtils.decodeBase64(s);
export const encodeBase64 = (u: Uint8Array) => naclUtils.encodeBase64(u); export const encodeBase64 = (u: Uint8Array) => naclUtils.encodeBase64(u);
export const randomSecureBytes = (length = 32) => crypto.randomBytes(length);
export type TDecryptSymmetricInput = { export type TDecryptSymmetricInput = {
ciphertext: string; ciphertext: string;
iv: string; iv: string;

View File

@@ -9,8 +9,7 @@ export {
encryptAsymmetric, encryptAsymmetric,
encryptSymmetric, encryptSymmetric,
encryptSymmetric128BitHexKeyUTF8, encryptSymmetric128BitHexKeyUTF8,
generateAsymmetricKeyPair, generateAsymmetricKeyPair
randomSecureBytes
} from "./encryption"; } from "./encryption";
export { export {
decryptIntegrationAuths, decryptIntegrationAuths,

View File

@@ -104,68 +104,24 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
throw new DatabaseError({ error, name: "Create" }); throw new DatabaseError({ error, name: "Create" });
} }
}, },
updateById: async ( updateById: async (id: string, data: Tables[Tname]["update"], tx?: Knex) => {
id: string,
{
$incr,
$decr,
...data
}: Tables[Tname]["update"] & {
$incr?: { [x in keyof Partial<Tables[Tname]["base"]>]: number };
$decr?: { [x in keyof Partial<Tables[Tname]["base"]>]: number };
},
tx?: Knex
) => {
try { try {
const query = (tx || db)(tableName) const [res] = await (tx || db)(tableName)
.where({ id } as never) .where({ id } as never)
.update(data as never) .update(data as never)
.returning("*"); .returning("*");
if ($incr) { return res;
Object.entries($incr).forEach(([incrementField, incrementValue]) => {
void query.increment(incrementField, incrementValue);
});
}
if ($decr) {
Object.entries($decr).forEach(([incrementField, incrementValue]) => {
void query.decrement(incrementField, incrementValue);
});
}
const [docs] = await query;
return docs;
} catch (error) { } catch (error) {
throw new DatabaseError({ error, name: "Update by id" }); throw new DatabaseError({ error, name: "Update by id" });
} }
}, },
update: async ( update: async (filter: TFindFilter<Tables[Tname]["base"]>, data: Tables[Tname]["update"], tx?: Knex) => {
filter: TFindFilter<Tables[Tname]["base"]>,
{
$incr,
$decr,
...data
}: Tables[Tname]["update"] & {
$incr?: { [x in keyof Partial<Tables[Tname]["base"]>]: number };
$decr?: { [x in keyof Partial<Tables[Tname]["base"]>]: number };
},
tx?: Knex
) => {
try { try {
const query = (tx || db)(tableName) const res = await (tx || db)(tableName)
.where(buildFindFilter(filter)) .where(buildFindFilter(filter))
.update(data as never) .update(data as never)
.returning("*"); .returning("*");
// increment and decrement operation in update return res;
if ($incr) {
Object.entries($incr).forEach(([incrementField, incrementValue]) => {
void query.increment(incrementField, incrementValue);
});
}
if ($decr) {
Object.entries($decr).forEach(([incrementField, incrementValue]) => {
void query.increment(incrementField, incrementValue);
});
}
return await query;
} catch (error) { } catch (error) {
throw new DatabaseError({ error, name: "Update" }); throw new DatabaseError({ error, name: "Update" });
} }

View File

@@ -30,37 +30,6 @@ const loggerConfig = z.object({
NODE_ENV: z.enum(["development", "test", "production"]).default("production") NODE_ENV: z.enum(["development", "test", "production"]).default("production")
}); });
const redactedKeys = [
"accessToken",
"authToken",
"serviceToken",
"identityAccessToken",
"token",
"privateKey",
"serverPrivateKey",
"plainPrivateKey",
"plainProjectKey",
"encryptedPrivateKey",
"userPrivateKey",
"protectedKey",
"decryptKey",
"encryptedProjectKey",
"encryptedSymmetricKey",
"encryptedPrivateKey",
"backupPrivateKey",
"secretKey",
"SecretKey",
"botPrivateKey",
"encryptedKey",
"plaintextProjectKey",
"accessKey",
"botKey",
"decryptedSecret",
"secrets",
"key",
"password"
];
export const initLogger = async () => { export const initLogger = async () => {
const cfg = loggerConfig.parse(process.env); const cfg = loggerConfig.parse(process.env);
const targets: pino.TransportMultiOptions["targets"][number][] = [ const targets: pino.TransportMultiOptions["targets"][number][] = [
@@ -105,9 +74,7 @@ export const initLogger = async () => {
hostname: bindings.hostname hostname: bindings.hostname
// node_version: process.version // node_version: process.version
}) })
}, }
// redact until depth of three
redact: [...redactedKeys, ...redactedKeys.map((key) => `*.${key}`), ...redactedKeys.map((key) => `*.*.${key}`)]
}, },
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
transport transport

View File

@@ -1,682 +0,0 @@
/* eslint-disable */
// Source code credits: https://github.com/mike-marcacci/node-redlock
// Taken to avoid external dependency
import { randomBytes, createHash } from "crypto";
import { EventEmitter } from "events";
// AbortController became available as a global in node version 16. Once version
// 14 reaches its end-of-life, this can be removed.
import { Redis as IORedisClient, Cluster as IORedisCluster } from "ioredis";
type Client = IORedisClient | IORedisCluster;
// Define script constants.
const ACQUIRE_SCRIPT = `
-- Return 0 if an entry already exists.
for i, key in ipairs(KEYS) do
if redis.call("exists", key) == 1 then
return 0
end
end
-- Create an entry for each provided key.
for i, key in ipairs(KEYS) do
redis.call("set", key, ARGV[1], "PX", ARGV[2])
end
-- Return the number of entries added.
return #KEYS
`;
const EXTEND_SCRIPT = `
-- Return 0 if an entry exists with a *different* lock value.
for i, key in ipairs(KEYS) do
if redis.call("get", key) ~= ARGV[1] then
return 0
end
end
-- Update the entry for each provided key.
for i, key in ipairs(KEYS) do
redis.call("set", key, ARGV[1], "PX", ARGV[2])
end
-- Return the number of entries updated.
return #KEYS
`;
const RELEASE_SCRIPT = `
local count = 0
for i, key in ipairs(KEYS) do
-- Only remove entries for *this* lock value.
if redis.call("get", key) == ARGV[1] then
redis.pcall("del", key)
count = count + 1
end
end
-- Return the number of entries removed.
return count
`;
export type ClientExecutionResult =
| {
client: Client;
vote: "for";
value: number;
}
| {
client: Client;
vote: "against";
error: Error;
};
/*
* This object contains a summary of results.
*/
export type ExecutionStats = {
readonly membershipSize: number;
readonly quorumSize: number;
readonly votesFor: Set<Client>;
readonly votesAgainst: Map<Client, Error>;
};
/*
* This object contains a summary of results. Because the result of an attempt
* can sometimes be determined before all requests are finished, each attempt
* contains a Promise that will resolve ExecutionStats once all requests are
* finished. A rejection of these promises should be considered undefined
* behavior and should cause a crash.
*/
export type ExecutionResult = {
attempts: ReadonlyArray<Promise<ExecutionStats>>;
start: number;
};
/**
*
*/
export interface Settings {
readonly driftFactor: number;
readonly retryCount: number;
readonly retryDelay: number;
readonly retryJitter: number;
readonly automaticExtensionThreshold: number;
}
// Define default settings.
const defaultSettings: Readonly<Settings> = {
driftFactor: 0.01,
retryCount: 10,
retryDelay: 200,
retryJitter: 100,
automaticExtensionThreshold: 500
};
// Modifyng this object is forbidden.
Object.freeze(defaultSettings);
/*
* This error indicates a failure due to the existence of another lock for one
* or more of the requested resources.
*/
export class ResourceLockedError extends Error {
constructor(public readonly message: string) {
super();
this.name = "ResourceLockedError";
}
}
/*
* This error indicates a failure of an operation to pass with a quorum.
*/
export class ExecutionError extends Error {
constructor(
public readonly message: string,
public readonly attempts: ReadonlyArray<Promise<ExecutionStats>>
) {
super();
this.name = "ExecutionError";
}
}
/*
* An object of this type is returned when a resource is successfully locked. It
* contains convenience methods `release` and `extend` which perform the
* associated Redlock method on itself.
*/
export class Lock {
constructor(
public readonly redlock: Redlock,
public readonly resources: string[],
public readonly value: string,
public readonly attempts: ReadonlyArray<Promise<ExecutionStats>>,
public expiration: number
) {}
async release(): Promise<ExecutionResult> {
return this.redlock.release(this);
}
async extend(duration: number): Promise<Lock> {
return this.redlock.extend(this, duration);
}
}
export type RedlockAbortSignal = AbortSignal & { error?: Error };
/**
* A redlock object is instantiated with an array of at least one redis client
* and an optional `options` object. Properties of the Redlock object should NOT
* be changed after it is first used, as doing so could have unintended
* consequences for live locks.
*/
export class Redlock extends EventEmitter {
public readonly clients: Set<Client>;
public readonly settings: Settings;
public readonly scripts: {
readonly acquireScript: { value: string; hash: string };
readonly extendScript: { value: string; hash: string };
readonly releaseScript: { value: string; hash: string };
};
public constructor(
clients: Iterable<Client>,
settings: Partial<Settings> = {},
scripts: {
readonly acquireScript?: string | ((script: string) => string);
readonly extendScript?: string | ((script: string) => string);
readonly releaseScript?: string | ((script: string) => string);
} = {}
) {
super();
// Prevent crashes on error events.
this.on("error", () => {
// Because redlock is designed for high availability, it does not care if
// a minority of redis instances/clusters fail at an operation.
//
// However, it can be helpful to monitor and log such cases. Redlock emits
// an "error" event whenever it encounters an error, even if the error is
// ignored in its normal operation.
//
// This function serves to prevent node's default behavior of crashing
// when an "error" event is emitted in the absence of listeners.
});
// Create a new array of client, to ensure no accidental mutation.
this.clients = new Set(clients);
if (this.clients.size === 0) {
throw new Error("Redlock must be instantiated with at least one redis client.");
}
// Customize the settings for this instance.
this.settings = {
driftFactor: typeof settings.driftFactor === "number" ? settings.driftFactor : defaultSettings.driftFactor,
retryCount: typeof settings.retryCount === "number" ? settings.retryCount : defaultSettings.retryCount,
retryDelay: typeof settings.retryDelay === "number" ? settings.retryDelay : defaultSettings.retryDelay,
retryJitter: typeof settings.retryJitter === "number" ? settings.retryJitter : defaultSettings.retryJitter,
automaticExtensionThreshold:
typeof settings.automaticExtensionThreshold === "number"
? settings.automaticExtensionThreshold
: defaultSettings.automaticExtensionThreshold
};
// Use custom scripts and script modifiers.
const acquireScript =
typeof scripts.acquireScript === "function" ? scripts.acquireScript(ACQUIRE_SCRIPT) : ACQUIRE_SCRIPT;
const extendScript =
typeof scripts.extendScript === "function" ? scripts.extendScript(EXTEND_SCRIPT) : EXTEND_SCRIPT;
const releaseScript =
typeof scripts.releaseScript === "function" ? scripts.releaseScript(RELEASE_SCRIPT) : RELEASE_SCRIPT;
this.scripts = {
acquireScript: {
value: acquireScript,
hash: this._hash(acquireScript)
},
extendScript: {
value: extendScript,
hash: this._hash(extendScript)
},
releaseScript: {
value: releaseScript,
hash: this._hash(releaseScript)
}
};
}
/**
* Generate a sha1 hash compatible with redis evalsha.
*/
private _hash(value: string): string {
return createHash("sha1").update(value).digest("hex");
}
/**
* Generate a cryptographically random string.
*/
private _random(): string {
return randomBytes(16).toString("hex");
}
/**
* This method runs `.quit()` on all client connections.
*/
public async quit(): Promise<void> {
const results = [];
for (const client of this.clients) {
results.push(client.quit());
}
await Promise.all(results);
}
/**
* This method acquires a locks on the resources for the duration specified by
* the `duration`.
*/
public async acquire(resources: string[], duration: number, settings?: Partial<Settings>): Promise<Lock> {
if (Math.floor(duration) !== duration) {
throw new Error("Duration must be an integer value in milliseconds.");
}
const value = this._random();
try {
const { attempts, start } = await this._execute(
this.scripts.acquireScript,
resources,
[value, duration],
settings
);
// Add 2 milliseconds to the drift to account for Redis expires precision,
// which is 1 ms, plus the configured allowable drift factor.
const drift = Math.round((settings?.driftFactor ?? this.settings.driftFactor) * duration) + 2;
return new Lock(this, resources, value, attempts, start + duration - drift);
} catch (error) {
// If there was an error acquiring the lock, release any partial lock
// state that may exist on a minority of clients.
await this._execute(this.scripts.releaseScript, resources, [value], {
retryCount: 0
}).catch(() => {
// Any error here will be ignored.
});
throw error;
}
}
/**
* This method unlocks the provided lock from all servers still persisting it.
* It will fail with an error if it is unable to release the lock on a quorum
* of nodes, but will make no attempt to restore the lock in the case of a
* failure to release. It is safe to re-attempt a release or to ignore the
* error, as the lock will automatically expire after its timeout.
*/
public async release(lock: Lock, settings?: Partial<Settings>): Promise<ExecutionResult> {
// Immediately invalidate the lock.
lock.expiration = 0;
// Attempt to release the lock.
return this._execute(this.scripts.releaseScript, lock.resources, [lock.value], settings);
}
/**
* This method extends a valid lock by the provided `duration`.
*/
public async extend(existing: Lock, duration: number, settings?: Partial<Settings>): Promise<Lock> {
if (Math.floor(duration) !== duration) {
throw new Error("Duration must be an integer value in milliseconds.");
}
// The lock has already expired.
if (existing.expiration < Date.now()) {
throw new ExecutionError("Cannot extend an already-expired lock.", []);
}
const { attempts, start } = await this._execute(
this.scripts.extendScript,
existing.resources,
[existing.value, duration],
settings
);
// Invalidate the existing lock.
existing.expiration = 0;
// Add 2 milliseconds to the drift to account for Redis expires precision,
// which is 1 ms, plus the configured allowable drift factor.
const drift = Math.round((settings?.driftFactor ?? this.settings.driftFactor) * duration) + 2;
const replacement = new Lock(this, existing.resources, existing.value, attempts, start + duration - drift);
return replacement;
}
/**
* Execute a script on all clients. The resulting promise is resolved or
* rejected as soon as this quorum is reached; the resolution or rejection
* will contains a `stats` property that is resolved once all votes are in.
*/
private async _execute(
script: { value: string; hash: string },
keys: string[],
args: (string | number)[],
_settings?: Partial<Settings>
): Promise<ExecutionResult> {
const settings = _settings
? {
...this.settings,
..._settings
}
: this.settings;
// For the purpose of easy config serialization, we treat a retryCount of
// -1 a equivalent to Infinity.
const maxAttempts = settings.retryCount === -1 ? Infinity : settings.retryCount + 1;
const attempts: Promise<ExecutionStats>[] = [];
while (true) {
const { vote, stats, start } = await this._attemptOperation(script, keys, args);
attempts.push(stats);
// The operation achieved a quorum in favor.
if (vote === "for") {
return { attempts, start };
}
// Wait before reattempting.
if (attempts.length < maxAttempts) {
await new Promise((resolve) => {
setTimeout(
resolve,
Math.max(0, settings.retryDelay + Math.floor((Math.random() * 2 - 1) * settings.retryJitter)),
undefined
);
});
} else {
throw new ExecutionError("The operation was unable to achieve a quorum during its retry window.", attempts);
}
}
}
private async _attemptOperation(
script: { value: string; hash: string },
keys: string[],
args: (string | number)[]
): Promise<
| { vote: "for"; stats: Promise<ExecutionStats>; start: number }
| { vote: "against"; stats: Promise<ExecutionStats>; start: number }
> {
const start = Date.now();
return await new Promise((resolve) => {
const clientResults = [];
for (const client of this.clients) {
clientResults.push(this._attemptOperationOnClient(client, script, keys, args));
}
const stats: ExecutionStats = {
membershipSize: clientResults.length,
quorumSize: Math.floor(clientResults.length / 2) + 1,
votesFor: new Set<Client>(),
votesAgainst: new Map<Client, Error>()
};
let done: () => void;
const statsPromise = new Promise<typeof stats>((resolve) => {
done = () => resolve(stats);
});
// This is the expected flow for all successful and unsuccessful requests.
const onResultResolve = (clientResult: ClientExecutionResult): void => {
switch (clientResult.vote) {
case "for":
stats.votesFor.add(clientResult.client);
break;
case "against":
stats.votesAgainst.set(clientResult.client, clientResult.error);
break;
}
// A quorum has determined a success.
if (stats.votesFor.size === stats.quorumSize) {
resolve({
vote: "for",
stats: statsPromise,
start
});
}
// A quorum has determined a failure.
if (stats.votesAgainst.size === stats.quorumSize) {
resolve({
vote: "against",
stats: statsPromise,
start
});
}
// All votes are in.
if (stats.votesFor.size + stats.votesAgainst.size === stats.membershipSize) {
done();
}
};
// This is unexpected and should crash to prevent undefined behavior.
const onResultReject = (error: Error): void => {
throw error;
};
for (const result of clientResults) {
result.then(onResultResolve, onResultReject);
}
});
}
private async _attemptOperationOnClient(
client: Client,
script: { value: string; hash: string },
keys: string[],
args: (string | number)[]
): Promise<ClientExecutionResult> {
try {
let result: number;
try {
// Attempt to evaluate the script by its hash.
// @ts-expect-error
const shaResult = (await client.evalsha(script.hash, keys.length, [...keys, ...args])) as unknown;
if (typeof shaResult !== "number") {
throw new Error(`Unexpected result of type ${typeof shaResult} returned from redis.`);
}
result = shaResult;
} catch (error) {
// If the redis server does not already have the script cached,
// reattempt the request with the script's raw text.
if (!(error instanceof Error) || !error.message.startsWith("NOSCRIPT")) {
throw error;
}
// @ts-expect-error
const rawResult = (await client.eval(script.value, keys.length, [...keys, ...args])) as unknown;
if (typeof rawResult !== "number") {
throw new Error(`Unexpected result of type ${typeof rawResult} returned from redis.`);
}
result = rawResult;
}
// One or more of the resources was already locked.
if (result !== keys.length) {
throw new ResourceLockedError(
`The operation was applied to: ${result} of the ${keys.length} requested resources.`
);
}
return {
vote: "for",
client,
value: result
};
} catch (error) {
if (!(error instanceof Error)) {
throw new Error(`Unexpected type ${typeof error} thrown with value: ${error}`);
}
// Emit the error on the redlock instance for observability.
this.emit("error", error);
return {
vote: "against",
client,
error
};
}
}
/**
* Wrap and execute a routine in the context of an auto-extending lock,
* returning a promise of the routine's value. In the case that auto-extension
* fails, an AbortSignal will be updated to indicate that abortion of the
* routine is in order, and to pass along the encountered error.
*
* @example
* ```ts
* await redlock.using([senderId, recipientId], 5000, { retryCount: 5 }, async (signal) => {
* const senderBalance = await getBalance(senderId);
* const recipientBalance = await getBalance(recipientId);
*
* if (senderBalance < amountToSend) {
* throw new Error("Insufficient balance.");
* }
*
* // The abort signal will be true if:
* // 1. the above took long enough that the lock needed to be extended
* // 2. redlock was unable to extend the lock
* //
* // In such a case, exclusivity can no longer be guaranteed for further
* // operations, and should be handled as an exceptional case.
* if (signal.aborted) {
* throw signal.error;
* }
*
* await setBalances([
* {id: senderId, balance: senderBalance - amountToSend},
* {id: recipientId, balance: recipientBalance + amountToSend},
* ]);
* });
* ```
*/
public async using<T>(
resources: string[],
duration: number,
settings: Partial<Settings>,
routine?: (signal: RedlockAbortSignal) => Promise<T>
): Promise<T>;
public async using<T>(
resources: string[],
duration: number,
routine: (signal: RedlockAbortSignal) => Promise<T>
): Promise<T>;
public async using<T>(
resources: string[],
duration: number,
settingsOrRoutine: undefined | Partial<Settings> | ((signal: RedlockAbortSignal) => Promise<T>),
optionalRoutine?: (signal: RedlockAbortSignal) => Promise<T>
): Promise<T> {
if (Math.floor(duration) !== duration) {
throw new Error("Duration must be an integer value in milliseconds.");
}
const settings =
settingsOrRoutine && typeof settingsOrRoutine !== "function"
? {
...this.settings,
...settingsOrRoutine
}
: this.settings;
const routine = optionalRoutine ?? settingsOrRoutine;
if (typeof routine !== "function") {
throw new Error("INVARIANT: routine is not a function.");
}
if (settings.automaticExtensionThreshold > duration - 100) {
throw new Error(
"A lock `duration` must be at least 100ms greater than the `automaticExtensionThreshold` setting."
);
}
// The AbortController/AbortSignal pattern allows the routine to be notified
// of a failure to extend the lock, and subsequent expiration. In the event
// of an abort, the error object will be made available at `signal.error`.
const controller = new AbortController();
const signal = controller.signal as RedlockAbortSignal;
function queue(): void {
timeout = setTimeout(
() => (extension = extend()),
lock.expiration - Date.now() - settings.automaticExtensionThreshold
);
}
async function extend(): Promise<void> {
timeout = undefined;
try {
lock = await lock.extend(duration);
queue();
} catch (error) {
if (!(error instanceof Error)) {
throw new Error(`Unexpected thrown ${typeof error}: ${error}.`);
}
if (lock.expiration > Date.now()) {
return (extension = extend());
}
signal.error = error instanceof Error ? error : new Error(`${error}`);
controller.abort();
}
}
let timeout: undefined | NodeJS.Timeout;
let extension: undefined | Promise<void>;
let lock = await this.acquire(resources, duration, settings);
queue();
try {
return await routine(signal);
} finally {
// Clean up the timer.
if (timeout) {
clearTimeout(timeout);
timeout = undefined;
}
// Wait for an in-flight extension to finish.
if (extension) {
await extension.catch(() => {
// An error here doesn't matter at all, because the routine has
// already completed, and a release will be attempted regardless. The
// only reason for waiting here is to prevent possible contention
// between the extension and release.
});
}
await lock.release();
}
}
}

View File

@@ -7,7 +7,3 @@ export const zpStr = <T extends ZodTypeAny>(schema: T, opt: { stripNull: boolean
if (typeof val !== "string") return val; if (typeof val !== "string") return val;
return val.trim() || undefined; return val.trim() || undefined;
}, schema); }, schema);
export const zodBuffer = z.custom<Buffer>((data) => Buffer.isBuffer(data) || data instanceof Uint8Array, {
message: "Expected binary data (Buffer Or Uint8Array)"
});

View File

@@ -7,42 +7,33 @@ import {
TScanFullRepoEventPayload, TScanFullRepoEventPayload,
TScanPushEventPayload TScanPushEventPayload
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types"; } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import { TSyncSecretsDTO } from "@app/services/secret/secret-types";
export enum QueueName { export enum QueueName {
SecretRotation = "secret-rotation", SecretRotation = "secret-rotation",
SecretReminder = "secret-reminder", SecretReminder = "secret-reminder",
AuditLog = "audit-log", AuditLog = "audit-log",
// TODO(akhilmhdh): This will get removed later. For now this is kept to stop the repeatable queue
AuditLogPrune = "audit-log-prune", AuditLogPrune = "audit-log-prune",
DailyResourceCleanUp = "daily-resource-cleanup",
TelemetryInstanceStats = "telemtry-self-hosted-stats", TelemetryInstanceStats = "telemtry-self-hosted-stats",
IntegrationSync = "sync-integrations", IntegrationSync = "sync-integrations",
SecretWebhook = "secret-webhook", SecretWebhook = "secret-webhook",
SecretFullRepoScan = "secret-full-repo-scan", SecretFullRepoScan = "secret-full-repo-scan",
SecretPushEventScan = "secret-push-event-scan", SecretPushEventScan = "secret-push-event-scan",
UpgradeProjectToGhost = "upgrade-project-to-ghost", UpgradeProjectToGhost = "upgrade-project-to-ghost",
DynamicSecretRevocation = "dynamic-secret-revocation", DynamicSecretRevocation = "dynamic-secret-revocation"
SecretReplication = "secret-replication",
SecretSync = "secret-sync" // parent queue to push integration sync, webhook, and secret replication
} }
export enum QueueJobs { export enum QueueJobs {
SecretReminder = "secret-reminder-job", SecretReminder = "secret-reminder-job",
SecretRotation = "secret-rotation-job", SecretRotation = "secret-rotation-job",
AuditLog = "audit-log-job", AuditLog = "audit-log-job",
// TODO(akhilmhdh): This will get removed later. For now this is kept to stop the repeatable queue
AuditLogPrune = "audit-log-prune-job", AuditLogPrune = "audit-log-prune-job",
DailyResourceCleanUp = "daily-resource-cleanup-job",
SecWebhook = "secret-webhook-trigger", SecWebhook = "secret-webhook-trigger",
TelemetryInstanceStats = "telemetry-self-hosted-stats", TelemetryInstanceStats = "telemetry-self-hosted-stats",
IntegrationSync = "secret-integration-pull", IntegrationSync = "secret-integration-pull",
SecretScan = "secret-scan", SecretScan = "secret-scan",
UpgradeProjectToGhost = "upgrade-project-to-ghost-job", UpgradeProjectToGhost = "upgrade-project-to-ghost-job",
DynamicSecretRevocation = "dynamic-secret-revocation", DynamicSecretRevocation = "dynamic-secret-revocation",
DynamicSecretPruning = "dynamic-secret-pruning", DynamicSecretPruning = "dynamic-secret-pruning"
SecretReplication = "secret-replication",
SecretSync = "secret-sync" // parent queue to push integration sync, webhook, and secret replication
} }
export type TQueueJobTypes = { export type TQueueJobTypes = {
@@ -64,10 +55,6 @@ export type TQueueJobTypes = {
name: QueueJobs.AuditLog; name: QueueJobs.AuditLog;
payload: TCreateAuditLogDTO; payload: TCreateAuditLogDTO;
}; };
[QueueName.DailyResourceCleanUp]: {
name: QueueJobs.DailyResourceCleanUp;
payload: undefined;
};
[QueueName.AuditLogPrune]: { [QueueName.AuditLogPrune]: {
name: QueueJobs.AuditLogPrune; name: QueueJobs.AuditLogPrune;
payload: undefined; payload: undefined;
@@ -78,13 +65,7 @@ export type TQueueJobTypes = {
}; };
[QueueName.IntegrationSync]: { [QueueName.IntegrationSync]: {
name: QueueJobs.IntegrationSync; name: QueueJobs.IntegrationSync;
payload: { payload: { projectId: string; environment: string; secretPath: string; depth?: number };
projectId: string;
environment: string;
secretPath: string;
depth?: number;
deDupeQueue?: Record<string, boolean>;
};
}; };
[QueueName.SecretFullRepoScan]: { [QueueName.SecretFullRepoScan]: {
name: QueueJobs.SecretScan; name: QueueJobs.SecretScan;
@@ -121,14 +102,6 @@ export type TQueueJobTypes = {
dynamicSecretCfgId: string; dynamicSecretCfgId: string;
}; };
}; };
[QueueName.SecretReplication]: {
name: QueueJobs.SecretReplication;
payload: TSyncSecretsDTO;
};
[QueueName.SecretSync]: {
name: QueueJobs.SecretSync;
payload: TSyncSecretsDTO;
};
}; };
export type TQueueServiceFactory = ReturnType<typeof queueServiceFactory>; export type TQueueServiceFactory = ReturnType<typeof queueServiceFactory>;
@@ -145,7 +118,7 @@ export const queueServiceFactory = (redisUrl: string) => {
const start = <T extends QueueName>( const start = <T extends QueueName>(
name: T, name: T,
jobFn: (job: Job<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>, token?: string) => Promise<void>, jobFn: (job: Job<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>) => Promise<void>,
queueSettings: Omit<QueueOptions, "connection"> = {} queueSettings: Omit<QueueOptions, "connection"> = {}
) => { ) => {
if (queueContainer[name]) { if (queueContainer[name]) {
@@ -179,7 +152,7 @@ export const queueServiceFactory = (redisUrl: string) => {
name: T, name: T,
job: TQueueJobTypes[T]["name"], job: TQueueJobTypes[T]["name"],
data: TQueueJobTypes[T]["payload"], data: TQueueJobTypes[T]["payload"],
opts?: JobsOptions & { jobId?: string } opts: JobsOptions & { jobId?: string }
) => { ) => {
const q = queueContainer[name]; const q = queueContainer[name];
@@ -193,9 +166,7 @@ export const queueServiceFactory = (redisUrl: string) => {
jobId?: string jobId?: string
) => { ) => {
const q = queueContainer[name]; const q = queueContainer[name];
if (q) { return q.removeRepeatable(job, repeatOpt, jobId);
return q.removeRepeatable(job, repeatOpt, jobId);
}
}; };
const stopRepeatableJobByJobId = async <T extends QueueName>(name: T, jobId: string) => { const stopRepeatableJobByJobId = async <T extends QueueName>(name: T, jobId: string) => {

View File

@@ -28,7 +28,7 @@ export const readLimit: RateLimitOptions = {
// POST, PATCH, PUT, DELETE endpoints // POST, PATCH, PUT, DELETE endpoints
export const writeLimit: RateLimitOptions = { export const writeLimit: RateLimitOptions = {
timeWindow: 60 * 1000, timeWindow: 60 * 1000,
max: 200, // (too low, FA having issues so increasing it - maidul) max: 50,
keyGenerator: (req) => req.realIp keyGenerator: (req) => req.realIp
}; };
@@ -36,7 +36,7 @@ export const writeLimit: RateLimitOptions = {
export const secretsLimit: RateLimitOptions = { export const secretsLimit: RateLimitOptions = {
// secrets, folders, secret imports // secrets, folders, secret imports
timeWindow: 60 * 1000, timeWindow: 60 * 1000,
max: 60, max: 1000,
keyGenerator: (req) => req.realIp keyGenerator: (req) => req.realIp
}; };
@@ -52,25 +52,9 @@ export const inviteUserRateLimit: RateLimitOptions = {
keyGenerator: (req) => req.realIp keyGenerator: (req) => req.realIp
}; };
export const mfaRateLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
max: 20,
keyGenerator: (req) => {
return req.headers.authorization?.split(" ")[1] || req.realIp;
}
};
export const creationLimit: RateLimitOptions = { export const creationLimit: RateLimitOptions = {
// identity, project, org // identity, project, org
timeWindow: 60 * 1000, timeWindow: 60 * 1000,
max: 30, max: 30,
keyGenerator: (req) => req.realIp keyGenerator: (req) => req.realIp
}; };
// Public endpoints to avoid brute force attacks
export const publicEndpointLimit: RateLimitOptions = {
// Shared Secrets
timeWindow: 60 * 1000,
max: 30,
keyGenerator: (req) => req.realIp
};

View File

@@ -1,6 +1,5 @@
import fp from "fastify-plugin"; import fp from "fastify-plugin";
import { logger } from "@app/lib/logger";
import { ActorType } from "@app/services/auth/auth-type"; import { ActorType } from "@app/services/auth/auth-type";
// inject permission type needed based on auth extracted // inject permission type needed based on auth extracted
@@ -16,10 +15,6 @@ export const injectPermission = fp(async (server) => {
orgId: req.auth.orgId, // if the req.auth.authMode is AuthMode.API_KEY, the orgId will be "API_KEY" orgId: req.auth.orgId, // if the req.auth.authMode is AuthMode.API_KEY, the orgId will be "API_KEY"
authMethod: req.auth.authMethod // if the req.auth.authMode is AuthMode.API_KEY, the authMethod will be null authMethod: req.auth.authMethod // if the req.auth.authMode is AuthMode.API_KEY, the authMethod will be null
}; };
logger.info(
`injectPermission: Injecting permissions for [permissionsForIdentity=${req.auth.userId}] [type=${ActorType.USER}]`
);
} else if (req.auth.actor === ActorType.IDENTITY) { } else if (req.auth.actor === ActorType.IDENTITY) {
req.permission = { req.permission = {
type: ActorType.IDENTITY, type: ActorType.IDENTITY,
@@ -27,10 +22,6 @@ export const injectPermission = fp(async (server) => {
orgId: req.auth.orgId, orgId: req.auth.orgId,
authMethod: null authMethod: null
}; };
logger.info(
`injectPermission: Injecting permissions for [permissionsForIdentity=${req.auth.identityId}] [type=${ActorType.IDENTITY}]`
);
} else if (req.auth.actor === ActorType.SERVICE) { } else if (req.auth.actor === ActorType.SERVICE) {
req.permission = { req.permission = {
type: ActorType.SERVICE, type: ActorType.SERVICE,
@@ -38,10 +29,6 @@ export const injectPermission = fp(async (server) => {
orgId: req.auth.orgId, orgId: req.auth.orgId,
authMethod: null authMethod: null
}; };
logger.info(
`injectPermission: Injecting permissions for [permissionsForIdentity=${req.auth.serviceTokenId}] [type=${ActorType.SERVICE}]`
);
} else if (req.auth.actor === ActorType.SCIM_CLIENT) { } else if (req.auth.actor === ActorType.SCIM_CLIENT) {
req.permission = { req.permission = {
type: ActorType.SCIM_CLIENT, type: ActorType.SCIM_CLIENT,
@@ -49,10 +36,6 @@ export const injectPermission = fp(async (server) => {
orgId: req.auth.orgId, orgId: req.auth.orgId,
authMethod: null authMethod: null
}; };
logger.info(
`injectPermission: Injecting permissions for [permissionsForIdentity=${req.auth.scimTokenId}] [type=${ActorType.SCIM_CLIENT}]`
);
} }
}); });
}); });

View File

@@ -6,7 +6,6 @@ const headersOrder = [
"cf-connecting-ip", // Cloudflare "cf-connecting-ip", // Cloudflare
"Cf-Pseudo-IPv4", // Cloudflare "Cf-Pseudo-IPv4", // Cloudflare
"x-client-ip", // Most common "x-client-ip", // Most common
"x-envoy-external-address", // for envoy
"x-forwarded-for", // Mostly used by proxies "x-forwarded-for", // Mostly used by proxies
"fastly-client-ip", "fastly-client-ip",
"true-client-ip", // Akamai and Cloudflare "true-client-ip", // Akamai and Cloudflare
@@ -24,21 +23,7 @@ export const fastifyIp = fp(async (fastify) => {
const forwardedIpHeader = headersOrder.find((header) => Boolean(req.headers[header])); const forwardedIpHeader = headersOrder.find((header) => Boolean(req.headers[header]));
const forwardedIp = forwardedIpHeader ? req.headers[forwardedIpHeader] : undefined; const forwardedIp = forwardedIpHeader ? req.headers[forwardedIpHeader] : undefined;
if (forwardedIp) { if (forwardedIp) {
if (Array.isArray(forwardedIp)) { req.realIp = Array.isArray(forwardedIp) ? forwardedIp[0] : forwardedIp;
// eslint-disable-next-line
req.realIp = forwardedIp[0];
return;
}
if (forwardedIp.includes(",")) {
// the ip header when placed with load balancers that proxy request
// will attach the internal ips to header by appending with comma
// https://github.com/go-chi/chi/blob/master/middleware/realip.go
const clientIPFromProxy = forwardedIp.slice(0, forwardedIp.indexOf(",")).trim();
req.realIp = clientIPFromProxy;
return;
}
req.realIp = forwardedIp;
} else { } else {
req.realIp = req.ip; req.realIp = req.ip;
} }

View File

@@ -5,13 +5,8 @@ import { getConfig } from "@app/lib/config/env";
export const maintenanceMode = fp(async (fastify) => { export const maintenanceMode = fp(async (fastify) => {
fastify.addHook("onRequest", async (req) => { fastify.addHook("onRequest", async (req) => {
const serverEnvs = getConfig(); const serverEnvs = getConfig();
if (serverEnvs.MAINTENANCE_MODE) { if (req.url !== "/api/v1/auth/checkAuth" && req.method !== "GET" && serverEnvs.MAINTENANCE_MODE) {
// skip if its universal auth login or renew throw new Error("Infisical is in maintenance mode. Please try again later.");
if (req.url === "/api/v1/auth/universal-auth/login" && req.method === "POST") return;
if (req.url === "/api/v1/auth/token/renew" && req.method === "POST") return;
if (req.url !== "/api/v1/auth/checkAuth" && req.method !== "GET") {
throw new Error("Infisical is in maintenance mode. Please try again later.");
}
} }
}); });
}); });

View File

@@ -44,7 +44,6 @@ import { secretApprovalRequestDALFactory } from "@app/ee/services/secret-approva
import { secretApprovalRequestReviewerDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-reviewer-dal"; import { secretApprovalRequestReviewerDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-reviewer-dal";
import { secretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal"; import { secretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal";
import { secretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service"; import { secretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
import { secretReplicationServiceFactory } from "@app/ee/services/secret-replication/secret-replication-service";
import { secretRotationDALFactory } from "@app/ee/services/secret-rotation/secret-rotation-dal"; import { secretRotationDALFactory } from "@app/ee/services/secret-rotation/secret-rotation-dal";
import { secretRotationQueueFactory } from "@app/ee/services/secret-rotation/secret-rotation-queue"; import { secretRotationQueueFactory } from "@app/ee/services/secret-rotation/secret-rotation-queue";
import { secretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service"; import { secretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
@@ -79,14 +78,6 @@ import { identityOrgDALFactory } from "@app/services/identity/identity-org-dal";
import { identityServiceFactory } from "@app/services/identity/identity-service"; import { identityServiceFactory } from "@app/services/identity/identity-service";
import { identityAccessTokenDALFactory } from "@app/services/identity-access-token/identity-access-token-dal"; import { identityAccessTokenDALFactory } from "@app/services/identity-access-token/identity-access-token-dal";
import { identityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service"; import { identityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
import { identityAwsAuthDALFactory } from "@app/services/identity-aws-auth/identity-aws-auth-dal";
import { identityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
import { identityAzureAuthDALFactory } from "@app/services/identity-azure-auth/identity-azure-auth-dal";
import { identityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
import { identityGcpAuthDALFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-dal";
import { identityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
import { identityKubernetesAuthDALFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-dal";
import { identityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
import { identityProjectDALFactory } from "@app/services/identity-project/identity-project-dal"; import { identityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
import { identityProjectMembershipRoleDALFactory } from "@app/services/identity-project/identity-project-membership-role-dal"; import { identityProjectMembershipRoleDALFactory } from "@app/services/identity-project/identity-project-membership-role-dal";
import { identityProjectServiceFactory } from "@app/services/identity-project/identity-project-service"; import { identityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
@@ -97,9 +88,6 @@ import { integrationDALFactory } from "@app/services/integration/integration-dal
import { integrationServiceFactory } from "@app/services/integration/integration-service"; import { integrationServiceFactory } from "@app/services/integration/integration-service";
import { integrationAuthDALFactory } from "@app/services/integration-auth/integration-auth-dal"; import { integrationAuthDALFactory } from "@app/services/integration-auth/integration-auth-dal";
import { integrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service"; import { integrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
import { kmsDALFactory } from "@app/services/kms/kms-dal";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { kmsServiceFactory } from "@app/services/kms/kms-service";
import { incidentContactDALFactory } from "@app/services/org/incident-contacts-dal"; import { incidentContactDALFactory } from "@app/services/org/incident-contacts-dal";
import { orgBotDALFactory } from "@app/services/org/org-bot-dal"; import { orgBotDALFactory } from "@app/services/org/org-bot-dal";
import { orgDALFactory } from "@app/services/org/org-dal"; import { orgDALFactory } from "@app/services/org/org-dal";
@@ -121,7 +109,6 @@ import { projectMembershipServiceFactory } from "@app/services/project-membershi
import { projectUserMembershipRoleDALFactory } from "@app/services/project-membership/project-user-membership-role-dal"; import { projectUserMembershipRoleDALFactory } from "@app/services/project-membership/project-user-membership-role-dal";
import { projectRoleDALFactory } from "@app/services/project-role/project-role-dal"; import { projectRoleDALFactory } from "@app/services/project-role/project-role-dal";
import { projectRoleServiceFactory } from "@app/services/project-role/project-role-service"; import { projectRoleServiceFactory } from "@app/services/project-role/project-role-service";
import { dailyResourceCleanUpQueueServiceFactory } from "@app/services/resource-cleanup/resource-cleanup-queue";
import { secretDALFactory } from "@app/services/secret/secret-dal"; import { secretDALFactory } from "@app/services/secret/secret-dal";
import { secretQueueFactory } from "@app/services/secret/secret-queue"; import { secretQueueFactory } from "@app/services/secret/secret-queue";
import { secretServiceFactory } from "@app/services/secret/secret-service"; import { secretServiceFactory } from "@app/services/secret/secret-service";
@@ -134,8 +121,6 @@ import { secretFolderServiceFactory } from "@app/services/secret-folder/secret-f
import { secretFolderVersionDALFactory } from "@app/services/secret-folder/secret-folder-version-dal"; import { secretFolderVersionDALFactory } from "@app/services/secret-folder/secret-folder-version-dal";
import { secretImportDALFactory } from "@app/services/secret-import/secret-import-dal"; import { secretImportDALFactory } from "@app/services/secret-import/secret-import-dal";
import { secretImportServiceFactory } from "@app/services/secret-import/secret-import-service"; import { secretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
import { secretSharingDALFactory } from "@app/services/secret-sharing/secret-sharing-dal";
import { secretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
import { secretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal"; import { secretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
import { secretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service"; import { secretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
import { serviceTokenDALFactory } from "@app/services/service-token/service-token-dal"; import { serviceTokenDALFactory } from "@app/services/service-token/service-token-dal";
@@ -169,10 +154,7 @@ export const registerRoutes = async (
keyStore keyStore
}: { db: Knex; smtp: TSmtpService; queue: TQueueServiceFactory; keyStore: TKeyStoreFactory } }: { db: Knex; smtp: TSmtpService; queue: TQueueServiceFactory; keyStore: TKeyStoreFactory }
) => { ) => {
const appCfg = getConfig(); await server.register(registerSecretScannerGhApp, { prefix: "/ss-webhook" });
if (!appCfg.DISABLE_SECRET_SCANNING) {
await server.register(registerSecretScannerGhApp, { prefix: "/ss-webhook" });
}
// db layers // db layers
const userDAL = userDALFactory(db); const userDAL = userDALFactory(db);
@@ -218,11 +200,7 @@ export const registerRoutes = async (
const identityProjectAdditionalPrivilegeDAL = identityProjectAdditionalPrivilegeDALFactory(db); const identityProjectAdditionalPrivilegeDAL = identityProjectAdditionalPrivilegeDALFactory(db);
const identityUaDAL = identityUaDALFactory(db); const identityUaDAL = identityUaDALFactory(db);
const identityKubernetesAuthDAL = identityKubernetesAuthDALFactory(db);
const identityUaClientSecretDAL = identityUaClientSecretDALFactory(db); const identityUaClientSecretDAL = identityUaClientSecretDALFactory(db);
const identityAwsAuthDAL = identityAwsAuthDALFactory(db);
const identityGcpAuthDAL = identityGcpAuthDALFactory(db);
const identityAzureAuthDAL = identityAzureAuthDALFactory(db);
const auditLogDAL = auditLogDALFactory(db); const auditLogDAL = auditLogDALFactory(db);
const auditLogStreamDAL = auditLogStreamDALFactory(db); const auditLogStreamDAL = auditLogStreamDALFactory(db);
@@ -244,8 +222,8 @@ export const registerRoutes = async (
const sapApproverDAL = secretApprovalPolicyApproverDALFactory(db); const sapApproverDAL = secretApprovalPolicyApproverDALFactory(db);
const secretApprovalPolicyDAL = secretApprovalPolicyDALFactory(db); const secretApprovalPolicyDAL = secretApprovalPolicyDALFactory(db);
const secretApprovalRequestDAL = secretApprovalRequestDALFactory(db); const secretApprovalRequestDAL = secretApprovalRequestDALFactory(db);
const secretApprovalRequestReviewerDAL = secretApprovalRequestReviewerDALFactory(db); const sarReviewerDAL = secretApprovalRequestReviewerDALFactory(db);
const secretApprovalRequestSecretDAL = secretApprovalRequestSecretDALFactory(db); const sarSecretDAL = secretApprovalRequestSecretDALFactory(db);
const secretRotationDAL = secretRotationDALFactory(db); const secretRotationDAL = secretRotationDALFactory(db);
const snapshotDAL = snapshotDALFactory(db); const snapshotDAL = snapshotDALFactory(db);
@@ -259,14 +237,10 @@ export const registerRoutes = async (
const groupProjectMembershipRoleDAL = groupProjectMembershipRoleDALFactory(db); const groupProjectMembershipRoleDAL = groupProjectMembershipRoleDALFactory(db);
const userGroupMembershipDAL = userGroupMembershipDALFactory(db); const userGroupMembershipDAL = userGroupMembershipDALFactory(db);
const secretScanningDAL = secretScanningDALFactory(db); const secretScanningDAL = secretScanningDALFactory(db);
const secretSharingDAL = secretSharingDALFactory(db);
const licenseDAL = licenseDALFactory(db); const licenseDAL = licenseDALFactory(db);
const dynamicSecretDAL = dynamicSecretDALFactory(db); const dynamicSecretDAL = dynamicSecretDALFactory(db);
const dynamicSecretLeaseDAL = dynamicSecretLeaseDALFactory(db); const dynamicSecretLeaseDAL = dynamicSecretLeaseDALFactory(db);
const kmsDAL = kmsDALFactory(db);
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
const permissionService = permissionServiceFactory({ const permissionService = permissionServiceFactory({
permissionDAL, permissionDAL,
orgRoleDAL, orgRoleDAL,
@@ -275,12 +249,6 @@ export const registerRoutes = async (
projectDAL projectDAL
}); });
const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL, keyStore }); const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL, keyStore });
const kmsService = kmsServiceFactory({
kmsRootConfigDAL,
keyStore,
kmsDAL
});
const trustedIpService = trustedIpServiceFactory({ const trustedIpService = trustedIpServiceFactory({
licenseService, licenseService,
projectDAL, projectDAL,
@@ -301,7 +269,7 @@ export const registerRoutes = async (
permissionService, permissionService,
auditLogStreamDAL auditLogStreamDAL
}); });
const secretApprovalPolicyService = secretApprovalPolicyServiceFactory({ const sapService = secretApprovalPolicyServiceFactory({
projectMembershipDAL, projectMembershipDAL,
projectEnvDAL, projectEnvDAL,
secretApprovalPolicyApproverDAL: sapApproverDAL, secretApprovalPolicyApproverDAL: sapApproverDAL,
@@ -502,7 +470,7 @@ export const registerRoutes = async (
projectBotDAL, projectBotDAL,
projectMembershipDAL, projectMembershipDAL,
secretApprovalRequestDAL, secretApprovalRequestDAL,
secretApprovalSecretDAL: secretApprovalRequestSecretDAL, secretApprovalSecretDAL: sarSecretDAL,
projectUserMembershipRoleDAL projectUserMembershipRoleDAL
}); });
@@ -539,8 +507,7 @@ export const registerRoutes = async (
permissionService, permissionService,
projectRoleDAL, projectRoleDAL,
projectUserMembershipRoleDAL, projectUserMembershipRoleDAL,
identityProjectMembershipRoleDAL, identityProjectMembershipRoleDAL
projectDAL
}); });
const snapshotService = secretSnapshotServiceFactory({ const snapshotService = secretSnapshotServiceFactory({
@@ -568,10 +535,8 @@ export const registerRoutes = async (
folderDAL, folderDAL,
folderVersionDAL, folderVersionDAL,
projectEnvDAL, projectEnvDAL,
snapshotService, snapshotService
projectDAL
}); });
const integrationAuthService = integrationAuthServiceFactory({ const integrationAuthService = integrationAuthServiceFactory({
integrationAuthDAL, integrationAuthDAL,
integrationDAL, integrationDAL,
@@ -600,7 +565,6 @@ export const registerRoutes = async (
secretVersionTagDAL secretVersionTagDAL
}); });
const secretImportService = secretImportServiceFactory({ const secretImportService = secretImportServiceFactory({
licenseService,
projectEnvDAL, projectEnvDAL,
folderDAL, folderDAL,
permissionService, permissionService,
@@ -629,24 +593,18 @@ export const registerRoutes = async (
projectEnvDAL, projectEnvDAL,
projectBotService projectBotService
}); });
const sarService = secretApprovalRequestServiceFactory({
const secretSharingService = secretSharingServiceFactory({
permissionService, permissionService,
secretSharingDAL
});
const secretApprovalRequestService = secretApprovalRequestServiceFactory({
permissionService,
projectBotService,
folderDAL, folderDAL,
secretDAL, secretDAL,
secretTagDAL, secretTagDAL,
secretApprovalRequestSecretDAL, secretApprovalRequestSecretDAL: sarSecretDAL,
secretApprovalRequestReviewerDAL, secretApprovalRequestReviewerDAL: sarReviewerDAL,
projectDAL, projectDAL,
secretVersionDAL, secretVersionDAL,
secretBlindIndexDAL, secretBlindIndexDAL,
secretApprovalRequestDAL, secretApprovalRequestDAL,
secretService,
snapshotService, snapshotService,
secretVersionTagDAL, secretVersionTagDAL,
secretQueueService secretQueueService
@@ -675,23 +633,6 @@ export const registerRoutes = async (
accessApprovalPolicyApproverDAL accessApprovalPolicyApproverDAL
}); });
const secretReplicationService = secretReplicationServiceFactory({
secretTagDAL,
secretVersionTagDAL,
secretDAL,
secretVersionDAL,
secretImportDAL,
keyStore,
queueService,
folderDAL,
secretApprovalPolicyService,
secretBlindIndexDAL,
secretApprovalRequestDAL,
secretApprovalRequestSecretDAL,
secretQueueService,
projectMembershipDAL,
projectBotService
});
const secretRotationQueue = secretRotationQueueFactory({ const secretRotationQueue = secretRotationQueueFactory({
telemetryService, telemetryService,
secretRotationDAL, secretRotationDAL,
@@ -758,41 +699,6 @@ export const registerRoutes = async (
identityUaDAL, identityUaDAL,
licenseService licenseService
}); });
const identityKubernetesAuthService = identityKubernetesAuthServiceFactory({
identityKubernetesAuthDAL,
identityOrgMembershipDAL,
identityAccessTokenDAL,
identityDAL,
orgBotDAL,
permissionService,
licenseService
});
const identityGcpAuthService = identityGcpAuthServiceFactory({
identityGcpAuthDAL,
identityOrgMembershipDAL,
identityAccessTokenDAL,
identityDAL,
permissionService,
licenseService
});
const identityAwsAuthService = identityAwsAuthServiceFactory({
identityAccessTokenDAL,
identityAwsAuthDAL,
identityOrgMembershipDAL,
identityDAL,
licenseService,
permissionService
});
const identityAzureAuthService = identityAzureAuthServiceFactory({
identityAzureAuthDAL,
identityOrgMembershipDAL,
identityAccessTokenDAL,
identityDAL,
permissionService,
licenseService
});
const dynamicSecretProviders = buildDynamicSecretProviders(); const dynamicSecretProviders = buildDynamicSecretProviders();
const dynamicSecretQueueService = dynamicSecretLeaseQueueServiceFactory({ const dynamicSecretQueueService = dynamicSecretLeaseQueueServiceFactory({
@@ -821,21 +727,14 @@ export const registerRoutes = async (
folderDAL, folderDAL,
licenseService licenseService
}); });
const dailyResourceCleanUp = dailyResourceCleanUpQueueServiceFactory({
auditLogDAL,
queueService,
identityAccessTokenDAL,
secretSharingDAL
});
await superAdminService.initServerCfg(); await superAdminService.initServerCfg();
// //
// setup the communication with license key server // setup the communication with license key server
await licenseService.init(); await licenseService.init();
await auditLogQueue.startAuditLogPruneJob();
await telemetryQueue.startTelemetryCheck(); await telemetryQueue.startTelemetryCheck();
await dailyResourceCleanUp.startCleanUp();
await kmsService.startService();
// inject all services // inject all services
server.decorate<FastifyZodProvider["services"]>("services", { server.decorate<FastifyZodProvider["services"]>("services", {
@@ -857,7 +756,6 @@ export const registerRoutes = async (
projectEnv: projectEnvService, projectEnv: projectEnvService,
projectRole: projectRoleService, projectRole: projectRoleService,
secret: secretService, secret: secretService,
secretReplication: secretReplicationService,
secretTag: secretTagService, secretTag: secretTagService,
folder: folderService, folder: folderService,
secretImport: secretImportService, secretImport: secretImportService,
@@ -870,14 +768,10 @@ export const registerRoutes = async (
identityAccessToken: identityAccessTokenService, identityAccessToken: identityAccessTokenService,
identityProject: identityProjectService, identityProject: identityProjectService,
identityUa: identityUaService, identityUa: identityUaService,
identityKubernetesAuth: identityKubernetesAuthService, secretApprovalPolicy: sapService,
identityGcpAuth: identityGcpAuthService,
identityAwsAuth: identityAwsAuthService,
identityAzureAuth: identityAzureAuthService,
accessApprovalPolicy: accessApprovalPolicyService, accessApprovalPolicy: accessApprovalPolicyService,
accessApprovalRequest: accessApprovalRequestService, accessApprovalRequest: accessApprovalRequestService,
secretApprovalPolicy: secretApprovalPolicyService, secretApprovalRequest: sarService,
secretApprovalRequest: secretApprovalRequestService,
secretRotation: secretRotationService, secretRotation: secretRotationService,
dynamicSecret: dynamicSecretService, dynamicSecret: dynamicSecretService,
dynamicSecretLease: dynamicSecretLeaseService, dynamicSecretLease: dynamicSecretLeaseService,
@@ -893,8 +787,7 @@ export const registerRoutes = async (
secretBlindIndex: secretBlindIndexService, secretBlindIndex: secretBlindIndexService,
telemetry: telemetryService, telemetry: telemetryService,
projectUserAdditionalPrivilege: projectUserAdditionalPrivilegeService, projectUserAdditionalPrivilege: projectUserAdditionalPrivilegeService,
identityProjectAdditionalPrivilege: identityProjectAdditionalPrivilegeService, identityProjectAdditionalPrivilege: identityProjectAdditionalPrivilegeService
secretSharing: secretSharingService
}); });
server.decorate<FastifyZodProvider["store"]>("store", { server.decorate<FastifyZodProvider["store"]>("store", {
@@ -919,8 +812,7 @@ export const registerRoutes = async (
emailConfigured: z.boolean().optional(), emailConfigured: z.boolean().optional(),
inviteOnlySignup: z.boolean().optional(), inviteOnlySignup: z.boolean().optional(),
redisConfigured: z.boolean().optional(), redisConfigured: z.boolean().optional(),
secretScanningConfigured: z.boolean().optional(), secretScanningConfigured: z.boolean().optional()
samlDefaultOrgSlug: z.string().optional()
}) })
} }
}, },
@@ -933,8 +825,7 @@ export const registerRoutes = async (
emailConfigured: cfg.isSmtpConfigured, emailConfigured: cfg.isSmtpConfigured,
inviteOnlySignup: Boolean(serverCfg.allowSignUp), inviteOnlySignup: Boolean(serverCfg.allowSignUp),
redisConfigured: cfg.isRedisConfigured, redisConfigured: cfg.isRedisConfigured,
secretScanningConfigured: cfg.isSecretScanningConfigured, secretScanningConfigured: cfg.isSecretScanningConfigured
samlDefaultOrgSlug: cfg.samlDefaultOrgSlug
}; };
} }
}); });

View File

@@ -4,12 +4,10 @@ import {
DynamicSecretsSchema, DynamicSecretsSchema,
IdentityProjectAdditionalPrivilegeSchema, IdentityProjectAdditionalPrivilegeSchema,
IntegrationAuthsSchema, IntegrationAuthsSchema,
ProjectRolesSchema,
SecretApprovalPoliciesSchema, SecretApprovalPoliciesSchema,
UsersSchema UsersSchema
} from "@app/db/schemas"; } from "@app/db/schemas";
import { UnpackedPermissionSchema } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service"; import { UnpackedPermissionSchema } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
// sometimes the return data must be santizied to avoid leaking important values // sometimes the return data must be santizied to avoid leaking important values
// always prefer pick over omit in zod // always prefer pick over omit in zod
@@ -66,12 +64,14 @@ export const secretRawSchema = z.object({
secretComment: z.string().optional() secretComment: z.string().optional()
}); });
export const ProjectPermissionSchema = z.object({ export const PermissionSchema = z.object({
action: z action: z
.nativeEnum(ProjectPermissionActions) .string()
.min(1)
.describe("Describe what action an entity can take. Possible actions: create, edit, delete, and read"), .describe("Describe what action an entity can take. Possible actions: create, edit, delete, and read"),
subject: z subject: z
.nativeEnum(ProjectPermissionSub) .string()
.min(1)
.describe("The entity this permission pertains to. Possible options: secrets, environments"), .describe("The entity this permission pertains to. Possible options: secrets, environments"),
conditions: z conditions: z
.object({ .object({
@@ -89,38 +89,10 @@ export const ProjectPermissionSchema = z.object({
.optional() .optional()
}); });
export const ProjectSpecificPrivilegePermissionSchema = z.object({
actions: z
.nativeEnum(ProjectPermissionActions)
.describe("Describe what action an entity can take. Possible actions: create, edit, delete, and read")
.array()
.min(1),
subject: z
.enum([ProjectPermissionSub.Secrets])
.describe("The entity this permission pertains to. Possible options: secrets, environments"),
conditions: z
.object({
environment: z.string().describe("The environment slug this permission should allow."),
secretPath: z
.object({
$glob: z
.string()
.min(1)
.describe("The secret path this permission should allow. Can be a glob pattern such as /folder-name/*/** ")
})
.optional()
})
.describe("When specified, only matching conditions will be allowed to access given resource.")
});
export const SanitizedIdentityPrivilegeSchema = IdentityProjectAdditionalPrivilegeSchema.extend({ export const SanitizedIdentityPrivilegeSchema = IdentityProjectAdditionalPrivilegeSchema.extend({
permissions: UnpackedPermissionSchema.array() permissions: UnpackedPermissionSchema.array()
}); });
export const SanitizedRoleSchema = ProjectRolesSchema.extend({
permissions: UnpackedPermissionSchema.array()
});
export const SanitizedDynamicSecretSchema = DynamicSecretsSchema.omit({ export const SanitizedDynamicSecretSchema = DynamicSecretsSchema.omit({
inputIV: true, inputIV: true,
inputTag: true, inputTag: true,

View File

@@ -20,23 +20,16 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
schema: { schema: {
response: { response: {
200: z.object({ 200: z.object({
config: SuperAdminSchema.omit({ createdAt: true, updatedAt: true }).extend({ config: SuperAdminSchema.omit({ createdAt: true, updatedAt: true }).merge(
isMigrationModeOn: z.boolean(), z.object({ isMigrationModeOn: z.boolean() })
isSecretScanningDisabled: z.boolean() )
})
}) })
} }
}, },
handler: async () => { handler: async () => {
const config = await getServerCfg(); const config = await getServerCfg();
const serverEnvs = getConfig(); const serverEnvs = getConfig();
return { return { config: { ...config, isMigrationModeOn: serverEnvs.MAINTENANCE_MODE } };
config: {
...config,
isMigrationModeOn: serverEnvs.MAINTENANCE_MODE,
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING
}
};
} }
}); });

View File

@@ -36,29 +36,4 @@ export const registerIdentityAccessTokenRouter = async (server: FastifyZodProvid
}; };
} }
}); });
server.route({
url: "/token/revoke",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
description: "Revoke access token",
body: z.object({
accessToken: z.string().trim().describe(UNIVERSAL_AUTH.REVOKE_ACCESS_TOKEN.accessToken)
}),
response: {
200: z.object({
message: z.string()
})
}
},
handler: async (req) => {
await server.services.identityAccessToken.revokeAccessToken(req.body.accessToken);
return {
message: "Successfully revoked access token"
};
}
});
}; };

View File

@@ -1,269 +0,0 @@
import { z } from "zod";
import { IdentityAwsAuthsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { AWS_AUTH } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
import {
validateAccountIds,
validatePrincipalArns
} from "@app/services/identity-aws-auth/identity-aws-auth-validators";
export const registerIdentityAwsAuthRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/aws-auth/login",
config: {
rateLimit: writeLimit
},
schema: {
description: "Login with AWS Auth",
body: z.object({
identityId: z.string().describe(AWS_AUTH.LOGIN.identityId),
iamHttpRequestMethod: z.string().default("POST").describe(AWS_AUTH.LOGIN.iamHttpRequestMethod),
iamRequestBody: z.string().describe(AWS_AUTH.LOGIN.iamRequestBody),
iamRequestHeaders: z.string().describe(AWS_AUTH.LOGIN.iamRequestHeaders)
}),
response: {
200: z.object({
accessToken: z.string(),
expiresIn: z.coerce.number(),
accessTokenMaxTTL: z.coerce.number(),
tokenType: z.literal("Bearer")
})
}
},
handler: async (req) => {
const { identityAwsAuth, accessToken, identityAccessToken, identityMembershipOrg } =
await server.services.identityAwsAuth.login(req.body);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityMembershipOrg?.orgId,
event: {
type: EventType.LOGIN_IDENTITY_AWS_AUTH,
metadata: {
identityId: identityAwsAuth.identityId,
identityAccessTokenId: identityAccessToken.id,
identityAwsAuthId: identityAwsAuth.id
}
}
});
return {
accessToken,
tokenType: "Bearer" as const,
expiresIn: identityAwsAuth.accessTokenTTL,
accessTokenMaxTTL: identityAwsAuth.accessTokenMaxTTL
};
}
});
server.route({
method: "POST",
url: "/aws-auth/identities/:identityId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Attach AWS Auth configuration onto identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().trim()
}),
body: z.object({
stsEndpoint: z.string().trim().min(1).default("https://sts.amazonaws.com/"),
allowedPrincipalArns: validatePrincipalArns,
allowedAccountIds: validateAccountIds,
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }]),
accessTokenTTL: z
.number()
.int()
.min(1)
.refine((value) => value !== 0, {
message: "accessTokenTTL must have a non zero number"
})
.default(2592000),
accessTokenMaxTTL: z
.number()
.int()
.refine((value) => value !== 0, {
message: "accessTokenMaxTTL must have a non zero number"
})
.default(2592000),
accessTokenNumUsesLimit: z.number().int().min(0).default(0)
}),
response: {
200: z.object({
identityAwsAuth: IdentityAwsAuthsSchema
})
}
},
handler: async (req) => {
const identityAwsAuth = await server.services.identityAwsAuth.attachAwsAuth({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body,
identityId: req.params.identityId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityAwsAuth.orgId,
event: {
type: EventType.ADD_IDENTITY_AWS_AUTH,
metadata: {
identityId: identityAwsAuth.identityId,
stsEndpoint: identityAwsAuth.stsEndpoint,
allowedPrincipalArns: identityAwsAuth.allowedPrincipalArns,
allowedAccountIds: identityAwsAuth.allowedAccountIds,
accessTokenTTL: identityAwsAuth.accessTokenTTL,
accessTokenMaxTTL: identityAwsAuth.accessTokenMaxTTL,
accessTokenTrustedIps: identityAwsAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
accessTokenNumUsesLimit: identityAwsAuth.accessTokenNumUsesLimit
}
}
});
return { identityAwsAuth };
}
});
server.route({
method: "PATCH",
url: "/aws-auth/identities/:identityId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Update AWS Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string()
}),
body: z.object({
stsEndpoint: z.string().trim().min(1).optional(),
allowedPrincipalArns: validatePrincipalArns,
allowedAccountIds: validateAccountIds,
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.optional(),
accessTokenTTL: z.number().int().min(0).optional(),
accessTokenNumUsesLimit: z.number().int().min(0).optional(),
accessTokenMaxTTL: z
.number()
.int()
.refine((value) => value !== 0, {
message: "accessTokenMaxTTL must have a non zero number"
})
.optional()
}),
response: {
200: z.object({
identityAwsAuth: IdentityAwsAuthsSchema
})
}
},
handler: async (req) => {
const identityAwsAuth = await server.services.identityAwsAuth.updateAwsAuth({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body,
identityId: req.params.identityId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityAwsAuth.orgId,
event: {
type: EventType.UPDATE_IDENTITY_AWS_AUTH,
metadata: {
identityId: identityAwsAuth.identityId,
stsEndpoint: identityAwsAuth.stsEndpoint,
allowedPrincipalArns: identityAwsAuth.allowedPrincipalArns,
allowedAccountIds: identityAwsAuth.allowedAccountIds,
accessTokenTTL: identityAwsAuth.accessTokenTTL,
accessTokenMaxTTL: identityAwsAuth.accessTokenMaxTTL,
accessTokenTrustedIps: identityAwsAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
accessTokenNumUsesLimit: identityAwsAuth.accessTokenNumUsesLimit
}
}
});
return { identityAwsAuth };
}
});
server.route({
method: "GET",
url: "/aws-auth/identities/:identityId",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Retrieve AWS Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string()
}),
response: {
200: z.object({
identityAwsAuth: IdentityAwsAuthsSchema
})
}
},
handler: async (req) => {
const identityAwsAuth = await server.services.identityAwsAuth.getAwsAuth({
identityId: req.params.identityId,
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityAwsAuth.orgId,
event: {
type: EventType.GET_IDENTITY_AWS_AUTH,
metadata: {
identityId: identityAwsAuth.identityId
}
}
});
return { identityAwsAuth };
}
});
};

View File

@@ -1,262 +0,0 @@
import { z } from "zod";
import { IdentityAzureAuthsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
import { validateAzureAuthField } from "@app/services/identity-azure-auth/identity-azure-auth-validators";
export const registerIdentityAzureAuthRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/azure-auth/login",
config: {
rateLimit: writeLimit
},
schema: {
description: "Login with Azure Auth",
body: z.object({
identityId: z.string(),
jwt: z.string()
}),
response: {
200: z.object({
accessToken: z.string(),
expiresIn: z.coerce.number(),
accessTokenMaxTTL: z.coerce.number(),
tokenType: z.literal("Bearer")
})
}
},
handler: async (req) => {
const { identityAzureAuth, accessToken, identityAccessToken, identityMembershipOrg } =
await server.services.identityAzureAuth.login(req.body);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityMembershipOrg.orgId,
event: {
type: EventType.LOGIN_IDENTITY_AZURE_AUTH,
metadata: {
identityId: identityAzureAuth.identityId,
identityAccessTokenId: identityAccessToken.id,
identityAzureAuthId: identityAzureAuth.id
}
}
});
return {
accessToken,
tokenType: "Bearer" as const,
expiresIn: identityAzureAuth.accessTokenTTL,
accessTokenMaxTTL: identityAzureAuth.accessTokenMaxTTL
};
}
});
server.route({
method: "POST",
url: "/azure-auth/identities/:identityId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Attach Azure Auth configuration onto identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().trim()
}),
body: z.object({
tenantId: z.string().trim(),
resource: z.string().trim(),
allowedServicePrincipalIds: validateAzureAuthField,
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }]),
accessTokenTTL: z
.number()
.int()
.min(1)
.refine((value) => value !== 0, {
message: "accessTokenTTL must have a non zero number"
})
.default(2592000),
accessTokenMaxTTL: z
.number()
.int()
.refine((value) => value !== 0, {
message: "accessTokenMaxTTL must have a non zero number"
})
.default(2592000),
accessTokenNumUsesLimit: z.number().int().min(0).default(0)
}),
response: {
200: z.object({
identityAzureAuth: IdentityAzureAuthsSchema
})
}
},
handler: async (req) => {
const identityAzureAuth = await server.services.identityAzureAuth.attachAzureAuth({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body,
identityId: req.params.identityId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityAzureAuth.orgId,
event: {
type: EventType.ADD_IDENTITY_AZURE_AUTH,
metadata: {
identityId: identityAzureAuth.identityId,
tenantId: identityAzureAuth.tenantId,
resource: identityAzureAuth.resource,
accessTokenTTL: identityAzureAuth.accessTokenTTL,
accessTokenMaxTTL: identityAzureAuth.accessTokenMaxTTL,
accessTokenTrustedIps: identityAzureAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
accessTokenNumUsesLimit: identityAzureAuth.accessTokenNumUsesLimit
}
}
});
return { identityAzureAuth };
}
});
server.route({
method: "PATCH",
url: "/azure-auth/identities/:identityId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Update Azure Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().trim()
}),
body: z.object({
tenantId: z.string().trim().optional(),
resource: z.string().trim().optional(),
allowedServicePrincipalIds: validateAzureAuthField.optional(),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.optional(),
accessTokenTTL: z.number().int().min(0).optional(),
accessTokenNumUsesLimit: z.number().int().min(0).optional(),
accessTokenMaxTTL: z
.number()
.int()
.refine((value) => value !== 0, {
message: "accessTokenMaxTTL must have a non zero number"
})
.optional()
}),
response: {
200: z.object({
identityAzureAuth: IdentityAzureAuthsSchema
})
}
},
handler: async (req) => {
const identityAzureAuth = await server.services.identityAzureAuth.updateAzureAuth({
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.body,
identityId: req.params.identityId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityAzureAuth.orgId,
event: {
type: EventType.UPDATE_IDENTITY_AZURE_AUTH,
metadata: {
identityId: identityAzureAuth.identityId,
tenantId: identityAzureAuth.tenantId,
resource: identityAzureAuth.resource,
accessTokenTTL: identityAzureAuth.accessTokenTTL,
accessTokenMaxTTL: identityAzureAuth.accessTokenMaxTTL,
accessTokenTrustedIps: identityAzureAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
accessTokenNumUsesLimit: identityAzureAuth.accessTokenNumUsesLimit
}
}
});
return { identityAzureAuth };
}
});
server.route({
method: "GET",
url: "/azure-auth/identities/:identityId",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Retrieve Azure Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string()
}),
response: {
200: z.object({
identityAzureAuth: IdentityAzureAuthsSchema
})
}
},
handler: async (req) => {
const identityAzureAuth = await server.services.identityAzureAuth.getAzureAuth({
identityId: req.params.identityId,
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityAzureAuth.orgId,
event: {
type: EventType.GET_IDENTITY_AZURE_AUTH,
metadata: {
identityId: identityAzureAuth.identityId
}
}
});
return { identityAzureAuth };
}
});
};

View File

@@ -1,268 +0,0 @@
import { z } from "zod";
import { IdentityGcpAuthsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
import { validateGcpAuthField } from "@app/services/identity-gcp-auth/identity-gcp-auth-validators";
export const registerIdentityGcpAuthRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/gcp-auth/login",
config: {
rateLimit: writeLimit
},
schema: {
description: "Login with GCP Auth",
body: z.object({
identityId: z.string(),
jwt: z.string()
}),
response: {
200: z.object({
accessToken: z.string(),
expiresIn: z.coerce.number(),
accessTokenMaxTTL: z.coerce.number(),
tokenType: z.literal("Bearer")
})
}
},
handler: async (req) => {
const { identityGcpAuth, accessToken, identityAccessToken, identityMembershipOrg } =
await server.services.identityGcpAuth.login(req.body);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityMembershipOrg?.orgId,
event: {
type: EventType.LOGIN_IDENTITY_GCP_AUTH,
metadata: {
identityId: identityGcpAuth.identityId,
identityAccessTokenId: identityAccessToken.id,
identityGcpAuthId: identityGcpAuth.id
}
}
});
return {
accessToken,
tokenType: "Bearer" as const,
expiresIn: identityGcpAuth.accessTokenTTL,
accessTokenMaxTTL: identityGcpAuth.accessTokenMaxTTL
};
}
});
server.route({
method: "POST",
url: "/gcp-auth/identities/:identityId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Attach GCP Auth configuration onto identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().trim()
}),
body: z.object({
type: z.enum(["iam", "gce"]),
allowedServiceAccounts: validateGcpAuthField,
allowedProjects: validateGcpAuthField,
allowedZones: validateGcpAuthField,
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }]),
accessTokenTTL: z
.number()
.int()
.min(1)
.refine((value) => value !== 0, {
message: "accessTokenTTL must have a non zero number"
})
.default(2592000),
accessTokenMaxTTL: z
.number()
.int()
.refine((value) => value !== 0, {
message: "accessTokenMaxTTL must have a non zero number"
})
.default(2592000),
accessTokenNumUsesLimit: z.number().int().min(0).default(0)
}),
response: {
200: z.object({
identityGcpAuth: IdentityGcpAuthsSchema
})
}
},
handler: async (req) => {
const identityGcpAuth = await server.services.identityGcpAuth.attachGcpAuth({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body,
identityId: req.params.identityId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityGcpAuth.orgId,
event: {
type: EventType.ADD_IDENTITY_GCP_AUTH,
metadata: {
identityId: identityGcpAuth.identityId,
type: identityGcpAuth.type,
allowedServiceAccounts: identityGcpAuth.allowedServiceAccounts,
allowedProjects: identityGcpAuth.allowedProjects,
allowedZones: identityGcpAuth.allowedZones,
accessTokenTTL: identityGcpAuth.accessTokenTTL,
accessTokenMaxTTL: identityGcpAuth.accessTokenMaxTTL,
accessTokenTrustedIps: identityGcpAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
accessTokenNumUsesLimit: identityGcpAuth.accessTokenNumUsesLimit
}
}
});
return { identityGcpAuth };
}
});
server.route({
method: "PATCH",
url: "/gcp-auth/identities/:identityId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Update GCP Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().trim()
}),
body: z.object({
type: z.enum(["iam", "gce"]).optional(),
allowedServiceAccounts: validateGcpAuthField.optional(),
allowedProjects: validateGcpAuthField.optional(),
allowedZones: validateGcpAuthField.optional(),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.optional(),
accessTokenTTL: z.number().int().min(0).optional(),
accessTokenNumUsesLimit: z.number().int().min(0).optional(),
accessTokenMaxTTL: z
.number()
.int()
.refine((value) => value !== 0, {
message: "accessTokenMaxTTL must have a non zero number"
})
.optional()
}),
response: {
200: z.object({
identityGcpAuth: IdentityGcpAuthsSchema
})
}
},
handler: async (req) => {
const identityGcpAuth = await server.services.identityGcpAuth.updateGcpAuth({
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.body,
identityId: req.params.identityId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityGcpAuth.orgId,
event: {
type: EventType.UPDATE_IDENTITY_GCP_AUTH,
metadata: {
identityId: identityGcpAuth.identityId,
type: identityGcpAuth.type,
allowedServiceAccounts: identityGcpAuth.allowedServiceAccounts,
allowedProjects: identityGcpAuth.allowedProjects,
allowedZones: identityGcpAuth.allowedZones,
accessTokenTTL: identityGcpAuth.accessTokenTTL,
accessTokenMaxTTL: identityGcpAuth.accessTokenMaxTTL,
accessTokenTrustedIps: identityGcpAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
accessTokenNumUsesLimit: identityGcpAuth.accessTokenNumUsesLimit
}
}
});
return { identityGcpAuth };
}
});
server.route({
method: "GET",
url: "/gcp-auth/identities/:identityId",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Retrieve GCP Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string()
}),
response: {
200: z.object({
identityGcpAuth: IdentityGcpAuthsSchema
})
}
},
handler: async (req) => {
const identityGcpAuth = await server.services.identityGcpAuth.getGcpAuth({
identityId: req.params.identityId,
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityGcpAuth.orgId,
event: {
type: EventType.GET_IDENTITY_GCP_AUTH,
metadata: {
identityId: identityGcpAuth.identityId
}
}
});
return { identityGcpAuth };
}
});
};

View File

@@ -1,283 +0,0 @@
import { z } from "zod";
import { IdentityKubernetesAuthsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
const IdentityKubernetesAuthResponseSchema = IdentityKubernetesAuthsSchema.omit({
encryptedCaCert: true,
caCertIV: true,
caCertTag: true,
encryptedTokenReviewerJwt: true,
tokenReviewerJwtIV: true,
tokenReviewerJwtTag: true
}).extend({
caCert: z.string(),
tokenReviewerJwt: z.string()
});
export const registerIdentityKubernetesRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/kubernetes-auth/login",
config: {
rateLimit: writeLimit
},
schema: {
description: "Login with Kubernetes Auth",
body: z.object({
identityId: z.string().trim(),
jwt: z.string().trim()
}),
response: {
200: z.object({
accessToken: z.string(),
expiresIn: z.coerce.number(),
accessTokenMaxTTL: z.coerce.number(),
tokenType: z.literal("Bearer")
})
}
},
handler: async (req) => {
const { identityKubernetesAuth, accessToken, identityAccessToken, identityMembershipOrg } =
await server.services.identityKubernetesAuth.login({
identityId: req.body.identityId,
jwt: req.body.jwt
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityMembershipOrg?.orgId,
event: {
type: EventType.LOGIN_IDENTITY_KUBERNETES_AUTH,
metadata: {
identityId: identityKubernetesAuth.identityId,
identityAccessTokenId: identityAccessToken.id,
identityKubernetesAuthId: identityKubernetesAuth.id
}
}
});
return {
accessToken,
tokenType: "Bearer" as const,
expiresIn: identityKubernetesAuth.accessTokenTTL,
accessTokenMaxTTL: identityKubernetesAuth.accessTokenMaxTTL
};
}
});
server.route({
method: "POST",
url: "/kubernetes-auth/identities/:identityId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Attach Kubernetes Auth configuration onto identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().trim()
}),
body: z.object({
kubernetesHost: z.string().trim().min(1),
caCert: z.string().trim().default(""),
tokenReviewerJwt: z.string().trim().min(1),
allowedNamespaces: z.string(), // TODO: validation
allowedNames: z.string(),
allowedAudience: z.string(),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }]),
accessTokenTTL: z
.number()
.int()
.min(1)
.refine((value) => value !== 0, {
message: "accessTokenTTL must have a non zero number"
})
.default(2592000),
accessTokenMaxTTL: z
.number()
.int()
.refine((value) => value !== 0, {
message: "accessTokenMaxTTL must have a non zero number"
})
.default(2592000),
accessTokenNumUsesLimit: z.number().int().min(0).default(0)
}),
response: {
200: z.object({
identityKubernetesAuth: IdentityKubernetesAuthResponseSchema
})
}
},
handler: async (req) => {
const identityKubernetesAuth = await server.services.identityKubernetesAuth.attachKubernetesAuth({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body,
identityId: req.params.identityId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityKubernetesAuth.orgId,
event: {
type: EventType.ADD_IDENTITY_KUBERNETES_AUTH,
metadata: {
identityId: identityKubernetesAuth.identityId,
kubernetesHost: identityKubernetesAuth.kubernetesHost,
allowedNamespaces: identityKubernetesAuth.allowedNamespaces,
allowedNames: identityKubernetesAuth.allowedNames,
accessTokenTTL: identityKubernetesAuth.accessTokenTTL,
accessTokenMaxTTL: identityKubernetesAuth.accessTokenMaxTTL,
accessTokenTrustedIps: identityKubernetesAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
accessTokenNumUsesLimit: identityKubernetesAuth.accessTokenNumUsesLimit
}
}
});
return { identityKubernetesAuth: IdentityKubernetesAuthResponseSchema.parse(identityKubernetesAuth) };
}
});
server.route({
method: "PATCH",
url: "/kubernetes-auth/identities/:identityId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Update Kubernetes Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string()
}),
body: z.object({
kubernetesHost: z.string().trim().min(1).optional(),
caCert: z.string().trim().optional(),
tokenReviewerJwt: z.string().trim().min(1).optional(),
allowedNamespaces: z.string().optional(), // TODO: validation
allowedNames: z.string().optional(),
allowedAudience: z.string().optional(),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.optional(),
accessTokenTTL: z.number().int().min(0).optional(),
accessTokenNumUsesLimit: z.number().int().min(0).optional(),
accessTokenMaxTTL: z
.number()
.int()
.refine((value) => value !== 0, {
message: "accessTokenMaxTTL must have a non zero number"
})
.optional()
}),
response: {
200: z.object({
identityKubernetesAuth: IdentityKubernetesAuthsSchema
})
}
},
handler: async (req) => {
const identityKubernetesAuth = await server.services.identityKubernetesAuth.updateKubernetesAuth({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body,
identityId: req.params.identityId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityKubernetesAuth.orgId,
event: {
type: EventType.UPDATE_IDENTITY_KUBENETES_AUTH,
metadata: {
identityId: identityKubernetesAuth.identityId,
kubernetesHost: identityKubernetesAuth.kubernetesHost,
allowedNamespaces: identityKubernetesAuth.allowedNamespaces,
allowedNames: identityKubernetesAuth.allowedNames,
accessTokenTTL: identityKubernetesAuth.accessTokenTTL,
accessTokenMaxTTL: identityKubernetesAuth.accessTokenMaxTTL,
accessTokenTrustedIps: identityKubernetesAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
accessTokenNumUsesLimit: identityKubernetesAuth.accessTokenNumUsesLimit
}
}
});
return { identityKubernetesAuth };
}
});
server.route({
method: "GET",
url: "/kubernetes-auth/identities/:identityId",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Retrieve Kubernetes Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string()
}),
response: {
200: z.object({
identityKubernetesAuth: IdentityKubernetesAuthResponseSchema
})
}
},
handler: async (req) => {
const identityKubernetesAuth = await server.services.identityKubernetesAuth.getKubernetesAuth({
identityId: req.params.identityId,
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityKubernetesAuth.orgId,
event: {
type: EventType.GET_IDENTITY_KUBERNETES_AUTH,
metadata: {
identityId: identityKubernetesAuth.identityId
}
}
});
return { identityKubernetesAuth: IdentityKubernetesAuthResponseSchema.parse(identityKubernetesAuth) };
}
});
};

View File

@@ -2,10 +2,6 @@ import { registerAdminRouter } from "./admin-router";
import { registerAuthRoutes } from "./auth-router"; import { registerAuthRoutes } from "./auth-router";
import { registerProjectBotRouter } from "./bot-router"; import { registerProjectBotRouter } from "./bot-router";
import { registerIdentityAccessTokenRouter } from "./identity-access-token-router"; import { registerIdentityAccessTokenRouter } from "./identity-access-token-router";
import { registerIdentityAwsAuthRouter } from "./identity-aws-iam-auth-router";
import { registerIdentityAzureAuthRouter } from "./identity-azure-auth-router";
import { registerIdentityGcpAuthRouter } from "./identity-gcp-auth-router";
import { registerIdentityKubernetesRouter } from "./identity-kubernetes-auth-router";
import { registerIdentityRouter } from "./identity-router"; import { registerIdentityRouter } from "./identity-router";
import { registerIdentityUaRouter } from "./identity-ua"; import { registerIdentityUaRouter } from "./identity-ua";
import { registerIntegrationAuthRouter } from "./integration-auth-router"; import { registerIntegrationAuthRouter } from "./integration-auth-router";
@@ -19,7 +15,6 @@ import { registerProjectMembershipRouter } from "./project-membership-router";
import { registerProjectRouter } from "./project-router"; import { registerProjectRouter } from "./project-router";
import { registerSecretFolderRouter } from "./secret-folder-router"; import { registerSecretFolderRouter } from "./secret-folder-router";
import { registerSecretImportRouter } from "./secret-import-router"; import { registerSecretImportRouter } from "./secret-import-router";
import { registerSecretSharingRouter } from "./secret-sharing-router";
import { registerSecretTagRouter } from "./secret-tag-router"; import { registerSecretTagRouter } from "./secret-tag-router";
import { registerSsoRouter } from "./sso-router"; import { registerSsoRouter } from "./sso-router";
import { registerUserActionRouter } from "./user-action-router"; import { registerUserActionRouter } from "./user-action-router";
@@ -32,11 +27,7 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
async (authRouter) => { async (authRouter) => {
await authRouter.register(registerAuthRoutes); await authRouter.register(registerAuthRoutes);
await authRouter.register(registerIdentityUaRouter); await authRouter.register(registerIdentityUaRouter);
await authRouter.register(registerIdentityKubernetesRouter);
await authRouter.register(registerIdentityGcpAuthRouter);
await authRouter.register(registerIdentityAccessTokenRouter); await authRouter.register(registerIdentityAccessTokenRouter);
await authRouter.register(registerIdentityAwsAuthRouter);
await authRouter.register(registerIdentityAzureAuthRouter);
}, },
{ prefix: "/auth" } { prefix: "/auth" }
); );
@@ -66,5 +57,4 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
await server.register(registerIntegrationAuthRouter, { prefix: "/integration-auth" }); await server.register(registerIntegrationAuthRouter, { prefix: "/integration-auth" });
await server.register(registerWebhookRouter, { prefix: "/webhooks" }); await server.register(registerWebhookRouter, { prefix: "/webhooks" });
await server.register(registerIdentityRouter, { prefix: "/identities" }); await server.register(registerIdentityRouter, { prefix: "/identities" });
await server.register(registerSecretSharingRouter, { prefix: "/secret-sharing" });
}; };

View File

@@ -330,7 +330,7 @@ export const registerIntegrationAuthRouter = async (server: FastifyZodProvider)
teams: z teams: z
.object({ .object({
name: z.string(), name: z.string(),
id: z.string() id: z.string().optional()
}) })
.array() .array()
}) })

View File

@@ -8,7 +8,6 @@ import { writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry"; import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type"; import { AuthMode } from "@app/services/auth/auth-type";
import { IntegrationMappingBehavior } from "@app/services/integration-auth/integration-list";
import { PostHogEventTypes, TIntegrationCreatedEvent } from "@app/services/telemetry/telemetry-types"; import { PostHogEventTypes, TIntegrationCreatedEvent } from "@app/services/telemetry/telemetry-types";
export const registerIntegrationRouter = async (server: FastifyZodProvider) => { export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
@@ -42,7 +41,6 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
targetService: z.string().trim().optional().describe(INTEGRATION.CREATE.targetService), targetService: z.string().trim().optional().describe(INTEGRATION.CREATE.targetService),
targetServiceId: z.string().trim().optional().describe(INTEGRATION.CREATE.targetServiceId), targetServiceId: z.string().trim().optional().describe(INTEGRATION.CREATE.targetServiceId),
owner: z.string().trim().optional().describe(INTEGRATION.CREATE.owner), owner: z.string().trim().optional().describe(INTEGRATION.CREATE.owner),
url: z.string().trim().optional().describe(INTEGRATION.CREATE.url),
path: z.string().trim().optional().describe(INTEGRATION.CREATE.path), path: z.string().trim().optional().describe(INTEGRATION.CREATE.path),
region: z.string().trim().optional().describe(INTEGRATION.CREATE.region), region: z.string().trim().optional().describe(INTEGRATION.CREATE.region),
scope: z.string().trim().optional().describe(INTEGRATION.CREATE.scope), scope: z.string().trim().optional().describe(INTEGRATION.CREATE.scope),
@@ -51,10 +49,6 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix), secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix),
secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix), secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix),
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir), initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
mappingBehavior: z
.nativeEnum(IntegrationMappingBehavior)
.optional()
.describe(INTEGRATION.CREATE.metadata.mappingBehavior),
shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy), shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy),
secretGCPLabel: z secretGCPLabel: z
.object({ .object({
@@ -72,8 +66,7 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
) )
.optional() .optional()
.describe(INTEGRATION.CREATE.metadata.secretAWSTag), .describe(INTEGRATION.CREATE.metadata.secretAWSTag),
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId), kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId)
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete)
}) })
.default({}) .default({})
}), }),
@@ -149,8 +142,8 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
integrationId: z.string().trim().describe(INTEGRATION.UPDATE.integrationId) integrationId: z.string().trim().describe(INTEGRATION.UPDATE.integrationId)
}), }),
body: z.object({ body: z.object({
app: z.string().trim().optional().describe(INTEGRATION.UPDATE.app), app: z.string().trim().describe(INTEGRATION.UPDATE.app),
appId: z.string().trim().optional().describe(INTEGRATION.UPDATE.appId), appId: z.string().trim().describe(INTEGRATION.UPDATE.appId),
isActive: z.boolean().describe(INTEGRATION.UPDATE.isActive), isActive: z.boolean().describe(INTEGRATION.UPDATE.isActive),
secretPath: z secretPath: z
.string() .string()
@@ -160,34 +153,7 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
.describe(INTEGRATION.UPDATE.secretPath), .describe(INTEGRATION.UPDATE.secretPath),
targetEnvironment: z.string().trim().describe(INTEGRATION.UPDATE.targetEnvironment), targetEnvironment: z.string().trim().describe(INTEGRATION.UPDATE.targetEnvironment),
owner: z.string().trim().describe(INTEGRATION.UPDATE.owner), owner: z.string().trim().describe(INTEGRATION.UPDATE.owner),
environment: z.string().trim().describe(INTEGRATION.UPDATE.environment), environment: z.string().trim().describe(INTEGRATION.UPDATE.environment)
metadata: z
.object({
secretPrefix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretPrefix),
secretSuffix: z.string().optional().describe(INTEGRATION.CREATE.metadata.secretSuffix),
initialSyncBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.initialSyncBehavoir),
mappingBehavior: z.string().optional().describe(INTEGRATION.CREATE.metadata.mappingBehavior),
shouldAutoRedeploy: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldAutoRedeploy),
secretGCPLabel: z
.object({
labelName: z.string(),
labelValue: z.string()
})
.optional()
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
secretAWSTag: z
.array(
z.object({
key: z.string(),
value: z.string()
})
)
.optional()
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId),
shouldDisableDelete: z.boolean().optional().describe(INTEGRATION.CREATE.metadata.shouldDisableDelete)
})
.optional()
}), }),
response: { response: {
200: z.object({ 200: z.object({
@@ -269,64 +235,5 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
} }
}); });
server.route({ // TODO(akhilmhdh-pg): manual sync
method: "POST",
url: "/:integrationId/sync",
config: {
rateLimit: writeLimit
},
schema: {
description: "Manually trigger sync of an integration by integration id",
security: [
{
bearerAuth: []
}
],
params: z.object({
integrationId: z.string().trim().describe(INTEGRATION.SYNC.integrationId)
}),
response: {
200: z.object({
integration: IntegrationsSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const integration = await server.services.integration.syncIntegration({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.integrationId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: integration.projectId,
event: {
type: EventType.MANUAL_SYNC_INTEGRATION,
// eslint-disable-next-line
metadata: shake({
integrationId: integration.id,
integration: integration.integration,
environment: integration.environment.slug,
secretPath: integration.secretPath,
url: integration.url,
app: integration.app,
appId: integration.appId,
targetEnvironment: integration.targetEnvironment,
targetEnvironmentId: integration.targetEnvironmentId,
targetService: integration.targetService,
targetServiceId: integration.targetServiceId,
path: integration.path,
region: integration.region
// eslint-disable-next-line
}) as any
}
});
return { integration };
}
});
}; };

View File

@@ -9,7 +9,7 @@ import {
UsersSchema UsersSchema
} from "@app/db/schemas"; } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types"; import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { PROJECT_USERS } from "@app/lib/api-docs"; import { PROJECTS } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type"; import { AuthMode } from "@app/services/auth/auth-type";
@@ -30,7 +30,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
} }
], ],
params: z.object({ params: z.object({
workspaceId: z.string().trim().describe(PROJECT_USERS.GET_USER_MEMBERSHIPS.workspaceId) workspaceId: z.string().trim().describe(PROJECTS.GET_USER_MEMBERSHIPS.workspaceId)
}), }),
response: { response: {
200: z.object({ 200: z.object({
@@ -74,66 +74,6 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
} }
}); });
server.route({
method: "POST",
url: "/:workspaceId/memberships/details",
config: {
rateLimit: readLimit
},
schema: {
description: "Return project user memberships",
security: [
{
bearerAuth: []
}
],
params: z.object({
workspaceId: z.string().min(1).trim().describe(PROJECT_USERS.GET_USER_MEMBERSHIP.workspaceId)
}),
body: z.object({
username: z.string().min(1).trim().describe(PROJECT_USERS.GET_USER_MEMBERSHIP.username)
}),
response: {
200: z.object({
membership: ProjectMembershipsSchema.extend({
user: UsersSchema.pick({
email: true,
firstName: true,
lastName: true,
id: true
}).merge(UserEncryptionKeysSchema.pick({ publicKey: true })),
roles: z.array(
z.object({
id: z.string(),
role: z.string(),
customRoleId: z.string().optional().nullable(),
customRoleName: z.string().optional().nullable(),
customRoleSlug: z.string().optional().nullable(),
isTemporary: z.boolean(),
temporaryMode: z.string().optional().nullable(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional()
})
)
}).omit({ createdAt: true, updatedAt: true })
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const membership = await server.services.projectMembership.getProjectMembershipByUsername({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
projectId: req.params.workspaceId,
username: req.body.username
});
return { membership };
}
});
server.route({ server.route({
method: "POST", method: "POST",
url: "/:workspaceId/memberships", url: "/:workspaceId/memberships",
@@ -202,8 +142,8 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
} }
], ],
params: z.object({ params: z.object({
workspaceId: z.string().trim().describe(PROJECT_USERS.UPDATE_USER_MEMBERSHIP.workspaceId), workspaceId: z.string().trim().describe(PROJECTS.UPDATE_USER_MEMBERSHIP.workspaceId),
membershipId: z.string().trim().describe(PROJECT_USERS.UPDATE_USER_MEMBERSHIP.membershipId) membershipId: z.string().trim().describe(PROJECTS.UPDATE_USER_MEMBERSHIP.membershipId)
}), }),
body: z.object({ body: z.object({
roles: z roles: z
@@ -224,7 +164,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
) )
.min(1) .min(1)
.refine((data) => data.some(({ isTemporary }) => !isTemporary), "At least one long lived role is required") .refine((data) => data.some(({ isTemporary }) => !isTemporary), "At least one long lived role is required")
.describe(PROJECT_USERS.UPDATE_USER_MEMBERSHIP.roles) .describe(PROJECTS.UPDATE_USER_MEMBERSHIP.roles)
}), }),
response: { response: {
200: z.object({ 200: z.object({

View File

@@ -127,70 +127,6 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
} }
}); });
server.route({
url: "/batch",
method: "PATCH",
config: {
rateLimit: secretsLimit
},
schema: {
description: "Update folders by batch",
security: [
{
bearerAuth: []
}
],
body: z.object({
projectSlug: z.string().trim().describe(FOLDERS.UPDATE.projectSlug),
folders: z
.object({
id: z.string().describe(FOLDERS.UPDATE.folderId),
environment: z.string().trim().describe(FOLDERS.UPDATE.environment),
name: z.string().trim().describe(FOLDERS.UPDATE.name),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(FOLDERS.UPDATE.path)
})
.array()
.min(1)
}),
response: {
200: z.object({
folders: SecretFoldersSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.SERVICE_TOKEN, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { newFolders, oldFolders, projectId } = await server.services.folder.updateManyFolders({
...req.body,
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await Promise.all(
req.body.folders.map(async (folder, index) => {
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.UPDATE_FOLDER,
metadata: {
environment: oldFolders[index].envId,
folderId: oldFolders[index].id,
folderPath: folder.path,
newFolderName: newFolders[index].name,
oldFolderName: oldFolders[index].name
}
}
});
})
);
return { folders: newFolders };
}
});
// TODO(daniel): Expose this route in api reference and write docs for it. // TODO(daniel): Expose this route in api reference and write docs for it.
server.route({ server.route({
method: "DELETE", method: "DELETE",

View File

@@ -29,8 +29,7 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
import: z.object({ import: z.object({
environment: z.string().trim().describe(SECRET_IMPORTS.CREATE.import.environment), environment: z.string().trim().describe(SECRET_IMPORTS.CREATE.import.environment),
path: z.string().trim().transform(removeTrailingSlash).describe(SECRET_IMPORTS.CREATE.import.path) path: z.string().trim().transform(removeTrailingSlash).describe(SECRET_IMPORTS.CREATE.import.path)
}), })
isReplication: z.boolean().default(false)
}), }),
response: { response: {
200: z.object({ 200: z.object({
@@ -211,49 +210,6 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
} }
}); });
server.route({
method: "POST",
url: "/:secretImportId/replication-resync",
config: {
rateLimit: secretsLimit
},
schema: {
description: "Resync secret replication of secret imports",
security: [
{
bearerAuth: []
}
],
params: z.object({
secretImportId: z.string().trim().describe(SECRET_IMPORTS.UPDATE.secretImportId)
}),
body: z.object({
workspaceId: z.string().trim().describe(SECRET_IMPORTS.UPDATE.workspaceId),
environment: z.string().trim().describe(SECRET_IMPORTS.UPDATE.environment),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(SECRET_IMPORTS.UPDATE.path)
}),
response: {
200: z.object({
message: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { message } = await server.services.secretImport.resyncSecretImportReplication({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.secretImportId,
...req.body,
projectId: req.body.workspaceId
});
return { message };
}
});
server.route({ server.route({
method: "GET", method: "GET",
url: "/", url: "/",
@@ -276,9 +232,11 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
200: z.object({ 200: z.object({
message: z.string(), message: z.string(),
secretImports: SecretImportsSchema.omit({ importEnv: true }) secretImports: SecretImportsSchema.omit({ importEnv: true })
.extend({ .merge(
importEnv: z.object({ name: z.string(), slug: z.string(), id: z.string() }) z.object({
}) importEnv: z.object({ name: z.string(), slug: z.string(), id: z.string() })
})
)
.array() .array()
}) })
} }

View File

@@ -1,145 +0,0 @@
import { z } from "zod";
import { SecretSharingSchema } from "@app/db/schemas";
import { publicEndpointLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerSecretSharingRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
response: {
200: z.array(SecretSharingSchema)
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const sharedSecrets = await req.server.services.secretSharing.getSharedSecrets({
actor: req.permission.type,
actorId: req.permission.id,
orgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return sharedSecrets;
}
});
server.route({
method: "GET",
url: "/public/:id",
config: {
rateLimit: publicEndpointLimit
},
schema: {
params: z.object({
id: z.string().uuid()
}),
querystring: z.object({
hashedHex: z.string()
}),
response: {
200: SecretSharingSchema.pick({
encryptedValue: true,
iv: true,
tag: true,
expiresAt: true,
expiresAfterViews: true
})
}
},
handler: async (req) => {
const sharedSecret = await req.server.services.secretSharing.getActiveSharedSecretByIdAndHashedHex(
req.params.id,
req.query.hashedHex
);
if (!sharedSecret) return undefined;
return {
encryptedValue: sharedSecret.encryptedValue,
iv: sharedSecret.iv,
tag: sharedSecret.tag,
expiresAt: sharedSecret.expiresAt,
expiresAfterViews: sharedSecret.expiresAfterViews
};
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
encryptedValue: z.string(),
iv: z.string(),
tag: z.string(),
hashedHex: z.string(),
expiresAt: z
.string()
.refine((date) => date === undefined || new Date(date) > new Date(), "Expires at should be a future date"),
expiresAfterViews: z.number()
}),
response: {
200: z.object({
id: z.string().uuid()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { encryptedValue, iv, tag, hashedHex, expiresAt, expiresAfterViews } = req.body;
const sharedSecret = await req.server.services.secretSharing.createSharedSecret({
actor: req.permission.type,
actorId: req.permission.id,
orgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
encryptedValue,
iv,
tag,
hashedHex,
expiresAt: new Date(expiresAt),
expiresAfterViews
});
return { id: sharedSecret.id };
}
});
server.route({
method: "DELETE",
url: "/:sharedSecretId",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
sharedSecretId: z.string().uuid()
}),
response: {
200: SecretSharingSchema
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { sharedSecretId } = req.params;
const deletedSharedSecret = await req.server.services.secretSharing.deleteSharedSecretById({
actor: req.permission.type,
actorId: req.permission.id,
orgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
sharedSecretId
});
return { ...deletedSharedSecret };
}
});
};

View File

@@ -1,15 +1,11 @@
import { z } from "zod"; import { z } from "zod";
import { UserEncryptionKeysSchema, UsersSchema } from "@app/db/schemas"; import { UserEncryptionKeysSchema, UsersSchema } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env"; import { readLimit } from "@app/server/config/rateLimiter";
import { logger } from "@app/lib/logger";
import { authRateLimit, readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type"; import { AuthMode } from "@app/services/auth/auth-type";
export const registerUserRouter = async (server: FastifyZodProvider) => { export const registerUserRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
server.route({ server.route({
method: "GET", method: "GET",
url: "/", url: "/",
@@ -29,29 +25,4 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
return { user }; return { user };
} }
}); });
server.route({
method: "GET",
url: "/:userId/unlock",
config: {
rateLimit: authRateLimit
},
schema: {
querystring: z.object({
token: z.string().trim()
}),
params: z.object({
userId: z.string()
})
},
handler: async (req, res) => {
try {
await server.services.user.unlockUser(req.params.userId, req.query.token);
} catch (err) {
logger.error(`User unlock failed for ${req.params.userId}`);
logger.error(err);
}
return res.redirect(`${appCfg.SITE_URL}/login`);
}
});
}; };

View File

@@ -7,8 +7,7 @@ import {
ProjectMembershipRole, ProjectMembershipRole,
ProjectUserMembershipRolesSchema ProjectUserMembershipRolesSchema
} from "@app/db/schemas"; } from "@app/db/schemas";
import { PROJECT_IDENTITIES } from "@app/lib/api-docs"; import { PROJECTS } from "@app/lib/api-docs";
import { BadRequestError } from "@app/lib/errors";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type"; import { AuthMode } from "@app/services/auth/auth-type";
@@ -23,48 +22,12 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
}, },
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: { schema: {
description: "Create project identity membership",
security: [
{
bearerAuth: []
}
],
params: z.object({ params: z.object({
projectId: z.string().trim(), projectId: z.string().trim(),
identityId: z.string().trim() identityId: z.string().trim()
}), }),
body: z.object({ body: z.object({
// @depreciated role: z.string().trim().min(1).default(ProjectMembershipRole.NoAccess)
role: z.string().trim().optional().default(ProjectMembershipRole.NoAccess),
roles: z
.array(
z.union([
z.object({
role: z.string().describe(PROJECT_IDENTITIES.CREATE_IDENTITY_MEMBERSHIP.roles.role),
isTemporary: z
.literal(false)
.default(false)
.describe(PROJECT_IDENTITIES.CREATE_IDENTITY_MEMBERSHIP.roles.role)
}),
z.object({
role: z.string().describe(PROJECT_IDENTITIES.CREATE_IDENTITY_MEMBERSHIP.roles.role),
isTemporary: z.literal(true).describe(PROJECT_IDENTITIES.CREATE_IDENTITY_MEMBERSHIP.roles.role),
temporaryMode: z
.nativeEnum(ProjectUserMembershipTemporaryMode)
.describe(PROJECT_IDENTITIES.CREATE_IDENTITY_MEMBERSHIP.roles.role),
temporaryRange: z
.string()
.refine((val) => ms(val) > 0, "Temporary range must be a positive number")
.describe(PROJECT_IDENTITIES.CREATE_IDENTITY_MEMBERSHIP.roles.role),
temporaryAccessStartTime: z
.string()
.datetime()
.describe(PROJECT_IDENTITIES.CREATE_IDENTITY_MEMBERSHIP.roles.role)
})
])
)
.describe(PROJECT_IDENTITIES.CREATE_IDENTITY_MEMBERSHIP.roles.description)
.optional()
}), }),
response: { response: {
200: z.object({ 200: z.object({
@@ -73,9 +36,6 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
} }
}, },
handler: async (req) => { handler: async (req) => {
const { role, roles } = req.body;
if (!role && !roles) throw new BadRequestError({ message: "You must provide either role or roles field" });
const identityMembership = await server.services.identityProject.createProjectIdentity({ const identityMembership = await server.services.identityProject.createProjectIdentity({
actor: req.permission.type, actor: req.permission.type,
actorId: req.permission.id, actorId: req.permission.id,
@@ -83,7 +43,7 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
actorOrgId: req.permission.orgId, actorOrgId: req.permission.orgId,
identityId: req.params.identityId, identityId: req.params.identityId,
projectId: req.params.projectId, projectId: req.params.projectId,
roles: roles || [{ role }] role: req.body.role
}); });
return { identityMembership }; return { identityMembership };
} }
@@ -104,39 +64,28 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
} }
], ],
params: z.object({ params: z.object({
projectId: z.string().trim().describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.projectId), projectId: z.string().trim().describe(PROJECTS.UPDATE_IDENTITY_MEMBERSHIP.projectId),
identityId: z.string().trim().describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.identityId) identityId: z.string().trim().describe(PROJECTS.UPDATE_IDENTITY_MEMBERSHIP.identityId)
}), }),
body: z.object({ body: z.object({
roles: z roles: z
.array( .array(
z.union([ z.union([
z.object({ z.object({
role: z.string().describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.roles.role), role: z.string(),
isTemporary: z isTemporary: z.literal(false).default(false)
.literal(false)
.default(false)
.describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.roles.isTemporary)
}), }),
z.object({ z.object({
role: z.string().describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.roles.role), role: z.string(),
isTemporary: z.literal(true).describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.roles.isTemporary), isTemporary: z.literal(true),
temporaryMode: z temporaryMode: z.nativeEnum(ProjectUserMembershipTemporaryMode),
.nativeEnum(ProjectUserMembershipTemporaryMode) temporaryRange: z.string().refine((val) => ms(val) > 0, "Temporary range must be a positive number"),
.describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.roles.temporaryMode), temporaryAccessStartTime: z.string().datetime()
temporaryRange: z
.string()
.refine((val) => ms(val) > 0, "Temporary range must be a positive number")
.describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.roles.temporaryRange),
temporaryAccessStartTime: z
.string()
.datetime()
.describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.roles.temporaryAccessStartTime)
}) })
]) ])
) )
.min(1) .min(1)
.describe(PROJECT_IDENTITIES.UPDATE_IDENTITY_MEMBERSHIP.roles.description) .describe(PROJECTS.UPDATE_IDENTITY_MEMBERSHIP.roles)
}), }),
response: { response: {
200: z.object({ 200: z.object({
@@ -173,8 +122,8 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
} }
], ],
params: z.object({ params: z.object({
projectId: z.string().trim().describe(PROJECT_IDENTITIES.DELETE_IDENTITY_MEMBERSHIP.projectId), projectId: z.string().trim().describe(PROJECTS.DELETE_IDENTITY_MEMBERSHIP.projectId),
identityId: z.string().trim().describe(PROJECT_IDENTITIES.DELETE_IDENTITY_MEMBERSHIP.identityId) identityId: z.string().trim().describe(PROJECTS.DELETE_IDENTITY_MEMBERSHIP.identityId)
}), }),
response: { response: {
200: z.object({ 200: z.object({
@@ -210,7 +159,7 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
} }
], ],
params: z.object({ params: z.object({
projectId: z.string().trim().describe(PROJECT_IDENTITIES.LIST_IDENTITY_MEMBERSHIPS.projectId) projectId: z.string().trim().describe(PROJECTS.LIST_IDENTITY_MEMBERSHIPS.projectId)
}), }),
response: { response: {
200: z.object({ 200: z.object({
@@ -251,61 +200,4 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
return { identityMemberships }; return { identityMemberships };
} }
}); });
server.route({
method: "GET",
url: "/:projectId/identity-memberships/:identityId",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Return project identity membership",
security: [
{
bearerAuth: []
}
],
params: z.object({
projectId: z.string().trim().describe(PROJECT_IDENTITIES.GET_IDENTITY_MEMBERSHIP_BY_ID.projectId),
identityId: z.string().trim().describe(PROJECT_IDENTITIES.GET_IDENTITY_MEMBERSHIP_BY_ID.identityId)
}),
response: {
200: z.object({
identityMembership: z.object({
id: z.string(),
identityId: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
roles: z.array(
z.object({
id: z.string(),
role: z.string(),
customRoleId: z.string().optional().nullable(),
customRoleName: z.string().optional().nullable(),
customRoleSlug: z.string().optional().nullable(),
isTemporary: z.boolean(),
temporaryMode: z.string().optional().nullable(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional()
})
),
identity: IdentitiesSchema.pick({ name: true, id: true, authMethod: true })
})
})
}
},
handler: async (req) => {
const identityMembership = await server.services.identityProject.getProjectIdentityByIdentityId({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
projectId: req.params.projectId,
identityId: req.params.identityId
});
return { identityMembership };
}
});
}; };

View File

@@ -2,7 +2,7 @@ import jwt from "jsonwebtoken";
import { z } from "zod"; import { z } from "zod";
import { getConfig } from "@app/lib/config/env"; import { getConfig } from "@app/lib/config/env";
import { mfaRateLimit } from "@app/server/config/rateLimiter"; import { writeLimit } from "@app/server/config/rateLimiter";
import { AuthModeMfaJwtTokenPayload, AuthTokenType } from "@app/services/auth/auth-type"; import { AuthModeMfaJwtTokenPayload, AuthTokenType } from "@app/services/auth/auth-type";
export const registerMfaRouter = async (server: FastifyZodProvider) => { export const registerMfaRouter = async (server: FastifyZodProvider) => {
@@ -34,7 +34,7 @@ export const registerMfaRouter = async (server: FastifyZodProvider) => {
method: "POST", method: "POST",
url: "/mfa/send", url: "/mfa/send",
config: { config: {
rateLimit: mfaRateLimit rateLimit: writeLimit
}, },
schema: { schema: {
response: { response: {
@@ -53,7 +53,7 @@ export const registerMfaRouter = async (server: FastifyZodProvider) => {
url: "/mfa/verify", url: "/mfa/verify",
method: "POST", method: "POST",
config: { config: {
rateLimit: mfaRateLimit rateLimit: writeLimit
}, },
schema: { schema: {
body: z.object({ body: z.object({

View File

@@ -2,7 +2,7 @@ import { z } from "zod";
import { ProjectMembershipsSchema } from "@app/db/schemas"; import { ProjectMembershipsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types"; import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { PROJECT_USERS } from "@app/lib/api-docs"; import { PROJECTS } from "@app/lib/api-docs";
import { writeLimit } from "@app/server/config/rateLimiter"; import { writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type"; import { AuthMode } from "@app/services/auth/auth-type";
@@ -22,11 +22,11 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
} }
], ],
params: z.object({ params: z.object({
projectId: z.string().describe(PROJECT_USERS.INVITE_MEMBER.projectId) projectId: z.string().describe(PROJECTS.INVITE_MEMBER.projectId)
}), }),
body: z.object({ body: z.object({
emails: z.string().email().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.emails), emails: z.string().email().array().default([]).describe(PROJECTS.INVITE_MEMBER.emails),
usernames: z.string().array().default([]).describe(PROJECT_USERS.INVITE_MEMBER.usernames) usernames: z.string().array().default([]).describe(PROJECTS.INVITE_MEMBER.usernames)
}), }),
response: { response: {
200: z.object({ 200: z.object({
@@ -77,11 +77,11 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
} }
], ],
params: z.object({ params: z.object({
projectId: z.string().describe(PROJECT_USERS.REMOVE_MEMBER.projectId) projectId: z.string().describe(PROJECTS.REMOVE_MEMBER.projectId)
}), }),
body: z.object({ body: z.object({
emails: z.string().email().array().default([]).describe(PROJECT_USERS.REMOVE_MEMBER.emails), emails: z.string().email().array().default([]).describe(PROJECTS.REMOVE_MEMBER.emails),
usernames: z.string().array().default([]).describe(PROJECT_USERS.REMOVE_MEMBER.usernames) usernames: z.string().array().default([]).describe(PROJECTS.REMOVE_MEMBER.usernames)
}), }),
response: { response: {
200: z.object({ 200: z.object({

Some files were not shown because too many files have changed in this diff Show More