1
0
mirror of https://github.com/Infisical/infisical.git synced 2025-03-29 13:26:20 +00:00

Compare commits

..

1 Commits

Author SHA1 Message Date
cf3630c5cf fix: backend/package.json & backend/package-lock.json to reduce vulnerabilities
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JS-FASTXMLPARSER-7573289
2024-07-31 09:09:33 +00:00
1113 changed files with 17451 additions and 61204 deletions
.env.example.env.migration.example
.github
.gitignoreMakefileREADME.md
backend
e2e-test
package-lock.jsonpackage.json
scripts
src
@types
db
auditlog-knexfile.tsindex.tsinstance.ts
migrations
schemas
ee
routes
services
access-approval-policy
access-approval-request
audit-log-stream
audit-log
certificate-authority-crl
certificate-est
dynamic-secret-lease
dynamic-secret
external-kms
group
identity-project-additional-privilege
ldap-config
license
oidc
permission
project-user-additional-privilege
rate-limit
saml-config
scim
secret-approval-policy
secret-approval-request
secret-replication
secret-rotation
secret-scanning
secret-snapshot
keystore
lib
main.ts
queue
server
services
access-token-queue
api-key
auth-token
auth
certificate-authority
certificate-template
certificate
cmek
external-migration
group-project
identity-access-token
identity-aws-auth
identity-azure-auth
identity-gcp-auth
identity-kubernetes-auth
identity-oidc-auth
identity-project
identity-token-auth
identity-ua
identity
integration-auth
integration
kms
org-admin
org-membership
org
pki-alert
pki-collection
project-bot
project-env
project-membership
project-role
project
resource-cleanup
secret-blind-index
secret-folder
secret-import
secret-sharing
secret-tag
secret-v2-bridge
secret
service-token
slack
smtp
super-admin
telemetry
user
webhook
workflow-integration
vitest.e2e.config.ts
cli
company
docker-compose.dev.yml
docs
api-reference/endpoints
cli
documentation
images
guides/import-envkey
integrations
platform
dynamic-secrets
kms
mfa/entra
pki
workflow-integrations/slack-integration
sso
integrations
internals
mint.json
sdks
self-hosting
frontend
next.config.jspackage-lock.jsonpackage.json
public
data
images/integrations
src
components
context
OrgPermissionContext
ProjectPermissionContext
UserContext
WorkspaceContext
index.tsx
helpers
hooks
api
index.tsuseDebounce.tsxusePagination.tsxusePopUp.tsxuseToggle.tsx
utils
layouts/AppLayout
pages
_app.tsx
integrations
aws-parameter-store
aws-secret-manager
azure-devops
circleci
databricks
details
github
login
org/[id]
admin
overview
project/[id]
audit-logs
ca/[caId]
kms
pki-collections/[collectionId]
signupinvite.tsx
views
IntegrationsPage
Login
Login.utils.tsx
components/InitialStep
Org
OrgAdminPage
Project
AuditLogsPage
CaPage
CertificatesPage
KmsPage
MembersPage/components
PkiCollectionPage
RolePage
Types
SecretApprovalPage/components
SecretMainPage
SecretOverviewPage
Settings
ShareSecretPage/components
ShareSecretPublicPage/components
ViewSecretPublicPage
admin/DashboardPage
helm-charts/secrets-operator
k8-operator/controllers
nginx
standalone-entrypoint.sh

@ -70,5 +70,3 @@ NEXT_PUBLIC_CAPTCHA_SITE_KEY=
PLAIN_API_KEY=
PLAIN_WISH_LABEL_IDS=
SSL_CLIENT_CERTIFICATE_HEADER_KEY=

@ -1,2 +1 @@
DB_CONNECTION_URI=
AUDIT_LOGS_DB_CONNECTION_URI=

@ -6,7 +6,6 @@
- [ ] Bug fix
- [ ] New feature
- [ ] Improvement
- [ ] Breaking change
- [ ] Documentation

@ -6,15 +6,9 @@ permissions:
contents: read
jobs:
infisical-tests:
name: Run tests before deployment
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
uses: ./.github/workflows/run-backend-tests.yml
infisical-image:
name: Build backend image
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3

@ -22,14 +22,14 @@ jobs:
# uncomment this when testing locally using nektos/act
- uses: KengoTODA/actions-setup-docker-compose@v1
if: ${{ env.ACT }}
name: Install `docker compose` for local simulations
name: Install `docker-compose` for local simulations
with:
version: "2.14.2"
- name: 📦Build the latest image
run: docker build --tag infisical-api .
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis
- name: Start the server
run: |
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
@ -72,6 +72,6 @@ jobs:
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
- name: cleanup
run: |
docker compose -f "docker-compose.dev.yml" down
docker-compose -f "docker-compose.dev.yml" down
docker stop infisical-api
docker remove infisical-api

@ -20,7 +20,7 @@ jobs:
uses: actions/checkout@v3
- uses: KengoTODA/actions-setup-docker-compose@v1
if: ${{ env.ACT }}
name: Install `docker compose` for local simulations
name: Install `docker-compose` for local simulations
with:
version: "2.14.2"
- name: 🔧 Setup Node 20
@ -33,7 +33,7 @@ jobs:
run: npm install
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis
- name: Start integration test
run: npm run test:e2e
working-directory: backend
@ -44,4 +44,4 @@ jobs:
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- name: cleanup
run: |
docker compose -f "docker-compose.dev.yml" down
docker-compose -f "docker-compose.dev.yml" down

@ -50,6 +50,6 @@ jobs:
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
# INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
run: go test -v -count=1 ./test

1
.gitignore vendored

@ -63,7 +63,6 @@ yarn-error.log*
# Editor specific
.vscode/*
.idea/*
frontend-build

@ -15,16 +15,3 @@ up-prod:
down:
docker compose -f docker-compose.dev.yml down
reviewable-ui:
cd frontend && \
npm run lint:fix && \
npm run type:check
reviewable-api:
cd backend && \
npm run lint:fix && \
npm run type:check
reviewable: reviewable-ui reviewable-api

File diff suppressed because one or more lines are too long

@ -123,7 +123,7 @@ describe("Project Environment Router", async () => {
id: deletedProjectEnvironment.id,
name: mockProjectEnv.name,
slug: mockProjectEnv.slug,
position: 5,
position: 4,
createdAt: expect.any(String),
updatedAt: expect.any(String)
})

@ -1,36 +0,0 @@
import { seedData1 } from "@app/db/seed-data";
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
const createPolicy = async (dto: { name: string; secretPath: string; approvers: {type: ApproverType.User, id: string}[]; approvals: number }) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-approvals`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
name: dto.name,
secretPath: dto.secretPath,
approvers: dto.approvers,
approvals: dto.approvals
}
});
expect(res.statusCode).toBe(200);
return res.json().approval;
};
describe("Secret approval policy router", async () => {
test("Create policy", async () => {
const policy = await createPolicy({
secretPath: "/",
approvals: 1,
approvers: [{id:seedData1.id, type: ApproverType.User}],
name: "test-policy"
});
expect(policy.name).toBe("test-policy");
});
});

@ -1,61 +1,73 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
const createSecretImport = async (importPath: string, importEnv: string) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/",
import: {
environment: importEnv,
path: importPath
}
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport;
};
const deleteSecretImport = async (id: string) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/secret-imports/${id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/"
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport;
};
describe("Secret Import Router", async () => {
test.each([
{ importEnv: "prod", importPath: "/" }, // one in root
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
// check for default environments
const payload = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath,
importEnv
});
const payload = await createSecretImport(importPath, importEnv);
expect(payload).toEqual(
expect.objectContaining({
id: expect.any(String),
importPath,
importPath: expect.any(String),
importEnv: expect.objectContaining({
name: expect.any(String),
slug: importEnv,
slug: expect.any(String),
id: expect.any(String)
})
})
);
await deleteSecretImport({
id: payload.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport(payload.id);
});
test("Get secret imports", async () => {
const createdImport1 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "prod"
});
const createdImport2 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "staging"
});
const createdImport1 = await createSecretImport("/", "prod");
const createdImport2 = await createSecretImport("/", "staging");
const res = await testServer.inject({
method: "GET",
url: `/api/v1/secret-imports`,
@ -77,60 +89,25 @@ describe("Secret Import Router", async () => {
expect.arrayContaining([
expect.objectContaining({
id: expect.any(String),
importPath: "/",
importPath: expect.any(String),
importEnv: expect.objectContaining({
name: expect.any(String),
slug: "prod",
id: expect.any(String)
})
}),
expect.objectContaining({
id: expect.any(String),
importPath: "/",
importEnv: expect.objectContaining({
name: expect.any(String),
slug: "staging",
slug: expect.any(String),
id: expect.any(String)
})
})
])
);
await deleteSecretImport({
id: createdImport1.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport({
id: createdImport2.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport(createdImport1.id);
await deleteSecretImport(createdImport2.id);
});
test("Update secret import position", async () => {
const prodImportDetails = { path: "/", envSlug: "prod" };
const stagingImportDetails = { path: "/", envSlug: "staging" };
const createdImport1 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: prodImportDetails.path,
importEnv: prodImportDetails.envSlug
});
const createdImport2 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: stagingImportDetails.path,
importEnv: stagingImportDetails.envSlug
});
const createdImport1 = await createSecretImport(prodImportDetails.path, prodImportDetails.envSlug);
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
const updateImportRes = await testServer.inject({
method: "PATCH",
@ -184,55 +161,22 @@ describe("Secret Import Router", async () => {
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
await deleteSecretImport({
id: createdImport1.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport({
id: createdImport2.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport(createdImport1.id);
await deleteSecretImport(createdImport2.id);
});
test("Delete secret import position", async () => {
const createdImport1 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "prod"
});
const createdImport2 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "staging"
});
const deletedImport = await deleteSecretImport({
id: createdImport1.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
const createdImport1 = await createSecretImport("/", "prod");
const createdImport2 = await createSecretImport("/", "staging");
const deletedImport = await deleteSecretImport(createdImport1.id);
// check for default environments
expect(deletedImport).toEqual(
expect.objectContaining({
id: expect.any(String),
importPath: "/",
importPath: expect.any(String),
importEnv: expect.objectContaining({
name: expect.any(String),
slug: "prod",
slug: expect.any(String),
id: expect.any(String)
})
})
@ -257,552 +201,6 @@ describe("Secret Import Router", async () => {
expect(secretImportList.secretImports.length).toEqual(1);
expect(secretImportList.secretImports[0].position).toEqual(1);
await deleteSecretImport({
id: createdImport2.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport(createdImport2.id);
});
});
// dev <- stage <- prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret import waterfall pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging"
});
const stageImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod"
});
return async () => {
await deleteSecretImport({
id: stageImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check one level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
});
test("Check two level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
expect(secret.secretKey).toBe("PROD_KEY");
expect(secret.secretValue).toBe("prod-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
}
);
// dev <- stage, dev <- prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret import multiple destination to one source pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging"
});
const devImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod"
});
return async () => {
await deleteSecretImport({
id: devImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
}),
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
}
);
// dev -> stage, prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret import one source to multiple destination pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const stageImportFromDev = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: seedData1.environment.slug
});
const prodImportFromDev = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: seedData1.environment.slug
});
return async () => {
await deleteSecretImport({
id: prodImportFromDev.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: stageImportFromDev.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check imported secret exist", async () => {
await createSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
await createSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
const stagingSecret = await getSecretByNameV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(stagingSecret.secretKey).toBe("STAGING_KEY");
expect(stagingSecret.secretValue).toBe("stage-value");
const prodSecret = await getSecretByNameV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
expect(prodSecret.secretKey).toBe("PROD_KEY");
expect(prodSecret.secretValue).toBe("prod-value");
await deleteSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
await deleteSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
}
);

@ -1,406 +0,0 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
// dev <- stage <- prod
describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
"Secret replication waterfall pattern testing - %secretPath",
({ secretPath: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging",
isReplication: true
});
const stageImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod",
isReplication: true
});
return async () => {
await deleteSecretImport({
id: stageImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check one level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
// wait for 5 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 5000); // time to breathe for db
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
});
test("Check two level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
// wait for 5 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 5000); // time to breathe for db
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
expect(secret.secretKey).toBe("PROD_KEY");
expect(secret.secretValue).toBe("prod-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
},
{ timeout: 30000 }
);
// dev <- stage, dev <- prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret replication 1-N pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging",
isReplication: true
});
const devImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod",
isReplication: true
});
return async () => {
await deleteSecretImport({
id: devImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
// wait for 5 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 5000); // time to breathe for db
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
}),
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
},
{ timeout: 30000 }
);

@ -510,7 +510,7 @@ describe("Service token fail cases", async () => {
authorization: `Bearer ${serviceToken}`
}
});
expect(fetchSecrets.statusCode).toBe(403);
expect(fetchSecrets.statusCode).toBe(401);
expect(fetchSecrets.json().error).toBe("PermissionDenied");
await deleteServiceToken();
});
@ -532,7 +532,7 @@ describe("Service token fail cases", async () => {
authorization: `Bearer ${serviceToken}`
}
});
expect(fetchSecrets.statusCode).toBe(403);
expect(fetchSecrets.statusCode).toBe(401);
expect(fetchSecrets.json().error).toBe("PermissionDenied");
await deleteServiceToken();
});
@ -557,7 +557,7 @@ describe("Service token fail cases", async () => {
authorization: `Bearer ${serviceToken}`
}
});
expect(writeSecrets.statusCode).toBe(403);
expect(writeSecrets.statusCode).toBe(401);
expect(writeSecrets.json().error).toBe("PermissionDenied");
// but read access should still work fine

@ -1,330 +0,0 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
describe("Secret expansion", () => {
const projectId = seedData1.projectV3.id;
beforeAll(async () => {
const prodRootFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/",
name: "deep"
});
await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
name: "nested"
});
return async () => {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodRootFolder.id,
workspaceId: projectId,
environmentSlug: "prod"
});
};
});
test("Local secret reference", async () => {
const secrets = [
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "HELLO",
value: "world"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "TEST",
// eslint-disable-next-line
value: "hello ${HELLO}"
}
];
await Promise.all(secrets.map((el) => createSecretV2(el)));
const expandedSecret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "TEST"
});
expect(expandedSecret.secretValue).toBe("hello world");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretKey: "TEST",
secretValue: "hello world"
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
});
test("Cross environment secret reference", async () => {
const secrets = [
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
authToken: jwtAuthToken,
key: "DEEP_KEY_1",
value: "testing"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_1",
value: "reference"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_2",
// eslint-disable-next-line
value: "secret ${NESTED_KEY_1}"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY",
// eslint-disable-next-line
value: "hello ${prod.deep.DEEP_KEY_1} ${prod.deep.nested.NESTED_KEY_2}"
}
];
await Promise.all(secrets.map((el) => createSecretV2(el)));
const expandedSecret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY"
});
expect(expandedSecret.secretValue).toBe("hello testing secret reference");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretKey: "KEY",
secretValue: "hello testing secret reference"
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
});
test("Non replicated secret import secret expansion on local reference and nested reference", async () => {
const secrets = [
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
authToken: jwtAuthToken,
key: "DEEP_KEY_1",
value: "testing"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_1",
value: "reference"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_2",
// eslint-disable-next-line
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY",
// eslint-disable-next-line
value: "hello world"
}
];
await Promise.all(secrets.map((el) => createSecretV2(el)));
const secretImportFromProdToDev = await createSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
importEnv: "prod",
importPath: "/deep/nested"
});
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretPath: "/deep/nested",
environment: "prod",
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "NESTED_KEY_1",
secretValue: "reference"
}),
expect.objectContaining({
secretKey: "NESTED_KEY_2",
secretValue: "secret reference testing"
})
])
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
await deleteSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
authToken: jwtAuthToken,
id: secretImportFromProdToDev.id,
secretPath: "/"
});
});
test(
"Replicated secret import secret expansion on local reference and nested reference",
async () => {
const secrets = [
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
authToken: jwtAuthToken,
key: "DEEP_KEY_1",
value: "testing"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_1",
value: "reference"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_2",
// eslint-disable-next-line
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY",
// eslint-disable-next-line
value: "hello world"
}
];
await Promise.all(secrets.map((el) => createSecretV2(el)));
const secretImportFromProdToDev = await createSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
importEnv: "prod",
importPath: "/deep/nested",
isReplication: true
});
// wait for 5 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 5000); // time to breathe for db
});
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretPath: `/__reserve_replication_${secretImportFromProdToDev.id}`,
environment: seedData1.environment.slug,
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "NESTED_KEY_1",
secretValue: "reference"
}),
expect.objectContaining({
secretKey: "NESTED_KEY_2",
secretValue: "secret reference testing"
})
])
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
await deleteSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
authToken: jwtAuthToken,
id: secretImportFromProdToDev.id,
secretPath: "/"
});
},
{ timeout: 10000 }
);
});

@ -8,7 +8,6 @@ type TRawSecret = {
secretComment?: string;
version: number;
};
const createSecret = async (dto: { path: string; key: string; value: string; comment: string; type?: SecretType }) => {
const createSecretReqBody = {
workspaceId: seedData1.projectV3.id,

@ -1075,7 +1075,7 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
},
body: createSecretReqBody
});
expect(createSecRes.statusCode).toBe(404);
expect(createSecRes.statusCode).toBe(400);
});
test("Update secret raw", async () => {
@ -1093,7 +1093,7 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
},
body: updateSecretReqBody
});
expect(updateSecRes.statusCode).toBe(404);
expect(updateSecRes.statusCode).toBe(400);
});
test("Delete secret raw", async () => {
@ -1110,6 +1110,6 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
},
body: deletedSecretReqBody
});
expect(deletedSecRes.statusCode).toBe(404);
expect(deletedSecRes.statusCode).toBe(400);
});
});

@ -1,73 +0,0 @@
type TFolder = {
id: string;
name: string;
};
export const createFolder = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
name: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
name: dto.name,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
return res.json().folder as TFolder;
};
export const deleteFolder = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
id: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/folders/${dto.id}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
return res.json().folder as TFolder;
};
export const listFolders = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "GET",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
return res.json().folders as TFolder[];
};

@ -1,93 +0,0 @@
type TSecretImport = {
id: string;
importEnv: {
name: string;
slug: string;
id: string;
};
importPath: string;
};
export const createSecretImport = async (dto: {
workspaceId: string;
environmentSlug: string;
isReplication?: boolean;
secretPath: string;
importPath: string;
importEnv: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
isReplication: dto.isReplication,
path: dto.secretPath,
import: {
environment: dto.importEnv,
path: dto.importPath
}
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport as TSecretImport;
};
export const deleteSecretImport = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
id: string;
}) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/secret-imports/${dto.id}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport as TSecretImport;
};
export const listSecretImport = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "GET",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
query: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImports");
return payload.secretImports as TSecretImport[];
};

@ -1,128 +0,0 @@
import { SecretType } from "@app/db/schemas";
type TRawSecret = {
secretKey: string;
secretValue: string;
secretComment?: string;
version: number;
};
export const createSecretV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
key: string;
value: string;
comment?: string;
authToken: string;
type?: SecretType;
}) => {
const createSecretReqBody = {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
type: dto.type || SecretType.Shared,
secretPath: dto.secretPath,
secretKey: dto.key,
secretValue: dto.value,
secretComment: dto.comment
};
const createSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: createSecretReqBody
});
expect(createSecRes.statusCode).toBe(200);
const createdSecretPayload = JSON.parse(createSecRes.payload);
expect(createdSecretPayload).toHaveProperty("secret");
return createdSecretPayload.secret as TRawSecret;
};
export const deleteSecretV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
key: string;
authToken: string;
}) => {
const deleteSecRes = await testServer.inject({
method: "DELETE",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
secretPath: dto.secretPath
}
});
expect(deleteSecRes.statusCode).toBe(200);
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
expect(updatedSecretPayload).toHaveProperty("secret");
return updatedSecretPayload.secret as TRawSecret;
};
export const getSecretByNameV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
key: string;
authToken: string;
}) => {
const response = await testServer.inject({
method: "GET",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
query: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
secretPath: dto.secretPath,
expandSecretReferences: "true",
include_imports: "true"
}
});
expect(response.statusCode).toBe(200);
const payload = JSON.parse(response.payload);
expect(payload).toHaveProperty("secret");
return payload.secret as TRawSecret;
};
export const getSecretsV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
}) => {
const getSecretsResponse = await testServer.inject({
method: "GET",
url: `/api/v3/secrets/raw`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
query: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
secretPath: dto.secretPath,
expandSecretReferences: "true",
include_imports: "true"
}
});
expect(getSecretsResponse.statusCode).toBe(200);
const getSecretsPayload = JSON.parse(getSecretsResponse.payload);
expect(getSecretsPayload).toHaveProperty("secrets");
expect(getSecretsPayload).toHaveProperty("imports");
return getSecretsPayload as {
secrets: TRawSecret[];
imports: {
secretPath: string;
environment: string;
folderId: string;
secrets: TRawSecret[];
}[];
};
};

@ -11,11 +11,10 @@ import { initLogger } from "@app/lib/logger";
import { main } from "@app/server/app";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { mockQueue } from "./mocks/queue";
import { mockSmtpServer } from "./mocks/smtp";
import { mockKeyStore } from "./mocks/keystore";
import { initDbConnection } from "@app/db";
import { queueServiceFactory } from "@app/queue";
import { keyStoreFactory } from "@app/keystore/keystore";
import { Redis } from "ioredis";
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
export default {
@ -29,31 +28,19 @@ export default {
dbRootCert: cfg.DB_ROOT_CERT
});
const redis = new Redis(cfg.REDIS_URL);
await redis.flushdb("SYNC");
try {
await db.migrate.rollback(
{
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
tableName: "infisical_migrations"
},
true
);
await db.migrate.latest({
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
tableName: "infisical_migrations"
});
await db.seed.run({
directory: path.join(__dirname, "../src/db/seeds"),
extension: "ts"
});
const smtp = mockSmtpServer();
const queue = queueServiceFactory(cfg.REDIS_URL);
const keyStore = keyStoreFactory(cfg.REDIS_URL);
const queue = mockQueue();
const keyStore = mockKeyStore();
const server = await main({ db, smtp, logger, queue, keyStore });
// @ts-expect-error type
globalThis.testServer = server;
@ -71,12 +58,10 @@ export default {
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
);
} catch (error) {
// eslint-disable-next-line
console.log("[TEST] Error setting up environment", error);
await db.destroy();
throw error;
}
// custom setup
return {
async teardown() {
@ -95,9 +80,6 @@ export default {
},
true
);
await redis.flushdb("ASYNC");
redis.disconnect();
await db.destroy();
}
};

6072
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -34,9 +34,9 @@
"test": "echo \"Error: no test specified\" && exit 1",
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
"dev:docker": "nodemon",
"build": "tsup --sourcemap",
"build": "tsup",
"build:frontend": "npm run build --prefix ../frontend",
"start": "node --enable-source-maps dist/main.mjs",
"start": "node dist/main.mjs",
"type:check": "tsc --noEmit",
"lint:fix": "eslint --fix --ext js,ts ./src",
"lint": "eslint 'src/**/*.ts'",
@ -45,19 +45,12 @@
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
"generate:component": "tsx ./scripts/create-backend-file.ts",
"generate:schema": "tsx ./scripts/generate-schema-types.ts",
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
"migration:new": "tsx ./scripts/create-migration.ts",
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"migration:up": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
"migration:down": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
"migration:list": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
"migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"seed:new": "tsx ./scripts/create-seed-file.ts",
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
@ -85,8 +78,6 @@
"@types/picomatch": "^2.3.3",
"@types/prompt-sync": "^4.2.3",
"@types/resolve": "^1.20.6",
"@types/safe-regex": "^1.1.6",
"@types/sjcl": "^1.0.34",
"@types/uuid": "^9.0.7",
"@typescript-eslint/eslint-plugin": "^6.20.0",
"@typescript-eslint/parser": "^6.20.0",
@ -110,16 +101,15 @@
"tsup": "^8.0.1",
"tsx": "^4.4.0",
"typescript": "^5.3.2",
"vite-tsconfig-paths": "^4.2.2",
"vitest": "^1.2.2"
},
"dependencies": {
"@aws-sdk/client-elasticache": "^3.637.0",
"@aws-sdk/client-iam": "^3.525.0",
"@aws-sdk/client-kms": "^3.609.0",
"@aws-sdk/client-kms": "^3.621.0",
"@aws-sdk/client-secrets-manager": "^3.504.0",
"@aws-sdk/client-sts": "^3.600.0",
"@casl/ability": "^6.5.0",
"@elastic/elasticsearch": "^8.15.0",
"@fastify/cookie": "^9.3.1",
"@fastify/cors": "^8.5.0",
"@fastify/etag": "^5.1.0",
@ -131,15 +121,12 @@
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/x509": "^1.12.1",
"@peculiar/x509": "^1.10.0",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "1.1.0",
"@slack/oauth": "^3.0.1",
"@slack/web-api": "^7.3.4",
"@team-plain/typescript-sdk": "^4.6.1",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
@ -165,10 +152,8 @@
"jwks-rsa": "^3.1.0",
"knex": "^3.0.1",
"ldapjs": "^3.0.7",
"ldif": "0.5.1",
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"mongodb": "^6.8.1",
"ms": "^2.1.3",
"mysql2": "^3.9.8",
"nanoid": "^3.3.4",
@ -184,13 +169,8 @@
"pg-query-stream": "^4.5.3",
"picomatch": "^3.0.1",
"pino": "^8.16.2",
"pkijs": "^3.2.4",
"posthog-node": "^3.6.2",
"probot": "^13.3.8",
"safe-regex": "^2.1.1",
"scim-patch": "^0.8.3",
"scim2-parse-filter": "^0.2.10",
"sjcl": "^1.0.8",
"probot": "^13.0.0",
"smee-client": "^2.0.0",
"tedious": "^18.2.1",
"tweetnacl": "^1.0.3",

@ -7,33 +7,14 @@ const prompt = promptSync({
sigint: true
});
type ComponentType = 1 | 2 | 3;
console.log(`
Component List
--------------
0. Exit
1. Service component
2. DAL component
3. Router component
`);
function getComponentType(): ComponentType {
while (true) {
const input = prompt("Select a component (0-3): ");
const componentType = parseInt(input, 10);
if (componentType === 0) {
console.log("Exiting the program. Goodbye!");
process.exit(0);
} else if (componentType === 1 || componentType === 2 || componentType === 3) {
return componentType;
} else {
console.log("Invalid input. Please enter 0, 1, 2, or 3.");
}
}
}
const componentType = getComponentType();
const componentType = parseInt(prompt("Select a component: "), 10);
if (componentType === 1) {
const componentName = prompt("Enter service name: ");

@ -90,7 +90,7 @@ const main = async () => {
.whereRaw("table_schema = current_schema()")
.select<{ tableName: string }[]>("table_name as tableName")
.orderBy("table_name")
).filter((el) => !el.tableName.includes("_migrations") && !el.tableName.includes("partitioned_audit_logs_"));
).filter((el) => !el.tableName.includes("_migrations"));
for (let i = 0; i < tables.length; i += 1) {
const { tableName } = tables[i];

@ -7,7 +7,6 @@ import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-se
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
@ -19,7 +18,6 @@ import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-ser
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
@ -37,9 +35,6 @@ import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
@ -55,9 +50,6 @@ import { TIntegrationServiceFactory } from "@app/services/integration/integratio
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
import { TOrgRoleServiceFactory } from "@app/services/org/org-role-service";
import { TOrgServiceFactory } from "@app/services/org/org-service";
import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
import { TProjectServiceFactory } from "@app/services/project/project-service";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
@ -72,14 +64,12 @@ import { TSecretReplicationServiceFactory } from "@app/services/secret-replicati
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TUserServiceFactory } from "@app/services/user/user-service";
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
declare module "fastify" {
interface FastifyRequest {
@ -98,7 +88,6 @@ declare module "fastify" {
id: string;
orgId: string;
};
rateLimits: RateLimitConfiguration;
// passport data
passportUser: {
isUserCompleted: string;
@ -124,7 +113,6 @@ declare module "fastify" {
group: TGroupServiceFactory;
groupProject: TGroupProjectServiceFactory;
apiKey: TApiKeyServiceFactory;
pkiAlert: TPkiAlertServiceFactory;
project: TProjectServiceFactory;
projectMembership: TProjectMembershipServiceFactory;
projectEnv: TProjectEnvServiceFactory;
@ -162,11 +150,8 @@ declare module "fastify" {
auditLog: TAuditLogServiceFactory;
auditLogStream: TAuditLogStreamServiceFactory;
certificate: TCertificateServiceFactory;
certificateTemplate: TCertificateTemplateServiceFactory;
certificateAuthority: TCertificateAuthorityServiceFactory;
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
certificateEst: TCertificateEstServiceFactory;
pkiCollection: TPkiCollectionServiceFactory;
secretScanning: TSecretScanningServiceFactory;
license: TLicenseServiceFactory;
trustedIp: TTrustedIpServiceFactory;
@ -180,11 +165,6 @@ declare module "fastify" {
rateLimit: TRateLimitServiceFactory;
userEngagement: TUserEngagementServiceFactory;
externalKms: TExternalKmsServiceFactory;
orgAdmin: TOrgAdminServiceFactory;
slack: TSlackServiceFactory;
workflowIntegration: TWorkflowIntegrationServiceFactory;
cmek: TCmekServiceFactory;
migration: TExternalMigrationServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

@ -53,12 +53,6 @@ import {
TCertificateSecretsUpdate,
TCertificatesInsert,
TCertificatesUpdate,
TCertificateTemplateEstConfigs,
TCertificateTemplateEstConfigsInsert,
TCertificateTemplateEstConfigsUpdate,
TCertificateTemplates,
TCertificateTemplatesInsert,
TCertificateTemplatesUpdate,
TDynamicSecretLeases,
TDynamicSecretLeasesInsert,
TDynamicSecretLeasesUpdate,
@ -101,9 +95,6 @@ import {
TIdentityKubernetesAuths,
TIdentityKubernetesAuthsInsert,
TIdentityKubernetesAuthsUpdate,
TIdentityMetadata,
TIdentityMetadataInsert,
TIdentityMetadataUpdate,
TIdentityOidcAuths,
TIdentityOidcAuthsInsert,
TIdentityOidcAuthsUpdate,
@ -170,18 +161,6 @@ import {
TOrgRoles,
TOrgRolesInsert,
TOrgRolesUpdate,
TPartitionedAuditLogs,
TPartitionedAuditLogsInsert,
TPartitionedAuditLogsUpdate,
TPkiAlerts,
TPkiAlertsInsert,
TPkiAlertsUpdate,
TPkiCollectionItems,
TPkiCollectionItemsInsert,
TPkiCollectionItemsUpdate,
TPkiCollections,
TPkiCollectionsInsert,
TPkiCollectionsUpdate,
TProjectBots,
TProjectBotsInsert,
TProjectBotsUpdate,
@ -199,9 +178,6 @@ import {
TProjectRolesUpdate,
TProjects,
TProjectsInsert,
TProjectSlackConfigs,
TProjectSlackConfigsInsert,
TProjectSlackConfigsUpdate,
TProjectsUpdate,
TProjectUserAdditionalPrivilege,
TProjectUserAdditionalPrivilegeInsert,
@ -308,9 +284,6 @@ import {
TServiceTokens,
TServiceTokensInsert,
TServiceTokensUpdate,
TSlackIntegrations,
TSlackIntegrationsInsert,
TSlackIntegrationsUpdate,
TSuperAdmin,
TSuperAdminInsert,
TSuperAdminUpdate,
@ -334,10 +307,7 @@ import {
TUsersUpdate,
TWebhooks,
TWebhooksInsert,
TWebhooksUpdate,
TWorkflowIntegrations,
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
TWebhooksUpdate
} from "@app/db/schemas";
import {
TSecretV2TagJunction,
@ -385,16 +355,6 @@ declare module "knex/types/tables" {
TCertificateAuthorityCrlUpdate
>;
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
[TableName.CertificateTemplate]: KnexOriginal.CompositeTableType<
TCertificateTemplates,
TCertificateTemplatesInsert,
TCertificateTemplatesUpdate
>;
[TableName.CertificateTemplateEstConfig]: KnexOriginal.CompositeTableType<
TCertificateTemplateEstConfigs,
TCertificateTemplateEstConfigsInsert,
TCertificateTemplateEstConfigsUpdate
>;
[TableName.CertificateBody]: KnexOriginal.CompositeTableType<
TCertificateBodies,
TCertificateBodiesInsert,
@ -405,17 +365,6 @@ declare module "knex/types/tables" {
TCertificateSecretsInsert,
TCertificateSecretsUpdate
>;
[TableName.PkiAlert]: KnexOriginal.CompositeTableType<TPkiAlerts, TPkiAlertsInsert, TPkiAlertsUpdate>;
[TableName.PkiCollection]: KnexOriginal.CompositeTableType<
TPkiCollections,
TPkiCollectionsInsert,
TPkiCollectionsUpdate
>;
[TableName.PkiCollectionItem]: KnexOriginal.CompositeTableType<
TPkiCollectionItems,
TPkiCollectionItemsInsert,
TPkiCollectionItemsUpdate
>;
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
TUserGroupMembership,
TUserGroupMembershipInsert,
@ -552,11 +501,6 @@ declare module "knex/types/tables" {
TIdentityUniversalAuthsInsert,
TIdentityUniversalAuthsUpdate
>;
[TableName.IdentityMetadata]: KnexOriginal.CompositeTableType<
TIdentityMetadata,
TIdentityMetadataInsert,
TIdentityMetadataUpdate
>;
[TableName.IdentityKubernetesAuth]: KnexOriginal.CompositeTableType<
TIdentityKubernetesAuths,
TIdentityKubernetesAuthsInsert,
@ -718,11 +662,6 @@ declare module "knex/types/tables" {
TAuditLogStreamsInsert,
TAuditLogStreamsUpdate
>;
[TableName.PartitionedAuditLog]: KnexOriginal.CompositeTableType<
TPartitionedAuditLogs,
TPartitionedAuditLogsInsert,
TPartitionedAuditLogsUpdate
>;
[TableName.GitAppInstallSession]: KnexOriginal.CompositeTableType<
TGitAppInstallSessions,
TGitAppInstallSessionsInsert,
@ -801,20 +740,5 @@ declare module "knex/types/tables" {
TKmsKeyVersionsInsert,
TKmsKeyVersionsUpdate
>;
[TableName.SlackIntegrations]: KnexOriginal.CompositeTableType<
TSlackIntegrations,
TSlackIntegrationsInsert,
TSlackIntegrationsUpdate
>;
[TableName.ProjectSlackConfigs]: KnexOriginal.CompositeTableType<
TProjectSlackConfigs,
TProjectSlackConfigsInsert,
TProjectSlackConfigsUpdate
>;
[TableName.WorkflowIntegrations]: KnexOriginal.CompositeTableType<
TWorkflowIntegrations,
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
>;
}
}

@ -1,4 +0,0 @@
declare module "ldif" {
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
function parse(input: string, ...args: any[]): any;
}

@ -1,75 +0,0 @@
// eslint-disable-next-line
import "ts-node/register";
import dotenv from "dotenv";
import type { Knex } from "knex";
import path from "path";
// Update with your config settings. .
dotenv.config({
path: path.join(__dirname, "../../../.env.migration")
});
dotenv.config({
path: path.join(__dirname, "../../../.env")
});
if (!process.env.AUDIT_LOGS_DB_CONNECTION_URI && !process.env.AUDIT_LOGS_DB_HOST) {
console.info("Dedicated audit log database not found. No further migrations necessary");
process.exit(0);
}
console.info("Executing migration on audit log database...");
export default {
development: {
client: "postgres",
connection: {
connectionString: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
host: process.env.AUDIT_LOGS_DB_HOST,
port: process.env.AUDIT_LOGS_DB_PORT,
user: process.env.AUDIT_LOGS_DB_USER,
database: process.env.AUDIT_LOGS_DB_NAME,
password: process.env.AUDIT_LOGS_DB_PASSWORD,
ssl: process.env.AUDIT_LOGS_DB_ROOT_CERT
? {
rejectUnauthorized: true,
ca: Buffer.from(process.env.AUDIT_LOGS_DB_ROOT_CERT, "base64").toString("ascii")
}
: false
},
pool: {
min: 2,
max: 10
},
seeds: {
directory: "./seeds"
},
migrations: {
tableName: "infisical_migrations"
}
},
production: {
client: "postgres",
connection: {
connectionString: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
host: process.env.AUDIT_LOGS_DB_HOST,
port: process.env.AUDIT_LOGS_DB_PORT,
user: process.env.AUDIT_LOGS_DB_USER,
database: process.env.AUDIT_LOGS_DB_NAME,
password: process.env.AUDIT_LOGS_DB_PASSWORD,
ssl: process.env.AUDIT_LOGS_DB_ROOT_CERT
? {
rejectUnauthorized: true,
ca: Buffer.from(process.env.AUDIT_LOGS_DB_ROOT_CERT, "base64").toString("ascii")
}
: false
},
pool: {
min: 2,
max: 10
},
migrations: {
tableName: "infisical_migrations"
}
}
} as Knex.Config;

@ -1,2 +1,2 @@
export type { TDbClient } from "./instance";
export { initAuditLogDbConnection, initDbConnection } from "./instance";
export { initDbConnection } from "./instance";

@ -70,45 +70,3 @@ export const initDbConnection = ({
return db;
};
export const initAuditLogDbConnection = ({
dbConnectionUri,
dbRootCert
}: {
dbConnectionUri: string;
dbRootCert?: string;
}) => {
// akhilmhdh: the default Knex is knex.Knex<any, any[]>. but when assigned with knex({<config>}) the value is knex.Knex<any, unknown[]>
// this was causing issue with files like `snapshot-dal` `findRecursivelySnapshots` this i am explicitly putting the any and unknown[]
// eslint-disable-next-line
const db: Knex<any, unknown[]> = knex({
client: "pg",
connection: {
connectionString: dbConnectionUri,
host: process.env.AUDIT_LOGS_DB_HOST,
// @ts-expect-error I have no clue why only for the port there is a type error
// eslint-disable-next-line
port: process.env.AUDIT_LOGS_DB_PORT,
user: process.env.AUDIT_LOGS_DB_USER,
database: process.env.AUDIT_LOGS_DB_NAME,
password: process.env.AUDIT_LOGS_DB_PASSWORD,
ssl: dbRootCert
? {
rejectUnauthorized: true,
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
}
: false
}
});
// we add these overrides so that auditLogDb and the primary DB are interchangeable
db.primaryNode = () => {
return db;
};
db.replicaNode = () => {
return db;
};
return db;
};

@ -115,14 +115,7 @@ export async function down(knex: Knex): Promise<void> {
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
approverId: knex(TableName.ProjectMembership)
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalPolicyApprover}.policyId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverUserId`]))
});
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
@ -154,27 +147,13 @@ export async function down(knex: Knex): Promise<void> {
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
committerId: knex(TableName.ProjectMembership)
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalRequest}.policyId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`]))
.select(knex.ref("id").withSchema(TableName.ProjectMembership)),
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`])),
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
statusChangeBy: knex(TableName.ProjectMembership)
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalRequest}.policyId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangedByUserId`]))
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
});
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
@ -198,20 +177,8 @@ export async function down(knex: Knex): Promise<void> {
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
member: knex(TableName.ProjectMembership)
.join(
TableName.SecretApprovalRequest,
`${TableName.SecretApprovalRequest}.id`,
`${TableName.SecretApprovalRequestReviewer}.requestId`
)
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalRequest}.policyId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`]))
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
});
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
tb.uuid("member").notNullable().alter();

@ -1,294 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
// ---------- ACCESS APPROVAL POLICY APPROVER ------------
const hasApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
const hasApproverId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverId");
if (!hasApproverUserId) {
// add the new fields
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
// if (hasApproverId) tb.setNullable("approverId");
tb.uuid("approverUserId");
tb.foreign("approverUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
});
// convert project membership id => user id
await knex(TableName.AccessApprovalPolicyApprover).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
approverUserId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.AccessApprovalPolicyApprover}.approverId`]))
});
// drop the old field
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
if (hasApproverId) tb.dropColumn("approverId");
tb.uuid("approverUserId").notNullable().alter();
});
}
// ---------- ACCESS APPROVAL REQUEST ------------
const hasAccessApprovalRequestTable = await knex.schema.hasTable(TableName.AccessApprovalRequest);
const hasRequestedByUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedByUserId");
const hasRequestedBy = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedBy");
if (hasAccessApprovalRequestTable) {
// new fields
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
if (!hasRequestedByUserId) {
tb.uuid("requestedByUserId");
tb.foreign("requestedByUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
}
});
// copy the assigned project membership => user id to new fields
await knex(TableName.AccessApprovalRequest).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
requestedByUserId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.AccessApprovalRequest}.requestedBy`]))
});
// drop old fields
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
if (hasRequestedBy) {
// DROP AT A LATER TIME
// tb.dropColumn("requestedBy");
// ADD ALLOW NULLABLE FOR NOW
tb.uuid("requestedBy").nullable().alter();
}
tb.uuid("requestedByUserId").notNullable().alter();
});
}
// ---------- ACCESS APPROVAL REQUEST REVIEWER ------------
const hasMemberId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "member");
const hasReviewerUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "reviewerUserId");
if (!hasReviewerUserId) {
// new fields
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
// if (hasMemberId) tb.setNullable("member");
tb.uuid("reviewerUserId");
tb.foreign("reviewerUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
});
// copy project membership => user id to new fields
await knex(TableName.AccessApprovalRequestReviewer).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
reviewerUserId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.AccessApprovalRequestReviewer}.member`]))
});
// drop table
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
if (hasMemberId) {
// DROP AT A LATER TIME
// tb.dropColumn("member");
// ADD ALLOW NULLABLE FOR NOW
tb.uuid("member").nullable().alter();
}
tb.uuid("reviewerUserId").notNullable().alter();
});
}
// ---------- PROJECT USER ADDITIONAL PRIVILEGE ------------
const projectUserAdditionalPrivilegeHasProjectMembershipId = await knex.schema.hasColumn(
TableName.ProjectUserAdditionalPrivilege,
"projectMembershipId"
);
const projectUserAdditionalPrivilegeHasUserId = await knex.schema.hasColumn(
TableName.ProjectUserAdditionalPrivilege,
"userId"
);
if (!projectUserAdditionalPrivilegeHasUserId) {
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
tb.uuid("userId");
tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
tb.string("projectId");
tb.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
});
await knex(TableName.ProjectUserAdditionalPrivilege)
.update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
userId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.ProjectUserAdditionalPrivilege}.projectMembershipId`])),
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
projectId: knex(TableName.ProjectMembership)
.select("projectId")
.where("id", knex.raw("??", [`${TableName.ProjectUserAdditionalPrivilege}.projectMembershipId`]))
})
.whereNotNull("projectMembershipId");
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
tb.uuid("userId").notNullable().alter();
tb.string("projectId").notNullable().alter();
});
}
if (projectUserAdditionalPrivilegeHasProjectMembershipId) {
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
// DROP AT A LATER TIME
// tb.dropColumn("projectMembershipId");
// ADD ALLOW NULLABLE FOR NOW
tb.uuid("projectMembershipId").nullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
// We remove project user additional privileges first, because it may delete records in the database where the project membership is not found.
// The project membership won't be found on records created by group members. In those cades we just delete the record and continue.
// When the additionl privilege record is deleted, it will cascade delete the access request created by the group member.
// ---------- PROJECT USER ADDITIONAL PRIVILEGE ------------
const hasUserId = await knex.schema.hasColumn(TableName.ProjectUserAdditionalPrivilege, "userId");
const hasProjectMembershipId = await knex.schema.hasColumn(
TableName.ProjectUserAdditionalPrivilege,
"projectMembershipId"
);
// If it doesn't have the userId field, then the up migration has not run
if (!hasUserId) {
return;
}
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
if (!hasProjectMembershipId) {
tb.uuid("projectMembershipId");
tb.foreign("projectMembershipId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
}
});
if (!hasProjectMembershipId) {
// First, update records where a matching project membership exists
await knex(TableName.ProjectUserAdditionalPrivilege).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
projectMembershipId: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.ProjectUserAdditionalPrivilege}.userId`]))
});
await knex(TableName.AccessApprovalRequest).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
projectMembershipId: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.userId`]))
});
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
tb.dropColumn("userId");
tb.dropColumn("projectId");
tb.uuid("projectMembershipId").notNullable().alter();
});
}
// Then, delete records where no matching project membership was found
await knex(TableName.ProjectUserAdditionalPrivilege).whereNull("projectMembershipId").delete();
await knex(TableName.AccessApprovalRequest).whereNull("requestedBy").delete();
// ---------- ACCESS APPROVAL POLICY APPROVER ------------
const hasApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
const hasApproverId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverId");
if (hasApproverUserId) {
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
if (!hasApproverId) {
tb.uuid("approverId");
tb.foreign("approverId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
}
});
if (!hasApproverId) {
await knex(TableName.AccessApprovalPolicyApprover).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
approverId: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.AccessApprovalPolicyApprover}.approverUserId`]))
});
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
tb.dropColumn("approverUserId");
tb.uuid("approverId").notNullable().alter();
});
}
// ---------- ACCESS APPROVAL REQUEST ------------
const hasAccessApprovalRequestTable = await knex.schema.hasTable(TableName.AccessApprovalRequest);
const hasRequestedByUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedByUserId");
const hasRequestedBy = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedBy");
if (hasAccessApprovalRequestTable) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
if (!hasRequestedBy) {
tb.uuid("requestedBy");
tb.foreign("requestedBy").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
}
});
// Try to find a project membership based on the AccessApprovalRequest.requestedByUserId and AccessApprovalRequest.policyId(reference to AccessApprovalRequestPolicy).envId(reference to Environment).projectId(reference to Project)
// If a project membership is found, set the AccessApprovalRequest.requestedBy to the project membership id
// If a project membership is not found, remove the AccessApprovalRequest record
await knex(TableName.AccessApprovalRequest).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
requestedBy: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.AccessApprovalRequest}.requestedByUserId`]))
});
// Then, delete records where no matching project membership was found
await knex(TableName.AccessApprovalRequest).whereNull("requestedBy").delete();
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
if (hasRequestedByUserId) {
tb.dropColumn("requestedByUserId");
}
if (hasRequestedBy) tb.uuid("requestedBy").notNullable().alter();
});
}
// ---------- ACCESS APPROVAL REQUEST REVIEWER ------------
const hasMemberId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "member");
const hasReviewerUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "reviewerUserId");
if (hasReviewerUserId) {
if (!hasMemberId) {
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
tb.uuid("member");
tb.foreign("member").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
});
}
await knex(TableName.AccessApprovalRequestReviewer).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
member: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.AccessApprovalRequestReviewer}.reviewerUserId`]))
});
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
tb.dropColumn("reviewerUserId");
tb.uuid("member").notNullable().alter();
});
}
}
}

@ -1,8 +1,178 @@
/* eslint-disable @typescript-eslint/ban-ts-comment */
import { Knex } from "knex";
import { SecretType, TableName } from "../schemas";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex/select";
import { SecretKeyEncoding, SecretType, TableName } from "../schemas";
import { createJunctionTable, createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
import { getSecretManagerDataKey } from "./utils/kms";
const backfillWebhooks = async (knex: Knex) => {
const hasEncryptedSecretKeyWithKms = await knex.schema.hasColumn(TableName.Webhook, "encryptedSecretKeyWithKms");
const hasEncryptedWebhookUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
const hasUrlCipherText = await knex.schema.hasColumn(TableName.Webhook, "urlCipherText");
const hasUrlIV = await knex.schema.hasColumn(TableName.Webhook, "urlIV");
const hasUrlTag = await knex.schema.hasColumn(TableName.Webhook, "urlTag");
const hasEncryptedSecretKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedSecretKey");
const hasIV = await knex.schema.hasColumn(TableName.Webhook, "iv");
const hasTag = await knex.schema.hasColumn(TableName.Webhook, "tag");
const hasKeyEncoding = await knex.schema.hasColumn(TableName.Webhook, "keyEncoding");
const hasAlgorithm = await knex.schema.hasColumn(TableName.Webhook, "algorithm");
const hasUrl = await knex.schema.hasColumn(TableName.Webhook, "url");
await knex.schema.alterTable(TableName.Webhook, (t) => {
if (!hasEncryptedSecretKeyWithKms) t.binary("encryptedSecretKeyWithKms");
if (!hasEncryptedWebhookUrl) t.binary("encryptedUrl");
if (hasUrl) t.string("url").nullable().alter();
});
const kmsEncryptorGroupByProjectId: Record<string, Awaited<ReturnType<typeof getSecretManagerDataKey>>["encryptor"]> =
{};
if (hasUrlCipherText && hasUrlIV && hasUrlTag && hasEncryptedSecretKey && hasIV && hasTag) {
// eslint-disable-next-line
const webhooksToFill = await knex(TableName.Webhook)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.Webhook}.envId`)
.whereNull("encryptedUrl")
// eslint-disable-next-line
// @ts-ignore knex migration fails
.select(selectAllTableCols(TableName.Webhook))
.select("projectId");
const updatedWebhooks = [];
for (const webhook of webhooksToFill) {
if (!kmsEncryptorGroupByProjectId[webhook.projectId]) {
// eslint-disable-next-line
const { encryptor } = await getSecretManagerDataKey(knex, webhook.projectId);
kmsEncryptorGroupByProjectId[webhook.projectId] = encryptor;
}
const kmsEncryptor = kmsEncryptorGroupByProjectId[webhook.projectId];
// @ts-ignore post migration fails
let webhookUrl = webhook.url;
let webhookSecretKey;
// @ts-ignore post migration fails
if (webhook.urlTag && webhook.urlCipherText && webhook.urlIV) {
webhookUrl = infisicalSymmetricDecrypt({
// @ts-ignore post migration fails
keyEncoding: webhook.keyEncoding as SecretKeyEncoding,
// @ts-ignore post migration fails
ciphertext: webhook.urlCipherText,
// @ts-ignore post migration fails
iv: webhook.urlIV,
// @ts-ignore post migration fails
tag: webhook.urlTag
});
}
// @ts-ignore post migration fails
if (webhook.encryptedSecretKey && webhook.iv && webhook.tag) {
webhookSecretKey = infisicalSymmetricDecrypt({
// @ts-ignore post migration fails
keyEncoding: webhook.keyEncoding as SecretKeyEncoding,
// @ts-ignore post migration fails
ciphertext: webhook.encryptedSecretKey,
// @ts-ignore post migration fails
iv: webhook.iv,
// @ts-ignore post migration fails
tag: webhook.tag
});
}
const { projectId, ...el } = webhook;
updatedWebhooks.push({
...el,
encryptedSecretKeyWithKms: webhookSecretKey
? kmsEncryptor({ plainText: Buffer.from(webhookSecretKey) }).cipherTextBlob
: null,
encryptedUrl: kmsEncryptor({ plainText: Buffer.from(webhookUrl) }).cipherTextBlob
});
}
if (updatedWebhooks.length) {
// eslint-disable-next-line
await knex(TableName.Webhook).insert(updatedWebhooks).onConflict("id").merge();
}
}
await knex.schema.alterTable(TableName.Webhook, (t) => {
t.binary("encryptedUrl").notNullable().alter();
if (hasUrlIV) t.dropColumn("urlIV");
if (hasUrlCipherText) t.dropColumn("urlCipherText");
if (hasUrlTag) t.dropColumn("urlTag");
if (hasIV) t.dropColumn("iv");
if (hasTag) t.dropColumn("tag");
if (hasEncryptedSecretKey) t.dropColumn("encryptedSecretKey");
if (hasKeyEncoding) t.dropColumn("keyEncoding");
if (hasAlgorithm) t.dropColumn("algorithm");
if (hasUrl) t.dropColumn("url");
});
};
const backfillDynamicSecretConfigs = async (knex: Knex) => {
const hasEncryptedConfig = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedConfig");
const hasInputCipherText = await knex.schema.hasColumn(TableName.DynamicSecret, "inputCiphertext");
const hasInputIV = await knex.schema.hasColumn(TableName.DynamicSecret, "inputIV");
const hasInputTag = await knex.schema.hasColumn(TableName.DynamicSecret, "inputTag");
const hasKeyEncoding = await knex.schema.hasColumn(TableName.DynamicSecret, "keyEncoding");
const hasAlgorithm = await knex.schema.hasColumn(TableName.DynamicSecret, "algorithm");
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
if (!hasEncryptedConfig) t.binary("encryptedConfig");
});
const kmsEncryptorGroupByProjectId: Record<string, Awaited<ReturnType<typeof getSecretManagerDataKey>>["encryptor"]> =
{};
if (hasInputCipherText && hasInputIV && hasInputTag) {
// eslint-disable-next-line
const dynamicSecretConfigs = await knex(TableName.DynamicSecret)
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.DynamicSecret}.folderId`)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.whereNull("encryptedConfig")
// @ts-ignore post migration fails
.select(selectAllTableCols(TableName.DynamicSecret))
.select("projectId");
const updatedConfigs = [];
for (const dynamicSecretConfig of dynamicSecretConfigs) {
if (!kmsEncryptorGroupByProjectId[dynamicSecretConfig.projectId]) {
// eslint-disable-next-line
const { encryptor } = await getSecretManagerDataKey(knex, dynamicSecretConfig.projectId);
kmsEncryptorGroupByProjectId[dynamicSecretConfig.projectId] = encryptor;
}
const kmsEncryptor = kmsEncryptorGroupByProjectId[dynamicSecretConfig.projectId];
const inputConfig = infisicalSymmetricDecrypt({
// @ts-ignore post migration fails
keyEncoding: dynamicSecretConfig.keyEncoding as SecretKeyEncoding,
// @ts-ignore post migration fails
ciphertext: dynamicSecretConfig.inputCiphertext as string,
// @ts-ignore post migration fails
iv: dynamicSecretConfig.inputIV as string,
// @ts-ignore post migration fails
tag: dynamicSecretConfig.inputTag as string
});
const { projectId, ...el } = dynamicSecretConfig;
updatedConfigs.push({
...el,
encryptedConfig: kmsEncryptor({ plainText: Buffer.from(inputConfig) }).cipherTextBlob
});
}
if (updatedConfigs.length) {
// eslint-disable-next-line
await knex(TableName.DynamicSecret).insert(updatedConfigs).onConflict("id").merge();
}
}
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
t.binary("encryptedConfig").notNullable().alter();
if (hasInputTag) t.dropColumn("inputTag");
if (hasInputIV) t.dropColumn("inputIV");
if (hasInputCipherText) t.dropColumn("inputCiphertext");
if (hasKeyEncoding) t.dropColumn("keyEncoding");
if (hasAlgorithm) t.dropColumn("algorithm");
});
};
export async function up(knex: Knex): Promise<void> {
const doesSecretV2TableExist = await knex.schema.hasTable(TableName.SecretV2);
@ -144,6 +314,14 @@ export async function up(knex: Knex): Promise<void> {
t.foreign("rotationId").references("id").inTable(TableName.SecretRotation).onDelete("CASCADE");
});
}
if (await knex.schema.hasTable(TableName.Webhook)) {
await backfillWebhooks(knex);
}
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
await backfillDynamicSecretConfigs(knex);
}
}
export async function down(knex: Knex): Promise<void> {
@ -178,4 +356,49 @@ export async function down(knex: Knex): Promise<void> {
if (hasEncryptedAwsIamAssumRole) t.dropColumn("encryptedAwsAssumeIamRoleArn");
});
}
if (await knex.schema.hasTable(TableName.Webhook)) {
const hasEncryptedWebhookSecretKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedSecretKeyWithKms");
const hasEncryptedWebhookUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
const hasUrlCipherText = await knex.schema.hasColumn(TableName.Webhook, "urlCipherText");
const hasUrlIV = await knex.schema.hasColumn(TableName.Webhook, "urlIV");
const hasUrlTag = await knex.schema.hasColumn(TableName.Webhook, "urlTag");
const hasEncryptedSecretKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedSecretKey");
const hasIV = await knex.schema.hasColumn(TableName.Webhook, "iv");
const hasTag = await knex.schema.hasColumn(TableName.Webhook, "tag");
const hasKeyEncoding = await knex.schema.hasColumn(TableName.Webhook, "keyEncoding");
const hasAlgorithm = await knex.schema.hasColumn(TableName.Webhook, "algorithm");
const hasUrl = await knex.schema.hasColumn(TableName.Webhook, "url");
await knex.schema.alterTable(TableName.Webhook, (t) => {
if (hasEncryptedWebhookSecretKey) t.dropColumn("encryptedSecretKeyWithKms");
if (hasEncryptedWebhookUrl) t.dropColumn("encryptedUrl");
if (!hasUrl) t.string("url");
if (!hasEncryptedSecretKey) t.string("encryptedSecretKey");
if (!hasIV) t.string("iv");
if (!hasTag) t.string("tag");
if (!hasAlgorithm) t.string("algorithm");
if (!hasKeyEncoding) t.string("keyEncoding");
if (!hasUrlCipherText) t.string("urlCipherText");
if (!hasUrlIV) t.string("urlIV");
if (!hasUrlTag) t.string("urlTag");
});
}
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
const hasEncryptedConfig = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedConfig");
const hasInputIV = await knex.schema.hasColumn(TableName.DynamicSecret, "inputIV");
const hasInputCipherText = await knex.schema.hasColumn(TableName.DynamicSecret, "inputCiphertext");
const hasInputTag = await knex.schema.hasColumn(TableName.DynamicSecret, "inputTag");
const hasAlgorithm = await knex.schema.hasColumn(TableName.DynamicSecret, "algorithm");
const hasKeyEncoding = await knex.schema.hasColumn(TableName.DynamicSecret, "keyEncoding");
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
if (hasEncryptedConfig) t.dropColumn("encryptedConfig");
if (!hasInputIV) t.string("inputIV");
if (!hasInputCipherText) t.text("inputCiphertext");
if (!hasInputTag) t.string("inputTag");
if (!hasAlgorithm) t.string("algorithm");
if (!hasKeyEncoding) t.string("keyEncoding");
});
}
}

@ -1,117 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
const hasActiveCaCertIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthority, "activeCaCertId");
if (!hasActiveCaCertIdColumn) {
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
t.uuid("activeCaCertId").nullable();
t.foreign("activeCaCertId").references("id").inTable(TableName.CertificateAuthorityCert);
});
await knex.raw(`
UPDATE "${TableName.CertificateAuthority}" ca
SET "activeCaCertId" = cac.id
FROM "${TableName.CertificateAuthorityCert}" cac
WHERE ca.id = cac."caId"
`);
}
}
if (await knex.schema.hasTable(TableName.CertificateAuthorityCert)) {
const hasVersionColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "version");
if (!hasVersionColumn) {
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.integer("version").nullable();
t.dropUnique(["caId"]);
});
await knex(TableName.CertificateAuthorityCert).update({ version: 1 }).whereNull("version");
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.integer("version").notNullable().alter();
});
}
const hasCaSecretIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "caSecretId");
if (!hasCaSecretIdColumn) {
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.uuid("caSecretId").nullable();
t.foreign("caSecretId").references("id").inTable(TableName.CertificateAuthoritySecret).onDelete("CASCADE");
});
await knex.raw(`
UPDATE "${TableName.CertificateAuthorityCert}" cert
SET "caSecretId" = (
SELECT sec.id
FROM "${TableName.CertificateAuthoritySecret}" sec
WHERE sec."caId" = cert."caId"
)
`);
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.uuid("caSecretId").notNullable().alter();
});
}
}
if (await knex.schema.hasTable(TableName.CertificateAuthoritySecret)) {
await knex.schema.alterTable(TableName.CertificateAuthoritySecret, (t) => {
t.dropUnique(["caId"]);
});
}
if (await knex.schema.hasTable(TableName.Certificate)) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("caCertId").nullable();
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
});
await knex.raw(`
UPDATE "${TableName.Certificate}" cert
SET "caCertId" = (
SELECT caCert.id
FROM "${TableName.CertificateAuthorityCert}" caCert
WHERE caCert."caId" = cert."caId"
)
`);
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("caCertId").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
if (await knex.schema.hasColumn(TableName.CertificateAuthority, "activeCaCertId")) {
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
t.dropColumn("activeCaCertId");
});
}
}
if (await knex.schema.hasTable(TableName.CertificateAuthorityCert)) {
if (await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "version")) {
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.dropColumn("version");
});
}
if (await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "caSecretId")) {
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.dropColumn("caSecretId");
});
}
}
if (await knex.schema.hasTable(TableName.Certificate)) {
if (await knex.schema.hasColumn(TableName.Certificate, "caCertId")) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.dropColumn("caCertId");
});
}
}
}

@ -1,25 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSharing)) {
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
if (!doesPasswordExist) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.string("password").nullable();
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSharing)) {
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
if (doesPasswordExist) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.dropColumn("password");
});
}
}
}

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasCreationLimitCol = await knex.schema.hasColumn(TableName.RateLimit, "creationLimit");
await knex.schema.alterTable(TableName.RateLimit, (t) => {
if (hasCreationLimitCol) {
t.dropColumn("creationLimit");
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasCreationLimitCol = await knex.schema.hasColumn(TableName.RateLimit, "creationLimit");
await knex.schema.alterTable(TableName.RateLimit, (t) => {
if (!hasCreationLimitCol) {
t.integer("creationLimit").defaultTo(30).notNullable();
}
});
}

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasNameField = await knex.schema.hasColumn(TableName.SecretTag, "name");
if (hasNameField) {
await knex.schema.alterTable(TableName.SecretTag, (t) => {
t.dropColumn("name");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasNameField = await knex.schema.hasColumn(TableName.SecretTag, "name");
if (!hasNameField) {
await knex.schema.alterTable(TableName.SecretTag, (t) => {
t.string("name");
});
}
}

@ -1,62 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.PkiCollection))) {
await knex.schema.createTable(TableName.PkiCollection, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("name").notNullable();
t.string("description").notNullable();
});
}
await createOnUpdateTrigger(knex, TableName.PkiCollection);
if (!(await knex.schema.hasTable(TableName.PkiCollectionItem))) {
await knex.schema.createTable(TableName.PkiCollectionItem, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("pkiCollectionId").notNullable();
t.foreign("pkiCollectionId").references("id").inTable(TableName.PkiCollection).onDelete("CASCADE");
t.uuid("caId").nullable();
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
t.uuid("certId").nullable();
t.foreign("certId").references("id").inTable(TableName.Certificate).onDelete("CASCADE");
});
}
await createOnUpdateTrigger(knex, TableName.PkiCollectionItem);
if (!(await knex.schema.hasTable(TableName.PkiAlert))) {
await knex.schema.createTable(TableName.PkiAlert, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("pkiCollectionId").notNullable();
t.foreign("pkiCollectionId").references("id").inTable(TableName.PkiCollection).onDelete("CASCADE");
t.string("name").notNullable();
t.integer("alertBeforeDays").notNullable();
t.string("recipientEmails").notNullable();
t.unique(["name", "projectId"]);
});
}
await createOnUpdateTrigger(knex, TableName.PkiAlert);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.PkiAlert);
await dropOnUpdateTrigger(knex, TableName.PkiAlert);
await knex.schema.dropTableIfExists(TableName.PkiCollectionItem);
await dropOnUpdateTrigger(knex, TableName.PkiCollectionItem);
await knex.schema.dropTableIfExists(TableName.PkiCollection);
await dropOnUpdateTrigger(knex, TableName.PkiCollection);
}

@ -1,55 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const hasCertificateTemplateTable = await knex.schema.hasTable(TableName.CertificateTemplate);
if (!hasCertificateTemplateTable) {
await knex.schema.createTable(TableName.CertificateTemplate, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.uuid("caId").notNullable();
tb.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
tb.uuid("pkiCollectionId");
tb.foreign("pkiCollectionId").references("id").inTable(TableName.PkiCollection).onDelete("SET NULL");
tb.string("name").notNullable();
tb.string("commonName").notNullable();
tb.string("subjectAlternativeName").notNullable();
tb.string("ttl").notNullable();
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.CertificateTemplate);
}
const doesCertificateTableHaveTemplateId = await knex.schema.hasColumn(
TableName.Certificate,
"certificateTemplateId"
);
if (!doesCertificateTableHaveTemplateId) {
await knex.schema.alterTable(TableName.Certificate, (tb) => {
tb.uuid("certificateTemplateId");
tb.foreign("certificateTemplateId").references("id").inTable(TableName.CertificateTemplate).onDelete("SET NULL");
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesCertificateTableHaveTemplateId = await knex.schema.hasColumn(
TableName.Certificate,
"certificateTemplateId"
);
if (doesCertificateTableHaveTemplateId) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.dropColumn("certificateTemplateId");
});
}
const hasCertificateTemplateTable = await knex.schema.hasTable(TableName.CertificateTemplate);
if (hasCertificateTemplateTable) {
await knex.schema.dropTable(TableName.CertificateTemplate);
await dropOnUpdateTrigger(knex, TableName.CertificateTemplate);
}
}

@ -1,26 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const hasEstConfigTable = await knex.schema.hasTable(TableName.CertificateTemplateEstConfig);
if (!hasEstConfigTable) {
await knex.schema.createTable(TableName.CertificateTemplateEstConfig, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.uuid("certificateTemplateId").notNullable().unique();
tb.foreign("certificateTemplateId").references("id").inTable(TableName.CertificateTemplate).onDelete("CASCADE");
tb.binary("encryptedCaChain").notNullable();
tb.string("hashedPassphrase").notNullable();
tb.boolean("isEnabled").notNullable();
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.CertificateTemplateEstConfig);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.CertificateTemplateEstConfig);
await dropOnUpdateTrigger(knex, TableName.CertificateTemplateEstConfig);
}

@ -1,36 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthorityCrl)) {
const hasCaSecretIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCrl, "caSecretId");
if (!hasCaSecretIdColumn) {
await knex.schema.alterTable(TableName.CertificateAuthorityCrl, (t) => {
t.uuid("caSecretId").nullable();
t.foreign("caSecretId").references("id").inTable(TableName.CertificateAuthoritySecret).onDelete("CASCADE");
});
await knex.raw(`
UPDATE "${TableName.CertificateAuthorityCrl}" crl
SET "caSecretId" = (
SELECT sec.id
FROM "${TableName.CertificateAuthoritySecret}" sec
WHERE sec."caId" = crl."caId"
)
`);
await knex.schema.alterTable(TableName.CertificateAuthorityCrl, (t) => {
t.uuid("caSecretId").notNullable().alter();
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthorityCrl)) {
await knex.schema.alterTable(TableName.CertificateAuthorityCrl, (t) => {
t.dropColumn("caSecretId");
});
}
}

@ -1,96 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.WorkflowIntegrations))) {
await knex.schema.createTable(TableName.WorkflowIntegrations, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.string("integration").notNullable();
tb.string("slug").notNullable();
tb.uuid("orgId").notNullable();
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
tb.string("description");
tb.unique(["orgId", "slug"]);
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.WorkflowIntegrations);
}
if (!(await knex.schema.hasTable(TableName.SlackIntegrations))) {
await knex.schema.createTable(TableName.SlackIntegrations, (tb) => {
tb.uuid("id", { primaryKey: true }).notNullable();
tb.foreign("id").references("id").inTable(TableName.WorkflowIntegrations).onDelete("CASCADE");
tb.string("teamId").notNullable();
tb.string("teamName").notNullable();
tb.string("slackUserId").notNullable();
tb.string("slackAppId").notNullable();
tb.binary("encryptedBotAccessToken").notNullable();
tb.string("slackBotId").notNullable();
tb.string("slackBotUserId").notNullable();
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.SlackIntegrations);
}
if (!(await knex.schema.hasTable(TableName.ProjectSlackConfigs))) {
await knex.schema.createTable(TableName.ProjectSlackConfigs, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.string("projectId").notNullable().unique();
tb.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
tb.uuid("slackIntegrationId").notNullable();
tb.foreign("slackIntegrationId").references("id").inTable(TableName.SlackIntegrations).onDelete("CASCADE");
tb.boolean("isAccessRequestNotificationEnabled").notNullable().defaultTo(false);
tb.string("accessRequestChannels").notNullable().defaultTo("");
tb.boolean("isSecretRequestNotificationEnabled").notNullable().defaultTo(false);
tb.string("secretRequestChannels").notNullable().defaultTo("");
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.ProjectSlackConfigs);
}
const doesSuperAdminHaveSlackClientId = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedSlackClientId");
const doesSuperAdminHaveSlackClientSecret = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedSlackClientSecret"
);
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
if (!doesSuperAdminHaveSlackClientId) {
tb.binary("encryptedSlackClientId");
}
if (!doesSuperAdminHaveSlackClientSecret) {
tb.binary("encryptedSlackClientSecret");
}
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.ProjectSlackConfigs);
await dropOnUpdateTrigger(knex, TableName.ProjectSlackConfigs);
await knex.schema.dropTableIfExists(TableName.SlackIntegrations);
await dropOnUpdateTrigger(knex, TableName.SlackIntegrations);
await knex.schema.dropTableIfExists(TableName.WorkflowIntegrations);
await dropOnUpdateTrigger(knex, TableName.WorkflowIntegrations);
const doesSuperAdminHaveSlackClientId = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedSlackClientId");
const doesSuperAdminHaveSlackClientSecret = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedSlackClientSecret"
);
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
if (doesSuperAdminHaveSlackClientId) {
tb.dropColumn("encryptedSlackClientId");
}
if (doesSuperAdminHaveSlackClientSecret) {
tb.dropColumn("encryptedSlackClientSecret");
}
});
}

@ -1,25 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
const hasRequireTemplateForIssuanceColumn = await knex.schema.hasColumn(
TableName.CertificateAuthority,
"requireTemplateForIssuance"
);
if (!hasRequireTemplateForIssuanceColumn) {
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
t.boolean("requireTemplateForIssuance").notNullable().defaultTo(false);
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
t.dropColumn("requireTemplateForIssuance");
});
}
}

@ -1,85 +0,0 @@
import { Knex } from "knex";
import { CertKeyUsage } from "@app/services/certificate/certificate-types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
// Certificate template
const hasKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "keyUsages");
const hasExtendedKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "extendedKeyUsages");
await knex.schema.alterTable(TableName.CertificateTemplate, (tb) => {
if (!hasKeyUsagesCol) {
tb.specificType("keyUsages", "text[]");
}
if (!hasExtendedKeyUsagesCol) {
tb.specificType("extendedKeyUsages", "text[]");
}
});
if (!hasKeyUsagesCol) {
await knex(TableName.CertificateTemplate).update({
keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]
});
}
if (!hasExtendedKeyUsagesCol) {
await knex(TableName.CertificateTemplate).update({
extendedKeyUsages: []
});
}
// Certificate
const doesCertTableHaveKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "keyUsages");
const doesCertTableHaveExtendedKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "extendedKeyUsages");
await knex.schema.alterTable(TableName.Certificate, (tb) => {
if (!doesCertTableHaveKeyUsages) {
tb.specificType("keyUsages", "text[]");
}
if (!doesCertTableHaveExtendedKeyUsages) {
tb.specificType("extendedKeyUsages", "text[]");
}
});
if (!doesCertTableHaveKeyUsages) {
await knex(TableName.Certificate).update({
keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]
});
}
if (!doesCertTableHaveExtendedKeyUsages) {
await knex(TableName.Certificate).update({
extendedKeyUsages: []
});
}
}
export async function down(knex: Knex): Promise<void> {
// Certificate Template
const hasKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "keyUsages");
const hasExtendedKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "extendedKeyUsages");
await knex.schema.alterTable(TableName.CertificateTemplate, (t) => {
if (hasKeyUsagesCol) {
t.dropColumn("keyUsages");
}
if (hasExtendedKeyUsagesCol) {
t.dropColumn("extendedKeyUsages");
}
});
// Certificate
const doesCertTableHaveKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "keyUsages");
const doesCertTableHaveExtendedKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "extendedKeyUsages");
await knex.schema.alterTable(TableName.Certificate, (t) => {
if (doesCertTableHaveKeyUsages) {
t.dropColumn("keyUsages");
}
if (doesCertTableHaveExtendedKeyUsages) {
t.dropColumn("extendedKeyUsages");
}
});
}

@ -1,76 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasAccessApproverGroupId = await knex.schema.hasColumn(
TableName.AccessApprovalPolicyApprover,
"approverGroupId"
);
const hasAccessApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
const hasSecretApproverGroupId = await knex.schema.hasColumn(
TableName.SecretApprovalPolicyApprover,
"approverGroupId"
);
const hasSecretApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId");
if (await knex.schema.hasTable(TableName.AccessApprovalPolicyApprover)) {
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (table) => {
// add column approverGroupId to AccessApprovalPolicyApprover
if (!hasAccessApproverGroupId) {
table.uuid("approverGroupId").nullable().references("id").inTable(TableName.Groups).onDelete("CASCADE");
}
// make approverUserId nullable
if (hasAccessApproverUserId) {
table.uuid("approverUserId").nullable().alter();
}
});
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (table) => {
// add column approverGroupId to SecretApprovalPolicyApprover
if (!hasSecretApproverGroupId) {
table.uuid("approverGroupId").nullable().references("id").inTable(TableName.Groups).onDelete("CASCADE");
}
// make approverUserId nullable
if (hasSecretApproverUserId) {
table.uuid("approverUserId").nullable().alter();
}
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasAccessApproverGroupId = await knex.schema.hasColumn(
TableName.AccessApprovalPolicyApprover,
"approverGroupId"
);
const hasAccessApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
const hasSecretApproverGroupId = await knex.schema.hasColumn(
TableName.SecretApprovalPolicyApprover,
"approverGroupId"
);
const hasSecretApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId");
if (await knex.schema.hasTable(TableName.AccessApprovalPolicyApprover)) {
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (table) => {
if (hasAccessApproverGroupId) {
table.dropColumn("approverGroupId");
}
// make approverUserId not nullable
if (hasAccessApproverUserId) {
table.uuid("approverUserId").notNullable().alter();
}
});
// remove
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (table) => {
if (hasSecretApproverGroupId) {
table.dropColumn("approverGroupId");
}
// make approverUserId not nullable
if (hasSecretApproverUserId) {
table.uuid("approverUserId").notNullable().alter();
}
});
}
}

@ -1,24 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityMetadata))) {
await knex.schema.createTable(TableName.IdentityMetadata, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.string("key").notNullable();
tb.string("value").notNullable();
tb.uuid("orgId").notNullable();
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
tb.uuid("userId");
tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
tb.uuid("identityId");
tb.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
tb.timestamps(true, true, true);
});
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityMetadata);
}

@ -1,30 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSharing)) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.string("iv").nullable().alter();
t.string("tag").nullable().alter();
t.string("encryptedValue").nullable().alter();
t.binary("encryptedSecret").nullable();
t.string("hashedHex").nullable().alter();
t.string("identifier", 64).nullable();
t.unique("identifier");
t.index("identifier");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSharing)) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.dropColumn("encryptedSecret");
t.dropColumn("identifier");
});
}
}

@ -1,19 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.OidcConfig, "lastUsed"))) {
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
tb.datetime("lastUsed");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.OidcConfig, "lastUsed")) {
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
tb.dropColumn("lastUsed");
});
}
}

@ -1,46 +0,0 @@
import { Knex } from "knex";
import { dropConstraintIfExists } from "@app/db/migrations/utils/dropConstraintIfExists";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId");
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
// drop constraint if exists (won't exist if rolled back, see below)
await dropConstraintIfExists(TableName.KmsKey, "kms_keys_orgid_slug_unique", knex);
// projectId for CMEK functionality
await knex.schema.alterTable(TableName.KmsKey, (table) => {
table.string("projectId").nullable().references("id").inTable(TableName.Project).onDelete("CASCADE");
if (hasOrgId) {
table.unique(["orgId", "projectId", "slug"]);
}
if (hasSlug) {
table.renameColumn("slug", "name");
}
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId");
const hasName = await knex.schema.hasColumn(TableName.KmsKey, "name");
// remove projectId for CMEK functionality
await knex.schema.alterTable(TableName.KmsKey, (table) => {
if (hasName) {
table.renameColumn("name", "slug");
}
if (hasOrgId) {
table.dropUnique(["orgId", "projectId", "slug"]);
}
table.dropColumn("projectId");
});
}
}

@ -1,30 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
if (!hasSlug) {
// add slug back temporarily and set value equal to name
await knex.schema
.alterTable(TableName.KmsKey, (table) => {
table.string("slug", 32);
})
.then(() => knex(TableName.KmsKey).update("slug", knex.ref("name")));
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
if (hasSlug) {
await knex.schema.alterTable(TableName.KmsKey, (table) => {
table.dropColumn("slug");
});
}
}
}

@ -1,48 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.AuditLog)) {
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
const doesProjectNameExist = await knex.schema.hasColumn(TableName.AuditLog, "projectName");
await knex.schema.alterTable(TableName.AuditLog, (t) => {
if (doesOrgIdExist) {
t.dropForeign("orgId");
}
if (doesProjectIdExist) {
t.dropForeign("projectId");
}
// add normalized field
if (!doesProjectNameExist) {
t.string("projectName");
}
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
const doesProjectNameExist = await knex.schema.hasColumn(TableName.AuditLog, "projectName");
if (await knex.schema.hasTable(TableName.AuditLog)) {
await knex.schema.alterTable(TableName.AuditLog, (t) => {
if (doesOrgIdExist) {
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
}
if (doesProjectIdExist) {
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
}
// remove normalized field
if (doesProjectNameExist) {
t.dropColumn("projectName");
}
});
}
}

@ -1,164 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
const formatPartitionDate = (date: Date) => {
const year = date.getFullYear();
const month = String(date.getMonth() + 1).padStart(2, "0");
const day = String(date.getDate()).padStart(2, "0");
return `${year}-${month}-${day}`;
};
const createAuditLogPartition = async (knex: Knex, startDate: Date, endDate: Date) => {
const startDateStr = formatPartitionDate(startDate);
const endDateStr = formatPartitionDate(endDate);
const partitionName = `${TableName.PartitionedAuditLog}_${startDateStr.replace(/-/g, "")}_${endDateStr.replace(
/-/g,
""
)}`;
await knex.schema.raw(
`CREATE TABLE ${partitionName} PARTITION OF ${TableName.PartitionedAuditLog} FOR VALUES FROM ('${startDateStr}') TO ('${endDateStr}')`
);
};
const isUsingDedicatedAuditLogDb = Boolean(process.env.AUDIT_LOGS_DB_CONNECTION_URI);
export async function up(knex: Knex): Promise<void> {
if (!isUsingDedicatedAuditLogDb && (await knex.schema.hasTable(TableName.AuditLog))) {
console.info("Dropping primary key of Audit Log table...");
await knex.schema.alterTable(TableName.AuditLog, (t) => {
// remove existing keys
t.dropPrimary();
});
}
// create a new partitioned table for audit logs
if (!(await knex.schema.hasTable(TableName.PartitionedAuditLog))) {
const createTableSql = knex.schema
.createTable(TableName.PartitionedAuditLog, (t) => {
t.uuid("id").defaultTo(knex.fn.uuid());
t.string("actor").notNullable();
t.jsonb("actorMetadata").notNullable();
t.string("ipAddress");
t.string("eventType").notNullable();
t.jsonb("eventMetadata");
t.string("userAgent");
t.string("userAgentType");
t.datetime("expiresAt");
t.timestamps(true, true, true);
t.uuid("orgId");
t.string("projectId");
t.string("projectName");
t.primary(["id", "createdAt"]);
})
.toString();
console.info("Creating partition table...");
await knex.schema.raw(`
${createTableSql} PARTITION BY RANGE ("createdAt");
`);
console.log("Adding indices...");
await knex.schema.alterTable(TableName.PartitionedAuditLog, (t) => {
t.index(["projectId", "createdAt"]);
t.index(["orgId", "createdAt"]);
t.index("expiresAt");
t.index("orgId");
t.index("projectId");
});
console.log("Adding GIN indices...");
await knex.raw(
`CREATE INDEX IF NOT EXISTS "audit_logs_actorMetadata_idx" ON ${TableName.PartitionedAuditLog} USING gin("actorMetadata" jsonb_path_ops)`
);
console.log("GIN index for actorMetadata done");
await knex.raw(
`CREATE INDEX IF NOT EXISTS "audit_logs_eventMetadata_idx" ON ${TableName.PartitionedAuditLog} USING gin("eventMetadata" jsonb_path_ops)`
);
console.log("GIN index for eventMetadata done");
// create default partition
console.log("Creating default partition...");
await knex.schema.raw(
`CREATE TABLE ${TableName.PartitionedAuditLog}_default PARTITION OF ${TableName.PartitionedAuditLog} DEFAULT`
);
const nextDate = new Date();
nextDate.setDate(nextDate.getDate() + 1);
const nextDateStr = formatPartitionDate(nextDate);
// attach existing audit log table as a partition ONLY if using the same DB
if (!isUsingDedicatedAuditLogDb) {
console.log("Attaching existing audit log table as a partition...");
await knex.schema.raw(`
ALTER TABLE ${TableName.AuditLog} ADD CONSTRAINT audit_log_old
CHECK ( "createdAt" < DATE '${nextDateStr}' );
ALTER TABLE ${TableName.PartitionedAuditLog} ATTACH PARTITION ${TableName.AuditLog}
FOR VALUES FROM (MINVALUE) TO ('${nextDateStr}' );
`);
}
// create partition from now until end of month
console.log("Creating audit log partitions ahead of time... next date:", nextDateStr);
await createAuditLogPartition(knex, nextDate, new Date(nextDate.getFullYear(), nextDate.getMonth() + 1));
// create partitions 4 years ahead
const partitionMonths = 4 * 12;
const partitionPromises: Promise<void>[] = [];
for (let x = 1; x <= partitionMonths; x += 1) {
partitionPromises.push(
createAuditLogPartition(
knex,
new Date(nextDate.getFullYear(), nextDate.getMonth() + x, 1),
new Date(nextDate.getFullYear(), nextDate.getMonth() + (x + 1), 1)
)
);
}
await Promise.all(partitionPromises);
console.log("Partition migration complete");
}
}
export async function down(knex: Knex): Promise<void> {
const partitionSearchResult = await knex.raw(`
SELECT inhrelid::regclass::text
FROM pg_inherits
WHERE inhparent::regclass::text = '${TableName.PartitionedAuditLog}'
AND inhrelid::regclass::text = '${TableName.AuditLog}'
`);
const isAuditLogAPartition = partitionSearchResult.rows.length > 0;
if (isAuditLogAPartition) {
// detach audit log from partition
console.log("Detaching original audit log table from new partition table...");
await knex.schema.raw(`
ALTER TABLE ${TableName.PartitionedAuditLog} DETACH PARTITION ${TableName.AuditLog};
ALTER TABLE ${TableName.AuditLog} DROP CONSTRAINT audit_log_old;
`);
// revert audit log modifications
console.log("Reverting changes made to the audit log table...");
if (await knex.schema.hasTable(TableName.AuditLog)) {
await knex.schema.alterTable(TableName.AuditLog, (t) => {
// we drop this first because adding to the partition results in a new primary key
t.dropPrimary();
// add back the original keys of the audit logs table
t.primary(["id"], {
constraintName: "audit_logs_pkey"
});
});
}
}
await knex.schema.dropTableIfExists(TableName.PartitionedAuditLog);
console.log("Partition rollback complete");
}

@ -1,6 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export const dropConstraintIfExists = (tableName: TableName, constraintName: string, knex: Knex) =>
knex.raw(`ALTER TABLE ${tableName} DROP CONSTRAINT IF EXISTS ${constraintName};`);

@ -54,7 +54,7 @@ export const getSecretManagerDataKey = async (knex: Knex, projectId: string) =>
} else {
const [kmsDoc] = await knex(TableName.KmsKey)
.insert({
name: slugify(alphaNumericNanoId(8).toLowerCase()),
slug: slugify(alphaNumericNanoId(8).toLowerCase()),
orgId: project.orgId,
isReserved: false
})

@ -9,11 +9,10 @@ import { TImmutableDBKeys } from "./models";
export const AccessApprovalPoliciesApproversSchema = z.object({
id: z.string().uuid(),
approverId: z.string().uuid(),
policyId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
approverUserId: z.string().uuid().nullable().optional(),
approverGroupId: z.string().uuid().nullable().optional()
updatedAt: z.date()
});
export type TAccessApprovalPoliciesApprovers = z.infer<typeof AccessApprovalPoliciesApproversSchema>;

@ -9,12 +9,11 @@ import { TImmutableDBKeys } from "./models";
export const AccessApprovalRequestsReviewersSchema = z.object({
id: z.string().uuid(),
member: z.string().uuid().nullable().optional(),
member: z.string().uuid(),
status: z.string(),
requestId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
reviewerUserId: z.string().uuid()
updatedAt: z.date()
});
export type TAccessApprovalRequestsReviewers = z.infer<typeof AccessApprovalRequestsReviewersSchema>;

@ -11,13 +11,12 @@ export const AccessApprovalRequestsSchema = z.object({
id: z.string().uuid(),
policyId: z.string().uuid(),
privilegeId: z.string().uuid().nullable().optional(),
requestedBy: z.string().uuid().nullable().optional(),
requestedBy: z.string().uuid(),
isTemporary: z.boolean(),
temporaryRange: z.string().nullable().optional(),
permissions: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
requestedByUserId: z.string().uuid()
updatedAt: z.date()
});
export type TAccessApprovalRequests = z.infer<typeof AccessApprovalRequestsSchema>;

@ -27,9 +27,7 @@ export const CertificateAuthoritiesSchema = z.object({
maxPathLength: z.number().nullable().optional(),
keyAlgorithm: z.string(),
notBefore: z.date().nullable().optional(),
notAfter: z.date().nullable().optional(),
activeCaCertId: z.string().uuid().nullable().optional(),
requireTemplateForIssuance: z.boolean().default(false)
notAfter: z.date().nullable().optional()
});
export type TCertificateAuthorities = z.infer<typeof CertificateAuthoritiesSchema>;

@ -15,9 +15,7 @@ export const CertificateAuthorityCertsSchema = z.object({
updatedAt: z.date(),
caId: z.string().uuid(),
encryptedCertificate: zodBuffer,
encryptedCertificateChain: zodBuffer,
version: z.number(),
caSecretId: z.string().uuid()
encryptedCertificateChain: zodBuffer
});
export type TCertificateAuthorityCerts = z.infer<typeof CertificateAuthorityCertsSchema>;

@ -14,8 +14,7 @@ export const CertificateAuthorityCrlSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
caId: z.string().uuid(),
encryptedCrl: zodBuffer,
caSecretId: z.string().uuid()
encryptedCrl: zodBuffer
});
export type TCertificateAuthorityCrl = z.infer<typeof CertificateAuthorityCrlSchema>;

@ -1,29 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const CertificateTemplateEstConfigsSchema = z.object({
id: z.string().uuid(),
certificateTemplateId: z.string().uuid(),
encryptedCaChain: zodBuffer,
hashedPassphrase: z.string(),
isEnabled: z.boolean(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TCertificateTemplateEstConfigs = z.infer<typeof CertificateTemplateEstConfigsSchema>;
export type TCertificateTemplateEstConfigsInsert = Omit<
z.input<typeof CertificateTemplateEstConfigsSchema>,
TImmutableDBKeys
>;
export type TCertificateTemplateEstConfigsUpdate = Partial<
Omit<z.input<typeof CertificateTemplateEstConfigsSchema>, TImmutableDBKeys>
>;

@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const CertificateTemplatesSchema = z.object({
id: z.string().uuid(),
caId: z.string().uuid(),
pkiCollectionId: z.string().uuid().nullable().optional(),
name: z.string(),
commonName: z.string(),
subjectAlternativeName: z.string(),
ttl: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
keyUsages: z.string().array().nullable().optional(),
extendedKeyUsages: z.string().array().nullable().optional()
});
export type TCertificateTemplates = z.infer<typeof CertificateTemplatesSchema>;
export type TCertificateTemplatesInsert = Omit<z.input<typeof CertificateTemplatesSchema>, TImmutableDBKeys>;
export type TCertificateTemplatesUpdate = Partial<Omit<z.input<typeof CertificateTemplatesSchema>, TImmutableDBKeys>>;

@ -20,11 +20,7 @@ export const CertificatesSchema = z.object({
notAfter: z.date(),
revokedAt: z.date().nullable().optional(),
revocationReason: z.number().nullable().optional(),
altNames: z.string().default("").nullable().optional(),
caCertId: z.string().uuid(),
certificateTemplateId: z.string().uuid().nullable().optional(),
keyUsages: z.string().array().nullable().optional(),
extendedKeyUsages: z.string().array().nullable().optional()
altNames: z.string().default("").nullable().optional()
});
export type TCertificates = z.infer<typeof CertificatesSchema>;

@ -5,6 +5,8 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const DynamicSecretsSchema = z.object({
@ -14,16 +16,12 @@ export const DynamicSecretsSchema = z.object({
type: z.string(),
defaultTTL: z.string(),
maxTTL: z.string().nullable().optional(),
inputIV: z.string(),
inputCiphertext: z.string(),
inputTag: z.string(),
algorithm: z.string().default("aes-256-gcm"),
keyEncoding: z.string().default("utf8"),
folderId: z.string().uuid(),
status: z.string().nullable().optional(),
statusDetails: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
encryptedConfig: zodBuffer
});
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;

@ -1,23 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityMetadataSchema = z.object({
id: z.string().uuid(),
key: z.string(),
value: z.string(),
orgId: z.string().uuid(),
userId: z.string().uuid().nullable().optional(),
identityId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TIdentityMetadata = z.infer<typeof IdentityMetadataSchema>;
export type TIdentityMetadataInsert = Omit<z.input<typeof IdentityMetadataSchema>, TImmutableDBKeys>;
export type TIdentityMetadataUpdate = Partial<Omit<z.input<typeof IdentityMetadataSchema>, TImmutableDBKeys>>;

@ -14,8 +14,6 @@ export * from "./certificate-authority-crl";
export * from "./certificate-authority-secret";
export * from "./certificate-bodies";
export * from "./certificate-secrets";
export * from "./certificate-template-est-configs";
export * from "./certificate-templates";
export * from "./certificates";
export * from "./dynamic-secret-leases";
export * from "./dynamic-secrets";
@ -31,7 +29,6 @@ export * from "./identity-aws-auths";
export * from "./identity-azure-auths";
export * from "./identity-gcp-auths";
export * from "./identity-kubernetes-auths";
export * from "./identity-metadata";
export * from "./identity-oidc-auths";
export * from "./identity-org-memberships";
export * from "./identity-project-additional-privilege";
@ -55,16 +52,11 @@ export * from "./org-bots";
export * from "./org-memberships";
export * from "./org-roles";
export * from "./organizations";
export * from "./partitioned-audit-logs";
export * from "./pki-alerts";
export * from "./pki-collection-items";
export * from "./pki-collections";
export * from "./project-bots";
export * from "./project-environments";
export * from "./project-keys";
export * from "./project-memberships";
export * from "./project-roles";
export * from "./project-slack-configs";
export * from "./project-user-additional-privilege";
export * from "./project-user-membership-roles";
export * from "./projects";
@ -104,7 +96,6 @@ export * from "./secret-versions-v2";
export * from "./secrets";
export * from "./secrets-v2";
export * from "./service-tokens";
export * from "./slack-integrations";
export * from "./super-admin";
export * from "./trusted-ips";
export * from "./user-actions";
@ -113,4 +104,3 @@ export * from "./user-encryption-keys";
export * from "./user-group-membership";
export * from "./users";
export * from "./webhooks";
export * from "./workflow-integrations";

@ -13,11 +13,9 @@ export const KmsKeysSchema = z.object({
isDisabled: z.boolean().default(false).nullable().optional(),
isReserved: z.boolean().default(true).nullable().optional(),
orgId: z.string().uuid(),
name: z.string(),
slug: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string().nullable().optional(),
slug: z.string().nullable().optional()
updatedAt: z.date()
});
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;

@ -3,17 +3,12 @@ import { z } from "zod";
export enum TableName {
Users = "users",
CertificateAuthority = "certificate_authorities",
CertificateTemplateEstConfig = "certificate_template_est_configs",
CertificateAuthorityCert = "certificate_authority_certs",
CertificateAuthoritySecret = "certificate_authority_secret",
CertificateAuthorityCrl = "certificate_authority_crl",
Certificate = "certificates",
CertificateBody = "certificate_bodies",
CertificateSecret = "certificate_secrets",
CertificateTemplate = "certificate_templates",
PkiAlert = "pki_alerts",
PkiCollection = "pki_collections",
PkiCollectionItem = "pki_collection_items",
Groups = "groups",
GroupProjectMembership = "group_project_memberships",
GroupProjectMembershipRole = "group_project_membership_roles",
@ -70,8 +65,6 @@ export enum TableName {
IdentityProjectMembership = "identity_project_memberships",
IdentityProjectMembershipRole = "identity_project_membership_role",
IdentityProjectAdditionalPrivilege = "identity_project_additional_privilege",
// used by both identity and users
IdentityMetadata = "identity_metadata",
ScimToken = "scim_tokens",
AccessApprovalPolicy = "access_approval_policies",
AccessApprovalPolicyApprover = "access_approval_policies_approvers",
@ -90,7 +83,6 @@ export enum TableName {
OidcConfig = "oidc_configs",
LdapGroupMap = "ldap_group_maps",
AuditLog = "audit_logs",
PartitionedAuditLog = "partitioned_audit_logs",
AuditLogStream = "audit_log_streams",
GitAppInstallSession = "git_app_install_sessions",
GitAppOrg = "git_app_org",
@ -117,10 +109,7 @@ export enum TableName {
InternalKms = "internal_kms",
InternalKmsKeyVersion = "internal_kms_key_version",
// @depreciated
KmsKeyVersion = "kms_key_versions",
WorkflowIntegrations = "workflow_integrations",
SlackIntegrations = "slack_integrations",
ProjectSlackConfigs = "project_slack_configs"
KmsKeyVersion = "kms_key_versions"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";

@ -26,8 +26,7 @@ export const OidcConfigsSchema = z.object({
isActive: z.boolean(),
createdAt: z.date(),
updatedAt: z.date(),
orgId: z.string().uuid(),
lastUsed: z.date().nullable().optional()
orgId: z.string().uuid()
});
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;

@ -1,29 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const PartitionedAuditLogsSchema = z.object({
id: z.string().uuid(),
actor: z.string(),
actorMetadata: z.unknown(),
ipAddress: z.string().nullable().optional(),
eventType: z.string(),
eventMetadata: z.unknown().nullable().optional(),
userAgent: z.string().nullable().optional(),
userAgentType: z.string().nullable().optional(),
expiresAt: z.date().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
orgId: z.string().uuid().nullable().optional(),
projectId: z.string().nullable().optional(),
projectName: z.string().nullable().optional()
});
export type TPartitionedAuditLogs = z.infer<typeof PartitionedAuditLogsSchema>;
export type TPartitionedAuditLogsInsert = Omit<z.input<typeof PartitionedAuditLogsSchema>, TImmutableDBKeys>;
export type TPartitionedAuditLogsUpdate = Partial<Omit<z.input<typeof PartitionedAuditLogsSchema>, TImmutableDBKeys>>;

@ -1,23 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const PkiAlertsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string(),
pkiCollectionId: z.string().uuid(),
name: z.string(),
alertBeforeDays: z.number(),
recipientEmails: z.string()
});
export type TPkiAlerts = z.infer<typeof PkiAlertsSchema>;
export type TPkiAlertsInsert = Omit<z.input<typeof PkiAlertsSchema>, TImmutableDBKeys>;
export type TPkiAlertsUpdate = Partial<Omit<z.input<typeof PkiAlertsSchema>, TImmutableDBKeys>>;

@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const PkiCollectionItemsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
pkiCollectionId: z.string().uuid(),
caId: z.string().uuid().nullable().optional(),
certId: z.string().uuid().nullable().optional()
});
export type TPkiCollectionItems = z.infer<typeof PkiCollectionItemsSchema>;
export type TPkiCollectionItemsInsert = Omit<z.input<typeof PkiCollectionItemsSchema>, TImmutableDBKeys>;
export type TPkiCollectionItemsUpdate = Partial<Omit<z.input<typeof PkiCollectionItemsSchema>, TImmutableDBKeys>>;

@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const PkiCollectionsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string(),
name: z.string(),
description: z.string()
});
export type TPkiCollections = z.infer<typeof PkiCollectionsSchema>;
export type TPkiCollectionsInsert = Omit<z.input<typeof PkiCollectionsSchema>, TImmutableDBKeys>;
export type TPkiCollectionsUpdate = Partial<Omit<z.input<typeof PkiCollectionsSchema>, TImmutableDBKeys>>;

@ -1,24 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ProjectSlackConfigsSchema = z.object({
id: z.string().uuid(),
projectId: z.string(),
slackIntegrationId: z.string().uuid(),
isAccessRequestNotificationEnabled: z.boolean().default(false),
accessRequestChannels: z.string().default(""),
isSecretRequestNotificationEnabled: z.boolean().default(false),
secretRequestChannels: z.string().default(""),
createdAt: z.date(),
updatedAt: z.date()
});
export type TProjectSlackConfigs = z.infer<typeof ProjectSlackConfigsSchema>;
export type TProjectSlackConfigsInsert = Omit<z.input<typeof ProjectSlackConfigsSchema>, TImmutableDBKeys>;
export type TProjectSlackConfigsUpdate = Partial<Omit<z.input<typeof ProjectSlackConfigsSchema>, TImmutableDBKeys>>;

@ -10,7 +10,7 @@ import { TImmutableDBKeys } from "./models";
export const ProjectUserAdditionalPrivilegeSchema = z.object({
id: z.string().uuid(),
slug: z.string(),
projectMembershipId: z.string().uuid().nullable().optional(),
projectMembershipId: z.string().uuid(),
isTemporary: z.boolean().default(false),
temporaryMode: z.string().nullable().optional(),
temporaryRange: z.string().nullable().optional(),
@ -18,9 +18,7 @@ export const ProjectUserAdditionalPrivilegeSchema = z.object({
temporaryAccessEndTime: z.date().nullable().optional(),
permissions: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
userId: z.string().uuid(),
projectId: z.string()
updatedAt: z.date()
});
export type TProjectUserAdditionalPrivilege = z.infer<typeof ProjectUserAdditionalPrivilegeSchema>;

@ -15,6 +15,7 @@ export const RateLimitSchema = z.object({
authRateLimit: z.number().default(60),
inviteUserRateLimit: z.number().default(30),
mfaRateLimit: z.number().default(20),
creationLimit: z.number().default(30),
publicEndpointLimit: z.number().default(30),
createdAt: z.date(),
updatedAt: z.date()

@ -12,8 +12,7 @@ export const SecretApprovalPoliciesApproversSchema = z.object({
policyId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
approverUserId: z.string().uuid().nullable().optional(),
approverGroupId: z.string().uuid().nullable().optional()
approverUserId: z.string().uuid()
});
export type TSecretApprovalPoliciesApprovers = z.infer<typeof SecretApprovalPoliciesApproversSchema>;

@ -5,16 +5,14 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SecretSharingSchema = z.object({
id: z.string().uuid(),
encryptedValue: z.string().nullable().optional(),
iv: z.string().nullable().optional(),
tag: z.string().nullable().optional(),
hashedHex: z.string().nullable().optional(),
encryptedValue: z.string(),
iv: z.string(),
tag: z.string(),
hashedHex: z.string(),
expiresAt: z.date(),
userId: z.string().uuid().nullable().optional(),
orgId: z.string().uuid().nullable().optional(),
@ -23,10 +21,7 @@ export const SecretSharingSchema = z.object({
expiresAfterViews: z.number().nullable().optional(),
accessType: z.string().default("anyone"),
name: z.string().nullable().optional(),
lastViewedAt: z.date().nullable().optional(),
password: z.string().nullable().optional(),
encryptedSecret: zodBuffer.nullable().optional(),
identifier: z.string().nullable().optional()
lastViewedAt: z.date().nullable().optional()
});
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;

@ -9,6 +9,7 @@ import { TImmutableDBKeys } from "./models";
export const SecretTagsSchema = z.object({
id: z.string().uuid(),
name: z.string(),
slug: z.string(),
color: z.string().nullable().optional(),
createdAt: z.date(),

@ -1,27 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SlackIntegrationsSchema = z.object({
id: z.string().uuid(),
teamId: z.string(),
teamName: z.string(),
slackUserId: z.string(),
slackAppId: z.string(),
encryptedBotAccessToken: zodBuffer,
slackBotId: z.string(),
slackBotUserId: z.string(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSlackIntegrations = z.infer<typeof SlackIntegrationsSchema>;
export type TSlackIntegrationsInsert = Omit<z.input<typeof SlackIntegrationsSchema>, TImmutableDBKeys>;
export type TSlackIntegrationsUpdate = Partial<Omit<z.input<typeof SlackIntegrationsSchema>, TImmutableDBKeys>>;

@ -5,8 +5,6 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SuperAdminSchema = z.object({
@ -21,9 +19,7 @@ export const SuperAdminSchema = z.object({
trustLdapEmails: z.boolean().default(false).nullable().optional(),
trustOidcEmails: z.boolean().default(false).nullable().optional(),
defaultAuthOrgId: z.string().uuid().nullable().optional(),
enabledLoginMethods: z.string().array().nullable().optional(),
encryptedSlackClientId: zodBuffer.nullable().optional(),
encryptedSlackClientSecret: zodBuffer.nullable().optional()
enabledLoginMethods: z.string().array().nullable().optional()
});
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;

@ -5,27 +5,22 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const WebhooksSchema = z.object({
id: z.string().uuid(),
secretPath: z.string().default("/"),
url: z.string(),
lastStatus: z.string().nullable().optional(),
lastRunErrorMessage: z.string().nullable().optional(),
isDisabled: z.boolean().default(false),
encryptedSecretKey: z.string().nullable().optional(),
iv: z.string().nullable().optional(),
tag: z.string().nullable().optional(),
algorithm: z.string().nullable().optional(),
keyEncoding: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
envId: z.string().uuid(),
urlCipherText: z.string().nullable().optional(),
urlIV: z.string().nullable().optional(),
urlTag: z.string().nullable().optional(),
type: z.string().default("general").nullable().optional()
type: z.string().default("general").nullable().optional(),
encryptedSecretKeyWithKms: zodBuffer.nullable().optional(),
encryptedUrl: zodBuffer
});
export type TWebhooks = z.infer<typeof WebhooksSchema>;

@ -1,22 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const WorkflowIntegrationsSchema = z.object({
id: z.string().uuid(),
integration: z.string(),
slug: z.string(),
orgId: z.string().uuid(),
description: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TWorkflowIntegrations = z.infer<typeof WorkflowIntegrationsSchema>;
export type TWorkflowIntegrationsInsert = Omit<z.input<typeof WorkflowIntegrationsSchema>, TImmutableDBKeys>;
export type TWorkflowIntegrationsUpdate = Partial<Omit<z.input<typeof WorkflowIntegrationsSchema>, TImmutableDBKeys>>;

@ -1,173 +0,0 @@
import bcrypt from "bcrypt";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
export const registerCertificateEstRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
// add support for CSR bodies
server.addContentTypeParser("application/pkcs10", { parseAs: "string" }, (_, body, done) => {
try {
let csrBody = body as string;
// some EST clients send CSRs in PEM format and some in base64 format
// for CSRs sent in PEM, we leave them as is
// for CSRs sent in base64, we preprocess them to remove new lines and spaces
if (!csrBody.includes("BEGIN CERTIFICATE REQUEST")) {
csrBody = csrBody.replace(/\n/g, "").replace(/ /g, "");
}
done(null, csrBody);
} catch (err) {
const error = err as Error;
done(error, undefined);
}
});
// Authenticate EST client using Passphrase
server.addHook("onRequest", async (req, res) => {
const { authorization } = req.headers;
const urlFragments = req.url.split("/");
// cacerts endpoint should not have any authentication
if (urlFragments[urlFragments.length - 1] === "cacerts") {
return;
}
if (!authorization) {
const wwwAuthenticateHeader = "WWW-Authenticate";
const errAuthRequired = "Authentication required";
await res.hijack();
// definitive connection timeout to clean-up open connections and prevent memory leak
res.raw.setTimeout(10 * 1000, () => {
res.raw.end();
});
res.raw.setHeader(wwwAuthenticateHeader, `Basic realm="infisical"`);
res.raw.setHeader("Content-Length", 0);
res.raw.statusCode = 401;
// Write the error message to the response without ending the connection
res.raw.write(errAuthRequired);
// flush headers
res.raw.flushHeaders();
return;
}
const certificateTemplateId = urlFragments.slice(-2)[0];
const estConfig = await server.services.certificateTemplate.getEstConfiguration({
isInternal: true,
certificateTemplateId
});
if (!estConfig.isEnabled) {
throw new BadRequestError({
message: "EST is disabled"
});
}
const rawCredential = authorization?.split(" ").pop();
if (!rawCredential) {
throw new UnauthorizedError({ message: "Missing HTTP credentials" });
}
// expected format is user:password
const basicCredential = atob(rawCredential);
const password = basicCredential.split(":").pop();
if (!password) {
throw new BadRequestError({
message: "No password provided"
});
}
const isPasswordValid = await bcrypt.compare(password, estConfig.hashedPassphrase);
if (!isPasswordValid) {
throw new UnauthorizedError({
message: "Invalid credentials"
});
}
});
server.route({
method: "POST",
url: "/:certificateTemplateId/simpleenroll",
config: {
rateLimit: writeLimit
},
schema: {
body: z.string().min(1),
params: z.object({
certificateTemplateId: z.string().min(1)
}),
response: {
200: z.string()
}
},
handler: async (req, res) => {
void res.header("Content-Type", "application/pkcs7-mime; smime-type=certs-only");
void res.header("Content-Transfer-Encoding", "base64");
return server.services.certificateEst.simpleEnroll({
csr: req.body,
certificateTemplateId: req.params.certificateTemplateId,
sslClientCert: req.headers[appCfg.SSL_CLIENT_CERTIFICATE_HEADER_KEY] as string
});
}
});
server.route({
method: "POST",
url: "/:certificateTemplateId/simplereenroll",
config: {
rateLimit: writeLimit
},
schema: {
body: z.string().min(1),
params: z.object({
certificateTemplateId: z.string().min(1)
}),
response: {
200: z.string()
}
},
handler: async (req, res) => {
void res.header("Content-Type", "application/pkcs7-mime; smime-type=certs-only");
void res.header("Content-Transfer-Encoding", "base64");
return server.services.certificateEst.simpleReenroll({
csr: req.body,
certificateTemplateId: req.params.certificateTemplateId,
sslClientCert: req.headers[appCfg.SSL_CLIENT_CERTIFICATE_HEADER_KEY] as string
});
}
});
server.route({
method: "GET",
url: "/:certificateTemplateId/cacerts",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
certificateTemplateId: z.string().min(1)
}),
response: {
200: z.string()
}
},
handler: async (req, res) => {
void res.header("Content-Type", "application/pkcs7-mime; smime-type=certs-only");
void res.header("Content-Transfer-Encoding", "base64");
return server.services.certificateEst.getCaCerts({
certificateTemplateId: req.params.certificateTemplateId
});
}
});
};

@ -1,9 +1,7 @@
import { nanoid } from "nanoid";
import { z } from "zod";
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
import { EnforcementLevel } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { sapPubSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
@ -12,32 +10,28 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
server.route({
url: "/",
method: "POST",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
projectSlug: z.string().trim(),
name: z.string().optional(),
secretPath: z.string().trim().default("/"),
environment: z.string(),
approvers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
])
.array()
.min(1, { message: "At least one approver should be provided" }),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
}),
body: z
.object({
projectSlug: z.string().trim(),
name: z.string().optional(),
secretPath: z.string().trim().default("/"),
environment: z.string(),
approvers: z.string().array().min(1),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
})
.refine((data) => data.approvals <= data.approvers.length, {
path: ["approvals"],
message: "The number of approvals should be lower than the number of approvers."
}),
response: {
200: z.object({
approval: sapPubSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const approval = await server.services.accessApprovalPolicy.createAccessApprovalPolicy({
actor: req.permission.type,
@ -56,26 +50,13 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
server.route({
url: "/",
method: "GET",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
projectSlug: z.string().trim()
}),
response: {
200: z.object({
approvals: sapPubSchema
.extend({
approvers: z
.object({ type: z.nativeEnum(ApproverType), id: z.string().nullable().optional() })
.array()
.nullable()
.optional()
})
.array()
.nullable()
.optional()
approvals: sapPubSchema.extend({ approvers: z.string().array(), secretPath: z.string().optional() }).array()
})
}
},
@ -88,7 +69,6 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
actorOrgId: req.permission.orgId,
projectSlug: req.query.projectSlug
});
return { approvals };
}
});
@ -125,37 +105,33 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
server.route({
url: "/:policyId",
method: "PATCH",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
policyId: z.string()
}),
body: z.object({
name: z.string().optional(),
secretPath: z
.string()
.trim()
.optional()
.transform((val) => (val === "" ? "/" : val)),
approvers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
])
.array()
.min(1, { message: "At least one approver should be provided" }),
approvals: z.number().min(1).optional(),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
}),
body: z
.object({
name: z.string().optional(),
secretPath: z
.string()
.trim()
.optional()
.transform((val) => (val === "" ? "/" : val)),
approvers: z.string().array().min(1),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
})
.refine((data) => data.approvals <= data.approvers.length, {
path: ["approvals"],
message: "The number of approvals should be lower than the number of approvers."
}),
response: {
200: z.object({
approval: sapPubSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
await server.services.accessApprovalPolicy.updateAccessApprovalPolicy({
policyId: req.params.policyId,
@ -171,9 +147,6 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
server.route({
url: "/:policyId",
method: "DELETE",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
policyId: z.string()
@ -184,7 +157,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const approval = await server.services.accessApprovalPolicy.deleteAccessApprovalPolicy({
actor: req.permission.type,
@ -196,44 +169,4 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
return { approval };
}
});
server.route({
url: "/:policyId",
method: "GET",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
policyId: z.string()
}),
response: {
200: z.object({
approval: sapPubSchema.extend({
approvers: z
.object({
type: z.nativeEnum(ApproverType),
id: z.string().nullable().optional(),
name: z.string().nullable().optional()
})
.array()
.nullable()
.optional()
})
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const approval = await server.services.accessApprovalPolicy.getAccessApprovalPolicyById({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.params
});
return { approval };
}
});
};

@ -1,19 +1,10 @@
import { z } from "zod";
import { AccessApprovalRequestsReviewersSchema, AccessApprovalRequestsSchema, UsersSchema } from "@app/db/schemas";
import { AccessApprovalRequestsReviewersSchema, AccessApprovalRequestsSchema } from "@app/db/schemas";
import { ApprovalStatus } from "@app/ee/services/access-approval-request/access-approval-request-types";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const approvalRequestUser = z.object({ userId: z.string() }).merge(
UsersSchema.pick({
email: true,
firstName: true,
lastName: true,
username: true
})
);
export const registerAccessApprovalRequestRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/",
@ -113,11 +104,10 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
}),
reviewers: z
.object({
userId: z.string(),
member: z.string(),
status: z.string()
})
.array(),
requestedByUser: approvalRequestUser
.array()
}).array()
})
}

@ -1,55 +1,86 @@
/* eslint-disable @typescript-eslint/no-floating-promises */
import { z } from "zod";
import { CA_CRLS } from "@app/lib/api-docs";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerCaCrlRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:crlId",
url: "/:caId/crl",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Get CRL in DER format (deprecated)",
description: "Get CRL of the CA",
params: z.object({
crlId: z.string().trim().describe(CA_CRLS.GET.crlId)
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CRL.caId)
}),
response: {
200: z.instanceof(Buffer)
200: z.object({
crl: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CRL.crl)
})
}
},
handler: async (req, res) => {
const { crl } = await server.services.certificateAuthorityCrl.getCrlById(req.params.crlId);
handler: async (req) => {
const { crl, ca } = await server.services.certificateAuthorityCrl.getCaCrl({
caId: req.params.caId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
res.header("Content-Type", "application/pkix-crl");
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.GET_CA_CRL,
metadata: {
caId: ca.id,
dn: ca.dn
}
}
});
return Buffer.from(crl);
return {
crl
};
}
});
server.route({
method: "GET",
url: "/:crlId/der",
config: {
rateLimit: readLimit
},
schema: {
description: "Get CRL in DER format",
params: z.object({
crlId: z.string().trim().describe(CA_CRLS.GET.crlId)
}),
response: {
200: z.instanceof(Buffer)
}
},
handler: async (req, res) => {
const { crl } = await server.services.certificateAuthorityCrl.getCrlById(req.params.crlId);
res.header("Content-Type", "application/pkix-crl");
return Buffer.from(crl);
}
});
// server.route({
// method: "GET",
// url: "/:caId/crl/rotate",
// config: {
// rateLimit: writeLimit
// },
// onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
// schema: {
// description: "Rotate CRL of the CA",
// params: z.object({
// caId: z.string().trim()
// }),
// response: {
// 200: z.object({
// message: z.string()
// })
// }
// },
// handler: async (req) => {
// await server.services.certificateAuthority.rotateCaCrl({
// caId: req.params.caId,
// actor: req.permission.type,
// actorId: req.permission.id,
// actorAuthMethod: req.permission.authMethod,
// actorOrgId: req.permission.orgId
// });
// return {
// message: "Successfully rotated CA CRL"
// };
// }
// });
};

@ -131,7 +131,7 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
.default("/")
.transform(removeTrailingSlash)
.describe(DYNAMIC_SECRET_LEASES.RENEW.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.RENEW.environmentSlug)
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.RENEW.ttl)
}),
response: {
200: z.object({

@ -77,39 +77,6 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
}
});
server.route({
method: "POST",
url: "/entra-id/users",
config: {
rateLimit: readLimit
},
schema: {
body: z.object({
tenantId: z.string().min(1).describe("The tenant ID of the Azure Entra ID"),
applicationId: z.string().min(1).describe("The application ID of the Azure Entra ID App Registration"),
clientSecret: z.string().min(1).describe("The client secret of the Azure Entra ID App Registration")
}),
response: {
200: z
.object({
name: z.string().min(1).describe("The name of the user"),
id: z.string().min(1).describe("The ID of the user"),
email: z.string().min(1).describe("The email of the user")
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const data = await server.services.dynamicSecret.fetchAzureEntraIdUsers({
tenantId: req.body.tenantId,
applicationId: req.body.applicationId,
clientSecret: req.body.clientSecret
});
return data;
}
});
server.route({
method: "PATCH",
url: "/:name",
@ -270,7 +237,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const dynamicSecretCfgs = await server.services.dynamicSecret.listDynamicSecretsByEnv({
const dynamicSecretCfgs = await server.services.dynamicSecret.list({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,

@ -26,7 +26,7 @@ const sanitizedExternalSchemaForGetAll = KmsKeysSchema.pick({
isDisabled: true,
createdAt: true,
updatedAt: true,
name: true
slug: true
})
.extend({
externalKms: ExternalKmsSchema.pick({
@ -57,7 +57,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
},
schema: {
body: z.object({
name: z.string().min(1).trim().toLowerCase(),
slug: z.string().min(1).trim().toLowerCase(),
description: z.string().trim().optional(),
provider: ExternalKmsInputSchema
}),
@ -74,7 +74,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
name: req.body.name,
slug: req.body.slug,
provider: req.body.provider,
description: req.body.description
});
@ -87,7 +87,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
metadata: {
kmsId: externalKms.id,
provider: req.body.provider.type,
name: req.body.name,
slug: req.body.slug,
description: req.body.description
}
}
@ -108,7 +108,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
id: z.string().trim().min(1)
}),
body: z.object({
name: z.string().min(1).trim().toLowerCase().optional(),
slug: z.string().min(1).trim().toLowerCase().optional(),
description: z.string().trim().optional(),
provider: ExternalKmsInputUpdateSchema
}),
@ -125,7 +125,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
name: req.body.name,
slug: req.body.slug,
provider: req.body.provider,
description: req.body.description,
id: req.params.id
@ -139,7 +139,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
metadata: {
kmsId: externalKms.id,
provider: req.body.provider.type,
name: req.body.name,
slug: req.body.slug,
description: req.body.description
}
}
@ -182,7 +182,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
type: EventType.DELETE_KMS,
metadata: {
kmsId: externalKms.id,
name: externalKms.name
slug: externalKms.slug
}
}
});
@ -224,7 +224,7 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
type: EventType.GET_KMS,
metadata: {
kmsId: externalKms.id,
name: externalKms.name
slug: externalKms.slug
}
}
});
@ -260,13 +260,13 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/name/:name",
url: "/slug/:slug",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
name: z.string().trim().min(1)
slug: z.string().trim().min(1)
}),
response: {
200: z.object({
@ -276,12 +276,12 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const externalKms = await server.services.externalKms.findByName({
const externalKms = await server.services.externalKms.findBySlug({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
name: req.params.name
slug: req.params.slug
});
return { externalKms };
}

@ -10,7 +10,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/",
method: "POST",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
body: z.object({
name: z.string().trim().min(1).max(50).describe(GROUPS.CREATE.name),
@ -43,59 +43,12 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
});
server.route({
url: "/:id",
method: "GET",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
params: z.object({
id: z.string().trim().describe(GROUPS.GET_BY_ID.id)
}),
response: {
200: GroupsSchema
}
},
handler: async (req) => {
const group = await server.services.group.getGroupById({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.id
});
return group;
}
});
server.route({
url: "/",
method: "GET",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
response: {
200: GroupsSchema.array()
}
},
handler: async (req) => {
const groups = await server.services.org.getOrgGroups({
actor: req.permission.type,
actorId: req.permission.id,
orgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return groups;
}
});
server.route({
url: "/:id",
url: "/:currentSlug",
method: "PATCH",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
id: z.string().trim().describe(GROUPS.UPDATE.id)
currentSlug: z.string().trim().describe(GROUPS.UPDATE.currentSlug)
}),
body: z
.object({
@ -117,7 +70,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
},
handler: async (req) => {
const group = await server.services.group.updateGroup({
id: req.params.id,
currentSlug: req.params.currentSlug,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
@ -130,12 +83,12 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
});
server.route({
url: "/:id",
url: "/:slug",
method: "DELETE",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
id: z.string().trim().describe(GROUPS.DELETE.id)
slug: z.string().trim().describe(GROUPS.DELETE.slug)
}),
response: {
200: GroupsSchema
@ -143,7 +96,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
},
handler: async (req) => {
const group = await server.services.group.deleteGroup({
id: req.params.id,
groupSlug: req.params.slug,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
@ -156,11 +109,11 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:id/users",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
url: "/:slug/users",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
id: z.string().trim().describe(GROUPS.LIST_USERS.id)
slug: z.string().trim().describe(GROUPS.LIST_USERS.slug)
}),
querystring: z.object({
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
@ -188,25 +141,24 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
},
handler: async (req) => {
const { users, totalCount } = await server.services.group.listGroupUsers({
id: req.params.id,
groupSlug: req.params.slug,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.query
});
return { users, totalCount };
}
});
server.route({
method: "POST",
url: "/:id/users/:username",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
url: "/:slug/users/:username",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
id: z.string().trim().describe(GROUPS.ADD_USER.id),
slug: z.string().trim().describe(GROUPS.ADD_USER.slug),
username: z.string().trim().describe(GROUPS.ADD_USER.username)
}),
response: {
@ -221,7 +173,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
},
handler: async (req) => {
const user = await server.services.group.addUserToGroup({
id: req.params.id,
groupSlug: req.params.slug,
username: req.params.username,
actor: req.permission.type,
actorId: req.permission.id,
@ -235,11 +187,11 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
server.route({
method: "DELETE",
url: "/:id/users/:username",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
url: "/:slug/users/:username",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
id: z.string().trim().describe(GROUPS.DELETE_USER.id),
slug: z.string().trim().describe(GROUPS.DELETE_USER.slug),
username: z.string().trim().describe(GROUPS.DELETE_USER.username)
}),
response: {
@ -254,7 +206,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
},
handler: async (req) => {
const user = await server.services.group.removeUserFromGroup({
id: req.params.id,
groupSlug: req.params.slug,
username: req.params.username,
actor: req.permission.type,
actorId: req.permission.id,

@ -5,7 +5,7 @@ import { z } from "zod";
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types";
import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
import { UnauthorizedError } from "@app/lib/errors";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@ -61,7 +61,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
handler: async (req) => {
const { permissions, privilegePermission } = req.body;
if (!permissions && !privilegePermission) {
throw new UnauthorizedError({ message: "Permission or privilegePermission must be provided" });
throw new BadRequestError({ message: "Permission or privilegePermission must be provided" });
}
const permission = privilegePermission
@ -140,7 +140,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
handler: async (req) => {
const { permissions, privilegePermission } = req.body;
if (!permissions && !privilegePermission) {
throw new UnauthorizedError({ message: "Permission or privilegePermission must be provided" });
throw new BadRequestError({ message: "Permission or privilegePermission must be provided" });
}
const permission = privilegePermission
@ -224,7 +224,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
handler: async (req) => {
const { permissions, privilegePermission, ...updatedInfo } = req.body.privilegeDetails;
if (!permissions && !privilegePermission) {
throw new UnauthorizedError({ message: "Permission or privilegePermission must be provided" });
throw new BadRequestError({ message: "Permission or privilegePermission must be provided" });
}
const permission = privilegePermission

@ -61,7 +61,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
await server.register(
async (pkiRouter) => {
await pkiRouter.register(registerCaCrlRouter, { prefix: "/crl" });
await pkiRouter.register(registerCaCrlRouter, { prefix: "/ca" });
},
{ prefix: "/pki" }
);

@ -1,6 +1,6 @@
/* eslint-disable @typescript-eslint/no-unsafe-return */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
// TODO(akhilmhdh): Fix this when license service gets it type
// TODO(akhilmhdh): Fix this when licence service gets it type
import { z } from "zod";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";

@ -3,11 +3,10 @@ import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { ProjectMembershipRole, ProjectMembershipsSchema, ProjectRolesSchema } from "@app/db/schemas";
import { ProjectPermissionSchema } from "@app/ee/services/permission/project-permission";
import { PROJECT_ROLE } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas";
import { ProjectPermissionSchema, SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
@ -102,7 +101,6 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
message: "Slug must be a valid"
}),
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
permissions: ProjectPermissionSchema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
}),
response: {

@ -1,6 +1,6 @@
import { z } from "zod";
import { PartitionedAuditLogsSchema, SecretSnapshotsSchema } from "@app/db/schemas";
import { AuditLogsSchema, SecretSnapshotsSchema } from "@app/db/schemas";
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
import { AUDIT_LOGS, PROJECTS } from "@app/lib/api-docs";
import { getLastMidnightDateISO, removeTrailingSlash } from "@app/lib/fn";
@ -87,12 +87,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
}
});
/*
* Daniel: This endpoint is no longer is use.
* We are keeping it for now because it has been exposed in our public api docs for a while, so by removing it we are likely to break users workflows.
*
* Please refer to the new endpoint, GET /api/v1/organization/audit-logs, for the same (and more) functionality.
*/
server.route({
method: "GET",
url: "/:workspaceId/audit-logs",
@ -107,7 +101,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
}
],
params: z.object({
workspaceId: z.string().trim().describe(AUDIT_LOGS.EXPORT.projectId)
workspaceId: z.string().trim().describe(AUDIT_LOGS.EXPORT.workspaceId)
}),
querystring: z.object({
eventType: z.nativeEnum(EventType).optional().describe(AUDIT_LOGS.EXPORT.eventType),
@ -120,7 +114,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
auditLogs: PartitionedAuditLogsSchema.omit({
auditLogs: AuditLogsSchema.omit({
eventMetadata: true,
eventType: true,
actor: true,
@ -128,12 +122,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
})
.merge(
z.object({
project: z
.object({
name: z.string(),
slug: z.string()
})
.optional(),
event: z.object({
type: z.string(),
metadata: z.any()
@ -150,20 +138,16 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const auditLogs = await server.services.auditLog.listAuditLogs({
const auditLogs = await server.services.auditLog.listProjectAuditLogs({
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
actor: req.permission.type,
filter: {
...req.query,
projectId: req.params.workspaceId,
endDate: req.query.endDate,
startDate: req.query.startDate || getLastMidnightDateISO(),
auditLogActorId: req.query.actor,
eventType: req.query.eventType ? [req.query.eventType] : undefined
}
projectId: req.params.workspaceId,
...req.query,
endDate: req.query.endDate,
startDate: req.query.startDate || getLastMidnightDateISO(),
auditLogActor: req.query.actor,
actor: req.permission.type
});
return { auditLogs };
}
@ -203,7 +187,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
200: z.object({
secretManagerKmsKey: z.object({
id: z.string(),
name: z.string(),
slug: z.string(),
isExternal: z.boolean()
})
})
@ -243,7 +227,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
200: z.object({
secretManagerKmsKey: z.object({
id: z.string(),
name: z.string(),
slug: z.string(),
isExternal: z.boolean()
})
})
@ -268,7 +252,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
metadata: {
secretManagerKmsKey: {
id: secretManagerKmsKey.id,
name: secretManagerKmsKey.name
slug: secretManagerKmsKey.slug
}
}
}
@ -336,7 +320,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
200: z.object({
secretManagerKmsKey: z.object({
id: z.string(),
name: z.string(),
slug: z.string(),
isExternal: z.boolean()
})
})

@ -1,7 +1,7 @@
import { z } from "zod";
import { RateLimitSchema } from "@app/db/schemas";
import { NotFoundError } from "@app/lib/errors";
import { BadRequestError } from "@app/lib/errors";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@ -29,7 +29,7 @@ export const registerRateLimitRouter = async (server: FastifyZodProvider) => {
handler: async () => {
const rateLimit = await server.services.rateLimit.getRateLimits();
if (!rateLimit) {
throw new NotFoundError({
throw new BadRequestError({
name: "Get Rate Limit Error",
message: "Rate limit configuration does not exist."
});
@ -58,6 +58,7 @@ export const registerRateLimitRouter = async (server: FastifyZodProvider) => {
authRateLimit: z.number(),
inviteUserRateLimit: z.number(),
mfaRateLimit: z.number(),
creationLimit: z.number(),
publicEndpointLimit: z.number()
}),
response: {

@ -61,7 +61,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
id: samlConfigId
};
} else {
throw new BadRequestError({ message: "Missing sso identifier or org slug" });
throw new BadRequestError({ message: "Missing sso identitier or org slug" });
}
const ssoConfig = await server.services.saml.getSaml(ssoLookupDetails);
@ -100,52 +100,25 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
async (req, profile, cb) => {
try {
if (!profile) throw new BadRequestError({ message: "Missing profile" });
const email = profile?.email ?? (profile?.emailAddress as string); // emailRippling is added because in Rippling the field `email` reserved
const email =
profile?.email ??
// entra sends data in this format
(profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/email"] as string) ??
(profile?.emailAddress as string); // emailRippling is added because in Rippling the field `email` reserved\
const firstName = (profile.firstName ??
// entra sends data in this format
profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/firstName"]) as string;
const lastName =
profile.lastName ?? profile["http://schemas.xmlsoap.org/ws/2005/05/identity/claims/lastName"];
if (!email || !firstName) {
logger.info(
{
err: new Error("Invalid saml request. Missing email or first name"),
profile
},
`email: ${email} firstName: ${profile.firstName as string}`
);
if (!email || !profile.firstName) {
throw new BadRequestError({ message: "Invalid request. Missing email or first name" });
}
const userMetadata = Object.keys(profile.attributes || {})
.map((key) => {
// for the ones like in format: http://schemas.xmlsoap.org/ws/2005/05/identity/claims/email
const formatedKey = key.startsWith("http") ? key.split("/").at(-1) || "" : key;
return { key: formatedKey, value: String((profile.attributes as Record<string, string>)[key]) };
})
.filter((el) => el.key && !["email", "firstName", "lastName"].includes(el.key));
const { isUserCompleted, providerAuthToken } = await server.services.saml.samlLogin({
externalId: profile.nameID,
email,
firstName,
lastName: lastName as string,
firstName: profile.firstName as string,
lastName: profile.lastName as string,
relayState: (req.body as { RelayState?: string }).RelayState,
authProvider: (req as unknown as FastifyRequest).ssoConfig?.authProvider as string,
orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId as string,
metadata: userMetadata
orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId as string
});
cb(null, { isUserCompleted, providerAuthToken });
} catch (error) {
logger.error(error);
cb(error as Error);
cb(null, {});
}
},
() => {}

Some files were not shown because too many files have changed in this diff Show More