1
0
mirror of https://github.com/Infisical/infisical.git synced 2025-03-21 08:39:48 +00:00

Compare commits

..

11 Commits

Author SHA1 Message Date
5d07aef973 Update build-binaries.yml 2024-07-23 13:20:39 +02:00
83e83ced99 Update build-binaries.yml 2024-07-23 13:13:00 +02:00
c51f071765 Update build-binaries.yml 2024-07-23 13:11:14 +02:00
467e02d94b Update build-binaries.yml 2024-07-23 12:53:38 +02:00
477ad41ff1 Update build-binaries.yml 2024-07-23 12:34:58 +02:00
e527a8b496 Update build-binaries.yml 2024-07-23 12:31:07 +02:00
3f624e833c Test 2024-07-23 12:19:53 +02:00
e666c4cbad Update build-binaries.yml 2024-07-23 12:06:44 +02:00
d6a280d2b4 Update build-binaries.yml 2024-07-23 11:49:03 +02:00
51b76368ca Update build-binaries.yml 2024-07-23 11:23:09 +02:00
041706dd5d Update build-binaries.yml 2024-07-23 11:21:43 +02:00
1269 changed files with 21260 additions and 77155 deletions
.env.example.env.migration.example
.github
.gitignoreMakefileREADME.md
backend
e2e-test
package-lock.jsonpackage.json
scripts
src
@types
db
auditlog-knexfile.tsindex.tsinstance.ts
manual-migrations
migrations
schemas
seed-data.ts
seeds
ee
routes
services
access-approval-policy
access-approval-request
audit-log-stream
audit-log
certificate-authority-crl
certificate-est
dynamic-secret-lease
dynamic-secret
external-kms
group
identity-project-additional-privilege
ldap-config
license
oidc
permission
project-user-additional-privilege
rate-limit
saml-config
scim
secret-approval-policy
secret-approval-request
secret-replication
secret-rotation
secret-scanning
secret-snapshot
keystore
lib
main.ts
queue
server
services
access-token-queue
api-key
auth-token
auth
certificate-authority
certificate-template
certificate
cmek
external-migration
group-project
identity-access-token
identity-aws-auth
identity-azure-auth
identity-gcp-auth
identity-kubernetes-auth
identity-oidc-auth
identity-project
identity-token-auth
identity-ua
identity
integration-auth
integration
kms
org-admin
org-membership
org
pki-alert
pki-collection
project-bot
project-env
project-membership
project-role
project
resource-cleanup
secret-blind-index
secret-folder
secret-import
secret-sharing
secret-tag
secret-v2-bridge
secret
service-token
slack
smtp
super-admin
telemetry
user
webhook
workflow-integration
vitest.e2e.config.ts
cli
company
docker-compose.dev.yml
docs
api-reference/endpoints
changelog
cli
documentation
images
guides/import-envkey
integrations
platform
admin-panels
dynamic-secrets
kms
mfa/entra
pki
workflow-integrations/slack-integration
sso
integrations
internals
mint.json
sdks
self-hosting
frontend
next.config.jspackage-lock.jsonpackage.json
public
data
images/integrations
src
components
context
OrgPermissionContext
ProjectPermissionContext
UserContext
WorkspaceContext
index.tsx
helpers
hooks
api
accessApproval
admin
apiKeys
auditLogs
auth
ca
certificateTemplates
certificates
cmeks
dashboard
dynamicSecret
generic
groups
identities
index.tsx
integrationAuth
integrations
kms
migration
oidcConfig
orgAdmin
organization
pkiAlerts
pkiCollections
rateLimit
roles
secretApproval
secretApprovalRequest
secretFolders
secretImports
secretRotation
secretSharing
secretSnapshots
secrets
subscriptions
tags
users
workflowIntegrations
workspace
index.tsuseDebounce.tsxusePagination.tsxusePopUp.tsxuseToggle.tsx
utils
layouts/AppLayout
pages
_app.tsx
integrations
aws-parameter-store
aws-secret-manager
azure-devops
circleci
databricks
details
github
hashicorp-vault
login
org/[id]
admin
overview
project/[id]
audit-logs
ca/[caId]
kms
pki-collections/[collectionId]
roles/[roleSlug]
share-secret
shared/secret/[id]
signupinvite.tsx
views
IntegrationsPage
Login
Login.utils.tsx
components
InitialStep
MFAStep
PasswordStep
Org
OrgAdminPage
Project
AuditLogsPage
CaPage
CertificatesPage
KmsPage
MembersPage
PkiCollectionPage
RolePage
Types
SecretApprovalPage/components
SecretMainPage
SecretOverviewPage
SecretRotationPage
SecretRotationPage.tsx
components/CreateRotationForm/steps
Settings
ShareSecretPage/components
ShareSecretPublicPage
Signup/components/UserInfoSSOStep
ViewSecretPublicPage
admin/DashboardPage
helm-charts
k8-operator/controllers
nginx
standalone-entrypoint.sh

@ -70,5 +70,3 @@ NEXT_PUBLIC_CAPTCHA_SITE_KEY=
PLAIN_API_KEY=
PLAIN_WISH_LABEL_IDS=
SSL_CLIENT_CERTIFICATE_HEADER_KEY=

@ -1,2 +1 @@
DB_CONNECTION_URI=
AUDIT_LOGS_DB_CONNECTION_URI=

@ -6,7 +6,6 @@
- [ ] Bug fix
- [ ] New feature
- [ ] Improvement
- [ ] Breaking change
- [ ] Documentation

@ -7,22 +7,23 @@ on:
description: "Version number"
required: true
type: string
defaults:
run:
working-directory: ./backend
jobs:
build-and-deploy:
runs-on: ubuntu-20.04
strategy:
matrix:
arch: [x64, arm64]
os: [linux, win]
os: [linux]
include:
- os: linux
target: node20-linux
- os: win
target: node20-win
runs-on: ${{ (matrix.arch == 'arm64' && matrix.os == 'linux') && 'ubuntu24-arm64' || 'ubuntu-latest' }}
steps:
- name: Checkout code
@ -33,26 +34,47 @@ jobs:
with:
node-version: 20
- name: Set up QEMU
if: matrix.arch == 'arm64' && matrix.os == 'linux'
uses: docker/setup-qemu-action@v2
- name: Install dependencies and build (x64)
if: matrix.arch == 'x64'
run: |
npm install
npm install --prefix ../frontend
npm run binary:build
- name: Install dependencies and build (arm64)
if: matrix.arch == 'arm64' && matrix.os == 'linux'
run: |
docker run --rm -v ${{ github.workspace }}:/workspace --platform linux/arm64 node:20 bash -c "
cd /workspace/backend && npm install &&
cd /workspace/frontend && npm install && npm run build &&
cd /workspace/backend && npm run binary:build
"
- name: Install pkg
run: npm install -g @yao-pkg/pkg
- name: Install dependencies (backend)
run: npm install
- name: Install dependencies (frontend)
run: npm install --prefix ../frontend
- name: Prerequisites for pkg
run: npm run binary:build
- name: Package into node binary
- name: Package into node binary (x64)
if: matrix.arch == 'x64'
run: |
if [ "${{ matrix.os }}" != "linux" ]; then
pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
pkg --no-bytecode --public-packages "*" --public --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }} .
else
pkg --no-bytecode --public-packages "*" --public --compress GZip --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
pkg --no-bytecode --public-packages "*" --public --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
fi
- name: Package into node binary (arm64)
if: matrix.arch == 'arm64' && matrix.os == 'linux'
run: |
docker run --rm -v ${{ github.workspace }}:/workspace --platform linux/arm64 node:20 bash -c "
cd /workspace/backend &&
npm install -g @yao-pkg/pkg &&
pkg --no-bytecode --public-packages '*' --public --target ${{ matrix.target }}-${{ matrix.arch }} --output ./binary/infisical-core .
"
# Set up .deb package structure (Debian/Ubuntu only)
- name: Set up .deb package structure
if: matrix.os == 'linux'
@ -82,108 +104,28 @@ jobs:
dpkg-deb --build infisical-core
mv infisical-core.deb ./binary/infisical-core-${{matrix.arch}}.deb
### RPM
# Set up .rpm package structure
- name: Set up .rpm package structure
if: matrix.os == 'linux'
run: |
mkdir -p infisical-core-rpm/usr/local/bin
cp ./binary/infisical-core infisical-core-rpm/usr/local/bin/
chmod +x infisical-core-rpm/usr/local/bin/infisical-core
# Install RPM build tools
- name: Install RPM build tools
if: matrix.os == 'linux'
run: sudo apt-get update && sudo apt-get install -y rpm
# Create .spec file for RPM
- name: Create .spec file for RPM
if: matrix.os == 'linux'
run: |
cat <<EOF > infisical-core.spec
%global _enable_debug_package 0
%global debug_package %{nil}
%global __os_install_post /usr/lib/rpm/brp-compress %{nil}
Name: infisical-core
Version: ${{ github.event.inputs.version }}
Release: 1%{?dist}
Summary: Infisical Core standalone executable
License: Proprietary
URL: https://app.infisical.com
%description
Infisical Core standalone executable (app.infisical.com)
%install
mkdir -p %{buildroot}/usr/local/bin
cp %{_sourcedir}/infisical-core %{buildroot}/usr/local/bin/
%files
/usr/local/bin/infisical-core
%pre
%post
%preun
%postun
EOF
# Build .rpm file
- name: Build .rpm package
if: matrix.os == 'linux'
run: |
# Create necessary directories
mkdir -p rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
# Copy the binary directly to SOURCES
cp ./binary/infisical-core rpmbuild/SOURCES/
# Run rpmbuild with verbose output
rpmbuild -vv -bb \
--define "_topdir $(pwd)/rpmbuild" \
--define "_sourcedir $(pwd)/rpmbuild/SOURCES" \
--define "_rpmdir $(pwd)/rpmbuild/RPMS" \
--target ${{ matrix.arch == 'x64' && 'x86_64' || 'aarch64' }} \
infisical-core.spec
# Try to find the RPM file
find rpmbuild -name "*.rpm"
# Move the RPM file if found
if [ -n "$(find rpmbuild -name '*.rpm')" ]; then
mv $(find rpmbuild -name '*.rpm') ./binary/infisical-core-${{matrix.arch}}.rpm
else
echo "RPM file not found!"
exit 1
fi
- uses: actions/setup-python@v4
with:
python-version: "3.x" # Specify the Python version you need
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install --upgrade cloudsmith-cli
- run: pip install --upgrade cloudsmith-cli
# Publish .deb file to Cloudsmith (Debian/Ubuntu only)
- name: Publish to Cloudsmith (Debian/Ubuntu)
if: matrix.os == 'linux'
if: matrix.os == 'linux' && matrix.arch == 'TEMP_DISABLED'
working-directory: ./backend
run: cloudsmith push deb --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.deb
# Publish .rpm file to Cloudsmith (Red Hat-based systems only)
- name: Publish .rpm to Cloudsmith
if: matrix.os == 'linux'
working-directory: ./backend
run: cloudsmith push rpm --republish --no-wait-for-sync --api-key=${{ secrets.CLOUDSMITH_API_KEY }} infisical/infisical-core/any-distro/any-version ./binary/infisical-core-${{ matrix.arch }}.rpm
# Publish .exe file to Cloudsmith (Windows only)
- name: Publish to Cloudsmith (Windows)
if: matrix.os == 'win'
if: matrix.os == 'win' && matrix.arch == 'TEMP_DISABLED'
working-directory: ./backend
run: cloudsmith push raw infisical/infisical-core ./binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }}.exe --republish --no-wait-for-sync --version ${{ github.event.inputs.version }} --api-key ${{ secrets.CLOUDSMITH_API_KEY }}
- name: List files in resources folders
run: |
echo "Listing files in backend:"
ls -R ./binary
- uses: actions/upload-artifact@v4
if: matrix.os == 'linux' && matrix.arch == 'arm64'
with:
name: test-binary
path: backend/binary/infisical-core-${{ matrix.os }}-${{ matrix.arch }}.deb

@ -6,15 +6,9 @@ permissions:
contents: read
jobs:
infisical-tests:
name: Run tests before deployment
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
uses: ./.github/workflows/run-backend-tests.yml
infisical-image:
name: Build backend image
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
@ -127,7 +121,6 @@ jobs:
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
AUDIT_LOGS_DB_CONNECTION_URI: ${{ secrets.AUDIT_LOGS_DB_CONNECTION_URI }}
run: |
cd backend
npm install

@ -22,14 +22,14 @@ jobs:
# uncomment this when testing locally using nektos/act
- uses: KengoTODA/actions-setup-docker-compose@v1
if: ${{ env.ACT }}
name: Install `docker compose` for local simulations
name: Install `docker-compose` for local simulations
with:
version: "2.14.2"
- name: 📦Build the latest image
run: docker build --tag infisical-api .
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis
- name: Start the server
run: |
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
@ -72,6 +72,6 @@ jobs:
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
- name: cleanup
run: |
docker compose -f "docker-compose.dev.yml" down
docker-compose -f "docker-compose.dev.yml" down
docker stop infisical-api
docker remove infisical-api

@ -20,7 +20,7 @@ jobs:
uses: actions/checkout@v3
- uses: KengoTODA/actions-setup-docker-compose@v1
if: ${{ env.ACT }}
name: Install `docker compose` for local simulations
name: Install `docker-compose` for local simulations
with:
version: "2.14.2"
- name: 🔧 Setup Node 20
@ -33,7 +33,7 @@ jobs:
run: npm install
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis
- name: Start integration test
run: npm run test:e2e
working-directory: backend
@ -44,4 +44,4 @@ jobs:
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- name: cleanup
run: |
docker compose -f "docker-compose.dev.yml" down
docker-compose -f "docker-compose.dev.yml" down

@ -50,6 +50,6 @@ jobs:
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
# INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
run: go test -v -count=1 ./test

1
.gitignore vendored

@ -63,7 +63,6 @@ yarn-error.log*
# Editor specific
.vscode/*
.idea/*
frontend-build

@ -15,16 +15,3 @@ up-prod:
down:
docker compose -f docker-compose.dev.yml down
reviewable-ui:
cd frontend && \
npm run lint:fix && \
npm run type:check
reviewable-api:
cd backend && \
npm run lint:fix && \
npm run type:check
reviewable: reviewable-ui reviewable-api

File diff suppressed because one or more lines are too long

@ -123,7 +123,7 @@ describe("Project Environment Router", async () => {
id: deletedProjectEnvironment.id,
name: mockProjectEnv.name,
slug: mockProjectEnv.slug,
position: 5,
position: 4,
createdAt: expect.any(String),
updatedAt: expect.any(String)
})

@ -1,36 +0,0 @@
import { seedData1 } from "@app/db/seed-data";
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
const createPolicy = async (dto: { name: string; secretPath: string; approvers: {type: ApproverType.User, id: string}[]; approvals: number }) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-approvals`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
name: dto.name,
secretPath: dto.secretPath,
approvers: dto.approvers,
approvals: dto.approvals
}
});
expect(res.statusCode).toBe(200);
return res.json().approval;
};
describe("Secret approval policy router", async () => {
test("Create policy", async () => {
const policy = await createPolicy({
secretPath: "/",
approvals: 1,
approvers: [{id:seedData1.id, type: ApproverType.User}],
name: "test-policy"
});
expect(policy.name).toBe("test-policy");
});
});

@ -1,61 +1,73 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
const createSecretImport = async (importPath: string, importEnv: string) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/",
import: {
environment: importEnv,
path: importPath
}
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport;
};
const deleteSecretImport = async (id: string) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/secret-imports/${id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/"
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport;
};
describe("Secret Import Router", async () => {
test.each([
{ importEnv: "prod", importPath: "/" }, // one in root
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
// check for default environments
const payload = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath,
importEnv
});
const payload = await createSecretImport(importPath, importEnv);
expect(payload).toEqual(
expect.objectContaining({
id: expect.any(String),
importPath,
importPath: expect.any(String),
importEnv: expect.objectContaining({
name: expect.any(String),
slug: importEnv,
slug: expect.any(String),
id: expect.any(String)
})
})
);
await deleteSecretImport({
id: payload.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport(payload.id);
});
test("Get secret imports", async () => {
const createdImport1 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "prod"
});
const createdImport2 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "staging"
});
const createdImport1 = await createSecretImport("/", "prod");
const createdImport2 = await createSecretImport("/", "staging");
const res = await testServer.inject({
method: "GET",
url: `/api/v1/secret-imports`,
@ -77,60 +89,25 @@ describe("Secret Import Router", async () => {
expect.arrayContaining([
expect.objectContaining({
id: expect.any(String),
importPath: "/",
importPath: expect.any(String),
importEnv: expect.objectContaining({
name: expect.any(String),
slug: "prod",
id: expect.any(String)
})
}),
expect.objectContaining({
id: expect.any(String),
importPath: "/",
importEnv: expect.objectContaining({
name: expect.any(String),
slug: "staging",
slug: expect.any(String),
id: expect.any(String)
})
})
])
);
await deleteSecretImport({
id: createdImport1.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport({
id: createdImport2.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport(createdImport1.id);
await deleteSecretImport(createdImport2.id);
});
test("Update secret import position", async () => {
const prodImportDetails = { path: "/", envSlug: "prod" };
const stagingImportDetails = { path: "/", envSlug: "staging" };
const createdImport1 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: prodImportDetails.path,
importEnv: prodImportDetails.envSlug
});
const createdImport2 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: stagingImportDetails.path,
importEnv: stagingImportDetails.envSlug
});
const createdImport1 = await createSecretImport(prodImportDetails.path, prodImportDetails.envSlug);
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
const updateImportRes = await testServer.inject({
method: "PATCH",
@ -184,55 +161,22 @@ describe("Secret Import Router", async () => {
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
await deleteSecretImport({
id: createdImport1.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport({
id: createdImport2.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport(createdImport1.id);
await deleteSecretImport(createdImport2.id);
});
test("Delete secret import position", async () => {
const createdImport1 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "prod"
});
const createdImport2 = await createSecretImport({
authToken: jwtAuthToken,
secretPath: "/",
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.project.id,
importPath: "/",
importEnv: "staging"
});
const deletedImport = await deleteSecretImport({
id: createdImport1.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
const createdImport1 = await createSecretImport("/", "prod");
const createdImport2 = await createSecretImport("/", "staging");
const deletedImport = await deleteSecretImport(createdImport1.id);
// check for default environments
expect(deletedImport).toEqual(
expect.objectContaining({
id: expect.any(String),
importPath: "/",
importPath: expect.any(String),
importEnv: expect.objectContaining({
name: expect.any(String),
slug: "prod",
slug: expect.any(String),
id: expect.any(String)
})
})
@ -257,552 +201,6 @@ describe("Secret Import Router", async () => {
expect(secretImportList.secretImports.length).toEqual(1);
expect(secretImportList.secretImports[0].position).toEqual(1);
await deleteSecretImport({
id: createdImport2.id,
workspaceId: seedData1.project.id,
environmentSlug: seedData1.environment.slug,
secretPath: "/",
authToken: jwtAuthToken
});
await deleteSecretImport(createdImport2.id);
});
});
// dev <- stage <- prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret import waterfall pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging"
});
const stageImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod"
});
return async () => {
await deleteSecretImport({
id: stageImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check one level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
});
test("Check two level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
expect(secret.secretKey).toBe("PROD_KEY");
expect(secret.secretValue).toBe("prod-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
}
);
// dev <- stage, dev <- prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret import multiple destination to one source pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging"
});
const devImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod"
});
return async () => {
await deleteSecretImport({
id: devImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
}),
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
}
);
// dev -> stage, prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret import one source to multiple destination pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const stageImportFromDev = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: seedData1.environment.slug
});
const prodImportFromDev = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: seedData1.environment.slug
});
return async () => {
await deleteSecretImport({
id: prodImportFromDev.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: stageImportFromDev.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check imported secret exist", async () => {
await createSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
await createSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
const stagingSecret = await getSecretByNameV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(stagingSecret.secretKey).toBe("STAGING_KEY");
expect(stagingSecret.secretValue).toBe("stage-value");
const prodSecret = await getSecretByNameV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
expect(prodSecret.secretKey).toBe("PROD_KEY");
expect(prodSecret.secretValue).toBe("prod-value");
await deleteSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
await deleteSecretV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
}
);

@ -1,406 +0,0 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
// dev <- stage <- prod
describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
"Secret replication waterfall pattern testing - %secretPath",
({ secretPath: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging",
isReplication: true
});
const stageImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod",
isReplication: true
});
return async () => {
await deleteSecretImport({
id: stageImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging",
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check one level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
// wait for 5 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 5000); // time to breathe for db
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
});
test("Check two level imported secret exist", async () => {
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
// wait for 5 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 5000); // time to breathe for db
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
expect(secret.secretKey).toBe("PROD_KEY");
expect(secret.secretValue).toBe("prod-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
},
{ timeout: 30000 }
);
// dev <- stage, dev <- prod
describe.each([{ path: "/" }, { path: "/deep" }])(
"Secret replication 1-N pattern testing - %path",
({ path: testSuitePath }) => {
beforeAll(async () => {
let prodFolder: { id: string };
let stagingFolder: { id: string };
let devFolder: { id: string };
if (testSuitePath !== "/") {
prodFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
stagingFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
devFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: "/",
name: "deep"
});
}
const devImportFromStage = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "staging",
isReplication: true
});
const devImportFromProd = await createSecretImport({
authToken: jwtAuthToken,
secretPath: testSuitePath,
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
importPath: testSuitePath,
importEnv: "prod",
isReplication: true
});
return async () => {
await deleteSecretImport({
id: devImportFromProd.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
await deleteSecretImport({
id: devImportFromStage.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
if (prodFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "prod"
});
}
if (stagingFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: stagingFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: "staging"
});
}
if (devFolder) {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: devFolder.id,
workspaceId: seedData1.projectV3.id,
environmentSlug: seedData1.environment.slug
});
}
};
});
test("Check imported secret exist", async () => {
await createSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY",
value: "stage-value"
});
await createSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY",
value: "prod-value"
});
// wait for 5 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 5000); // time to breathe for db
});
const secret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
expect(secret.secretKey).toBe("STAGING_KEY");
expect(secret.secretValue).toBe("stage-value");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "STAGING_KEY",
secretValue: "stage-value"
})
])
}),
expect.objectContaining({
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "PROD_KEY",
secretValue: "prod-value"
})
])
})
])
);
await deleteSecretV2({
environmentSlug: "staging",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "STAGING_KEY"
});
await deleteSecretV2({
environmentSlug: "prod",
workspaceId: seedData1.projectV3.id,
secretPath: testSuitePath,
authToken: jwtAuthToken,
key: "PROD_KEY"
});
});
},
{ timeout: 30000 }
);

@ -510,7 +510,7 @@ describe("Service token fail cases", async () => {
authorization: `Bearer ${serviceToken}`
}
});
expect(fetchSecrets.statusCode).toBe(403);
expect(fetchSecrets.statusCode).toBe(401);
expect(fetchSecrets.json().error).toBe("PermissionDenied");
await deleteServiceToken();
});
@ -532,7 +532,7 @@ describe("Service token fail cases", async () => {
authorization: `Bearer ${serviceToken}`
}
});
expect(fetchSecrets.statusCode).toBe(403);
expect(fetchSecrets.statusCode).toBe(401);
expect(fetchSecrets.json().error).toBe("PermissionDenied");
await deleteServiceToken();
});
@ -557,7 +557,7 @@ describe("Service token fail cases", async () => {
authorization: `Bearer ${serviceToken}`
}
});
expect(writeSecrets.statusCode).toBe(403);
expect(writeSecrets.statusCode).toBe(401);
expect(writeSecrets.json().error).toBe("PermissionDenied");
// but read access should still work fine

@ -1,330 +0,0 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretImport, deleteSecretImport } from "e2e-test/testUtils/secret-imports";
import { createSecretV2, deleteSecretV2, getSecretByNameV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
describe("Secret expansion", () => {
const projectId = seedData1.projectV3.id;
beforeAll(async () => {
const prodRootFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/",
name: "deep"
});
await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
name: "nested"
});
return async () => {
await deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id: prodRootFolder.id,
workspaceId: projectId,
environmentSlug: "prod"
});
};
});
test("Local secret reference", async () => {
const secrets = [
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "HELLO",
value: "world"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "TEST",
// eslint-disable-next-line
value: "hello ${HELLO}"
}
];
await Promise.all(secrets.map((el) => createSecretV2(el)));
const expandedSecret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "TEST"
});
expect(expandedSecret.secretValue).toBe("hello world");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretKey: "TEST",
secretValue: "hello world"
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
});
test("Cross environment secret reference", async () => {
const secrets = [
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
authToken: jwtAuthToken,
key: "DEEP_KEY_1",
value: "testing"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_1",
value: "reference"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_2",
// eslint-disable-next-line
value: "secret ${NESTED_KEY_1}"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY",
// eslint-disable-next-line
value: "hello ${prod.deep.DEEP_KEY_1} ${prod.deep.nested.NESTED_KEY_2}"
}
];
await Promise.all(secrets.map((el) => createSecretV2(el)));
const expandedSecret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY"
});
expect(expandedSecret.secretValue).toBe("hello testing secret reference");
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretKey: "KEY",
secretValue: "hello testing secret reference"
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
});
test("Non replicated secret import secret expansion on local reference and nested reference", async () => {
const secrets = [
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
authToken: jwtAuthToken,
key: "DEEP_KEY_1",
value: "testing"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_1",
value: "reference"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_2",
// eslint-disable-next-line
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY",
// eslint-disable-next-line
value: "hello world"
}
];
await Promise.all(secrets.map((el) => createSecretV2(el)));
const secretImportFromProdToDev = await createSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
importEnv: "prod",
importPath: "/deep/nested"
});
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretPath: "/deep/nested",
environment: "prod",
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "NESTED_KEY_1",
secretValue: "reference"
}),
expect.objectContaining({
secretKey: "NESTED_KEY_2",
secretValue: "secret reference testing"
})
])
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
await deleteSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
authToken: jwtAuthToken,
id: secretImportFromProdToDev.id,
secretPath: "/"
});
});
test(
"Replicated secret import secret expansion on local reference and nested reference",
async () => {
const secrets = [
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep",
authToken: jwtAuthToken,
key: "DEEP_KEY_1",
value: "testing"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_1",
value: "reference"
},
{
environmentSlug: "prod",
workspaceId: projectId,
secretPath: "/deep/nested",
authToken: jwtAuthToken,
key: "NESTED_KEY_2",
// eslint-disable-next-line
value: "secret ${NESTED_KEY_1} ${prod.deep.DEEP_KEY_1}"
},
{
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
key: "KEY",
// eslint-disable-next-line
value: "hello world"
}
];
await Promise.all(secrets.map((el) => createSecretV2(el)));
const secretImportFromProdToDev = await createSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken,
importEnv: "prod",
importPath: "/deep/nested",
isReplication: true
});
// wait for 5 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 5000); // time to breathe for db
});
const listSecrets = await getSecretsV2({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
secretPath: "/",
authToken: jwtAuthToken
});
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretPath: `/__reserve_replication_${secretImportFromProdToDev.id}`,
environment: seedData1.environment.slug,
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "NESTED_KEY_1",
secretValue: "reference"
}),
expect.objectContaining({
secretKey: "NESTED_KEY_2",
secretValue: "secret reference testing"
})
])
})
])
);
await Promise.all(secrets.map((el) => deleteSecretV2(el)));
await deleteSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
authToken: jwtAuthToken,
id: secretImportFromProdToDev.id,
secretPath: "/"
});
},
{ timeout: 10000 }
);
});

@ -1,577 +0,0 @@
import { SecretType } from "@app/db/schemas";
import { seedData1 } from "@app/db/seed-data";
import { AuthMode } from "@app/services/auth/auth-type";
type TRawSecret = {
secretKey: string;
secretValue: string;
secretComment?: string;
version: number;
};
const createSecret = async (dto: { path: string; key: string; value: string; comment: string; type?: SecretType }) => {
const createSecretReqBody = {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
type: dto.type || SecretType.Shared,
secretPath: dto.path,
secretKey: dto.key,
secretValue: dto.value,
secretComment: dto.comment
};
const createSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: createSecretReqBody
});
expect(createSecRes.statusCode).toBe(200);
const createdSecretPayload = JSON.parse(createSecRes.payload);
expect(createdSecretPayload).toHaveProperty("secret");
return createdSecretPayload.secret as TRawSecret;
};
const deleteSecret = async (dto: { path: string; key: string }) => {
const deleteSecRes = await testServer.inject({
method: "DELETE",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
secretPath: dto.path
}
});
expect(deleteSecRes.statusCode).toBe(200);
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
expect(updatedSecretPayload).toHaveProperty("secret");
return updatedSecretPayload.secret as TRawSecret;
};
describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }])(
"Secret V2 Architecture - $auth mode",
async ({ auth }) => {
let folderId = "";
let authToken = "";
const secretTestCases = [
{
path: "/",
secret: {
key: "SEC1",
value: "something-secret",
comment: "some comment"
}
},
{
path: "/nested1/nested2/folder",
secret: {
key: "NESTED-SEC1",
value: "something-secret",
comment: "some comment"
}
},
{
path: "/",
secret: {
key: "secret-key-2",
value: `-----BEGIN PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCa6eeFk+cMVqFn
hoVQDYgn2Ptp5Azysr2UPq6P73pCL9BzUtOXKZROqDyGehzzfg3wE2KdYU1Jk5Uq
fP0ZOWDIlM2SaVCSI3FW32o5+ZiggjpqcVdLFc/PS0S/ZdSmpPd8h11iO2brtIAI
ugTW8fcKlGSNUwx9aFmE7A6JnTRliTxB1l6QaC+YAwTK39VgeVH2gDSWC407aS15
QobAkaBKKmFkzB5D7i2ZJwt+uXJV/rbLmyDmtnw0lubciGn7NX9wbYef180fisqT
aPNAz0nPKk0fFH2Wd5MZixNGbrrpDA+FCYvI5doThZyT2hpj08qWP07oXXCAqw46
IEupNSILAgMBAAECggEBAIJb5KzeaiZS3B3O8G4OBQ5rJB3WfyLYUHnoSWLsBbie
nc392/ovThLmtZAAQE6SO85Tsb93+t64Z2TKqv1H8G658UeMgfWIB78v4CcLJ2mi
TN/3opqXrzjkQOTDHzBgT7al/mpETHZ6fOdbCemK0fVALGFUioUZg4M8VXtuI4Jw
q28jAyoRKrCrzda4BeQ553NZ4G5RvwhX3O2I8B8upTbt5hLcisBKy8MPLYY5LUFj
YKAP+raf6QLliP6KYHuVxUlgzxjLTxVG41etcyqqZF+foyiKBO3PU3n8oh++tgQP
ExOxiR0JSkBG5b+oOBD0zxcvo3/SjBHn0dJOZCSU2SkCgYEAyCe676XnNyBZMRD7
6trsaoiCWBpA6M8H44+x3w4cQFtqV38RyLy60D+iMKjIaLqeBbnay61VMzo24Bz3
EuF2n4+9k/MetLJ0NCw8HmN5k0WSMD2BFsJWG8glVbzaqzehP4tIclwDTYc1jQVt
IoV2/iL7HGT+x2daUwbU5kN5hK0CgYEAxiLB+fmjxJW7VY4SHDLqPdpIW0q/kv4K
d/yZBrCX799vjmFb9vLh7PkQUfJhMJ/ttJOd7EtT3xh4mfkBeLfHwVU0d/ahbmSH
UJu/E9ZGxAW3PP0kxHZtPrLKQwBnfq8AxBauIhR3rPSorQTIOKtwz1jMlHFSUpuL
3KeK2YfDYJcCgYEAkQnJOlNcAuRb/WQzSHIvktssqK8NjiZHryy3Vc0hx7j2jES2
HGI2dSVHYD9OSiXA0KFm3OTTsnViwm/60iGzFdjRJV6tR39xGUVcoyCuPnvRfUd0
PYvBXgxgkYpyYlPDcwp5CvWGJy3tLi1acgOIwIuUr3S38sL//t4adGk8q1kCgYB8
Jbs1Tl53BvrimKpwUNbE+sjrquJu0A7vL68SqgQJoQ7dP9PH4Ff/i+/V6PFM7mib
BQOm02wyFbs7fvKVGVJoqWK+6CIucX732x7W5yRgHtS5ukQXdbzt1Ek3wkEW98Cb
HTruz7RNAt/NyXlLSODeit1lBbx3Vk9EaxZtRsv88QKBgGn7JwXgez9NOyobsNIo
QVO80rpUeenSjuFi+R0VmbLKe/wgAQbYJ0xTAsQ0btqViMzB27D6mJyC+KUIwWNX
MN8a+m46v4kqvZkKL2c4gmDibyURNe/vCtCHFuanJS/1mo2tr4XDyEeiuK52eTd9
omQDpP86RX/hIIQ+JyLSaWYa
-----END PRIVATE KEY-----`,
comment:
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation"
}
},
{
path: "/nested1/nested2/folder",
secret: {
key: "secret-key-3",
value: `-----BEGIN PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCa6eeFk+cMVqFn
hoVQDYgn2Ptp5Azysr2UPq6P73pCL9BzUtOXKZROqDyGehzzfg3wE2KdYU1Jk5Uq
fP0ZOWDIlM2SaVCSI3FW32o5+ZiggjpqcVdLFc/PS0S/ZdSmpPd8h11iO2brtIAI
ugTW8fcKlGSNUwx9aFmE7A6JnTRliTxB1l6QaC+YAwTK39VgeVH2gDSWC407aS15
QobAkaBKKmFkzB5D7i2ZJwt+uXJV/rbLmyDmtnw0lubciGn7NX9wbYef180fisqT
aPNAz0nPKk0fFH2Wd5MZixNGbrrpDA+FCYvI5doThZyT2hpj08qWP07oXXCAqw46
IEupNSILAgMBAAECggEBAIJb5KzeaiZS3B3O8G4OBQ5rJB3WfyLYUHnoSWLsBbie
nc392/ovThLmtZAAQE6SO85Tsb93+t64Z2TKqv1H8G658UeMgfWIB78v4CcLJ2mi
TN/3opqXrzjkQOTDHzBgT7al/mpETHZ6fOdbCemK0fVALGFUioUZg4M8VXtuI4Jw
q28jAyoRKrCrzda4BeQ553NZ4G5RvwhX3O2I8B8upTbt5hLcisBKy8MPLYY5LUFj
YKAP+raf6QLliP6KYHuVxUlgzxjLTxVG41etcyqqZF+foyiKBO3PU3n8oh++tgQP
ExOxiR0JSkBG5b+oOBD0zxcvo3/SjBHn0dJOZCSU2SkCgYEAyCe676XnNyBZMRD7
6trsaoiCWBpA6M8H44+x3w4cQFtqV38RyLy60D+iMKjIaLqeBbnay61VMzo24Bz3
EuF2n4+9k/MetLJ0NCw8HmN5k0WSMD2BFsJWG8glVbzaqzehP4tIclwDTYc1jQVt
IoV2/iL7HGT+x2daUwbU5kN5hK0CgYEAxiLB+fmjxJW7VY4SHDLqPdpIW0q/kv4K
d/yZBrCX799vjmFb9vLh7PkQUfJhMJ/ttJOd7EtT3xh4mfkBeLfHwVU0d/ahbmSH
UJu/E9ZGxAW3PP0kxHZtPrLKQwBnfq8AxBauIhR3rPSorQTIOKtwz1jMlHFSUpuL
3KeK2YfDYJcCgYEAkQnJOlNcAuRb/WQzSHIvktssqK8NjiZHryy3Vc0hx7j2jES2
HGI2dSVHYD9OSiXA0KFm3OTTsnViwm/60iGzFdjRJV6tR39xGUVcoyCuPnvRfUd0
PYvBXgxgkYpyYlPDcwp5CvWGJy3tLi1acgOIwIuUr3S38sL//t4adGk8q1kCgYB8
Jbs1Tl53BvrimKpwUNbE+sjrquJu0A7vL68SqgQJoQ7dP9PH4Ff/i+/V6PFM7mib
BQOm02wyFbs7fvKVGVJoqWK+6CIucX732x7W5yRgHtS5ukQXdbzt1Ek3wkEW98Cb
HTruz7RNAt/NyXlLSODeit1lBbx3Vk9EaxZtRsv88QKBgGn7JwXgez9NOyobsNIo
QVO80rpUeenSjuFi+R0VmbLKe/wgAQbYJ0xTAsQ0btqViMzB27D6mJyC+KUIwWNX
MN8a+m46v4kqvZkKL2c4gmDibyURNe/vCtCHFuanJS/1mo2tr4XDyEeiuK52eTd9
omQDpP86RX/hIIQ+JyLSaWYa
-----END PRIVATE KEY-----`,
comment:
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation"
}
},
{
path: "/nested1/nested2/folder",
secret: {
key: "secret-key-3",
value:
"TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGRvIGVpdXNtb2QgdGVtcG9yIGluY2lkaWR1bnQgdXQgbGFib3JlIGV0IGRvbG9yZSBtYWduYSBhbGlxdWEuIFV0IGVuaW0gYWQgbWluaW0gdmVuaWFtLCBxdWlzIG5vc3RydWQgZXhlcmNpdGF0aW9uCg==",
comment: ""
}
}
];
beforeAll(async () => {
if (auth === AuthMode.JWT) {
authToken = jwtAuthToken;
} else if (auth === AuthMode.IDENTITY_ACCESS_TOKEN) {
const identityLogin = await testServer.inject({
method: "POST",
url: "/api/v1/auth/universal-auth/login",
body: {
clientSecret: seedData1.machineIdentity.clientCredentials.secret,
clientId: seedData1.machineIdentity.clientCredentials.id
}
});
expect(identityLogin.statusCode).toBe(200);
authToken = identityLogin.json().accessToken;
}
// create a deep folder
const folderCreate = await testServer.inject({
method: "POST",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
name: "folder",
path: "/nested1/nested2"
}
});
expect(folderCreate.statusCode).toBe(200);
folderId = folderCreate.json().folder.id;
});
afterAll(async () => {
const deleteFolder = await testServer.inject({
method: "DELETE",
url: `/api/v1/folders/${folderId}`,
headers: {
authorization: `Bearer ${authToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
path: "/nested1/nested2"
}
});
expect(deleteFolder.statusCode).toBe(200);
});
const getSecrets = async (environment: string, secretPath = "/") => {
const res = await testServer.inject({
method: "GET",
url: `/api/v3/secrets/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
query: {
secretPath,
environment,
workspaceId: seedData1.projectV3.id
}
});
const secrets: TRawSecret[] = JSON.parse(res.payload).secrets || [];
return secrets;
};
test.each(secretTestCases)("Create secret in path $path", async ({ secret, path }) => {
const createdSecret = await createSecret({ path, ...secret });
expect(createdSecret.secretKey).toEqual(secret.key);
expect(createdSecret.secretValue).toEqual(secret.value);
expect(createdSecret.secretComment || "").toEqual(secret.comment);
expect(createdSecret.version).toEqual(1);
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretKey: secret.key,
secretValue: secret.value,
type: SecretType.Shared
})
])
);
await deleteSecret({ path, key: secret.key });
});
test.each(secretTestCases)("Get secret by name in path $path", async ({ secret, path }) => {
await createSecret({ path, ...secret });
const getSecByNameRes = await testServer.inject({
method: "GET",
url: `/api/v3/secrets/raw/${secret.key}`,
headers: {
authorization: `Bearer ${authToken}`
},
query: {
secretPath: path,
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug
}
});
expect(getSecByNameRes.statusCode).toBe(200);
const getSecretByNamePayload = JSON.parse(getSecByNameRes.payload);
expect(getSecretByNamePayload).toHaveProperty("secret");
const decryptedSecret = getSecretByNamePayload.secret as TRawSecret;
expect(decryptedSecret.secretKey).toEqual(secret.key);
expect(decryptedSecret.secretValue).toEqual(secret.value);
expect(decryptedSecret.secretComment || "").toEqual(secret.comment);
await deleteSecret({ path, key: secret.key });
});
if (auth === AuthMode.JWT) {
test.each(secretTestCases)(
"Creating personal secret without shared throw error in path $path",
async ({ secret }) => {
const createSecretReqBody = {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
type: SecretType.Personal,
secretKey: secret.key,
secretValue: secret.value,
secretComment: secret.comment
};
const createSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/raw/SEC2`,
headers: {
authorization: `Bearer ${authToken}`
},
body: createSecretReqBody
});
const payload = JSON.parse(createSecRes.payload);
expect(createSecRes.statusCode).toBe(400);
expect(payload.error).toEqual("BadRequest");
}
);
test.each(secretTestCases)("Creating personal secret in path $path", async ({ secret, path }) => {
await createSecret({ path, ...secret });
const createSecretReqBody = {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
type: SecretType.Personal,
secretPath: path,
secretKey: secret.key,
secretValue: "personal-value",
secretComment: secret.comment
};
const createSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/raw/${secret.key}`,
headers: {
authorization: `Bearer ${authToken}`
},
body: createSecretReqBody
});
expect(createSecRes.statusCode).toBe(200);
// list secrets should contain personal one and shared one
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretKey: secret.key,
secretValue: secret.value,
type: SecretType.Shared
}),
expect.objectContaining({
secretKey: secret.key,
secretValue: "personal-value",
type: SecretType.Personal
})
])
);
await deleteSecret({ path, key: secret.key });
});
test.each(secretTestCases)(
"Deleting personal one should not delete shared secret in path $path",
async ({ secret, path }) => {
await createSecret({ path, ...secret }); // shared one
await createSecret({ path, ...secret, type: SecretType.Personal });
// shared secret deletion should delete personal ones also
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretKey: secret.key,
type: SecretType.Shared
}),
expect.not.objectContaining({
secretKey: secret.key,
type: SecretType.Personal
})
])
);
await deleteSecret({ path, key: secret.key });
}
);
}
test.each(secretTestCases)("Update secret in path $path", async ({ path, secret }) => {
await createSecret({ path, ...secret });
const updateSecretReqBody = {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
type: SecretType.Shared,
secretPath: path,
secretKey: secret.key,
secretValue: "new-value",
secretComment: secret.comment
};
const updateSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/raw/${secret.key}`,
headers: {
authorization: `Bearer ${authToken}`
},
body: updateSecretReqBody
});
expect(updateSecRes.statusCode).toBe(200);
const updatedSecretPayload = JSON.parse(updateSecRes.payload);
expect(updatedSecretPayload).toHaveProperty("secret");
const decryptedSecret = updatedSecretPayload.secret;
expect(decryptedSecret.secretKey).toEqual(secret.key);
expect(decryptedSecret.secretValue).toEqual("new-value");
expect(decryptedSecret.secretComment || "").toEqual(secret.comment);
// list secret should have updated value
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretKey: secret.key,
secretValue: "new-value",
type: SecretType.Shared
})
])
);
await deleteSecret({ path, key: secret.key });
});
test.each(secretTestCases)("Delete secret in path $path", async ({ secret, path }) => {
await createSecret({ path, ...secret });
const deletedSecret = await deleteSecret({ path, key: secret.key });
expect(deletedSecret.secretKey).toEqual(secret.key);
// shared secret deletion should delete personal ones also
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.not.arrayContaining([
expect.objectContaining({
secretKey: secret.key,
type: SecretType.Shared
}),
expect.objectContaining({
secretKey: secret.key,
type: SecretType.Personal
})
])
);
});
test.each(secretTestCases)("Bulk create secrets in path $path", async ({ secret, path }) => {
const createSharedSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-${secret.key}-${i + 1}`,
secretValue: secret.value,
secretComment: secret.comment
}))
}
});
expect(createSharedSecRes.statusCode).toBe(200);
const createSharedSecPayload = JSON.parse(createSharedSecRes.payload);
expect(createSharedSecPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
secretKey: `BULK-${secret.key}-${i + 1}`,
secretValue: secret.value,
type: SecretType.Shared
})
)
)
);
await Promise.all(
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
);
});
test.each(secretTestCases)("Bulk create fail on existing secret in path $path", async ({ secret, path }) => {
await createSecret({ ...secret, key: `BULK-${secret.key}-1`, path });
const createSharedSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-${secret.key}-${i + 1}`,
secretValue: secret.value,
secretComment: secret.comment
}))
}
});
expect(createSharedSecRes.statusCode).toBe(400);
await deleteSecret({ path, key: `BULK-${secret.key}-1` });
});
test.each(secretTestCases)("Bulk update secrets in path $path", async ({ secret, path }) => {
await Promise.all(
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
);
const updateSharedSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-${secret.key}-${i + 1}`,
secretValue: "update-value",
secretComment: secret.comment
}))
}
});
expect(updateSharedSecRes.statusCode).toBe(200);
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
expect(updateSharedSecPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
secretKey: `BULK-${secret.key}-${i + 1}`,
secretValue: "update-value",
type: SecretType.Shared
})
)
)
);
await Promise.all(
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
);
});
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
await Promise.all(
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
);
const deletedSharedSecRes = await testServer.inject({
method: "DELETE",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-${secret.key}-${i + 1}`
}))
}
});
expect(deletedSharedSecRes.statusCode).toBe(200);
const deletedSecretPayload = JSON.parse(deletedSharedSecRes.payload);
expect(deletedSecretPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.not.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
secretKey: `BULK-${secret.value}-${i + 1}`,
type: SecretType.Shared
})
)
)
);
});
}
);

@ -1075,7 +1075,7 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
},
body: createSecretReqBody
});
expect(createSecRes.statusCode).toBe(404);
expect(createSecRes.statusCode).toBe(400);
});
test("Update secret raw", async () => {
@ -1093,7 +1093,7 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
},
body: updateSecretReqBody
});
expect(updateSecRes.statusCode).toBe(404);
expect(updateSecRes.statusCode).toBe(400);
});
test("Delete secret raw", async () => {
@ -1110,6 +1110,6 @@ describe("Secret V3 Raw Router Without E2EE enabled", async () => {
},
body: deletedSecretReqBody
});
expect(deletedSecRes.statusCode).toBe(404);
expect(deletedSecRes.statusCode).toBe(400);
});
});

@ -1,73 +0,0 @@
type TFolder = {
id: string;
name: string;
};
export const createFolder = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
name: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
name: dto.name,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
return res.json().folder as TFolder;
};
export const deleteFolder = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
id: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/folders/${dto.id}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
return res.json().folder as TFolder;
};
export const listFolders = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "GET",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
return res.json().folders as TFolder[];
};

@ -1,93 +0,0 @@
type TSecretImport = {
id: string;
importEnv: {
name: string;
slug: string;
id: string;
};
importPath: string;
};
export const createSecretImport = async (dto: {
workspaceId: string;
environmentSlug: string;
isReplication?: boolean;
secretPath: string;
importPath: string;
importEnv: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
isReplication: dto.isReplication,
path: dto.secretPath,
import: {
environment: dto.importEnv,
path: dto.importPath
}
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport as TSecretImport;
};
export const deleteSecretImport = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
id: string;
}) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/secret-imports/${dto.id}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport as TSecretImport;
};
export const listSecretImport = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
}) => {
const res = await testServer.inject({
method: "GET",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
query: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
path: dto.secretPath
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImports");
return payload.secretImports as TSecretImport[];
};

@ -1,128 +0,0 @@
import { SecretType } from "@app/db/schemas";
type TRawSecret = {
secretKey: string;
secretValue: string;
secretComment?: string;
version: number;
};
export const createSecretV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
key: string;
value: string;
comment?: string;
authToken: string;
type?: SecretType;
}) => {
const createSecretReqBody = {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
type: dto.type || SecretType.Shared,
secretPath: dto.secretPath,
secretKey: dto.key,
secretValue: dto.value,
secretComment: dto.comment
};
const createSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: createSecretReqBody
});
expect(createSecRes.statusCode).toBe(200);
const createdSecretPayload = JSON.parse(createSecRes.payload);
expect(createdSecretPayload).toHaveProperty("secret");
return createdSecretPayload.secret as TRawSecret;
};
export const deleteSecretV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
key: string;
authToken: string;
}) => {
const deleteSecRes = await testServer.inject({
method: "DELETE",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
body: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
secretPath: dto.secretPath
}
});
expect(deleteSecRes.statusCode).toBe(200);
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
expect(updatedSecretPayload).toHaveProperty("secret");
return updatedSecretPayload.secret as TRawSecret;
};
export const getSecretByNameV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
key: string;
authToken: string;
}) => {
const response = await testServer.inject({
method: "GET",
url: `/api/v3/secrets/raw/${dto.key}`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
query: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
secretPath: dto.secretPath,
expandSecretReferences: "true",
include_imports: "true"
}
});
expect(response.statusCode).toBe(200);
const payload = JSON.parse(response.payload);
expect(payload).toHaveProperty("secret");
return payload.secret as TRawSecret;
};
export const getSecretsV2 = async (dto: {
workspaceId: string;
environmentSlug: string;
secretPath: string;
authToken: string;
}) => {
const getSecretsResponse = await testServer.inject({
method: "GET",
url: `/api/v3/secrets/raw`,
headers: {
authorization: `Bearer ${dto.authToken}`
},
query: {
workspaceId: dto.workspaceId,
environment: dto.environmentSlug,
secretPath: dto.secretPath,
expandSecretReferences: "true",
include_imports: "true"
}
});
expect(getSecretsResponse.statusCode).toBe(200);
const getSecretsPayload = JSON.parse(getSecretsResponse.payload);
expect(getSecretsPayload).toHaveProperty("secrets");
expect(getSecretsPayload).toHaveProperty("imports");
return getSecretsPayload as {
secrets: TRawSecret[];
imports: {
secretPath: string;
environment: string;
folderId: string;
secrets: TRawSecret[];
}[];
};
};

@ -11,11 +11,10 @@ import { initLogger } from "@app/lib/logger";
import { main } from "@app/server/app";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { mockQueue } from "./mocks/queue";
import { mockSmtpServer } from "./mocks/smtp";
import { mockKeyStore } from "./mocks/keystore";
import { initDbConnection } from "@app/db";
import { queueServiceFactory } from "@app/queue";
import { keyStoreFactory } from "@app/keystore/keystore";
import { Redis } from "ioredis";
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
export default {
@ -29,31 +28,19 @@ export default {
dbRootCert: cfg.DB_ROOT_CERT
});
const redis = new Redis(cfg.REDIS_URL);
await redis.flushdb("SYNC");
try {
await db.migrate.rollback(
{
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
tableName: "infisical_migrations"
},
true
);
await db.migrate.latest({
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
tableName: "infisical_migrations"
});
await db.seed.run({
directory: path.join(__dirname, "../src/db/seeds"),
extension: "ts"
});
const smtp = mockSmtpServer();
const queue = queueServiceFactory(cfg.REDIS_URL);
const keyStore = keyStoreFactory(cfg.REDIS_URL);
const queue = mockQueue();
const keyStore = mockKeyStore();
const server = await main({ db, smtp, logger, queue, keyStore });
// @ts-expect-error type
globalThis.testServer = server;
@ -71,12 +58,10 @@ export default {
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
);
} catch (error) {
// eslint-disable-next-line
console.log("[TEST] Error setting up environment", error);
await db.destroy();
throw error;
}
// custom setup
return {
async teardown() {
@ -95,9 +80,6 @@ export default {
},
true
);
await redis.flushdb("ASYNC");
redis.disconnect();
await db.destroy();
}
};

6075
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -20,7 +20,8 @@
"../frontend/.next/**",
"!../frontend/node_modules/next/dist/server/**/*.js",
"../frontend/node_modules/@fortawesome/fontawesome-svg-core/**/*",
"../frontend/public/**"
"../frontend/public/**",
"node_modules/argon2/**/*"
],
"outputPath": "binary"
},
@ -34,30 +35,23 @@
"test": "echo \"Error: no test specified\" && exit 1",
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
"dev:docker": "nodemon",
"build": "tsup --sourcemap",
"build": "tsup",
"build:frontend": "npm run build --prefix ../frontend",
"start": "node --enable-source-maps dist/main.mjs",
"start": "node dist/main.mjs",
"type:check": "tsc --noEmit",
"lint:fix": "eslint --fix --ext js,ts ./src",
"lint": "eslint 'src/**/*.ts'",
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
"test:e2e": "vitest run -c vitest.e2e.config.ts",
"test:e2e-watch": "vitest -c vitest.e2e.config.ts",
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
"generate:component": "tsx ./scripts/create-backend-file.ts",
"generate:schema": "tsx ./scripts/generate-schema-types.ts",
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
"migration:new": "tsx ./scripts/create-migration.ts",
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"migration:up": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
"migration:down": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
"migration:list": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
"migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"seed:new": "tsx ./scripts/create-seed-file.ts",
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
@ -85,8 +79,6 @@
"@types/picomatch": "^2.3.3",
"@types/prompt-sync": "^4.2.3",
"@types/resolve": "^1.20.6",
"@types/safe-regex": "^1.1.6",
"@types/sjcl": "^1.0.34",
"@types/uuid": "^9.0.7",
"@typescript-eslint/eslint-plugin": "^6.20.0",
"@typescript-eslint/parser": "^6.20.0",
@ -110,37 +102,32 @@
"tsup": "^8.0.1",
"tsx": "^4.4.0",
"typescript": "^5.3.2",
"vite-tsconfig-paths": "^4.2.2",
"vitest": "^1.2.2"
},
"dependencies": {
"@aws-sdk/client-elasticache": "^3.637.0",
"@aws-sdk/client-iam": "^3.525.0",
"@aws-sdk/client-kms": "^3.609.0",
"@aws-sdk/client-secrets-manager": "^3.504.0",
"@aws-sdk/client-sts": "^3.600.0",
"@casl/ability": "^6.5.0",
"@elastic/elasticsearch": "^8.15.0",
"@fastify/cookie": "^9.3.1",
"@fastify/cors": "^8.5.0",
"@fastify/etag": "^5.1.0",
"@fastify/formbody": "^7.4.0",
"@fastify/helmet": "^11.1.1",
"@fastify/multipart": "8.3.0",
"@fastify/passport": "^2.4.0",
"@fastify/rate-limit": "^9.0.0",
"@fastify/session": "^10.7.0",
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/x509": "^1.12.1",
"@peculiar/x509": "^1.10.0",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "1.1.0",
"@slack/oauth": "^3.0.1",
"@slack/web-api": "^7.3.4",
"@team-plain/typescript-sdk": "^4.6.1",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
@ -166,10 +153,8 @@
"jwks-rsa": "^3.1.0",
"knex": "^3.0.1",
"ldapjs": "^3.0.7",
"ldif": "0.5.1",
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"mongodb": "^6.8.1",
"ms": "^2.1.3",
"mysql2": "^3.9.8",
"nanoid": "^3.3.4",
@ -185,13 +170,8 @@
"pg-query-stream": "^4.5.3",
"picomatch": "^3.0.1",
"pino": "^8.16.2",
"pkijs": "^3.2.4",
"posthog-node": "^3.6.2",
"probot": "^13.3.8",
"safe-regex": "^2.1.1",
"scim-patch": "^0.8.3",
"scim2-parse-filter": "^0.2.10",
"sjcl": "^1.0.8",
"probot": "^13.0.0",
"smee-client": "^2.0.0",
"tedious": "^18.2.1",
"tweetnacl": "^1.0.3",

@ -7,33 +7,14 @@ const prompt = promptSync({
sigint: true
});
type ComponentType = 1 | 2 | 3;
console.log(`
Component List
--------------
0. Exit
1. Service component
2. DAL component
3. Router component
`);
function getComponentType(): ComponentType {
while (true) {
const input = prompt("Select a component (0-3): ");
const componentType = parseInt(input, 10);
if (componentType === 0) {
console.log("Exiting the program. Goodbye!");
process.exit(0);
} else if (componentType === 1 || componentType === 2 || componentType === 3) {
return componentType;
} else {
console.log("Invalid input. Please enter 0, 1, 2, or 3.");
}
}
}
const componentType = getComponentType();
const componentType = parseInt(prompt("Select a component: "), 10);
if (componentType === 1) {
const componentName = prompt("Enter service name: ");

@ -90,12 +90,7 @@ const main = async () => {
.whereRaw("table_schema = current_schema()")
.select<{ tableName: string }[]>("table_name as tableName")
.orderBy("table_name")
).filter(
(el) =>
!el.tableName.includes("_migrations") &&
!el.tableName.includes("audit_logs_") &&
el.tableName !== "intermediate_audit_logs"
);
).filter((el) => !el.tableName.includes("_migrations"));
for (let i = 0; i < tables.length; i += 1) {
const { tableName } = tables[i];

@ -7,7 +7,6 @@ import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-se
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
@ -19,7 +18,6 @@ import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-ser
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
@ -37,9 +35,6 @@ import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
@ -55,9 +50,6 @@ import { TIntegrationServiceFactory } from "@app/services/integration/integratio
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
import { TOrgRoleServiceFactory } from "@app/services/org/org-role-service";
import { TOrgServiceFactory } from "@app/services/org/org-service";
import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
import { TProjectServiceFactory } from "@app/services/project/project-service";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
@ -72,14 +64,12 @@ import { TSecretReplicationServiceFactory } from "@app/services/secret-replicati
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TUserServiceFactory } from "@app/services/user/user-service";
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
declare module "fastify" {
interface FastifyRequest {
@ -98,7 +88,6 @@ declare module "fastify" {
id: string;
orgId: string;
};
rateLimits: RateLimitConfiguration;
// passport data
passportUser: {
isUserCompleted: string;
@ -124,7 +113,6 @@ declare module "fastify" {
group: TGroupServiceFactory;
groupProject: TGroupProjectServiceFactory;
apiKey: TApiKeyServiceFactory;
pkiAlert: TPkiAlertServiceFactory;
project: TProjectServiceFactory;
projectMembership: TProjectMembershipServiceFactory;
projectEnv: TProjectEnvServiceFactory;
@ -162,11 +150,8 @@ declare module "fastify" {
auditLog: TAuditLogServiceFactory;
auditLogStream: TAuditLogStreamServiceFactory;
certificate: TCertificateServiceFactory;
certificateTemplate: TCertificateTemplateServiceFactory;
certificateAuthority: TCertificateAuthorityServiceFactory;
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
certificateEst: TCertificateEstServiceFactory;
pkiCollection: TPkiCollectionServiceFactory;
secretScanning: TSecretScanningServiceFactory;
license: TLicenseServiceFactory;
trustedIp: TTrustedIpServiceFactory;
@ -180,11 +165,6 @@ declare module "fastify" {
rateLimit: TRateLimitServiceFactory;
userEngagement: TUserEngagementServiceFactory;
externalKms: TExternalKmsServiceFactory;
orgAdmin: TOrgAdminServiceFactory;
slack: TSlackServiceFactory;
workflowIntegration: TWorkflowIntegrationServiceFactory;
cmek: TCmekServiceFactory;
migration: TExternalMigrationServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

@ -53,12 +53,6 @@ import {
TCertificateSecretsUpdate,
TCertificatesInsert,
TCertificatesUpdate,
TCertificateTemplateEstConfigs,
TCertificateTemplateEstConfigsInsert,
TCertificateTemplateEstConfigsUpdate,
TCertificateTemplates,
TCertificateTemplatesInsert,
TCertificateTemplatesUpdate,
TDynamicSecretLeases,
TDynamicSecretLeasesInsert,
TDynamicSecretLeasesUpdate,
@ -101,9 +95,6 @@ import {
TIdentityKubernetesAuths,
TIdentityKubernetesAuthsInsert,
TIdentityKubernetesAuthsUpdate,
TIdentityMetadata,
TIdentityMetadataInsert,
TIdentityMetadataUpdate,
TIdentityOidcAuths,
TIdentityOidcAuthsInsert,
TIdentityOidcAuthsUpdate,
@ -170,15 +161,6 @@ import {
TOrgRoles,
TOrgRolesInsert,
TOrgRolesUpdate,
TPkiAlerts,
TPkiAlertsInsert,
TPkiAlertsUpdate,
TPkiCollectionItems,
TPkiCollectionItemsInsert,
TPkiCollectionItemsUpdate,
TPkiCollections,
TPkiCollectionsInsert,
TPkiCollectionsUpdate,
TProjectBots,
TProjectBotsInsert,
TProjectBotsUpdate,
@ -196,9 +178,6 @@ import {
TProjectRolesUpdate,
TProjects,
TProjectsInsert,
TProjectSlackConfigs,
TProjectSlackConfigsInsert,
TProjectSlackConfigsUpdate,
TProjectsUpdate,
TProjectUserAdditionalPrivilege,
TProjectUserAdditionalPrivilegeInsert,
@ -225,9 +204,6 @@ import {
TSecretApprovalRequestSecretTags,
TSecretApprovalRequestSecretTagsInsert,
TSecretApprovalRequestSecretTagsUpdate,
TSecretApprovalRequestSecretTagsV2,
TSecretApprovalRequestSecretTagsV2Insert,
TSecretApprovalRequestSecretTagsV2Update,
TSecretApprovalRequestsInsert,
TSecretApprovalRequestsReviewers,
TSecretApprovalRequestsReviewersInsert,
@ -235,9 +211,6 @@ import {
TSecretApprovalRequestsSecrets,
TSecretApprovalRequestsSecretsInsert,
TSecretApprovalRequestsSecretsUpdate,
TSecretApprovalRequestsSecretsV2,
TSecretApprovalRequestsSecretsV2Insert,
TSecretApprovalRequestsSecretsV2Update,
TSecretApprovalRequestsUpdate,
TSecretBlindIndexes,
TSecretBlindIndexesInsert,
@ -254,15 +227,9 @@ import {
TSecretReferences,
TSecretReferencesInsert,
TSecretReferencesUpdate,
TSecretReferencesV2,
TSecretReferencesV2Insert,
TSecretReferencesV2Update,
TSecretRotationOutputs,
TSecretRotationOutputsInsert,
TSecretRotationOutputsUpdate,
TSecretRotationOutputV2,
TSecretRotationOutputV2Insert,
TSecretRotationOutputV2Update,
TSecretRotations,
TSecretRotationsInsert,
TSecretRotationsUpdate,
@ -281,9 +248,6 @@ import {
TSecretSnapshotSecrets,
TSecretSnapshotSecretsInsert,
TSecretSnapshotSecretsUpdate,
TSecretSnapshotSecretsV2,
TSecretSnapshotSecretsV2Insert,
TSecretSnapshotSecretsV2Update,
TSecretSnapshotsInsert,
TSecretSnapshotsUpdate,
TSecretsUpdate,
@ -299,15 +263,9 @@ import {
TSecretVersionTagJunction,
TSecretVersionTagJunctionInsert,
TSecretVersionTagJunctionUpdate,
TSecretVersionV2TagJunction,
TSecretVersionV2TagJunctionInsert,
TSecretVersionV2TagJunctionUpdate,
TServiceTokens,
TServiceTokensInsert,
TServiceTokensUpdate,
TSlackIntegrations,
TSlackIntegrationsInsert,
TSlackIntegrationsUpdate,
TSuperAdmin,
TSuperAdminInsert,
TSuperAdminUpdate,
@ -331,22 +289,8 @@ import {
TUsersUpdate,
TWebhooks,
TWebhooksInsert,
TWebhooksUpdate,
TWorkflowIntegrations,
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
TWebhooksUpdate
} from "@app/db/schemas";
import {
TSecretV2TagJunction,
TSecretV2TagJunctionInsert,
TSecretV2TagJunctionUpdate
} from "@app/db/schemas/secret-v2-tag-junction";
import {
TSecretVersionsV2,
TSecretVersionsV2Insert,
TSecretVersionsV2Update
} from "@app/db/schemas/secret-versions-v2";
import { TSecretsV2, TSecretsV2Insert, TSecretsV2Update } from "@app/db/schemas/secrets-v2";
declare module "knex" {
namespace Knex {
@ -382,16 +326,6 @@ declare module "knex/types/tables" {
TCertificateAuthorityCrlUpdate
>;
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
[TableName.CertificateTemplate]: KnexOriginal.CompositeTableType<
TCertificateTemplates,
TCertificateTemplatesInsert,
TCertificateTemplatesUpdate
>;
[TableName.CertificateTemplateEstConfig]: KnexOriginal.CompositeTableType<
TCertificateTemplateEstConfigs,
TCertificateTemplateEstConfigsInsert,
TCertificateTemplateEstConfigsUpdate
>;
[TableName.CertificateBody]: KnexOriginal.CompositeTableType<
TCertificateBodies,
TCertificateBodiesInsert,
@ -402,17 +336,6 @@ declare module "knex/types/tables" {
TCertificateSecretsInsert,
TCertificateSecretsUpdate
>;
[TableName.PkiAlert]: KnexOriginal.CompositeTableType<TPkiAlerts, TPkiAlertsInsert, TPkiAlertsUpdate>;
[TableName.PkiCollection]: KnexOriginal.CompositeTableType<
TPkiCollections,
TPkiCollectionsInsert,
TPkiCollectionsUpdate
>;
[TableName.PkiCollectionItem]: KnexOriginal.CompositeTableType<
TPkiCollectionItems,
TPkiCollectionItemsInsert,
TPkiCollectionItemsUpdate
>;
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
TUserGroupMembership,
TUserGroupMembershipInsert,
@ -549,11 +472,6 @@ declare module "knex/types/tables" {
TIdentityUniversalAuthsInsert,
TIdentityUniversalAuthsUpdate
>;
[TableName.IdentityMetadata]: KnexOriginal.CompositeTableType<
TIdentityMetadata,
TIdentityMetadataInsert,
TIdentityMetadataUpdate
>;
[TableName.IdentityKubernetesAuth]: KnexOriginal.CompositeTableType<
TIdentityKubernetesAuths,
TIdentityKubernetesAuthsInsert,
@ -727,23 +645,7 @@ declare module "knex/types/tables" {
TSecretScanningGitRisksUpdate
>;
[TableName.TrustedIps]: KnexOriginal.CompositeTableType<TTrustedIps, TTrustedIpsInsert, TTrustedIpsUpdate>;
[TableName.SecretV2]: KnexOriginal.CompositeTableType<TSecretsV2, TSecretsV2Insert, TSecretsV2Update>;
[TableName.SecretVersionV2]: KnexOriginal.CompositeTableType<
TSecretVersionsV2,
TSecretVersionsV2Insert,
TSecretVersionsV2Update
>;
[TableName.SecretReferenceV2]: KnexOriginal.CompositeTableType<
TSecretReferencesV2,
TSecretReferencesV2Insert,
TSecretReferencesV2Update
>;
// Junction tables
[TableName.SecretV2JnTag]: KnexOriginal.CompositeTableType<
TSecretV2TagJunction,
TSecretV2TagJunctionInsert,
TSecretV2TagJunctionUpdate
>;
[TableName.JnSecretTag]: KnexOriginal.CompositeTableType<
TSecretTagJunction,
TSecretTagJunctionInsert,
@ -754,31 +656,6 @@ declare module "knex/types/tables" {
TSecretVersionTagJunctionInsert,
TSecretVersionTagJunctionUpdate
>;
[TableName.SecretVersionV2Tag]: KnexOriginal.CompositeTableType<
TSecretVersionV2TagJunction,
TSecretVersionV2TagJunctionInsert,
TSecretVersionV2TagJunctionUpdate
>;
[TableName.SnapshotSecretV2]: KnexOriginal.CompositeTableType<
TSecretSnapshotSecretsV2,
TSecretSnapshotSecretsV2Insert,
TSecretSnapshotSecretsV2Update
>;
[TableName.SecretApprovalRequestSecretV2]: KnexOriginal.CompositeTableType<
TSecretApprovalRequestsSecretsV2,
TSecretApprovalRequestsSecretsV2Insert,
TSecretApprovalRequestsSecretsV2Update
>;
[TableName.SecretApprovalRequestSecretTagV2]: KnexOriginal.CompositeTableType<
TSecretApprovalRequestSecretTagsV2,
TSecretApprovalRequestSecretTagsV2Insert,
TSecretApprovalRequestSecretTagsV2Update
>;
[TableName.SecretRotationOutputV2]: KnexOriginal.CompositeTableType<
TSecretRotationOutputV2,
TSecretRotationOutputV2Insert,
TSecretRotationOutputV2Update
>;
// KMS service
[TableName.KmsServerRootConfig]: KnexOriginal.CompositeTableType<
TKmsRootConfig,
@ -793,20 +670,5 @@ declare module "knex/types/tables" {
TKmsKeyVersionsInsert,
TKmsKeyVersionsUpdate
>;
[TableName.SlackIntegrations]: KnexOriginal.CompositeTableType<
TSlackIntegrations,
TSlackIntegrationsInsert,
TSlackIntegrationsUpdate
>;
[TableName.ProjectSlackConfigs]: KnexOriginal.CompositeTableType<
TProjectSlackConfigs,
TProjectSlackConfigsInsert,
TProjectSlackConfigsUpdate
>;
[TableName.WorkflowIntegrations]: KnexOriginal.CompositeTableType<
TWorkflowIntegrations,
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
>;
}
}

@ -1,4 +0,0 @@
declare module "ldif" {
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
function parse(input: string, ...args: any[]): any;
}

@ -1,75 +0,0 @@
// eslint-disable-next-line
import "ts-node/register";
import dotenv from "dotenv";
import type { Knex } from "knex";
import path from "path";
// Update with your config settings. .
dotenv.config({
path: path.join(__dirname, "../../../.env.migration")
});
dotenv.config({
path: path.join(__dirname, "../../../.env")
});
if (!process.env.AUDIT_LOGS_DB_CONNECTION_URI && !process.env.AUDIT_LOGS_DB_HOST) {
console.info("Dedicated audit log database not found. No further migrations necessary");
process.exit(0);
}
console.info("Executing migration on audit log database...");
export default {
development: {
client: "postgres",
connection: {
connectionString: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
host: process.env.AUDIT_LOGS_DB_HOST,
port: process.env.AUDIT_LOGS_DB_PORT,
user: process.env.AUDIT_LOGS_DB_USER,
database: process.env.AUDIT_LOGS_DB_NAME,
password: process.env.AUDIT_LOGS_DB_PASSWORD,
ssl: process.env.AUDIT_LOGS_DB_ROOT_CERT
? {
rejectUnauthorized: true,
ca: Buffer.from(process.env.AUDIT_LOGS_DB_ROOT_CERT, "base64").toString("ascii")
}
: false
},
pool: {
min: 2,
max: 10
},
seeds: {
directory: "./seeds"
},
migrations: {
tableName: "infisical_migrations"
}
},
production: {
client: "postgres",
connection: {
connectionString: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
host: process.env.AUDIT_LOGS_DB_HOST,
port: process.env.AUDIT_LOGS_DB_PORT,
user: process.env.AUDIT_LOGS_DB_USER,
database: process.env.AUDIT_LOGS_DB_NAME,
password: process.env.AUDIT_LOGS_DB_PASSWORD,
ssl: process.env.AUDIT_LOGS_DB_ROOT_CERT
? {
rejectUnauthorized: true,
ca: Buffer.from(process.env.AUDIT_LOGS_DB_ROOT_CERT, "base64").toString("ascii")
}
: false
},
pool: {
min: 2,
max: 10
},
migrations: {
tableName: "infisical_migrations"
}
}
} as Knex.Config;

@ -1,2 +1,2 @@
export type { TDbClient } from "./instance";
export { initAuditLogDbConnection, initDbConnection } from "./instance";
export { initDbConnection } from "./instance";

@ -70,45 +70,3 @@ export const initDbConnection = ({
return db;
};
export const initAuditLogDbConnection = ({
dbConnectionUri,
dbRootCert
}: {
dbConnectionUri: string;
dbRootCert?: string;
}) => {
// akhilmhdh: the default Knex is knex.Knex<any, any[]>. but when assigned with knex({<config>}) the value is knex.Knex<any, unknown[]>
// this was causing issue with files like `snapshot-dal` `findRecursivelySnapshots` this i am explicitly putting the any and unknown[]
// eslint-disable-next-line
const db: Knex<any, unknown[]> = knex({
client: "pg",
connection: {
connectionString: dbConnectionUri,
host: process.env.AUDIT_LOGS_DB_HOST,
// @ts-expect-error I have no clue why only for the port there is a type error
// eslint-disable-next-line
port: process.env.AUDIT_LOGS_DB_PORT,
user: process.env.AUDIT_LOGS_DB_USER,
database: process.env.AUDIT_LOGS_DB_NAME,
password: process.env.AUDIT_LOGS_DB_PASSWORD,
ssl: dbRootCert
? {
rejectUnauthorized: true,
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
}
: false
}
});
// we add these overrides so that auditLogDb and the primary DB are interchangeable
db.primaryNode = () => {
return db;
};
db.replicaNode = () => {
return db;
};
return db;
};

@ -1,161 +0,0 @@
import kx, { Knex } from "knex";
import { TableName } from "../schemas";
const INTERMEDIATE_AUDIT_LOG_TABLE = "intermediate_audit_logs";
const formatPartitionDate = (date: Date) => {
const year = date.getFullYear();
const month = String(date.getMonth() + 1).padStart(2, "0");
const day = String(date.getDate()).padStart(2, "0");
return `${year}-${month}-${day}`;
};
const createAuditLogPartition = async (knex: Knex, startDate: Date, endDate: Date) => {
const startDateStr = formatPartitionDate(startDate);
const endDateStr = formatPartitionDate(endDate);
const partitionName = `${TableName.AuditLog}_${startDateStr.replace(/-/g, "")}_${endDateStr.replace(/-/g, "")}`;
await knex.schema.raw(
`CREATE TABLE ${partitionName} PARTITION OF ${TableName.AuditLog} FOR VALUES FROM ('${startDateStr}') TO ('${endDateStr}')`
);
};
const up = async (knex: Knex): Promise<void> => {
console.info("Dropping primary key of audit log table...");
await knex.schema.alterTable(TableName.AuditLog, (t) => {
// remove existing keys
t.dropPrimary();
});
// Get all indices of the audit log table and drop them
const indexNames: { rows: { indexname: string }[] } = await knex.raw(
`
SELECT indexname
FROM pg_indexes
WHERE tablename = '${TableName.AuditLog}'
`
);
console.log(
"Deleting existing audit log indices:",
indexNames.rows.map((e) => e.indexname)
);
for await (const row of indexNames.rows) {
await knex.raw(`DROP INDEX IF EXISTS ${row.indexname}`);
}
// renaming audit log to intermediate table
console.log("Renaming audit log table to the intermediate name");
await knex.schema.renameTable(TableName.AuditLog, INTERMEDIATE_AUDIT_LOG_TABLE);
if (!(await knex.schema.hasTable(TableName.AuditLog))) {
const createTableSql = knex.schema
.createTable(TableName.AuditLog, (t) => {
t.uuid("id").defaultTo(knex.fn.uuid());
t.string("actor").notNullable();
t.jsonb("actorMetadata").notNullable();
t.string("ipAddress");
t.string("eventType").notNullable();
t.jsonb("eventMetadata");
t.string("userAgent");
t.string("userAgentType");
t.datetime("expiresAt");
t.timestamps(true, true, true);
t.uuid("orgId");
t.string("projectId");
t.string("projectName");
t.primary(["id", "createdAt"]);
})
.toString();
console.info("Creating partition table...");
await knex.schema.raw(`
${createTableSql} PARTITION BY RANGE ("createdAt");
`);
console.log("Adding indices...");
await knex.schema.alterTable(TableName.AuditLog, (t) => {
t.index(["projectId", "createdAt"]);
t.index(["orgId", "createdAt"]);
t.index("expiresAt");
t.index("orgId");
t.index("projectId");
});
console.log("Adding GIN indices...");
await knex.raw(
`CREATE INDEX IF NOT EXISTS "audit_logs_actorMetadata_idx" ON ${TableName.AuditLog} USING gin("actorMetadata" jsonb_path_ops)`
);
console.log("GIN index for actorMetadata done");
await knex.raw(
`CREATE INDEX IF NOT EXISTS "audit_logs_eventMetadata_idx" ON ${TableName.AuditLog} USING gin("eventMetadata" jsonb_path_ops)`
);
console.log("GIN index for eventMetadata done");
// create default partition
console.log("Creating default partition...");
await knex.schema.raw(`CREATE TABLE ${TableName.AuditLog}_default PARTITION OF ${TableName.AuditLog} DEFAULT`);
const nextDate = new Date();
nextDate.setDate(nextDate.getDate() + 1);
const nextDateStr = formatPartitionDate(nextDate);
console.log("Attaching existing audit log table as a partition...");
await knex.schema.raw(`
ALTER TABLE ${INTERMEDIATE_AUDIT_LOG_TABLE} ADD CONSTRAINT audit_log_old
CHECK ( "createdAt" < DATE '${nextDateStr}' );
ALTER TABLE ${TableName.AuditLog} ATTACH PARTITION ${INTERMEDIATE_AUDIT_LOG_TABLE}
FOR VALUES FROM (MINVALUE) TO ('${nextDateStr}' );
`);
// create partition from now until end of month
console.log("Creating audit log partitions ahead of time... next date:", nextDateStr);
await createAuditLogPartition(knex, nextDate, new Date(nextDate.getFullYear(), nextDate.getMonth() + 1));
// create partitions 4 years ahead
const partitionMonths = 4 * 12;
const partitionPromises: Promise<void>[] = [];
for (let x = 1; x <= partitionMonths; x += 1) {
partitionPromises.push(
createAuditLogPartition(
knex,
new Date(nextDate.getFullYear(), nextDate.getMonth() + x, 1),
new Date(nextDate.getFullYear(), nextDate.getMonth() + (x + 1), 1)
)
);
}
await Promise.all(partitionPromises);
console.log("Partition migration complete");
}
};
export const executeMigration = async (url: string) => {
console.log("Executing migration...");
const knex = kx({
client: "pg",
connection: url
});
await knex.transaction(async (tx) => {
await up(tx);
});
};
const dbUrl = process.env.AUDIT_LOGS_DB_CONNECTION_URI;
if (!dbUrl) {
console.error("Please provide a DB connection URL to the AUDIT_LOGS_DB_CONNECTION_URI env");
process.exit(1);
}
void executeMigration(dbUrl).then(() => {
console.log("Migration: partition-audit-logs DONE");
process.exit(0);
});

@ -115,14 +115,7 @@ export async function down(knex: Knex): Promise<void> {
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
approverId: knex(TableName.ProjectMembership)
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalPolicyApprover}.policyId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalPolicyApprover}.approverUserId`]))
});
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (tb) => {
@ -154,27 +147,13 @@ export async function down(knex: Knex): Promise<void> {
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
committerId: knex(TableName.ProjectMembership)
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalRequest}.policyId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`]))
.select(knex.ref("id").withSchema(TableName.ProjectMembership)),
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.committerUserId`])),
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
statusChangeBy: knex(TableName.ProjectMembership)
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalRequest}.policyId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.statusChangedByUserId`]))
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
});
await knex.schema.alterTable(TableName.SecretApprovalRequest, (tb) => {
@ -198,20 +177,8 @@ export async function down(knex: Knex): Promise<void> {
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
member: knex(TableName.ProjectMembership)
.join(
TableName.SecretApprovalRequest,
`${TableName.SecretApprovalRequest}.id`,
`${TableName.SecretApprovalRequestReviewer}.requestId`
)
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicy}.id`,
`${TableName.SecretApprovalRequest}.policyId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretApprovalPolicy}.envId`)
.where(`${TableName.ProjectMembership}.projectId`, knex.raw("??", [`${TableName.Environment}.projectId`]))
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`]))
.select(knex.ref("id").withSchema(TableName.ProjectMembership))
});
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (tb) => {
tb.uuid("member").notNullable().alter();

@ -1,294 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
// ---------- ACCESS APPROVAL POLICY APPROVER ------------
const hasApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
const hasApproverId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverId");
if (!hasApproverUserId) {
// add the new fields
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
// if (hasApproverId) tb.setNullable("approverId");
tb.uuid("approverUserId");
tb.foreign("approverUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
});
// convert project membership id => user id
await knex(TableName.AccessApprovalPolicyApprover).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
approverUserId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.AccessApprovalPolicyApprover}.approverId`]))
});
// drop the old field
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
if (hasApproverId) tb.dropColumn("approverId");
tb.uuid("approverUserId").notNullable().alter();
});
}
// ---------- ACCESS APPROVAL REQUEST ------------
const hasAccessApprovalRequestTable = await knex.schema.hasTable(TableName.AccessApprovalRequest);
const hasRequestedByUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedByUserId");
const hasRequestedBy = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedBy");
if (hasAccessApprovalRequestTable) {
// new fields
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
if (!hasRequestedByUserId) {
tb.uuid("requestedByUserId");
tb.foreign("requestedByUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
}
});
// copy the assigned project membership => user id to new fields
await knex(TableName.AccessApprovalRequest).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
requestedByUserId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.AccessApprovalRequest}.requestedBy`]))
});
// drop old fields
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
if (hasRequestedBy) {
// DROP AT A LATER TIME
// tb.dropColumn("requestedBy");
// ADD ALLOW NULLABLE FOR NOW
tb.uuid("requestedBy").nullable().alter();
}
tb.uuid("requestedByUserId").notNullable().alter();
});
}
// ---------- ACCESS APPROVAL REQUEST REVIEWER ------------
const hasMemberId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "member");
const hasReviewerUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "reviewerUserId");
if (!hasReviewerUserId) {
// new fields
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
// if (hasMemberId) tb.setNullable("member");
tb.uuid("reviewerUserId");
tb.foreign("reviewerUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
});
// copy project membership => user id to new fields
await knex(TableName.AccessApprovalRequestReviewer).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
reviewerUserId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.AccessApprovalRequestReviewer}.member`]))
});
// drop table
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
if (hasMemberId) {
// DROP AT A LATER TIME
// tb.dropColumn("member");
// ADD ALLOW NULLABLE FOR NOW
tb.uuid("member").nullable().alter();
}
tb.uuid("reviewerUserId").notNullable().alter();
});
}
// ---------- PROJECT USER ADDITIONAL PRIVILEGE ------------
const projectUserAdditionalPrivilegeHasProjectMembershipId = await knex.schema.hasColumn(
TableName.ProjectUserAdditionalPrivilege,
"projectMembershipId"
);
const projectUserAdditionalPrivilegeHasUserId = await knex.schema.hasColumn(
TableName.ProjectUserAdditionalPrivilege,
"userId"
);
if (!projectUserAdditionalPrivilegeHasUserId) {
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
tb.uuid("userId");
tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
tb.string("projectId");
tb.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
});
await knex(TableName.ProjectUserAdditionalPrivilege)
.update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
userId: knex(TableName.ProjectMembership)
.select("userId")
.where("id", knex.raw("??", [`${TableName.ProjectUserAdditionalPrivilege}.projectMembershipId`])),
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
projectId: knex(TableName.ProjectMembership)
.select("projectId")
.where("id", knex.raw("??", [`${TableName.ProjectUserAdditionalPrivilege}.projectMembershipId`]))
})
.whereNotNull("projectMembershipId");
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
tb.uuid("userId").notNullable().alter();
tb.string("projectId").notNullable().alter();
});
}
if (projectUserAdditionalPrivilegeHasProjectMembershipId) {
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
// DROP AT A LATER TIME
// tb.dropColumn("projectMembershipId");
// ADD ALLOW NULLABLE FOR NOW
tb.uuid("projectMembershipId").nullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
// We remove project user additional privileges first, because it may delete records in the database where the project membership is not found.
// The project membership won't be found on records created by group members. In those cades we just delete the record and continue.
// When the additionl privilege record is deleted, it will cascade delete the access request created by the group member.
// ---------- PROJECT USER ADDITIONAL PRIVILEGE ------------
const hasUserId = await knex.schema.hasColumn(TableName.ProjectUserAdditionalPrivilege, "userId");
const hasProjectMembershipId = await knex.schema.hasColumn(
TableName.ProjectUserAdditionalPrivilege,
"projectMembershipId"
);
// If it doesn't have the userId field, then the up migration has not run
if (!hasUserId) {
return;
}
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
if (!hasProjectMembershipId) {
tb.uuid("projectMembershipId");
tb.foreign("projectMembershipId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
}
});
if (!hasProjectMembershipId) {
// First, update records where a matching project membership exists
await knex(TableName.ProjectUserAdditionalPrivilege).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
projectMembershipId: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.ProjectUserAdditionalPrivilege}.userId`]))
});
await knex(TableName.AccessApprovalRequest).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
projectMembershipId: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.SecretApprovalRequest}.userId`]))
});
await knex.schema.alterTable(TableName.ProjectUserAdditionalPrivilege, (tb) => {
tb.dropColumn("userId");
tb.dropColumn("projectId");
tb.uuid("projectMembershipId").notNullable().alter();
});
}
// Then, delete records where no matching project membership was found
await knex(TableName.ProjectUserAdditionalPrivilege).whereNull("projectMembershipId").delete();
await knex(TableName.AccessApprovalRequest).whereNull("requestedBy").delete();
// ---------- ACCESS APPROVAL POLICY APPROVER ------------
const hasApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
const hasApproverId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverId");
if (hasApproverUserId) {
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
if (!hasApproverId) {
tb.uuid("approverId");
tb.foreign("approverId").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
}
});
if (!hasApproverId) {
await knex(TableName.AccessApprovalPolicyApprover).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
approverId: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.AccessApprovalPolicyApprover}.approverUserId`]))
});
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (tb) => {
tb.dropColumn("approverUserId");
tb.uuid("approverId").notNullable().alter();
});
}
// ---------- ACCESS APPROVAL REQUEST ------------
const hasAccessApprovalRequestTable = await knex.schema.hasTable(TableName.AccessApprovalRequest);
const hasRequestedByUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedByUserId");
const hasRequestedBy = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "requestedBy");
if (hasAccessApprovalRequestTable) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
if (!hasRequestedBy) {
tb.uuid("requestedBy");
tb.foreign("requestedBy").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
}
});
// Try to find a project membership based on the AccessApprovalRequest.requestedByUserId and AccessApprovalRequest.policyId(reference to AccessApprovalRequestPolicy).envId(reference to Environment).projectId(reference to Project)
// If a project membership is found, set the AccessApprovalRequest.requestedBy to the project membership id
// If a project membership is not found, remove the AccessApprovalRequest record
await knex(TableName.AccessApprovalRequest).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
requestedBy: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.AccessApprovalRequest}.requestedByUserId`]))
});
// Then, delete records where no matching project membership was found
await knex(TableName.AccessApprovalRequest).whereNull("requestedBy").delete();
await knex.schema.alterTable(TableName.AccessApprovalRequest, (tb) => {
if (hasRequestedByUserId) {
tb.dropColumn("requestedByUserId");
}
if (hasRequestedBy) tb.uuid("requestedBy").notNullable().alter();
});
}
// ---------- ACCESS APPROVAL REQUEST REVIEWER ------------
const hasMemberId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "member");
const hasReviewerUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequestReviewer, "reviewerUserId");
if (hasReviewerUserId) {
if (!hasMemberId) {
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
tb.uuid("member");
tb.foreign("member").references("id").inTable(TableName.ProjectMembership).onDelete("CASCADE");
});
}
await knex(TableName.AccessApprovalRequestReviewer).update({
// eslint-disable-next-line
// @ts-ignore because generate schema happens after this
member: knex(TableName.ProjectMembership)
.select("id")
.where("userId", knex.raw("??", [`${TableName.AccessApprovalRequestReviewer}.reviewerUserId`]))
});
await knex.schema.alterTable(TableName.AccessApprovalRequestReviewer, (tb) => {
tb.dropColumn("reviewerUserId");
tb.uuid("member").notNullable().alter();
});
}
}
}

@ -1,39 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSharing)) {
const doesNameExist = await knex.schema.hasColumn(TableName.SecretSharing, "name");
if (!doesNameExist) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.string("name").nullable();
});
}
const doesLastViewedAtExist = await knex.schema.hasColumn(TableName.SecretSharing, "lastViewedAt");
if (!doesLastViewedAtExist) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.timestamp("lastViewedAt").nullable();
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSharing)) {
const doesNameExist = await knex.schema.hasColumn(TableName.SecretSharing, "name");
if (doesNameExist) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.dropColumn("name");
});
}
const doesLastViewedAtExist = await knex.schema.hasColumn(TableName.SecretSharing, "lastViewedAt");
if (doesLastViewedAtExist) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.dropColumn("lastViewedAt");
});
}
}
}

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasKmsDataKeyCol = await knex.schema.hasColumn(TableName.Organization, "kmsEncryptedDataKey");
await knex.schema.alterTable(TableName.Organization, (tb) => {
if (!hasKmsDataKeyCol) {
tb.binary("kmsEncryptedDataKey");
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasKmsDataKeyCol = await knex.schema.hasColumn(TableName.Organization, "kmsEncryptedDataKey");
await knex.schema.alterTable(TableName.Organization, (t) => {
if (hasKmsDataKeyCol) {
t.dropColumn("kmsEncryptedDataKey");
}
});
}

@ -1,29 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasKmsSecretManagerEncryptedDataKey = await knex.schema.hasColumn(
TableName.Project,
"kmsSecretManagerEncryptedDataKey"
);
await knex.schema.alterTable(TableName.Project, (tb) => {
if (!hasKmsSecretManagerEncryptedDataKey) {
tb.binary("kmsSecretManagerEncryptedDataKey");
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasKmsSecretManagerEncryptedDataKey = await knex.schema.hasColumn(
TableName.Project,
"kmsSecretManagerEncryptedDataKey"
);
await knex.schema.alterTable(TableName.Project, (t) => {
if (hasKmsSecretManagerEncryptedDataKey) {
t.dropColumn("kmsSecretManagerEncryptedDataKey");
}
});
}

@ -1,181 +0,0 @@
/* eslint-disable @typescript-eslint/ban-ts-comment */
import { Knex } from "knex";
import { SecretType, TableName } from "../schemas";
import { createJunctionTable, createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const doesSecretV2TableExist = await knex.schema.hasTable(TableName.SecretV2);
if (!doesSecretV2TableExist) {
await knex.schema.createTable(TableName.SecretV2, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.integer("version").defaultTo(1).notNullable();
t.string("type").notNullable().defaultTo(SecretType.Shared);
t.string("key", 500).notNullable();
t.binary("encryptedValue");
t.binary("encryptedComment");
t.string("reminderNote");
t.integer("reminderRepeatDays");
t.boolean("skipMultilineEncoding").defaultTo(false);
t.jsonb("metadata");
t.uuid("userId");
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.uuid("folderId").notNullable();
t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index(["folderId", "userId"]);
});
}
await createOnUpdateTrigger(knex, TableName.SecretV2);
// many to many relation between tags
await createJunctionTable(knex, TableName.SecretV2JnTag, TableName.SecretV2, TableName.SecretTag);
const doesSecretV2VersionTableExist = await knex.schema.hasTable(TableName.SecretVersionV2);
if (!doesSecretV2VersionTableExist) {
await knex.schema.createTable(TableName.SecretVersionV2, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.integer("version").defaultTo(1).notNullable();
t.string("type").notNullable().defaultTo(SecretType.Shared);
t.string("key", 500).notNullable();
t.binary("encryptedValue");
t.binary("encryptedComment");
t.string("reminderNote");
t.integer("reminderRepeatDays");
t.boolean("skipMultilineEncoding").defaultTo(false);
t.jsonb("metadata");
// to avoid orphan rows
t.uuid("envId");
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
t.uuid("secretId").notNullable();
t.uuid("folderId").notNullable();
t.uuid("userId");
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.SecretVersionV2);
if (!(await knex.schema.hasTable(TableName.SecretReferenceV2))) {
await knex.schema.createTable(TableName.SecretReferenceV2, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("environment").notNullable();
t.string("secretPath").notNullable();
t.string("secretKey", 500).notNullable();
t.uuid("secretId").notNullable();
t.foreign("secretId").references("id").inTable(TableName.SecretV2).onDelete("CASCADE");
});
}
await createJunctionTable(knex, TableName.SecretVersionV2Tag, TableName.SecretVersionV2, TableName.SecretTag);
if (!(await knex.schema.hasTable(TableName.SecretApprovalRequestSecretV2))) {
await knex.schema.createTable(TableName.SecretApprovalRequestSecretV2, (t) => {
// everything related to secret
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.integer("version").defaultTo(1);
t.string("key", 500).notNullable();
t.binary("encryptedValue");
t.binary("encryptedComment");
t.string("reminderNote");
t.integer("reminderRepeatDays");
t.boolean("skipMultilineEncoding").defaultTo(false);
t.jsonb("metadata");
t.timestamps(true, true, true);
// commit details
t.uuid("requestId").notNullable();
t.foreign("requestId").references("id").inTable(TableName.SecretApprovalRequest).onDelete("CASCADE");
t.string("op").notNullable();
t.uuid("secretId");
t.foreign("secretId").references("id").inTable(TableName.SecretV2).onDelete("SET NULL");
t.uuid("secretVersion");
t.foreign("secretVersion").references("id").inTable(TableName.SecretVersionV2).onDelete("SET NULL");
});
}
if (!(await knex.schema.hasTable(TableName.SecretApprovalRequestSecretTagV2))) {
await knex.schema.createTable(TableName.SecretApprovalRequestSecretTagV2, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("secretId").notNullable();
t.foreign("secretId").references("id").inTable(TableName.SecretApprovalRequestSecretV2).onDelete("CASCADE");
t.uuid("tagId").notNullable();
t.foreign("tagId").references("id").inTable(TableName.SecretTag).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
if (!(await knex.schema.hasTable(TableName.SnapshotSecretV2))) {
await knex.schema.createTable(TableName.SnapshotSecretV2, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("envId").index().notNullable();
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
// not a relation kept like that to keep it when rolled back
t.uuid("secretVersionId").index().notNullable();
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
t.uuid("snapshotId").index().notNullable();
t.foreign("snapshotId").references("id").inTable(TableName.Snapshot).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
if (await knex.schema.hasTable(TableName.IntegrationAuth)) {
const hasEncryptedAccess = await knex.schema.hasColumn(TableName.IntegrationAuth, "encryptedAccess");
const hasEncryptedAccessId = await knex.schema.hasColumn(TableName.IntegrationAuth, "encryptedAccessId");
const hasEncryptedRefresh = await knex.schema.hasColumn(TableName.IntegrationAuth, "encryptedRefresh");
const hasEncryptedAwsIamAssumRole = await knex.schema.hasColumn(
TableName.IntegrationAuth,
"encryptedAwsAssumeIamRoleArn"
);
await knex.schema.alterTable(TableName.IntegrationAuth, (t) => {
if (!hasEncryptedAccess) t.binary("encryptedAccess");
if (!hasEncryptedAccessId) t.binary("encryptedAccessId");
if (!hasEncryptedRefresh) t.binary("encryptedRefresh");
if (!hasEncryptedAwsIamAssumRole) t.binary("encryptedAwsAssumeIamRoleArn");
});
}
if (!(await knex.schema.hasTable(TableName.SecretRotationOutputV2))) {
await knex.schema.createTable(TableName.SecretRotationOutputV2, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("key").notNullable();
t.uuid("secretId").notNullable();
t.foreign("secretId").references("id").inTable(TableName.SecretV2).onDelete("CASCADE");
t.uuid("rotationId").notNullable();
t.foreign("rotationId").references("id").inTable(TableName.SecretRotation).onDelete("CASCADE");
});
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SnapshotSecretV2);
await knex.schema.dropTableIfExists(TableName.SecretApprovalRequestSecretTagV2);
await knex.schema.dropTableIfExists(TableName.SecretApprovalRequestSecretV2);
await knex.schema.dropTableIfExists(TableName.SecretV2JnTag);
await knex.schema.dropTableIfExists(TableName.SecretReferenceV2);
await knex.schema.dropTableIfExists(TableName.SecretRotationOutputV2);
await dropOnUpdateTrigger(knex, TableName.SecretVersionV2);
await knex.schema.dropTableIfExists(TableName.SecretVersionV2Tag);
await knex.schema.dropTableIfExists(TableName.SecretVersionV2);
await dropOnUpdateTrigger(knex, TableName.SecretV2);
await knex.schema.dropTableIfExists(TableName.SecretV2);
if (await knex.schema.hasTable(TableName.IntegrationAuth)) {
const hasEncryptedAccess = await knex.schema.hasColumn(TableName.IntegrationAuth, "encryptedAccess");
const hasEncryptedAccessId = await knex.schema.hasColumn(TableName.IntegrationAuth, "encryptedAccessId");
const hasEncryptedRefresh = await knex.schema.hasColumn(TableName.IntegrationAuth, "encryptedRefresh");
const hasEncryptedAwsIamAssumRole = await knex.schema.hasColumn(
TableName.IntegrationAuth,
"encryptedAwsAssumeIamRoleArn"
);
await knex.schema.alterTable(TableName.IntegrationAuth, (t) => {
if (hasEncryptedAccess) t.dropColumn("encryptedAccess");
if (hasEncryptedAccessId) t.dropColumn("encryptedAccessId");
if (hasEncryptedRefresh) t.dropColumn("encryptedRefresh");
if (hasEncryptedAwsIamAssumRole) t.dropColumn("encryptedAwsAssumeIamRoleArn");
});
}
}

@ -1,117 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
const hasActiveCaCertIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthority, "activeCaCertId");
if (!hasActiveCaCertIdColumn) {
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
t.uuid("activeCaCertId").nullable();
t.foreign("activeCaCertId").references("id").inTable(TableName.CertificateAuthorityCert);
});
await knex.raw(`
UPDATE "${TableName.CertificateAuthority}" ca
SET "activeCaCertId" = cac.id
FROM "${TableName.CertificateAuthorityCert}" cac
WHERE ca.id = cac."caId"
`);
}
}
if (await knex.schema.hasTable(TableName.CertificateAuthorityCert)) {
const hasVersionColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "version");
if (!hasVersionColumn) {
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.integer("version").nullable();
t.dropUnique(["caId"]);
});
await knex(TableName.CertificateAuthorityCert).update({ version: 1 }).whereNull("version");
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.integer("version").notNullable().alter();
});
}
const hasCaSecretIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "caSecretId");
if (!hasCaSecretIdColumn) {
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.uuid("caSecretId").nullable();
t.foreign("caSecretId").references("id").inTable(TableName.CertificateAuthoritySecret).onDelete("CASCADE");
});
await knex.raw(`
UPDATE "${TableName.CertificateAuthorityCert}" cert
SET "caSecretId" = (
SELECT sec.id
FROM "${TableName.CertificateAuthoritySecret}" sec
WHERE sec."caId" = cert."caId"
)
`);
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.uuid("caSecretId").notNullable().alter();
});
}
}
if (await knex.schema.hasTable(TableName.CertificateAuthoritySecret)) {
await knex.schema.alterTable(TableName.CertificateAuthoritySecret, (t) => {
t.dropUnique(["caId"]);
});
}
if (await knex.schema.hasTable(TableName.Certificate)) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("caCertId").nullable();
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
});
await knex.raw(`
UPDATE "${TableName.Certificate}" cert
SET "caCertId" = (
SELECT caCert.id
FROM "${TableName.CertificateAuthorityCert}" caCert
WHERE caCert."caId" = cert."caId"
)
`);
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("caCertId").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
if (await knex.schema.hasColumn(TableName.CertificateAuthority, "activeCaCertId")) {
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
t.dropColumn("activeCaCertId");
});
}
}
if (await knex.schema.hasTable(TableName.CertificateAuthorityCert)) {
if (await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "version")) {
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.dropColumn("version");
});
}
if (await knex.schema.hasColumn(TableName.CertificateAuthorityCert, "caSecretId")) {
await knex.schema.alterTable(TableName.CertificateAuthorityCert, (t) => {
t.dropColumn("caSecretId");
});
}
}
if (await knex.schema.hasTable(TableName.Certificate)) {
if (await knex.schema.hasColumn(TableName.Certificate, "caCertId")) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.dropColumn("caCertId");
});
}
}
}

@ -1,25 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSharing)) {
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
if (!doesPasswordExist) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.string("password").nullable();
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSharing)) {
const doesPasswordExist = await knex.schema.hasColumn(TableName.SecretSharing, "password");
if (doesPasswordExist) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.dropColumn("password");
});
}
}
}

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasCreationLimitCol = await knex.schema.hasColumn(TableName.RateLimit, "creationLimit");
await knex.schema.alterTable(TableName.RateLimit, (t) => {
if (hasCreationLimitCol) {
t.dropColumn("creationLimit");
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasCreationLimitCol = await knex.schema.hasColumn(TableName.RateLimit, "creationLimit");
await knex.schema.alterTable(TableName.RateLimit, (t) => {
if (!hasCreationLimitCol) {
t.integer("creationLimit").defaultTo(30).notNullable();
}
});
}

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasNameField = await knex.schema.hasColumn(TableName.SecretTag, "name");
if (hasNameField) {
await knex.schema.alterTable(TableName.SecretTag, (t) => {
t.dropColumn("name");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasNameField = await knex.schema.hasColumn(TableName.SecretTag, "name");
if (!hasNameField) {
await knex.schema.alterTable(TableName.SecretTag, (t) => {
t.string("name");
});
}
}

@ -1,62 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.PkiCollection))) {
await knex.schema.createTable(TableName.PkiCollection, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("name").notNullable();
t.string("description").notNullable();
});
}
await createOnUpdateTrigger(knex, TableName.PkiCollection);
if (!(await knex.schema.hasTable(TableName.PkiCollectionItem))) {
await knex.schema.createTable(TableName.PkiCollectionItem, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("pkiCollectionId").notNullable();
t.foreign("pkiCollectionId").references("id").inTable(TableName.PkiCollection).onDelete("CASCADE");
t.uuid("caId").nullable();
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
t.uuid("certId").nullable();
t.foreign("certId").references("id").inTable(TableName.Certificate).onDelete("CASCADE");
});
}
await createOnUpdateTrigger(knex, TableName.PkiCollectionItem);
if (!(await knex.schema.hasTable(TableName.PkiAlert))) {
await knex.schema.createTable(TableName.PkiAlert, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("pkiCollectionId").notNullable();
t.foreign("pkiCollectionId").references("id").inTable(TableName.PkiCollection).onDelete("CASCADE");
t.string("name").notNullable();
t.integer("alertBeforeDays").notNullable();
t.string("recipientEmails").notNullable();
t.unique(["name", "projectId"]);
});
}
await createOnUpdateTrigger(knex, TableName.PkiAlert);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.PkiAlert);
await dropOnUpdateTrigger(knex, TableName.PkiAlert);
await knex.schema.dropTableIfExists(TableName.PkiCollectionItem);
await dropOnUpdateTrigger(knex, TableName.PkiCollectionItem);
await knex.schema.dropTableIfExists(TableName.PkiCollection);
await dropOnUpdateTrigger(knex, TableName.PkiCollection);
}

@ -1,55 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const hasCertificateTemplateTable = await knex.schema.hasTable(TableName.CertificateTemplate);
if (!hasCertificateTemplateTable) {
await knex.schema.createTable(TableName.CertificateTemplate, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.uuid("caId").notNullable();
tb.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
tb.uuid("pkiCollectionId");
tb.foreign("pkiCollectionId").references("id").inTable(TableName.PkiCollection).onDelete("SET NULL");
tb.string("name").notNullable();
tb.string("commonName").notNullable();
tb.string("subjectAlternativeName").notNullable();
tb.string("ttl").notNullable();
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.CertificateTemplate);
}
const doesCertificateTableHaveTemplateId = await knex.schema.hasColumn(
TableName.Certificate,
"certificateTemplateId"
);
if (!doesCertificateTableHaveTemplateId) {
await knex.schema.alterTable(TableName.Certificate, (tb) => {
tb.uuid("certificateTemplateId");
tb.foreign("certificateTemplateId").references("id").inTable(TableName.CertificateTemplate).onDelete("SET NULL");
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesCertificateTableHaveTemplateId = await knex.schema.hasColumn(
TableName.Certificate,
"certificateTemplateId"
);
if (doesCertificateTableHaveTemplateId) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.dropColumn("certificateTemplateId");
});
}
const hasCertificateTemplateTable = await knex.schema.hasTable(TableName.CertificateTemplate);
if (hasCertificateTemplateTable) {
await knex.schema.dropTable(TableName.CertificateTemplate);
await dropOnUpdateTrigger(knex, TableName.CertificateTemplate);
}
}

@ -1,26 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const hasEstConfigTable = await knex.schema.hasTable(TableName.CertificateTemplateEstConfig);
if (!hasEstConfigTable) {
await knex.schema.createTable(TableName.CertificateTemplateEstConfig, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.uuid("certificateTemplateId").notNullable().unique();
tb.foreign("certificateTemplateId").references("id").inTable(TableName.CertificateTemplate).onDelete("CASCADE");
tb.binary("encryptedCaChain").notNullable();
tb.string("hashedPassphrase").notNullable();
tb.boolean("isEnabled").notNullable();
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.CertificateTemplateEstConfig);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.CertificateTemplateEstConfig);
await dropOnUpdateTrigger(knex, TableName.CertificateTemplateEstConfig);
}

@ -1,36 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthorityCrl)) {
const hasCaSecretIdColumn = await knex.schema.hasColumn(TableName.CertificateAuthorityCrl, "caSecretId");
if (!hasCaSecretIdColumn) {
await knex.schema.alterTable(TableName.CertificateAuthorityCrl, (t) => {
t.uuid("caSecretId").nullable();
t.foreign("caSecretId").references("id").inTable(TableName.CertificateAuthoritySecret).onDelete("CASCADE");
});
await knex.raw(`
UPDATE "${TableName.CertificateAuthorityCrl}" crl
SET "caSecretId" = (
SELECT sec.id
FROM "${TableName.CertificateAuthoritySecret}" sec
WHERE sec."caId" = crl."caId"
)
`);
await knex.schema.alterTable(TableName.CertificateAuthorityCrl, (t) => {
t.uuid("caSecretId").notNullable().alter();
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthorityCrl)) {
await knex.schema.alterTable(TableName.CertificateAuthorityCrl, (t) => {
t.dropColumn("caSecretId");
});
}
}

@ -1,96 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.WorkflowIntegrations))) {
await knex.schema.createTable(TableName.WorkflowIntegrations, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.string("integration").notNullable();
tb.string("slug").notNullable();
tb.uuid("orgId").notNullable();
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
tb.string("description");
tb.unique(["orgId", "slug"]);
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.WorkflowIntegrations);
}
if (!(await knex.schema.hasTable(TableName.SlackIntegrations))) {
await knex.schema.createTable(TableName.SlackIntegrations, (tb) => {
tb.uuid("id", { primaryKey: true }).notNullable();
tb.foreign("id").references("id").inTable(TableName.WorkflowIntegrations).onDelete("CASCADE");
tb.string("teamId").notNullable();
tb.string("teamName").notNullable();
tb.string("slackUserId").notNullable();
tb.string("slackAppId").notNullable();
tb.binary("encryptedBotAccessToken").notNullable();
tb.string("slackBotId").notNullable();
tb.string("slackBotUserId").notNullable();
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.SlackIntegrations);
}
if (!(await knex.schema.hasTable(TableName.ProjectSlackConfigs))) {
await knex.schema.createTable(TableName.ProjectSlackConfigs, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.string("projectId").notNullable().unique();
tb.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
tb.uuid("slackIntegrationId").notNullable();
tb.foreign("slackIntegrationId").references("id").inTable(TableName.SlackIntegrations).onDelete("CASCADE");
tb.boolean("isAccessRequestNotificationEnabled").notNullable().defaultTo(false);
tb.string("accessRequestChannels").notNullable().defaultTo("");
tb.boolean("isSecretRequestNotificationEnabled").notNullable().defaultTo(false);
tb.string("secretRequestChannels").notNullable().defaultTo("");
tb.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.ProjectSlackConfigs);
}
const doesSuperAdminHaveSlackClientId = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedSlackClientId");
const doesSuperAdminHaveSlackClientSecret = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedSlackClientSecret"
);
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
if (!doesSuperAdminHaveSlackClientId) {
tb.binary("encryptedSlackClientId");
}
if (!doesSuperAdminHaveSlackClientSecret) {
tb.binary("encryptedSlackClientSecret");
}
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.ProjectSlackConfigs);
await dropOnUpdateTrigger(knex, TableName.ProjectSlackConfigs);
await knex.schema.dropTableIfExists(TableName.SlackIntegrations);
await dropOnUpdateTrigger(knex, TableName.SlackIntegrations);
await knex.schema.dropTableIfExists(TableName.WorkflowIntegrations);
await dropOnUpdateTrigger(knex, TableName.WorkflowIntegrations);
const doesSuperAdminHaveSlackClientId = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedSlackClientId");
const doesSuperAdminHaveSlackClientSecret = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedSlackClientSecret"
);
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
if (doesSuperAdminHaveSlackClientId) {
tb.dropColumn("encryptedSlackClientId");
}
if (doesSuperAdminHaveSlackClientSecret) {
tb.dropColumn("encryptedSlackClientSecret");
}
});
}

@ -1,25 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
const hasRequireTemplateForIssuanceColumn = await knex.schema.hasColumn(
TableName.CertificateAuthority,
"requireTemplateForIssuance"
);
if (!hasRequireTemplateForIssuanceColumn) {
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
t.boolean("requireTemplateForIssuance").notNullable().defaultTo(false);
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.CertificateAuthority)) {
await knex.schema.alterTable(TableName.CertificateAuthority, (t) => {
t.dropColumn("requireTemplateForIssuance");
});
}
}

@ -1,85 +0,0 @@
import { Knex } from "knex";
import { CertKeyUsage } from "@app/services/certificate/certificate-types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
// Certificate template
const hasKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "keyUsages");
const hasExtendedKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "extendedKeyUsages");
await knex.schema.alterTable(TableName.CertificateTemplate, (tb) => {
if (!hasKeyUsagesCol) {
tb.specificType("keyUsages", "text[]");
}
if (!hasExtendedKeyUsagesCol) {
tb.specificType("extendedKeyUsages", "text[]");
}
});
if (!hasKeyUsagesCol) {
await knex(TableName.CertificateTemplate).update({
keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]
});
}
if (!hasExtendedKeyUsagesCol) {
await knex(TableName.CertificateTemplate).update({
extendedKeyUsages: []
});
}
// Certificate
const doesCertTableHaveKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "keyUsages");
const doesCertTableHaveExtendedKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "extendedKeyUsages");
await knex.schema.alterTable(TableName.Certificate, (tb) => {
if (!doesCertTableHaveKeyUsages) {
tb.specificType("keyUsages", "text[]");
}
if (!doesCertTableHaveExtendedKeyUsages) {
tb.specificType("extendedKeyUsages", "text[]");
}
});
if (!doesCertTableHaveKeyUsages) {
await knex(TableName.Certificate).update({
keyUsages: [CertKeyUsage.DIGITAL_SIGNATURE, CertKeyUsage.KEY_ENCIPHERMENT]
});
}
if (!doesCertTableHaveExtendedKeyUsages) {
await knex(TableName.Certificate).update({
extendedKeyUsages: []
});
}
}
export async function down(knex: Knex): Promise<void> {
// Certificate Template
const hasKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "keyUsages");
const hasExtendedKeyUsagesCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "extendedKeyUsages");
await knex.schema.alterTable(TableName.CertificateTemplate, (t) => {
if (hasKeyUsagesCol) {
t.dropColumn("keyUsages");
}
if (hasExtendedKeyUsagesCol) {
t.dropColumn("extendedKeyUsages");
}
});
// Certificate
const doesCertTableHaveKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "keyUsages");
const doesCertTableHaveExtendedKeyUsages = await knex.schema.hasColumn(TableName.Certificate, "extendedKeyUsages");
await knex.schema.alterTable(TableName.Certificate, (t) => {
if (doesCertTableHaveKeyUsages) {
t.dropColumn("keyUsages");
}
if (doesCertTableHaveExtendedKeyUsages) {
t.dropColumn("extendedKeyUsages");
}
});
}

@ -1,76 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasAccessApproverGroupId = await knex.schema.hasColumn(
TableName.AccessApprovalPolicyApprover,
"approverGroupId"
);
const hasAccessApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
const hasSecretApproverGroupId = await knex.schema.hasColumn(
TableName.SecretApprovalPolicyApprover,
"approverGroupId"
);
const hasSecretApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId");
if (await knex.schema.hasTable(TableName.AccessApprovalPolicyApprover)) {
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (table) => {
// add column approverGroupId to AccessApprovalPolicyApprover
if (!hasAccessApproverGroupId) {
table.uuid("approverGroupId").nullable().references("id").inTable(TableName.Groups).onDelete("CASCADE");
}
// make approverUserId nullable
if (hasAccessApproverUserId) {
table.uuid("approverUserId").nullable().alter();
}
});
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (table) => {
// add column approverGroupId to SecretApprovalPolicyApprover
if (!hasSecretApproverGroupId) {
table.uuid("approverGroupId").nullable().references("id").inTable(TableName.Groups).onDelete("CASCADE");
}
// make approverUserId nullable
if (hasSecretApproverUserId) {
table.uuid("approverUserId").nullable().alter();
}
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasAccessApproverGroupId = await knex.schema.hasColumn(
TableName.AccessApprovalPolicyApprover,
"approverGroupId"
);
const hasAccessApproverUserId = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "approverUserId");
const hasSecretApproverGroupId = await knex.schema.hasColumn(
TableName.SecretApprovalPolicyApprover,
"approverGroupId"
);
const hasSecretApproverUserId = await knex.schema.hasColumn(TableName.SecretApprovalPolicyApprover, "approverUserId");
if (await knex.schema.hasTable(TableName.AccessApprovalPolicyApprover)) {
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (table) => {
if (hasAccessApproverGroupId) {
table.dropColumn("approverGroupId");
}
// make approverUserId not nullable
if (hasAccessApproverUserId) {
table.uuid("approverUserId").notNullable().alter();
}
});
// remove
await knex.schema.alterTable(TableName.SecretApprovalPolicyApprover, (table) => {
if (hasSecretApproverGroupId) {
table.dropColumn("approverGroupId");
}
// make approverUserId not nullable
if (hasSecretApproverUserId) {
table.uuid("approverUserId").notNullable().alter();
}
});
}
}

@ -1,24 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityMetadata))) {
await knex.schema.createTable(TableName.IdentityMetadata, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.string("key").notNullable();
tb.string("value").notNullable();
tb.uuid("orgId").notNullable();
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
tb.uuid("userId");
tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
tb.uuid("identityId");
tb.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
tb.timestamps(true, true, true);
});
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityMetadata);
}

@ -1,30 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSharing)) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.string("iv").nullable().alter();
t.string("tag").nullable().alter();
t.string("encryptedValue").nullable().alter();
t.binary("encryptedSecret").nullable();
t.string("hashedHex").nullable().alter();
t.string("identifier", 64).nullable();
t.unique("identifier");
t.index("identifier");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSharing)) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.dropColumn("encryptedSecret");
t.dropColumn("identifier");
});
}
}

@ -1,19 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.OidcConfig, "lastUsed"))) {
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
tb.datetime("lastUsed");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.OidcConfig, "lastUsed")) {
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
tb.dropColumn("lastUsed");
});
}
}

@ -1,46 +0,0 @@
import { Knex } from "knex";
import { dropConstraintIfExists } from "@app/db/migrations/utils/dropConstraintIfExists";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId");
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
// drop constraint if exists (won't exist if rolled back, see below)
await dropConstraintIfExists(TableName.KmsKey, "kms_keys_orgid_slug_unique", knex);
// projectId for CMEK functionality
await knex.schema.alterTable(TableName.KmsKey, (table) => {
table.string("projectId").nullable().references("id").inTable(TableName.Project).onDelete("CASCADE");
if (hasOrgId) {
table.unique(["orgId", "projectId", "slug"]);
}
if (hasSlug) {
table.renameColumn("slug", "name");
}
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId");
const hasName = await knex.schema.hasColumn(TableName.KmsKey, "name");
// remove projectId for CMEK functionality
await knex.schema.alterTable(TableName.KmsKey, (table) => {
if (hasName) {
table.renameColumn("name", "slug");
}
if (hasOrgId) {
table.dropUnique(["orgId", "projectId", "slug"]);
}
table.dropColumn("projectId");
});
}
}

@ -1,30 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
if (!hasSlug) {
// add slug back temporarily and set value equal to name
await knex.schema
.alterTable(TableName.KmsKey, (table) => {
table.string("slug", 32);
})
.then(() => knex(TableName.KmsKey).update("slug", knex.ref("name")));
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
if (hasSlug) {
await knex.schema.alterTable(TableName.KmsKey, (table) => {
table.dropColumn("slug");
});
}
}
}

@ -1,48 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.AuditLog)) {
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
const doesProjectNameExist = await knex.schema.hasColumn(TableName.AuditLog, "projectName");
await knex.schema.alterTable(TableName.AuditLog, (t) => {
if (doesOrgIdExist) {
t.dropForeign("orgId");
}
if (doesProjectIdExist) {
t.dropForeign("projectId");
}
// add normalized field
if (!doesProjectNameExist) {
t.string("projectName");
}
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
const doesProjectNameExist = await knex.schema.hasColumn(TableName.AuditLog, "projectName");
if (await knex.schema.hasTable(TableName.AuditLog)) {
await knex.schema.alterTable(TableName.AuditLog, (t) => {
if (doesOrgIdExist) {
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
}
if (doesProjectIdExist) {
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
}
// remove normalized field
if (doesProjectNameExist) {
t.dropColumn("projectName");
}
});
}
}

@ -1,29 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
// org default role
if (await knex.schema.hasTable(TableName.Organization)) {
const hasDefaultRoleCol = await knex.schema.hasColumn(TableName.Organization, "defaultMembershipRole");
if (!hasDefaultRoleCol) {
await knex.schema.alterTable(TableName.Organization, (tb) => {
tb.string("defaultMembershipRole").notNullable().defaultTo("member");
});
}
}
}
export async function down(knex: Knex): Promise<void> {
// org default role
if (await knex.schema.hasTable(TableName.Organization)) {
const hasDefaultRoleCol = await knex.schema.hasColumn(TableName.Organization, "defaultMembershipRole");
if (hasDefaultRoleCol) {
await knex.schema.alterTable(TableName.Organization, (tb) => {
tb.dropColumn("defaultMembershipRole");
});
}
}
}

@ -1,6 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export const dropConstraintIfExists = (tableName: TableName, constraintName: string, knex: Knex) =>
knex.raw(`ALTER TABLE ${tableName} DROP CONSTRAINT IF EXISTS ${constraintName};`);

@ -1,105 +0,0 @@
import slugify from "@sindresorhus/slugify";
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { randomSecureBytes } from "@app/lib/crypto";
import { symmetricCipherService, SymmetricEncryption } from "@app/lib/crypto/cipher";
import { alphaNumericNanoId } from "@app/lib/nanoid";
const getInstanceRootKey = async (knex: Knex) => {
const encryptionKey = process.env.ENCRYPTION_KEY || process.env.ROOT_ENCRYPTION_KEY;
// if root key its base64 encoded
const isBase64 = !process.env.ENCRYPTION_KEY;
if (!encryptionKey) throw new Error("ENCRYPTION_KEY variable needed for migration");
const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8");
const KMS_ROOT_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
const kmsRootConfig = await knex(TableName.KmsServerRootConfig).where({ id: KMS_ROOT_CONFIG_UUID }).first();
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
if (kmsRootConfig) {
const decryptedRootKey = cipher.decrypt(kmsRootConfig.encryptedRootKey, encryptionKeyBuffer);
// set the flag so that other instancen nodes can start
return decryptedRootKey;
}
const newRootKey = randomSecureBytes(32);
const encryptedRootKey = cipher.encrypt(newRootKey, encryptionKeyBuffer);
await knex(TableName.KmsServerRootConfig).insert({
encryptedRootKey,
// eslint-disable-next-line
// @ts-ignore id is kept as fixed for idempotence and to avoid race condition
id: KMS_ROOT_CONFIG_UUID
});
return encryptedRootKey;
};
export const getSecretManagerDataKey = async (knex: Knex, projectId: string) => {
const KMS_VERSION = "v01";
const KMS_VERSION_BLOB_LENGTH = 3;
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
const project = await knex(TableName.Project).where({ id: projectId }).first();
if (!project) throw new Error("Missing project id");
const ROOT_ENCRYPTION_KEY = await getInstanceRootKey(knex);
let secretManagerKmsKey;
const projectSecretManagerKmsId = project?.kmsSecretManagerKeyId;
if (projectSecretManagerKmsId) {
const kmsDoc = await knex(TableName.KmsKey)
.leftJoin(TableName.InternalKms, `${TableName.KmsKey}.id`, `${TableName.InternalKms}.kmsKeyId`)
.where({ [`${TableName.KmsKey}.id` as "id"]: projectSecretManagerKmsId })
.first();
if (!kmsDoc) throw new Error("missing kms");
secretManagerKmsKey = cipher.decrypt(kmsDoc.encryptedKey, ROOT_ENCRYPTION_KEY);
} else {
const [kmsDoc] = await knex(TableName.KmsKey)
.insert({
name: slugify(alphaNumericNanoId(8).toLowerCase()),
orgId: project.orgId,
isReserved: false
})
.returning("*");
secretManagerKmsKey = randomSecureBytes(32);
const encryptedKeyMaterial = cipher.encrypt(secretManagerKmsKey, ROOT_ENCRYPTION_KEY);
await knex(TableName.InternalKms).insert({
version: 1,
encryptedKey: encryptedKeyMaterial,
encryptionAlgorithm: SymmetricEncryption.AES_GCM_256,
kmsKeyId: kmsDoc.id
});
}
const encryptedSecretManagerDataKey = project?.kmsSecretManagerEncryptedDataKey;
let dataKey: Buffer;
if (!encryptedSecretManagerDataKey) {
dataKey = randomSecureBytes();
// the below versioning we do it automatically in kms service
const unversionedDataKey = cipher.encrypt(dataKey, secretManagerKmsKey);
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
await knex(TableName.Project)
.where({ id: projectId })
.update({
kmsSecretManagerEncryptedDataKey: Buffer.concat([unversionedDataKey, versionBlob])
});
} else {
const cipherTextBlob = encryptedSecretManagerDataKey.subarray(0, -KMS_VERSION_BLOB_LENGTH);
dataKey = cipher.decrypt(cipherTextBlob, secretManagerKmsKey);
}
return {
encryptor: ({ plainText }: { plainText: Buffer }) => {
const encryptedPlainTextBlob = cipher.encrypt(plainText, dataKey);
// Buffer#1 encrypted text + Buffer#2 version number
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
const cipherTextBlob = Buffer.concat([encryptedPlainTextBlob, versionBlob]);
return { cipherTextBlob };
},
decryptor: ({ cipherTextBlob: versionedCipherTextBlob }: { cipherTextBlob: Buffer }) => {
const cipherTextBlob = versionedCipherTextBlob.subarray(0, -KMS_VERSION_BLOB_LENGTH);
const decryptedBlob = cipher.decrypt(cipherTextBlob, dataKey);
return decryptedBlob;
}
};
};

@ -9,11 +9,10 @@ import { TImmutableDBKeys } from "./models";
export const AccessApprovalPoliciesApproversSchema = z.object({
id: z.string().uuid(),
approverId: z.string().uuid(),
policyId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
approverUserId: z.string().uuid().nullable().optional(),
approverGroupId: z.string().uuid().nullable().optional()
updatedAt: z.date()
});
export type TAccessApprovalPoliciesApprovers = z.infer<typeof AccessApprovalPoliciesApproversSchema>;

@ -5,6 +5,8 @@
import { z } from "zod";
import { EnforcementLevel } from "@app/lib/types";
import { TImmutableDBKeys } from "./models";
export const AccessApprovalPoliciesSchema = z.object({
@ -15,7 +17,7 @@ export const AccessApprovalPoliciesSchema = z.object({
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
enforcementLevel: z.string().default("hard")
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
});
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;

@ -9,12 +9,11 @@ import { TImmutableDBKeys } from "./models";
export const AccessApprovalRequestsReviewersSchema = z.object({
id: z.string().uuid(),
member: z.string().uuid().nullable().optional(),
member: z.string().uuid(),
status: z.string(),
requestId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
reviewerUserId: z.string().uuid()
updatedAt: z.date()
});
export type TAccessApprovalRequestsReviewers = z.infer<typeof AccessApprovalRequestsReviewersSchema>;

@ -11,13 +11,12 @@ export const AccessApprovalRequestsSchema = z.object({
id: z.string().uuid(),
policyId: z.string().uuid(),
privilegeId: z.string().uuid().nullable().optional(),
requestedBy: z.string().uuid().nullable().optional(),
requestedBy: z.string().uuid(),
isTemporary: z.boolean(),
temporaryRange: z.string().nullable().optional(),
permissions: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
requestedByUserId: z.string().uuid()
updatedAt: z.date()
});
export type TAccessApprovalRequests = z.infer<typeof AccessApprovalRequestsSchema>;

@ -20,8 +20,7 @@ export const AuditLogsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
orgId: z.string().uuid().nullable().optional(),
projectId: z.string().nullable().optional(),
projectName: z.string().nullable().optional()
projectId: z.string().nullable().optional()
});
export type TAuditLogs = z.infer<typeof AuditLogsSchema>;

@ -27,9 +27,7 @@ export const CertificateAuthoritiesSchema = z.object({
maxPathLength: z.number().nullable().optional(),
keyAlgorithm: z.string(),
notBefore: z.date().nullable().optional(),
notAfter: z.date().nullable().optional(),
activeCaCertId: z.string().uuid().nullable().optional(),
requireTemplateForIssuance: z.boolean().default(false)
notAfter: z.date().nullable().optional()
});
export type TCertificateAuthorities = z.infer<typeof CertificateAuthoritiesSchema>;

@ -15,9 +15,7 @@ export const CertificateAuthorityCertsSchema = z.object({
updatedAt: z.date(),
caId: z.string().uuid(),
encryptedCertificate: zodBuffer,
encryptedCertificateChain: zodBuffer,
version: z.number(),
caSecretId: z.string().uuid()
encryptedCertificateChain: zodBuffer
});
export type TCertificateAuthorityCerts = z.infer<typeof CertificateAuthorityCertsSchema>;

@ -14,8 +14,7 @@ export const CertificateAuthorityCrlSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
caId: z.string().uuid(),
encryptedCrl: zodBuffer,
caSecretId: z.string().uuid()
encryptedCrl: zodBuffer
});
export type TCertificateAuthorityCrl = z.infer<typeof CertificateAuthorityCrlSchema>;

@ -1,29 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const CertificateTemplateEstConfigsSchema = z.object({
id: z.string().uuid(),
certificateTemplateId: z.string().uuid(),
encryptedCaChain: zodBuffer,
hashedPassphrase: z.string(),
isEnabled: z.boolean(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TCertificateTemplateEstConfigs = z.infer<typeof CertificateTemplateEstConfigsSchema>;
export type TCertificateTemplateEstConfigsInsert = Omit<
z.input<typeof CertificateTemplateEstConfigsSchema>,
TImmutableDBKeys
>;
export type TCertificateTemplateEstConfigsUpdate = Partial<
Omit<z.input<typeof CertificateTemplateEstConfigsSchema>, TImmutableDBKeys>
>;

@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const CertificateTemplatesSchema = z.object({
id: z.string().uuid(),
caId: z.string().uuid(),
pkiCollectionId: z.string().uuid().nullable().optional(),
name: z.string(),
commonName: z.string(),
subjectAlternativeName: z.string(),
ttl: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
keyUsages: z.string().array().nullable().optional(),
extendedKeyUsages: z.string().array().nullable().optional()
});
export type TCertificateTemplates = z.infer<typeof CertificateTemplatesSchema>;
export type TCertificateTemplatesInsert = Omit<z.input<typeof CertificateTemplatesSchema>, TImmutableDBKeys>;
export type TCertificateTemplatesUpdate = Partial<Omit<z.input<typeof CertificateTemplatesSchema>, TImmutableDBKeys>>;

@ -20,11 +20,7 @@ export const CertificatesSchema = z.object({
notAfter: z.date(),
revokedAt: z.date().nullable().optional(),
revocationReason: z.number().nullable().optional(),
altNames: z.string().default("").nullable().optional(),
caCertId: z.string().uuid(),
certificateTemplateId: z.string().uuid().nullable().optional(),
keyUsages: z.string().array().nullable().optional(),
extendedKeyUsages: z.string().array().nullable().optional()
altNames: z.string().default("").nullable().optional()
});
export type TCertificates = z.infer<typeof CertificatesSchema>;

@ -1,23 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityMetadataSchema = z.object({
id: z.string().uuid(),
key: z.string(),
value: z.string(),
orgId: z.string().uuid(),
userId: z.string().uuid().nullable().optional(),
identityId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TIdentityMetadata = z.infer<typeof IdentityMetadataSchema>;
export type TIdentityMetadataInsert = Omit<z.input<typeof IdentityMetadataSchema>, TImmutableDBKeys>;
export type TIdentityMetadataUpdate = Partial<Omit<z.input<typeof IdentityMetadataSchema>, TImmutableDBKeys>>;

@ -14,8 +14,6 @@ export * from "./certificate-authority-crl";
export * from "./certificate-authority-secret";
export * from "./certificate-bodies";
export * from "./certificate-secrets";
export * from "./certificate-template-est-configs";
export * from "./certificate-templates";
export * from "./certificates";
export * from "./dynamic-secret-leases";
export * from "./dynamic-secrets";
@ -31,7 +29,6 @@ export * from "./identity-aws-auths";
export * from "./identity-azure-auths";
export * from "./identity-gcp-auths";
export * from "./identity-kubernetes-auths";
export * from "./identity-metadata";
export * from "./identity-oidc-auths";
export * from "./identity-org-memberships";
export * from "./identity-project-additional-privilege";
@ -55,15 +52,11 @@ export * from "./org-bots";
export * from "./org-memberships";
export * from "./org-roles";
export * from "./organizations";
export * from "./pki-alerts";
export * from "./pki-collection-items";
export * from "./pki-collections";
export * from "./project-bots";
export * from "./project-environments";
export * from "./project-keys";
export * from "./project-memberships";
export * from "./project-roles";
export * from "./project-slack-configs";
export * from "./project-user-additional-privilege";
export * from "./project-user-membership-roles";
export * from "./projects";
@ -73,37 +66,27 @@ export * from "./scim-tokens";
export * from "./secret-approval-policies";
export * from "./secret-approval-policies-approvers";
export * from "./secret-approval-request-secret-tags";
export * from "./secret-approval-request-secret-tags-v2";
export * from "./secret-approval-requests";
export * from "./secret-approval-requests-reviewers";
export * from "./secret-approval-requests-secrets";
export * from "./secret-approval-requests-secrets-v2";
export * from "./secret-blind-indexes";
export * from "./secret-folder-versions";
export * from "./secret-folders";
export * from "./secret-imports";
export * from "./secret-references";
export * from "./secret-references-v2";
export * from "./secret-rotation-output-v2";
export * from "./secret-rotation-outputs";
export * from "./secret-rotations";
export * from "./secret-scanning-git-risks";
export * from "./secret-sharing";
export * from "./secret-snapshot-folders";
export * from "./secret-snapshot-secrets";
export * from "./secret-snapshot-secrets-v2";
export * from "./secret-snapshots";
export * from "./secret-tag-junction";
export * from "./secret-tags";
export * from "./secret-v2-tag-junction";
export * from "./secret-version-tag-junction";
export * from "./secret-version-v2-tag-junction";
export * from "./secret-versions";
export * from "./secret-versions-v2";
export * from "./secrets";
export * from "./secrets-v2";
export * from "./service-tokens";
export * from "./slack-integrations";
export * from "./super-admin";
export * from "./trusted-ips";
export * from "./user-actions";
@ -112,4 +95,3 @@ export * from "./user-encryption-keys";
export * from "./user-group-membership";
export * from "./users";
export * from "./webhooks";
export * from "./workflow-integrations";

@ -5,8 +5,6 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const IntegrationAuthsSchema = z.object({
@ -34,11 +32,7 @@ export const IntegrationAuthsSchema = z.object({
updatedAt: z.date(),
awsAssumeIamRoleArnCipherText: z.string().nullable().optional(),
awsAssumeIamRoleArnIV: z.string().nullable().optional(),
awsAssumeIamRoleArnTag: z.string().nullable().optional(),
encryptedAccess: zodBuffer.nullable().optional(),
encryptedAccessId: zodBuffer.nullable().optional(),
encryptedRefresh: zodBuffer.nullable().optional(),
encryptedAwsAssumeIamRoleArn: zodBuffer.nullable().optional()
awsAssumeIamRoleArnTag: z.string().nullable().optional()
});
export type TIntegrationAuths = z.infer<typeof IntegrationAuthsSchema>;

@ -13,11 +13,9 @@ export const KmsKeysSchema = z.object({
isDisabled: z.boolean().default(false).nullable().optional(),
isReserved: z.boolean().default(true).nullable().optional(),
orgId: z.string().uuid(),
name: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string().nullable().optional(),
slug: z.string().nullable().optional()
slug: z.string()
});
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;

@ -3,17 +3,12 @@ import { z } from "zod";
export enum TableName {
Users = "users",
CertificateAuthority = "certificate_authorities",
CertificateTemplateEstConfig = "certificate_template_est_configs",
CertificateAuthorityCert = "certificate_authority_certs",
CertificateAuthoritySecret = "certificate_authority_secret",
CertificateAuthorityCrl = "certificate_authority_crl",
Certificate = "certificates",
CertificateBody = "certificate_bodies",
CertificateSecret = "certificate_secrets",
CertificateTemplate = "certificate_templates",
PkiAlert = "pki_alerts",
PkiCollection = "pki_collections",
PkiCollectionItem = "pki_collection_items",
Groups = "groups",
GroupProjectMembership = "group_project_memberships",
GroupProjectMembershipRole = "group_project_membership_roles",
@ -70,8 +65,6 @@ export enum TableName {
IdentityProjectMembership = "identity_project_memberships",
IdentityProjectMembershipRole = "identity_project_membership_role",
IdentityProjectAdditionalPrivilege = "identity_project_additional_privilege",
// used by both identity and users
IdentityMetadata = "identity_metadata",
ScimToken = "scim_tokens",
AccessApprovalPolicy = "access_approval_policies",
AccessApprovalPolicyApprover = "access_approval_policies_approvers",
@ -97,18 +90,9 @@ export enum TableName {
TrustedIps = "trusted_ips",
DynamicSecret = "dynamic_secrets",
DynamicSecretLease = "dynamic_secret_leases",
SecretV2 = "secrets_v2",
SecretReferenceV2 = "secret_references_v2",
SecretVersionV2 = "secret_versions_v2",
SecretApprovalRequestSecretV2 = "secret_approval_requests_secrets_v2",
SecretApprovalRequestSecretTagV2 = "secret_approval_request_secret_tags_v2",
SnapshotSecretV2 = "secret_snapshot_secrets_v2",
// junction tables with tags
SecretV2JnTag = "secret_v2_tag_junction",
JnSecretTag = "secret_tag_junction",
SecretVersionTag = "secret_version_tag_junction",
SecretVersionV2Tag = "secret_version_v2_tag_junction",
SecretRotationOutputV2 = "secret_rotation_output_v2",
// KMS Service
KmsServerRootConfig = "kms_root_config",
KmsKey = "kms_keys",
@ -116,10 +100,7 @@ export enum TableName {
InternalKms = "internal_kms",
InternalKmsKeyVersion = "internal_kms_key_version",
// @depreciated
KmsKeyVersion = "kms_key_versions",
WorkflowIntegrations = "workflow_integrations",
SlackIntegrations = "slack_integrations",
ProjectSlackConfigs = "project_slack_configs"
KmsKeyVersion = "kms_key_versions"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
@ -176,8 +157,7 @@ export enum SecretType {
export enum ProjectVersion {
V1 = 1,
V2 = 2,
V3 = 3
V2 = 2
}
export enum ProjectUpgradeStatus {

@ -26,8 +26,7 @@ export const OidcConfigsSchema = z.object({
isActive: z.boolean(),
createdAt: z.date(),
updatedAt: z.date(),
orgId: z.string().uuid(),
lastUsed: z.date().nullable().optional()
orgId: z.string().uuid()
});
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;

@ -18,7 +18,7 @@ export const OrgMembershipsSchema = z.object({
orgId: z.string().uuid(),
roleId: z.string().uuid().nullable().optional(),
projectFavorites: z.string().array().nullable().optional(),
isActive: z.boolean().default(true)
isActive: z.boolean()
});
export type TOrgMemberships = z.infer<typeof OrgMembershipsSchema>;

@ -5,8 +5,6 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const OrganizationsSchema = z.object({
@ -18,9 +16,7 @@ export const OrganizationsSchema = z.object({
updatedAt: z.date(),
authEnforced: z.boolean().default(false).nullable().optional(),
scimEnabled: z.boolean().default(false).nullable().optional(),
kmsDefaultKeyId: z.string().uuid().nullable().optional(),
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
defaultMembershipRole: z.string().default("member")
kmsDefaultKeyId: z.string().uuid().nullable().optional()
});
export type TOrganizations = z.infer<typeof OrganizationsSchema>;

@ -1,23 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const PkiAlertsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string(),
pkiCollectionId: z.string().uuid(),
name: z.string(),
alertBeforeDays: z.number(),
recipientEmails: z.string()
});
export type TPkiAlerts = z.infer<typeof PkiAlertsSchema>;
export type TPkiAlertsInsert = Omit<z.input<typeof PkiAlertsSchema>, TImmutableDBKeys>;
export type TPkiAlertsUpdate = Partial<Omit<z.input<typeof PkiAlertsSchema>, TImmutableDBKeys>>;

@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const PkiCollectionItemsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
pkiCollectionId: z.string().uuid(),
caId: z.string().uuid().nullable().optional(),
certId: z.string().uuid().nullable().optional()
});
export type TPkiCollectionItems = z.infer<typeof PkiCollectionItemsSchema>;
export type TPkiCollectionItemsInsert = Omit<z.input<typeof PkiCollectionItemsSchema>, TImmutableDBKeys>;
export type TPkiCollectionItemsUpdate = Partial<Omit<z.input<typeof PkiCollectionItemsSchema>, TImmutableDBKeys>>;

@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const PkiCollectionsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string(),
name: z.string(),
description: z.string()
});
export type TPkiCollections = z.infer<typeof PkiCollectionsSchema>;
export type TPkiCollectionsInsert = Omit<z.input<typeof PkiCollectionsSchema>, TImmutableDBKeys>;
export type TPkiCollectionsUpdate = Partial<Omit<z.input<typeof PkiCollectionsSchema>, TImmutableDBKeys>>;

@ -1,24 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ProjectSlackConfigsSchema = z.object({
id: z.string().uuid(),
projectId: z.string(),
slackIntegrationId: z.string().uuid(),
isAccessRequestNotificationEnabled: z.boolean().default(false),
accessRequestChannels: z.string().default(""),
isSecretRequestNotificationEnabled: z.boolean().default(false),
secretRequestChannels: z.string().default(""),
createdAt: z.date(),
updatedAt: z.date()
});
export type TProjectSlackConfigs = z.infer<typeof ProjectSlackConfigsSchema>;
export type TProjectSlackConfigsInsert = Omit<z.input<typeof ProjectSlackConfigsSchema>, TImmutableDBKeys>;
export type TProjectSlackConfigsUpdate = Partial<Omit<z.input<typeof ProjectSlackConfigsSchema>, TImmutableDBKeys>>;

@ -10,7 +10,7 @@ import { TImmutableDBKeys } from "./models";
export const ProjectUserAdditionalPrivilegeSchema = z.object({
id: z.string().uuid(),
slug: z.string(),
projectMembershipId: z.string().uuid().nullable().optional(),
projectMembershipId: z.string().uuid(),
isTemporary: z.boolean().default(false),
temporaryMode: z.string().nullable().optional(),
temporaryRange: z.string().nullable().optional(),
@ -18,9 +18,7 @@ export const ProjectUserAdditionalPrivilegeSchema = z.object({
temporaryAccessEndTime: z.date().nullable().optional(),
permissions: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
userId: z.string().uuid(),
projectId: z.string()
updatedAt: z.date()
});
export type TProjectUserAdditionalPrivilege = z.infer<typeof ProjectUserAdditionalPrivilegeSchema>;

@ -5,8 +5,6 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const ProjectsSchema = z.object({
@ -22,8 +20,7 @@ export const ProjectsSchema = z.object({
pitVersionLimit: z.number().default(10),
kmsCertificateKeyId: z.string().uuid().nullable().optional(),
auditLogsRetentionDays: z.number().nullable().optional(),
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional()
kmsSecretManagerKeyId: z.string().uuid().nullable().optional()
});
export type TProjects = z.infer<typeof ProjectsSchema>;

@ -15,6 +15,7 @@ export const RateLimitSchema = z.object({
authRateLimit: z.number().default(60),
inviteUserRateLimit: z.number().default(30),
mfaRateLimit: z.number().default(20),
creationLimit: z.number().default(30),
publicEndpointLimit: z.number().default(30),
createdAt: z.date(),
updatedAt: z.date()

@ -12,8 +12,7 @@ export const SecretApprovalPoliciesApproversSchema = z.object({
policyId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
approverUserId: z.string().uuid().nullable().optional(),
approverGroupId: z.string().uuid().nullable().optional()
approverUserId: z.string().uuid()
});
export type TSecretApprovalPoliciesApprovers = z.infer<typeof SecretApprovalPoliciesApproversSchema>;

@ -1,25 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretApprovalRequestSecretTagsV2Schema = z.object({
id: z.string().uuid(),
secretId: z.string().uuid(),
tagId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretApprovalRequestSecretTagsV2 = z.infer<typeof SecretApprovalRequestSecretTagsV2Schema>;
export type TSecretApprovalRequestSecretTagsV2Insert = Omit<
z.input<typeof SecretApprovalRequestSecretTagsV2Schema>,
TImmutableDBKeys
>;
export type TSecretApprovalRequestSecretTagsV2Update = Partial<
Omit<z.input<typeof SecretApprovalRequestSecretTagsV2Schema>, TImmutableDBKeys>
>;

@ -1,37 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SecretApprovalRequestsSecretsV2Schema = z.object({
id: z.string().uuid(),
version: z.number().default(1).nullable().optional(),
key: z.string(),
encryptedValue: zodBuffer.nullable().optional(),
encryptedComment: zodBuffer.nullable().optional(),
reminderNote: z.string().nullable().optional(),
reminderRepeatDays: z.number().nullable().optional(),
skipMultilineEncoding: z.boolean().default(false).nullable().optional(),
metadata: z.unknown().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
requestId: z.string().uuid(),
op: z.string(),
secretId: z.string().uuid().nullable().optional(),
secretVersion: z.string().uuid().nullable().optional()
});
export type TSecretApprovalRequestsSecretsV2 = z.infer<typeof SecretApprovalRequestsSecretsV2Schema>;
export type TSecretApprovalRequestsSecretsV2Insert = Omit<
z.input<typeof SecretApprovalRequestsSecretsV2Schema>,
TImmutableDBKeys
>;
export type TSecretApprovalRequestsSecretsV2Update = Partial<
Omit<z.input<typeof SecretApprovalRequestsSecretsV2Schema>, TImmutableDBKeys>
>;

@ -15,12 +15,12 @@ export const SecretApprovalRequestsSchema = z.object({
conflicts: z.unknown().nullable().optional(),
slug: z.string(),
folderId: z.string().uuid(),
bypassReason: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
isReplicated: z.boolean().nullable().optional(),
committerUserId: z.string().uuid(),
statusChangedByUserId: z.string().uuid().nullable().optional(),
bypassReason: z.string().nullable().optional()
statusChangedByUserId: z.string().uuid().nullable().optional()
});
export type TSecretApprovalRequests = z.infer<typeof SecretApprovalRequestsSchema>;

@ -1,20 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretReferencesV2Schema = z.object({
id: z.string().uuid(),
environment: z.string(),
secretPath: z.string(),
secretKey: z.string(),
secretId: z.string().uuid()
});
export type TSecretReferencesV2 = z.infer<typeof SecretReferencesV2Schema>;
export type TSecretReferencesV2Insert = Omit<z.input<typeof SecretReferencesV2Schema>, TImmutableDBKeys>;
export type TSecretReferencesV2Update = Partial<Omit<z.input<typeof SecretReferencesV2Schema>, TImmutableDBKeys>>;

@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretRotationOutputV2Schema = z.object({
id: z.string().uuid(),
key: z.string(),
secretId: z.string().uuid(),
rotationId: z.string().uuid()
});
export type TSecretRotationOutputV2 = z.infer<typeof SecretRotationOutputV2Schema>;
export type TSecretRotationOutputV2Insert = Omit<z.input<typeof SecretRotationOutputV2Schema>, TImmutableDBKeys>;
export type TSecretRotationOutputV2Update = Partial<
Omit<z.input<typeof SecretRotationOutputV2Schema>, TImmutableDBKeys>
>;

@ -5,28 +5,23 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { SecretSharingAccessType } from "@app/lib/types";
import { TImmutableDBKeys } from "./models";
export const SecretSharingSchema = z.object({
id: z.string().uuid(),
encryptedValue: z.string().nullable().optional(),
iv: z.string().nullable().optional(),
tag: z.string().nullable().optional(),
hashedHex: z.string().nullable().optional(),
encryptedValue: z.string(),
iv: z.string(),
tag: z.string(),
hashedHex: z.string(),
expiresAt: z.date(),
userId: z.string().uuid().nullable().optional(),
orgId: z.string().uuid().nullable().optional(),
accessType: z.nativeEnum(SecretSharingAccessType).default(SecretSharingAccessType.Organization),
createdAt: z.date(),
updatedAt: z.date(),
expiresAfterViews: z.number().nullable().optional(),
accessType: z.string().default("anyone"),
name: z.string().nullable().optional(),
lastViewedAt: z.date().nullable().optional(),
password: z.string().nullable().optional(),
encryptedSecret: zodBuffer.nullable().optional(),
identifier: z.string().nullable().optional()
expiresAfterViews: z.number().nullable().optional()
});
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;

@ -1,23 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretSnapshotSecretsV2Schema = z.object({
id: z.string().uuid(),
envId: z.string().uuid(),
secretVersionId: z.string().uuid(),
snapshotId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretSnapshotSecretsV2 = z.infer<typeof SecretSnapshotSecretsV2Schema>;
export type TSecretSnapshotSecretsV2Insert = Omit<z.input<typeof SecretSnapshotSecretsV2Schema>, TImmutableDBKeys>;
export type TSecretSnapshotSecretsV2Update = Partial<
Omit<z.input<typeof SecretSnapshotSecretsV2Schema>, TImmutableDBKeys>
>;

@ -9,6 +9,7 @@ import { TImmutableDBKeys } from "./models";
export const SecretTagsSchema = z.object({
id: z.string().uuid(),
name: z.string(),
slug: z.string(),
color: z.string().nullable().optional(),
createdAt: z.date(),

@ -1,18 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretV2TagJunctionSchema = z.object({
id: z.string().uuid(),
secrets_v2Id: z.string().uuid(),
secret_tagsId: z.string().uuid()
});
export type TSecretV2TagJunction = z.infer<typeof SecretV2TagJunctionSchema>;
export type TSecretV2TagJunctionInsert = Omit<z.input<typeof SecretV2TagJunctionSchema>, TImmutableDBKeys>;
export type TSecretV2TagJunctionUpdate = Partial<Omit<z.input<typeof SecretV2TagJunctionSchema>, TImmutableDBKeys>>;

@ -1,23 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretVersionV2TagJunctionSchema = z.object({
id: z.string().uuid(),
secret_versions_v2Id: z.string().uuid(),
secret_tagsId: z.string().uuid()
});
export type TSecretVersionV2TagJunction = z.infer<typeof SecretVersionV2TagJunctionSchema>;
export type TSecretVersionV2TagJunctionInsert = Omit<
z.input<typeof SecretVersionV2TagJunctionSchema>,
TImmutableDBKeys
>;
export type TSecretVersionV2TagJunctionUpdate = Partial<
Omit<z.input<typeof SecretVersionV2TagJunctionSchema>, TImmutableDBKeys>
>;

@ -1,33 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SecretVersionsV2Schema = z.object({
id: z.string().uuid(),
version: z.number().default(1),
type: z.string().default("shared"),
key: z.string(),
encryptedValue: zodBuffer.nullable().optional(),
encryptedComment: zodBuffer.nullable().optional(),
reminderNote: z.string().nullable().optional(),
reminderRepeatDays: z.number().nullable().optional(),
skipMultilineEncoding: z.boolean().default(false).nullable().optional(),
metadata: z.unknown().nullable().optional(),
envId: z.string().uuid().nullable().optional(),
secretId: z.string().uuid(),
folderId: z.string().uuid(),
userId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretVersionsV2 = z.infer<typeof SecretVersionsV2Schema>;
export type TSecretVersionsV2Insert = Omit<z.input<typeof SecretVersionsV2Schema>, TImmutableDBKeys>;
export type TSecretVersionsV2Update = Partial<Omit<z.input<typeof SecretVersionsV2Schema>, TImmutableDBKeys>>;

@ -1,31 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SecretsV2Schema = z.object({
id: z.string().uuid(),
version: z.number().default(1),
type: z.string().default("shared"),
key: z.string(),
encryptedValue: zodBuffer.nullable().optional(),
encryptedComment: zodBuffer.nullable().optional(),
reminderNote: z.string().nullable().optional(),
reminderRepeatDays: z.number().nullable().optional(),
skipMultilineEncoding: z.boolean().default(false).nullable().optional(),
metadata: z.unknown().nullable().optional(),
userId: z.string().uuid().nullable().optional(),
folderId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretsV2 = z.infer<typeof SecretsV2Schema>;
export type TSecretsV2Insert = Omit<z.input<typeof SecretsV2Schema>, TImmutableDBKeys>;
export type TSecretsV2Update = Partial<Omit<z.input<typeof SecretsV2Schema>, TImmutableDBKeys>>;

Some files were not shown because too many files have changed in this diff Show More