Compare commits

..

11 Commits

Author SHA1 Message Date
eaccb3ddbc Update signup-router.ts 2024-02-14 15:43:09 -05:00
ad482fb24e Update signup-router.ts 2024-02-14 15:37:38 -05:00
94cf3c5ae2 Update signup-router.ts 2024-02-14 15:32:31 -05:00
9b781d3af8 Update signup-router.ts 2024-02-13 19:07:17 -05:00
f38d64cc90 Update signup-router.ts 2024-02-13 18:47:36 -05:00
6c738f7ad1 Update signup-router.ts 2024-02-13 18:44:04 -05:00
afb818e41a Update signup-router.ts 2024-02-13 18:40:02 -05:00
c0d679dbd4 Update signup-router.ts 2024-02-13 18:37:26 -05:00
7046542ace Update signup-router.ts 2024-02-13 18:34:54 -05:00
cdf90bf9a5 Update signup-router.ts 2024-02-13 22:35:18 +01:00
ecfb3ec95b Update project-dal.ts 2024-02-13 22:31:27 +01:00
249 changed files with 4293 additions and 11232 deletions

View File

@ -3,22 +3,24 @@
# THIS IS A SAMPLE ENCRYPTION KEY AND SHOULD NEVER BE USED FOR PRODUCTION
ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218
# Required
DB_CONNECTION_URI=postgres://infisical:infisical@db:5432/infisical
# JWT
# Required secrets to sign JWT tokens
# THIS IS A SAMPLE AUTH_SECRET KEY AND SHOULD NEVER BE USED FOR PRODUCTION
AUTH_SECRET=5lrMXKKWCVocS/uerPsl7V+TX/aaUaI7iDkgl3tSmLE=
# Postgres creds
POSTGRES_PASSWORD=infisical
POSTGRES_USER=infisical
POSTGRES_DB=infisical
# MongoDB
# Backend will connect to the MongoDB instance at connection string MONGO_URL which can either be a ref
# to the MongoDB container instance or Mongo Cloud
# Required
MONGO_URL=mongodb://root:example@mongo:27017/?authSource=admin
# Redis
REDIS_URL=redis://redis:6379
# Optional credentials for MongoDB container instance and Mongo-Express
MONGO_USERNAME=root
MONGO_PASSWORD=example
# Website URL
# Required
SITE_URL=http://localhost:8080

View File

@ -1,4 +0,0 @@
REDIS_URL=redis://localhost:6379
DB_CONNECTION_URI=postgres://infisical:infisical@localhost/infisical?sslmode=disable
AUTH_SECRET=4bnfe4e407b8921c104518903515b218
ENCRYPTION_KEY=4bnfe4e407b8921c104518903515b218

View File

@ -28,7 +28,7 @@ jobs:
run: docker build --tag infisical-api .
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis
run: touch .env && docker-compose -f docker-compose.pg.yml up -d db redis
- name: Start the server
run: |
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
@ -42,34 +42,14 @@ jobs:
- uses: actions/setup-go@v5
with:
go-version: '1.21.5'
- name: Wait for container to be stable and check logs
run: |
SECONDS=0
HEALTHY=0
while [ $SECONDS -lt 60 ]; do
if docker ps | grep infisical-api | grep -q healthy; then
echo "Container is healthy."
HEALTHY=1
break
fi
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
docker logs infisical-api
sleep 2
SECONDS=$((SECONDS+2))
done
if [ $HEALTHY -ne 1 ]; then
echo "Container did not become healthy in time"
exit 1
fi
- name: Wait for containers to be stable
run: timeout 60s sh -c 'until docker ps | grep infisical-api | grep -q healthy; do echo "Waiting for container to be healthy..."; sleep 2; done'
- name: Install openapi-diff
run: go install github.com/tufin/oasdiff@latest
- name: Running OpenAPI Spec diff action
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
- name: cleanup
run: |
docker-compose -f "docker-compose.dev.yml" down
docker-compose -f "docker-compose.pg.yml" down
docker stop infisical-api
docker remove infisical-api

View File

@ -1,4 +1,4 @@
name: Check Frontend Type and Lint check
name: Check Frontend Pull Request
on:
pull_request:
@ -10,8 +10,8 @@ on:
- "frontend/.eslintrc.js"
jobs:
check-fe-ts-lint:
name: Check Frontend Type and Lint check
check-fe-pr:
name: Check
runs-on: ubuntu-latest
timeout-minutes: 15
@ -25,11 +25,12 @@ jobs:
cache: "npm"
cache-dependency-path: frontend/package-lock.json
- name: 📦 Install dependencies
run: npm install
run: npm ci --only-production --ignore-scripts
working-directory: frontend
- name: 🏗️ Run Type check
run: npm run type:check
working-directory: frontend
- name: 🏗️ Run Link check
run: npm run lint:fix
# -
# name: 🧪 Run tests
# run: npm run test:ci
# working-directory: frontend
- name: 🏗️ Run build
run: npm run build
working-directory: frontend

View File

@ -5,14 +5,9 @@ on:
- "infisical/v*.*.*-postgres"
jobs:
infisical-tests:
name: Run tests before deployment
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
uses: ./.github/workflows/run-backend-tests.yml
infisical-standalone:
name: Build infisical standalone image postgres
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: Extract version from tag
id: extract_version

View File

@ -1,47 +0,0 @@
name: "Run backend tests"
on:
pull_request:
types: [opened, synchronize]
paths:
- "backend/**"
- "!backend/README.md"
- "!backend/.*"
- "backend/.eslintrc.js"
workflow_call:
jobs:
check-be-pr:
name: Run integration test
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- uses: KengoTODA/actions-setup-docker-compose@v1
if: ${{ env.ACT }}
name: Install `docker-compose` for local simulations
with:
version: "2.14.2"
- name: 🔧 Setup Node 20
uses: actions/setup-node@v3
with:
node-version: "20"
cache: "npm"
cache-dependency-path: backend/package-lock.json
- name: Install dependencies
run: npm install
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker-compose -f docker-compose.dev.yml up -d db redis
- name: Start integration test
run: npm run test:e2e
working-directory: backend
env:
REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
AUTH_SECRET: something-random
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- name: cleanup
run: |
docker-compose -f "docker-compose.dev.yml" down

2
.gitignore vendored
View File

@ -63,5 +63,3 @@ yarn-error.log*
.vscode/*
frontend-build
*.tgz

View File

@ -2,6 +2,6 @@
Thanks for taking the time to contribute! 😃 🚀
Please refer to our [Contributing Guide](https://infisical.com/docs/contributing/getting-started/overview) for instructions on how to contribute.
Please refer to our [Contributing Guide](https://infisical.com/docs/contributing/overview) for instructions on how to contribute.
We also have some 🔥amazing🔥 merch for our contributors. Please reach out to tony@infisical.com for more info 👀

View File

@ -5,10 +5,16 @@ push:
docker-compose -f docker-compose.yml push
up-dev:
docker compose -f docker-compose.dev.yml up --build
docker-compose -f docker-compose.dev.yml up --build
up-pg-dev:
docker compose -f docker-compose.pg.yml up --build
i-dev:
infisical run -- docker-compose -f docker-compose.dev.yml up --build
up-prod:
docker-compose -f docker-compose.prod.yml up --build
docker-compose -f docker-compose.yml up --build
down:
docker-compose down

View File

@ -84,13 +84,13 @@ To set up and run Infisical locally, make sure you have Git and Docker installed
Linux/macOS:
```console
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker-compose -f docker-compose.prod.yml up
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker-compose -f docker-compose.yml up
```
Windows Command Prompt:
```console
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker-compose -f docker-compose.prod.yml up
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker-compose -f docker-compose.yml up
```
Create an account at `http://localhost:80`

View File

@ -1,3 +1,2 @@
vitest-environment-infisical.ts
vitest.config.ts
vitest.e2e.config.ts

View File

@ -21,18 +21,6 @@ module.exports = {
tsconfigRootDir: __dirname
},
root: true,
overrides: [
{
files: ["./e2e-test/**/*"],
rules: {
"@typescript-eslint/no-unsafe-member-access": "off",
"@typescript-eslint/no-unsafe-assignment": "off",
"@typescript-eslint/no-unsafe-argument": "off",
"@typescript-eslint/no-unsafe-return": "off",
"@typescript-eslint/no-unsafe-call": "off",
}
}
],
rules: {
"@typescript-eslint/no-empty-function": "off",
"@typescript-eslint/no-unsafe-enum-comparison": "off",

View File

@ -1,71 +0,0 @@
import { OrgMembershipRole } from "@app/db/schemas";
import { seedData1 } from "@app/db/seed-data";
export const createIdentity = async (name: string, role: string) => {
const createIdentityRes = await testServer.inject({
method: "POST",
url: "/api/v1/identities",
body: {
name,
role,
organizationId: seedData1.organization.id
},
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(createIdentityRes.statusCode).toBe(200);
return createIdentityRes.json().identity;
};
export const deleteIdentity = async (id: string) => {
const deleteIdentityRes = await testServer.inject({
method: "DELETE",
url: `/api/v1/identities/${id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(deleteIdentityRes.statusCode).toBe(200);
return deleteIdentityRes.json().identity;
};
describe("Identity v1", async () => {
test("Create identity", async () => {
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
expect(newIdentity.name).toBe("mac1");
expect(newIdentity.authMethod).toBeNull();
await deleteIdentity(newIdentity.id);
});
test("Update identity", async () => {
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
expect(newIdentity.name).toBe("mac1");
expect(newIdentity.authMethod).toBeNull();
const updatedIdentity = await testServer.inject({
method: "PATCH",
url: `/api/v1/identities/${newIdentity.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
name: "updated-mac-1",
role: OrgMembershipRole.Member
}
});
expect(updatedIdentity.statusCode).toBe(200);
expect(updatedIdentity.json().identity.name).toBe("updated-mac-1");
await deleteIdentity(newIdentity.id);
});
test("Delete Identity", async () => {
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
const deletedIdentity = await deleteIdentity(newIdentity.id);
expect(deletedIdentity.name).toBe("mac1");
});
});

View File

@ -1,6 +1,5 @@
import jsrp from "jsrp";
import { seedData1 } from "@app/db/seed-data";
import jsrp from "jsrp";
describe("Login V1 Router", async () => {
// eslint-disable-next-line

View File

@ -1,40 +1,6 @@
import { seedData1 } from "@app/db/seed-data";
import { DEFAULT_PROJECT_ENVS } from "@app/db/seeds/3-project";
const createProjectEnvironment = async (name: string, slug: string) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/workspace/${seedData1.project.id}/environments`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
name,
slug
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("environment");
return payload.environment;
};
const deleteProjectEnvironment = async (envId: string) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/workspace/${seedData1.project.id}/environments/${envId}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("environment");
return payload.environment;
};
describe("Project Environment Router", async () => {
test("Get default environments", async () => {
const res = await testServer.inject({
@ -65,10 +31,24 @@ describe("Project Environment Router", async () => {
expect(payload.workspace.environments.length).toBe(3);
});
const mockProjectEnv = { name: "temp", slug: "temp" }; // id will be filled in create op
const mockProjectEnv = { name: "temp", slug: "temp", id: "" }; // id will be filled in create op
test("Create environment", async () => {
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
expect(newEnvironment).toEqual(
const res = await testServer.inject({
method: "POST",
url: `/api/v1/workspace/${seedData1.project.id}/environments`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
name: mockProjectEnv.name,
slug: mockProjectEnv.slug
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("environment");
expect(payload.environment).toEqual(
expect.objectContaining({
id: expect.any(String),
name: mockProjectEnv.name,
@ -79,15 +59,14 @@ describe("Project Environment Router", async () => {
updatedAt: expect.any(String)
})
);
await deleteProjectEnvironment(newEnvironment.id);
mockProjectEnv.id = payload.environment.id;
});
test("Update environment", async () => {
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
const updatedName = { name: "temp#2", slug: "temp2" };
const res = await testServer.inject({
method: "PATCH",
url: `/api/v1/workspace/${seedData1.project.id}/environments/${newEnvironment.id}`,
url: `/api/v1/workspace/${seedData1.project.id}/environments/${mockProjectEnv.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
@ -103,7 +82,7 @@ describe("Project Environment Router", async () => {
expect(payload).toHaveProperty("environment");
expect(payload.environment).toEqual(
expect.objectContaining({
id: newEnvironment.id,
id: expect.any(String),
name: updatedName.name,
slug: updatedName.slug,
projectId: seedData1.project.id,
@ -112,21 +91,61 @@ describe("Project Environment Router", async () => {
updatedAt: expect.any(String)
})
);
await deleteProjectEnvironment(newEnvironment.id);
mockProjectEnv.name = updatedName.name;
mockProjectEnv.slug = updatedName.slug;
});
test("Delete environment", async () => {
const newEnvironment = await createProjectEnvironment(mockProjectEnv.name, mockProjectEnv.slug);
const deletedProjectEnvironment = await deleteProjectEnvironment(newEnvironment.id);
expect(deletedProjectEnvironment).toEqual(
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/workspace/${seedData1.project.id}/environments/${mockProjectEnv.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("environment");
expect(payload.environment).toEqual(
expect.objectContaining({
id: deletedProjectEnvironment.id,
id: expect.any(String),
name: mockProjectEnv.name,
slug: mockProjectEnv.slug,
position: 4,
position: 1,
createdAt: expect.any(String),
updatedAt: expect.any(String)
})
);
});
// after all these opreations the list of environment should be still same
test("Default list of environment", async () => {
const res = await testServer.inject({
method: "GET",
url: `/api/v1/workspace/${seedData1.project.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("workspace");
// check for default environments
expect(payload).toEqual({
workspace: expect.objectContaining({
name: seedData1.project.name,
id: seedData1.project.id,
slug: seedData1.project.slug,
environments: expect.arrayContaining([
expect.objectContaining(DEFAULT_PROJECT_ENVS[0]),
expect.objectContaining(DEFAULT_PROJECT_ENVS[1]),
expect.objectContaining(DEFAULT_PROJECT_ENVS[2])
])
})
});
// ensure only two default environments exist
expect(payload.workspace.environments.length).toBe(3);
});
});

View File

@ -1,6 +1,12 @@
import { seedData1 } from "@app/db/seed-data";
const createFolder = async (dto: { path: string; name: string }) => {
describe("Secret Folder Router", async () => {
test.each([
{ name: "folder1", path: "/" }, // one in root
{ name: "folder1", path: "/level1/level2" }, // then create a deep one creating intermediate ones
{ name: "folder2", path: "/" },
{ name: "folder1", path: "/level1/level2" } // this should not create folder return same thing
])("Create folder $name in $path", async ({ name, path }) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/folders`,
@ -10,47 +16,21 @@ const createFolder = async (dto: { path: string; name: string }) => {
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
name: dto.name,
path: dto.path
name,
path
}
});
expect(res.statusCode).toBe(200);
return res.json().folder;
};
const deleteFolder = async (dto: { path: string; id: string }) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/folders/${dto.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: dto.path
}
});
expect(res.statusCode).toBe(200);
return res.json().folder;
};
describe("Secret Folder Router", async () => {
test.each([
{ name: "folder1", path: "/" }, // one in root
{ name: "folder1", path: "/level1/level2" }, // then create a deep one creating intermediate ones
{ name: "folder2", path: "/" },
{ name: "folder1", path: "/level1/level2" } // this should not create folder return same thing
])("Create folder $name in $path", async ({ name, path }) => {
const createdFolder = await createFolder({ path, name });
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("folder");
// check for default environments
expect(createdFolder).toEqual(
expect.objectContaining({
expect(payload).toEqual({
folder: expect.objectContaining({
name,
id: expect.any(String)
})
);
await deleteFolder({ path, id: createdFolder.id });
});
});
test.each([
@ -63,8 +43,6 @@ describe("Secret Folder Router", async () => {
},
{ path: "/level1/level2", expected: { folders: [{ name: "folder1" }], length: 1 } }
])("Get folders $path", async ({ path, expected }) => {
const newFolders = await Promise.all(expected.folders.map(({ name }) => createFolder({ name, path })));
const res = await testServer.inject({
method: "GET",
url: `/api/v1/folders`,
@ -81,22 +59,36 @@ describe("Secret Folder Router", async () => {
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("folders");
expect(payload.folders.length >= expected.folders.length).toBeTruthy();
expect(payload).toEqual({
folders: expect.arrayContaining(expected.folders.map((el) => expect.objectContaining(el)))
});
await Promise.all(newFolders.map(({ id }) => deleteFolder({ path, id })));
expect(payload.folders.length).toBe(expected.length);
expect(payload).toEqual({ folders: expected.folders.map((el) => expect.objectContaining(el)) });
});
let toBeDeleteFolderId = "";
test("Update a deep folder", async () => {
const newFolder = await createFolder({ name: "folder-updated", path: "/level1/level2" });
expect(newFolder).toEqual(
const res = await testServer.inject({
method: "PATCH",
url: `/api/v1/folders/folder1`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
name: "folder-updated",
path: "/level1/level2"
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("folder");
expect(payload.folder).toEqual(
expect.objectContaining({
id: expect.any(String),
name: "folder-updated"
})
);
toBeDeleteFolderId = payload.folder.id;
const resUpdatedFolders = await testServer.inject({
method: "GET",
@ -114,16 +106,14 @@ describe("Secret Folder Router", async () => {
expect(resUpdatedFolders.statusCode).toBe(200);
const updatedFolderList = JSON.parse(resUpdatedFolders.payload);
expect(updatedFolderList).toHaveProperty("folders");
expect(updatedFolderList.folders.length).toEqual(1);
expect(updatedFolderList.folders[0].name).toEqual("folder-updated");
await deleteFolder({ path: "/level1/level2", id: newFolder.id });
});
test("Delete a deep folder", async () => {
const newFolder = await createFolder({ name: "folder-updated", path: "/level1/level2" });
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/folders/${newFolder.id}`,
url: `/api/v1/folders/${toBeDeleteFolderId}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},

View File

@ -1,6 +1,10 @@
import { seedData1 } from "@app/db/seed-data";
const createSecretImport = async (importPath: string, importEnv: string) => {
describe("Secret Folder Router", async () => {
test.each([
{ importEnv: "dev", importPath: "/" }, // one in root
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-imports`,
@ -21,37 +25,8 @@ const createSecretImport = async (importPath: string, importEnv: string) => {
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport;
};
const deleteSecretImport = async (id: string) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/secret-imports/${id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/"
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
return payload.secretImport;
};
describe("Secret Import Router", async () => {
test.each([
{ importEnv: "dev", importPath: "/" }, // one in root
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
// check for default environments
const payload = await createSecretImport(importPath, importEnv);
expect(payload).toEqual(
expect(payload.secretImport).toEqual(
expect.objectContaining({
id: expect.any(String),
importPath: expect.any(String),
@ -62,12 +37,10 @@ describe("Secret Import Router", async () => {
})
})
);
await deleteSecretImport(payload.id);
});
let testSecretImportId = "";
test("Get secret imports", async () => {
const createdImport1 = await createSecretImport("/", "dev");
const createdImport2 = await createSecretImport("/", "staging");
const res = await testServer.inject({
method: "GET",
url: `/api/v1/secret-imports`,
@ -85,6 +58,7 @@ describe("Secret Import Router", async () => {
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImports");
expect(payload.secretImports.length).toBe(2);
testSecretImportId = payload.secretImports[0].id;
expect(payload.secretImports).toEqual(
expect.arrayContaining([
expect.objectContaining({
@ -98,20 +72,12 @@ describe("Secret Import Router", async () => {
})
])
);
await deleteSecretImport(createdImport1.id);
await deleteSecretImport(createdImport2.id);
});
test("Update secret import position", async () => {
const devImportDetails = { path: "/", envSlug: "dev" };
const stagingImportDetails = { path: "/", envSlug: "staging" };
const createdImport1 = await createSecretImport(devImportDetails.path, devImportDetails.envSlug);
const createdImport2 = await createSecretImport(stagingImportDetails.path, stagingImportDetails.envSlug);
const updateImportRes = await testServer.inject({
const res = await testServer.inject({
method: "PATCH",
url: `/api/v1/secret-imports/${createdImport1.id}`,
url: `/api/v1/secret-imports/${testSecretImportId}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
@ -125,8 +91,8 @@ describe("Secret Import Router", async () => {
}
});
expect(updateImportRes.statusCode).toBe(200);
const payload = JSON.parse(updateImportRes.payload);
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
// check for default environments
expect(payload.secretImport).toEqual(
@ -136,7 +102,7 @@ describe("Secret Import Router", async () => {
position: 2,
importEnv: expect.objectContaining({
name: expect.any(String),
slug: expect.stringMatching(devImportDetails.envSlug),
slug: expect.any(String),
id: expect.any(String)
})
})
@ -158,19 +124,28 @@ describe("Secret Import Router", async () => {
expect(secretImportsListRes.statusCode).toBe(200);
const secretImportList = JSON.parse(secretImportsListRes.payload);
expect(secretImportList).toHaveProperty("secretImports");
expect(secretImportList.secretImports[1].id).toEqual(createdImport1.id);
expect(secretImportList.secretImports[0].id).toEqual(createdImport2.id);
await deleteSecretImport(createdImport1.id);
await deleteSecretImport(createdImport2.id);
expect(secretImportList.secretImports[1].id).toEqual(testSecretImportId);
});
test("Delete secret import position", async () => {
const createdImport1 = await createSecretImport("/", "dev");
const createdImport2 = await createSecretImport("/", "staging");
const deletedImport = await deleteSecretImport(createdImport1.id);
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/secret-imports/${testSecretImportId}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/"
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
// check for default environments
expect(deletedImport).toEqual(
expect(payload.secretImport).toEqual(
expect.objectContaining({
id: expect.any(String),
importPath: expect.any(String),
@ -200,7 +175,5 @@ describe("Secret Import Router", async () => {
expect(secretImportList).toHaveProperty("secretImports");
expect(secretImportList.secretImports.length).toEqual(1);
expect(secretImportList.secretImports[0].position).toEqual(1);
await deleteSecretImport(createdImport2.id);
});
});

View File

@ -1,579 +0,0 @@
import crypto from "node:crypto";
import { SecretType, TSecrets } from "@app/db/schemas";
import { decryptSecret, encryptSecret, getUserPrivateKey, seedData1 } from "@app/db/seed-data";
import { decryptAsymmetric, decryptSymmetric128BitHexKeyUTF8, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
const createServiceToken = async (
scopes: { environment: string; secretPath: string }[],
permissions: ("read" | "write")[]
) => {
const projectKeyRes = await testServer.inject({
method: "GET",
url: `/api/v2/workspace/${seedData1.project.id}/encrypted-key`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
const projectKeyEnc = JSON.parse(projectKeyRes.payload);
const userInfoRes = await testServer.inject({
method: "GET",
url: "/api/v2/users/me",
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
const { user: userInfo } = JSON.parse(userInfoRes.payload);
const privateKey = await getUserPrivateKey(seedData1.password, userInfo);
const projectKey = decryptAsymmetric({
ciphertext: projectKeyEnc.encryptedKey,
nonce: projectKeyEnc.nonce,
publicKey: projectKeyEnc.sender.publicKey,
privateKey
});
const randomBytes = crypto.randomBytes(16).toString("hex");
const { ciphertext, iv, tag } = encryptSymmetric128BitHexKeyUTF8(projectKey, randomBytes);
const serviceTokenRes = await testServer.inject({
method: "POST",
url: "/api/v2/service-token",
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
name: "test-token",
workspaceId: seedData1.project.id,
scopes,
encryptedKey: ciphertext,
iv,
tag,
permissions,
expiresIn: null
}
});
expect(serviceTokenRes.statusCode).toBe(200);
const serviceTokenInfo = serviceTokenRes.json();
expect(serviceTokenInfo).toHaveProperty("serviceToken");
expect(serviceTokenInfo).toHaveProperty("serviceTokenData");
return `${serviceTokenInfo.serviceToken}.${randomBytes}`;
};
const deleteServiceToken = async () => {
const serviceTokenListRes = await testServer.inject({
method: "GET",
url: `/api/v1/workspace/${seedData1.project.id}/service-token-data`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(serviceTokenListRes.statusCode).toBe(200);
const serviceTokens = JSON.parse(serviceTokenListRes.payload).serviceTokenData as { name: string; id: string }[];
expect(serviceTokens.length).toBeGreaterThan(0);
const serviceTokenInfo = serviceTokens.find(({ name }) => name === "test-token");
expect(serviceTokenInfo).toBeDefined();
const deleteTokenRes = await testServer.inject({
method: "DELETE",
url: `/api/v2/service-token/${serviceTokenInfo?.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(deleteTokenRes.statusCode).toBe(200);
};
const createSecret = async (dto: {
projectKey: string;
path: string;
key: string;
value: string;
comment: string;
type?: SecretType;
token: string;
}) => {
const createSecretReqBody = {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
type: dto.type || SecretType.Shared,
secretPath: dto.path,
...encryptSecret(dto.projectKey, dto.key, dto.value, dto.comment)
};
const createSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/${dto.key}`,
headers: {
authorization: `Bearer ${dto.token}`
},
body: createSecretReqBody
});
expect(createSecRes.statusCode).toBe(200);
const createdSecretPayload = JSON.parse(createSecRes.payload);
expect(createdSecretPayload).toHaveProperty("secret");
return createdSecretPayload.secret;
};
const deleteSecret = async (dto: { path: string; key: string; token: string }) => {
const deleteSecRes = await testServer.inject({
method: "DELETE",
url: `/api/v3/secrets/${dto.key}`,
headers: {
authorization: `Bearer ${dto.token}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: dto.path
}
});
expect(deleteSecRes.statusCode).toBe(200);
const updatedSecretPayload = JSON.parse(deleteSecRes.payload);
expect(updatedSecretPayload).toHaveProperty("secret");
return updatedSecretPayload.secret;
};
describe("Service token secret ops", async () => {
let serviceToken = "";
let projectKey = "";
let folderId = "";
beforeAll(async () => {
serviceToken = await createServiceToken(
[{ secretPath: "/**", environment: seedData1.environment.slug }],
["read", "write"]
);
// this is ensure cli service token decryptiong working fine
const serviceTokenInfoRes = await testServer.inject({
method: "GET",
url: "/api/v2/service-token",
headers: {
authorization: `Bearer ${serviceToken}`
}
});
expect(serviceTokenInfoRes.statusCode).toBe(200);
const serviceTokenInfo = serviceTokenInfoRes.json();
const serviceTokenParts = serviceToken.split(".");
projectKey = decryptSymmetric128BitHexKeyUTF8({
key: serviceTokenParts[3],
tag: serviceTokenInfo.tag,
ciphertext: serviceTokenInfo.encryptedKey,
iv: serviceTokenInfo.iv
});
// create a deep folder
const folderCreate = await testServer.inject({
method: "POST",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
name: "folder",
path: "/nested1/nested2"
}
});
expect(folderCreate.statusCode).toBe(200);
folderId = folderCreate.json().folder.id;
});
afterAll(async () => {
await deleteServiceToken();
// create a deep folder
const deleteFolder = await testServer.inject({
method: "DELETE",
url: `/api/v1/folders/${folderId}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/nested1/nested2"
}
});
expect(deleteFolder.statusCode).toBe(200);
});
const testSecrets = [
{
path: "/",
secret: {
key: "ST-SEC",
value: "something-secret",
comment: "some comment"
}
},
{
path: "/nested1/nested2/folder",
secret: {
key: "NESTED-ST-SEC",
value: "something-secret",
comment: "some comment"
}
}
];
const getSecrets = async (environment: string, secretPath = "/") => {
const res = await testServer.inject({
method: "GET",
url: `/api/v3/secrets`,
headers: {
authorization: `Bearer ${serviceToken}`
},
query: {
secretPath,
environment,
workspaceId: seedData1.project.id
}
});
const secrets: TSecrets[] = JSON.parse(res.payload).secrets || [];
return secrets.map((el) => ({ ...decryptSecret(projectKey, el), type: el.type }));
};
test.each(testSecrets)("Create secret in path $path", async ({ secret, path }) => {
const createdSecret = await createSecret({ projectKey, path, ...secret, token: serviceToken });
const decryptedSecret = decryptSecret(projectKey, createdSecret);
expect(decryptedSecret.key).toEqual(secret.key);
expect(decryptedSecret.value).toEqual(secret.value);
expect(decryptedSecret.comment).toEqual(secret.comment);
expect(decryptedSecret.version).toEqual(1);
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
key: secret.key,
value: secret.value,
type: SecretType.Shared
})
])
);
await deleteSecret({ path, key: secret.key, token: serviceToken });
});
test.each(testSecrets)("Get secret by name in path $path", async ({ secret, path }) => {
await createSecret({ projectKey, path, ...secret, token: serviceToken });
const getSecByNameRes = await testServer.inject({
method: "GET",
url: `/api/v3/secrets/${secret.key}`,
headers: {
authorization: `Bearer ${serviceToken}`
},
query: {
secretPath: path,
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug
}
});
expect(getSecByNameRes.statusCode).toBe(200);
const getSecretByNamePayload = JSON.parse(getSecByNameRes.payload);
expect(getSecretByNamePayload).toHaveProperty("secret");
const decryptedSecret = decryptSecret(projectKey, getSecretByNamePayload.secret);
expect(decryptedSecret.key).toEqual(secret.key);
expect(decryptedSecret.value).toEqual(secret.value);
expect(decryptedSecret.comment).toEqual(secret.comment);
await deleteSecret({ path, key: secret.key, token: serviceToken });
});
test.each(testSecrets)("Update secret in path $path", async ({ path, secret }) => {
await createSecret({ projectKey, path, ...secret, token: serviceToken });
const updateSecretReqBody = {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
type: SecretType.Shared,
secretPath: path,
...encryptSecret(projectKey, secret.key, "new-value", secret.comment)
};
const updateSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/${secret.key}`,
headers: {
authorization: `Bearer ${serviceToken}`
},
body: updateSecretReqBody
});
expect(updateSecRes.statusCode).toBe(200);
const updatedSecretPayload = JSON.parse(updateSecRes.payload);
expect(updatedSecretPayload).toHaveProperty("secret");
const decryptedSecret = decryptSecret(projectKey, updatedSecretPayload.secret);
expect(decryptedSecret.key).toEqual(secret.key);
expect(decryptedSecret.value).toEqual("new-value");
expect(decryptedSecret.comment).toEqual(secret.comment);
// list secret should have updated value
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
key: secret.key,
value: "new-value",
type: SecretType.Shared
})
])
);
await deleteSecret({ path, key: secret.key, token: serviceToken });
});
test.each(testSecrets)("Delete secret in path $path", async ({ secret, path }) => {
await createSecret({ projectKey, path, ...secret, token: serviceToken });
const deletedSecret = await deleteSecret({ path, key: secret.key, token: serviceToken });
const decryptedSecret = decryptSecret(projectKey, deletedSecret);
expect(decryptedSecret.key).toEqual(secret.key);
// shared secret deletion should delete personal ones also
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.not.arrayContaining([
expect.objectContaining({
key: secret.key,
type: SecretType.Shared
})
])
);
});
test.each(testSecrets)("Bulk create secrets in path $path", async ({ secret, path }) => {
const createSharedSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/batch`,
headers: {
authorization: `Bearer ${serviceToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretName: `BULK-${secret.key}-${i + 1}`,
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, secret.value, secret.comment)
}))
}
});
expect(createSharedSecRes.statusCode).toBe(200);
const createSharedSecPayload = JSON.parse(createSharedSecRes.payload);
expect(createSharedSecPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
key: `BULK-${secret.key}-${i + 1}`,
type: SecretType.Shared
})
)
)
);
await Promise.all(
Array.from(Array(5)).map((_e, i) =>
deleteSecret({ path, token: serviceToken, key: `BULK-${secret.key}-${i + 1}` })
)
);
});
test.each(testSecrets)("Bulk create fail on existing secret in path $path", async ({ secret, path }) => {
await createSecret({ projectKey, ...secret, key: `BULK-${secret.key}-1`, path, token: serviceToken });
const createSharedSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/batch`,
headers: {
authorization: `Bearer ${serviceToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretName: `BULK-${secret.key}-${i + 1}`,
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, secret.value, secret.comment)
}))
}
});
expect(createSharedSecRes.statusCode).toBe(400);
await deleteSecret({ path, key: `BULK-${secret.key}-1`, token: serviceToken });
});
test.each(testSecrets)("Bulk update secrets in path $path", async ({ secret, path }) => {
await Promise.all(
Array.from(Array(5)).map((_e, i) =>
createSecret({ projectKey, token: serviceToken, ...secret, key: `BULK-${secret.key}-${i + 1}`, path })
)
);
const updateSharedSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/batch`,
headers: {
authorization: `Bearer ${serviceToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretName: `BULK-${secret.key}-${i + 1}`,
...encryptSecret(projectKey, `BULK-${secret.key}-${i + 1}`, "update-value", secret.comment)
}))
}
});
expect(updateSharedSecRes.statusCode).toBe(200);
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
expect(updateSharedSecPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
key: `BULK-${secret.key}-${i + 1}`,
value: "update-value",
type: SecretType.Shared
})
)
)
);
await Promise.all(
Array.from(Array(5)).map((_e, i) =>
deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}`, token: serviceToken })
)
);
});
test.each(testSecrets)("Bulk delete secrets in path $path", async ({ secret, path }) => {
await Promise.all(
Array.from(Array(5)).map((_e, i) =>
createSecret({ projectKey, token: serviceToken, ...secret, key: `BULK-${secret.key}-${i + 1}`, path })
)
);
const deletedSharedSecRes = await testServer.inject({
method: "DELETE",
url: `/api/v3/secrets/batch`,
headers: {
authorization: `Bearer ${serviceToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: path,
secrets: Array.from(Array(5)).map((_e, i) => ({
secretName: `BULK-${secret.key}-${i + 1}`
}))
}
});
expect(deletedSharedSecRes.statusCode).toBe(200);
const deletedSecretPayload = JSON.parse(deletedSharedSecRes.payload);
expect(deletedSecretPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.not.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
key: `BULK-${secret.value}-${i + 1}`,
type: SecretType.Shared
})
)
)
);
});
});
describe("Service token fail cases", async () => {
test("Unauthorized secret path access", async () => {
const serviceToken = await createServiceToken(
[{ secretPath: "/", environment: seedData1.environment.slug }],
["read", "write"]
);
const fetchSecrets = await testServer.inject({
method: "GET",
url: "/api/v3/secrets",
query: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: "/nested/deep"
},
headers: {
authorization: `Bearer ${serviceToken}`
}
});
expect(fetchSecrets.statusCode).toBe(401);
expect(fetchSecrets.json().error).toBe("PermissionDenied");
await deleteServiceToken();
});
test("Unauthorized secret environment access", async () => {
const serviceToken = await createServiceToken(
[{ secretPath: "/", environment: seedData1.environment.slug }],
["read", "write"]
);
const fetchSecrets = await testServer.inject({
method: "GET",
url: "/api/v3/secrets",
query: {
workspaceId: seedData1.project.id,
environment: "prod",
secretPath: "/"
},
headers: {
authorization: `Bearer ${serviceToken}`
}
});
expect(fetchSecrets.statusCode).toBe(401);
expect(fetchSecrets.json().error).toBe("PermissionDenied");
await deleteServiceToken();
});
test("Unauthorized write operation", async () => {
const serviceToken = await createServiceToken(
[{ secretPath: "/", environment: seedData1.environment.slug }],
["read"]
);
const writeSecrets = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/NEW`,
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
type: SecretType.Shared,
secretPath: "/",
// doesn't matter project key because this will fail before that due to read only access
...encryptSecret(crypto.randomBytes(16).toString("hex"), "NEW", "value", "")
},
headers: {
authorization: `Bearer ${serviceToken}`
}
});
expect(writeSecrets.statusCode).toBe(401);
expect(writeSecrets.json().error).toBe("PermissionDenied");
// but read access should still work fine
const fetchSecrets = await testServer.inject({
method: "GET",
url: "/api/v3/secrets",
query: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
secretPath: "/"
},
headers: {
authorization: `Bearer ${serviceToken}`
}
});
expect(fetchSecrets.statusCode).toBe(200);
await deleteServiceToken();
});
});

File diff suppressed because it is too large Load Diff

View File

@ -1,30 +1,26 @@
// eslint-disable-next-line
import "ts-node/register";
// import { main } from "@app/server/app";
import { initEnvConfig } from "@app/lib/config/env";
import dotenv from "dotenv";
import jwt from "jsonwebtoken";
import knex from "knex";
import path from "path";
import { seedData1 } from "@app/db/seed-data";
import { initEnvConfig } from "@app/lib/config/env";
import { initLogger } from "@app/lib/logger";
import { main } from "@app/server/app";
import { AuthTokenType } from "@app/services/auth/auth-type";
import { mockQueue } from "./mocks/queue";
import { mockSmtpServer } from "./mocks/smtp";
import { initLogger } from "@app/lib/logger";
import jwt from "jsonwebtoken";
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
import "ts-node/register";
import { main } from "@app/server/app";
import { mockQueue } from "./mocks/queue";
import { AuthTokenType } from "@app/services/auth/auth-type";
import { seedData1 } from "@app/db/seed-data";
dotenv.config({ path: path.join(__dirname, "../.env.test") });
export default {
name: "knex-env",
transformMode: "ssr",
async setup() {
const logger = await initLogger();
const cfg = initEnvConfig(logger);
const db = knex({
client: "pg",
connection: cfg.DB_CONNECTION_URI,
connection: process.env.DB_CONNECTION_URI,
migrations: {
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
@ -41,6 +37,8 @@ export default {
await db.seed.run();
const smtp = mockSmtpServer();
const queue = mockQueue();
const logger = await initLogger();
const cfg = initEnvConfig(logger);
const server = await main({ db, smtp, logger, queue });
// @ts-expect-error type
globalThis.testServer = server;
@ -56,7 +54,6 @@ export default {
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
);
} catch (error) {
console.log("[TEST] Error setting up environment", error);
await db.destroy();
throw error;
}

2286
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -24,8 +24,8 @@
"migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"seed:new": "tsx ./scripts/create-seed-file.ts",
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
"seed:run": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest && npm run seed:run"
},
"keywords": [],
"author": "",
@ -67,21 +67,21 @@
"tsx": "^4.4.0",
"typescript": "^5.3.2",
"vite-tsconfig-paths": "^4.2.2",
"vitest": "^1.2.2"
"vitest": "^1.0.4"
},
"dependencies": {
"@aws-sdk/client-secrets-manager": "^3.504.0",
"@aws-sdk/client-secrets-manager": "^3.485.0",
"@casl/ability": "^6.5.0",
"@fastify/cookie": "^9.2.0",
"@fastify/cors": "^8.5.0",
"@fastify/cors": "^8.4.1",
"@fastify/etag": "^5.1.0",
"@fastify/formbody": "^7.4.0",
"@fastify/helmet": "^11.1.1",
"@fastify/passport": "^2.4.0",
"@fastify/rate-limit": "^9.0.0",
"@fastify/session": "^10.7.0",
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@fastify/swagger": "^8.12.0",
"@fastify/swagger-ui": "^1.10.1",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
@ -90,13 +90,13 @@
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
"aws-sdk": "^2.1549.0",
"axios": "^1.6.7",
"aws-sdk": "^2.1532.0",
"axios": "^1.6.2",
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"bullmq": "^5.1.6",
"dotenv": "^16.4.1",
"fastify": "^4.26.0",
"bullmq": "^5.1.1",
"dotenv": "^16.3.1",
"fastify": "^4.24.3",
"fastify-plugin": "^4.5.1",
"handlebars": "^4.7.8",
"ioredis": "^5.3.2",
@ -106,11 +106,10 @@
"knex": "^3.0.1",
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"mysql2": "^3.9.1",
"mysql2": "^3.6.5",
"nanoid": "^5.0.4",
"node-cache": "^5.1.2",
"nodemailer": "^6.9.9",
"ora": "^7.0.1",
"nodemailer": "^6.9.7",
"passport-github": "^1.1.0",
"passport-gitlab2": "^5.0.0",
"passport-google-oauth20": "^2.0.0",
@ -118,12 +117,12 @@
"picomatch": "^3.0.1",
"pino": "^8.16.2",
"posthog-node": "^3.6.0",
"probot": "^13.0.0",
"probot": "^12.3.3",
"smee-client": "^2.0.0",
"tweetnacl": "^1.0.3",
"tweetnacl-util": "^0.15.1",
"uuid": "^9.0.1",
"zod": "^3.22.4",
"zod-to-json-schema": "^3.22.4"
"zod-to-json-schema": "^3.22.0"
}
}

View File

@ -7,10 +7,11 @@ import promptSync from "prompt-sync";
const prompt = promptSync({ sigint: true });
const migrationName = prompt("Enter name for seedfile: ");
const fileCounter = readdirSync(path.join(__dirname, "../src/db/seeds")).length || 1;
const fileCounter = readdirSync(path.join(__dirname, "../src/db/seed")).length || 1;
execSync(
`npx knex seed:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${
fileCounter + 1
}-${migrationName}`,
`npx knex seed:make --knexfile ${path.join(
__dirname,
"../src/db/knexfile.ts"
)} -x ts ${fileCounter}-${migrationName}`,
{ stdio: "inherit" }
);

View File

@ -6,7 +6,6 @@ import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
@ -106,7 +105,6 @@ declare module "fastify" {
secretRotation: TSecretRotationServiceFactory;
snapshot: TSecretSnapshotServiceFactory;
saml: TSamlConfigServiceFactory;
scim: TScimServiceFactory;
auditLog: TAuditLogServiceFactory;
secretScanning: TSecretScanningServiceFactory;
license: TLicenseServiceFactory;

View File

@ -83,9 +83,6 @@ import {
TSamlConfigs,
TSamlConfigsInsert,
TSamlConfigsUpdate,
TScimTokens,
TScimTokensInsert,
TScimTokensUpdate,
TSecretApprovalPolicies,
TSecretApprovalPoliciesApprovers,
TSecretApprovalPoliciesApproversInsert,
@ -265,7 +262,6 @@ declare module "knex/types/tables" {
TIdentityProjectMembershipsInsert,
TIdentityProjectMembershipsUpdate
>;
[TableName.ScimToken]: Knex.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
[TableName.SecretApprovalPolicy]: Knex.CompositeTableType<
TSecretApprovalPolicies,
TSecretApprovalPoliciesInsert,

View File

@ -10,10 +10,6 @@ dotenv.config({
path: path.join(__dirname, "../../../.env.migration"),
debug: true
});
dotenv.config({
path: path.join(__dirname, "../../../.env"),
debug: true
});
export default {
development: {
client: "postgres",

View File

@ -1,31 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.ScimToken))) {
await knex.schema.createTable(TableName.ScimToken, (t) => {
t.string("id", 36).primary().defaultTo(knex.fn.uuid());
t.bigInteger("ttlDays").defaultTo(365).notNullable();
t.string("description").notNullable();
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.timestamps(true, true, true);
});
}
await knex.schema.alterTable(TableName.Organization, (t) => {
t.boolean("scimEnabled").defaultTo(false);
});
await createOnUpdateTrigger(knex, TableName.ScimToken);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.ScimToken);
await dropOnUpdateTrigger(knex, TableName.ScimToken);
await knex.schema.alterTable(TableName.Organization, (t) => {
t.dropColumn("scimEnabled");
});
}

View File

@ -1,39 +0,0 @@
import { Knex } from "knex";
import { ProjectVersion, TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasGhostUserColumn = await knex.schema.hasColumn(TableName.Users, "isGhost");
const hasProjectVersionColumn = await knex.schema.hasColumn(TableName.Project, "version");
if (!hasGhostUserColumn) {
await knex.schema.alterTable(TableName.Users, (t) => {
t.boolean("isGhost").defaultTo(false).notNullable();
});
}
if (!hasProjectVersionColumn) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.integer("version").defaultTo(ProjectVersion.V1).notNullable();
t.string("upgradeStatus").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasGhostUserColumn = await knex.schema.hasColumn(TableName.Users, "isGhost");
const hasProjectVersionColumn = await knex.schema.hasColumn(TableName.Project, "version");
if (hasGhostUserColumn) {
await knex.schema.alterTable(TableName.Users, (t) => {
t.dropColumn("isGhost");
});
}
if (hasProjectVersionColumn) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.dropColumn("version");
t.dropColumn("upgradeStatus");
});
}
}

View File

@ -1,20 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const isTablePresent = await knex.schema.hasTable(TableName.SuperAdmin);
if (isTablePresent) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.string("allowedSignUpDomain");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SuperAdmin, "allowedSignUpDomain")) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.dropColumn("allowedSignUpDomain");
});
}
}

View File

@ -26,7 +26,6 @@ export * from "./project-memberships";
export * from "./project-roles";
export * from "./projects";
export * from "./saml-configs";
export * from "./scim-tokens";
export * from "./secret-approval-policies";
export * from "./secret-approval-policies-approvers";
export * from "./secret-approval-request-secret-tags";

View File

@ -40,7 +40,6 @@ export enum TableName {
IdentityUaClientSecret = "identity_ua_client_secrets",
IdentityOrgMembership = "identity_org_memberships",
IdentityProjectMembership = "identity_project_memberships",
ScimToken = "scim_tokens",
SecretApprovalPolicy = "secret_approval_policies",
SecretApprovalPolicyApprover = "secret_approval_policies_approvers",
SecretApprovalRequest = "secret_approval_requests",
@ -112,17 +111,6 @@ export enum SecretType {
Personal = "personal"
}
export enum ProjectVersion {
V1 = 1,
V2 = 2
}
export enum ProjectUpgradeStatus {
InProgress = "IN_PROGRESS",
// Completed -> Will be null if completed. So a completed status is not needed
Failed = "FAILED"
}
export enum IdentityAuthMethod {
Univeral = "universal-auth"
}

View File

@ -14,8 +14,7 @@ export const OrganizationsSchema = z.object({
slug: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
authEnforced: z.boolean().default(false).nullable().optional(),
scimEnabled: z.boolean().default(false).nullable().optional()
authEnforced: z.boolean().default(false).nullable().optional()
});
export type TOrganizations = z.infer<typeof OrganizationsSchema>;

View File

@ -14,9 +14,7 @@ export const ProjectsSchema = z.object({
autoCapitalization: z.boolean().default(true).nullable().optional(),
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
version: z.number().default(1),
upgradeStatus: z.string().nullable().optional()
updatedAt: z.date()
});
export type TProjects = z.infer<typeof ProjectsSchema>;

View File

@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ScimTokensSchema = z.object({
id: z.string(),
ttlDays: z.coerce.number().default(365),
description: z.string(),
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TScimTokens = z.infer<typeof ScimTokensSchema>;
export type TScimTokensInsert = Omit<TScimTokens, TImmutableDBKeys>;
export type TScimTokensUpdate = Partial<Omit<TScimTokens, TImmutableDBKeys>>;

View File

@ -12,8 +12,7 @@ export const SuperAdminSchema = z.object({
initialized: z.boolean().default(false).nullable().optional(),
allowSignUp: z.boolean().default(true).nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
allowedSignUpDomain: z.string().nullable().optional()
updatedAt: z.date()
});
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;

View File

@ -19,8 +19,7 @@ export const UsersSchema = z.object({
mfaMethods: z.string().array().nullable().optional(),
devices: z.unknown().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
isGhost: z.boolean().default(false)
updatedAt: z.date()
});
export type TUsers = z.infer<typeof UsersSchema>;

View File

@ -1,4 +1,3 @@
/* eslint-disable import/no-mutable-exports */
import crypto from "node:crypto";
import argon2, { argon2id } from "argon2";
@ -7,21 +6,17 @@ import nacl from "tweetnacl";
import { encodeBase64 } from "tweetnacl-util";
import {
decryptAsymmetric,
// decryptAsymmetric,
decryptSymmetric128BitHexKeyUTF8,
decryptSymmetric,
encryptAsymmetric,
encryptSymmetric128BitHexKeyUTF8
encryptSymmetric
} from "@app/lib/crypto";
import { TSecrets, TUserEncryptionKeys } from "./schemas";
export let userPrivateKey: string | undefined;
export let userPublicKey: string | undefined;
import { TUserEncryptionKeys } from "./schemas";
export const seedData1 = {
id: "3dafd81d-4388-432b-a4c5-f735616868c1",
email: process.env.TEST_USER_EMAIL || "test@localhost.local",
email: "test@localhost.local",
password: process.env.TEST_USER_PASSWORD || "testInfisical@1",
organization: {
id: "180870b7-f464-4740-8ffe-9d11c9245ea7",
@ -36,22 +31,8 @@ export const seedData1 = {
name: "Development",
slug: "dev"
},
machineIdentity: {
id: "88fa7aed-9288-401e-a4c9-fa9430be62a0",
name: "mac1",
clientCredentials: {
id: "3f6135db-f237-421d-af66-a8f4e80d443b",
secret: "da35a5a5a7b57f977a9a73394506e878a7175d06606df43dc93e1472b10cf339"
}
},
token: {
id: "a9dfafba-a3b7-42e3-8618-91abb702fd36"
},
// We set these values during user creation, and later re-use them during project seeding.
encryptionKeys: {
publicKey: "",
privateKey: ""
}
};
@ -92,7 +73,7 @@ export const generateUserSrpKeys = async (password: string) => {
ciphertext: encryptedPrivateKey,
iv: encryptedPrivateKeyIV,
tag: encryptedPrivateKeyTag
} = encryptSymmetric128BitHexKeyUTF8(privateKey, key);
} = encryptSymmetric(privateKey, key.toString("base64"));
// create the protected key by encrypting the symmetric key
// [key] with the derived key
@ -100,7 +81,7 @@ export const generateUserSrpKeys = async (password: string) => {
ciphertext: protectedKey,
iv: protectedKeyIV,
tag: protectedKeyTag
} = encryptSymmetric128BitHexKeyUTF8(key.toString("hex"), derivedKey);
} = encryptSymmetric(key.toString("hex"), derivedKey.toString("base64"));
return {
protectedKey,
@ -126,102 +107,32 @@ export const getUserPrivateKey = async (password: string, user: TUserEncryptionK
raw: true
});
if (!derivedKey) throw new Error("Failed to derive key from password");
const key = decryptSymmetric128BitHexKeyUTF8({
const key = decryptSymmetric({
ciphertext: user.protectedKey as string,
iv: user.protectedKeyIV as string,
tag: user.protectedKeyTag as string,
key: derivedKey
key: derivedKey.toString("base64")
});
const privateKey = decryptSymmetric128BitHexKeyUTF8({
const privateKey = decryptSymmetric({
ciphertext: user.encryptedPrivateKey,
iv: user.iv,
tag: user.tag,
key: Buffer.from(key, "hex")
key
});
return privateKey;
};
export const buildUserProjectKey = (privateKey: string, publickey: string) => {
export const buildUserProjectKey = async (privateKey: string, publickey: string) => {
const randomBytes = crypto.randomBytes(16).toString("hex");
const { nonce, ciphertext } = encryptAsymmetric(randomBytes, publickey, privateKey);
return { nonce, ciphertext };
};
export const getUserProjectKey = async (privateKey: string, ciphertext: string, nonce: string, publicKey: string) => {
return decryptAsymmetric({
ciphertext,
nonce,
publicKey,
privateKey
});
};
export const encryptSecret = (encKey: string, key: string, value?: string, comment?: string) => {
// encrypt key
const {
ciphertext: secretKeyCiphertext,
iv: secretKeyIV,
tag: secretKeyTag
} = encryptSymmetric128BitHexKeyUTF8(key, encKey);
// encrypt value
const {
ciphertext: secretValueCiphertext,
iv: secretValueIV,
tag: secretValueTag
} = encryptSymmetric128BitHexKeyUTF8(value ?? "", encKey);
// encrypt comment
const {
ciphertext: secretCommentCiphertext,
iv: secretCommentIV,
tag: secretCommentTag
} = encryptSymmetric128BitHexKeyUTF8(comment ?? "", encKey);
return {
secretKeyCiphertext,
secretKeyIV,
secretKeyTag,
secretValueCiphertext,
secretValueIV,
secretValueTag,
secretCommentCiphertext,
secretCommentIV,
secretCommentTag
};
};
export const decryptSecret = (decryptKey: string, encSecret: TSecrets) => {
const secretKey = decryptSymmetric128BitHexKeyUTF8({
key: decryptKey,
ciphertext: encSecret.secretKeyCiphertext,
tag: encSecret.secretKeyTag,
iv: encSecret.secretKeyIV
});
const secretValue = decryptSymmetric128BitHexKeyUTF8({
key: decryptKey,
ciphertext: encSecret.secretValueCiphertext,
tag: encSecret.secretValueTag,
iv: encSecret.secretValueIV
});
const secretComment =
encSecret.secretCommentIV && encSecret.secretCommentTag && encSecret.secretCommentCiphertext
? decryptSymmetric128BitHexKeyUTF8({
key: decryptKey,
ciphertext: encSecret.secretCommentCiphertext,
tag: encSecret.secretCommentTag,
iv: encSecret.secretCommentIV
})
: "";
return {
key: secretKey,
value: secretValue,
comment: secretComment,
version: encSecret.version
};
};
// export const getUserProjectKey = async (privateKey: string) => {
// const key = decryptAsymmetric({
// ciphertext: decryptFileKey.encryptedKey,
// nonce: decryptFileKey.nonce,
// publicKey: decryptFileKey.sender.publicKey,
// privateKey: PRIVATE_KEY
// });
// };

View File

@ -14,8 +14,7 @@ export async function seed(knex: Knex): Promise<void> {
const [user] = await knex(TableName.Users)
.insert([
{
// eslint-disable-next-line
// @ts-ignore
// @ts-expect-error exluded type id needs to be inserted here to keep it testable
id: seedData1.id,
email: seedData1.email,
superAdmin: true,
@ -49,8 +48,7 @@ export async function seed(knex: Knex): Promise<void> {
]);
await knex(TableName.AuthTokenSession).insert({
// eslint-disable-next-line
// @ts-ignore
// @ts-expect-error exluded type id needs to be inserted here to keep it testable
id: seedData1.token.id,
userId: seedData1.id,
ip: "151.196.220.213",

View File

@ -14,8 +14,7 @@ export async function seed(knex: Knex): Promise<void> {
const [org] = await knex(TableName.Organization)
.insert([
{
// eslint-disable-next-line
// @ts-ignore
// @ts-expect-error exluded type id needs to be inserted here to keep it testable
id: seedData1.organization.id,
name: "infisical",
slug: "infisical",

View File

@ -1,11 +1,7 @@
import crypto from "node:crypto";
import { Knex } from "knex";
import { encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { OrgMembershipRole, SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
import { buildUserProjectKey, getUserPrivateKey, seedData1 } from "../seed-data";
import { OrgMembershipRole, TableName } from "../schemas";
import { seedData1 } from "../seed-data";
export const DEFAULT_PROJECT_ENVS = [
{ name: "Development", slug: "dev" },
@ -24,32 +20,21 @@ export async function seed(knex: Knex): Promise<void> {
name: seedData1.project.name,
orgId: seedData1.organization.id,
slug: "first-project",
// eslint-disable-next-line
// @ts-ignore
// @ts-expect-error exluded type id needs to be inserted here to keep it testable
id: seedData1.project.id
})
.returning("*");
// await knex(TableName.ProjectKeys).insert({
// projectId: project.id,
// senderId: seedData1.id
// });
await knex(TableName.ProjectMembership).insert({
projectId: project.id,
role: OrgMembershipRole.Admin,
userId: seedData1.id
});
const user = await knex(TableName.UserEncryptionKey).where({ userId: seedData1.id }).first();
if (!user) throw new Error("User not found");
const userPrivateKey = await getUserPrivateKey(seedData1.password, user);
const projectKey = buildUserProjectKey(userPrivateKey, user.publicKey);
await knex(TableName.ProjectKeys).insert({
projectId: project.id,
nonce: projectKey.nonce,
encryptedKey: projectKey.ciphertext,
receiverId: seedData1.id,
senderId: seedData1.id
});
// create default environments and default folders
const envs = await knex(TableName.Environment)
.insert(
DEFAULT_PROJECT_ENVS.map(({ name, slug }, index) => ({
@ -61,19 +46,4 @@ export async function seed(knex: Knex): Promise<void> {
)
.returning("*");
await knex(TableName.SecretFolder).insert(envs.map(({ id }) => ({ name: "root", envId: id, parentId: null })));
// save secret secret blind index
const encKey = process.env.ENCRYPTION_KEY;
if (!encKey) throw new Error("Missing ENCRYPTION_KEY");
const salt = crypto.randomBytes(16).toString("base64");
const secretBlindIndex = encryptSymmetric128BitHexKeyUTF8(salt, encKey);
// insert secret blind index for project
await knex(TableName.SecretBlindIndex).insert({
projectId: project.id,
encryptedSaltCipherText: secretBlindIndex.ciphertext,
saltIV: secretBlindIndex.iv,
saltTag: secretBlindIndex.tag,
algorithm: SecretEncryptionAlgo.AES_256_GCM,
keyEncoding: SecretKeyEncoding.UTF8
});
}

View File

@ -1,83 +0,0 @@
import bcrypt from "bcrypt";
import { Knex } from "knex";
import { IdentityAuthMethod, OrgMembershipRole, ProjectMembershipRole, TableName } from "../schemas";
import { seedData1 } from "../seed-data";
export async function seed(knex: Knex): Promise<void> {
// Deletes ALL existing entries
await knex(TableName.Identity).del();
await knex(TableName.IdentityOrgMembership).del();
// Inserts seed entries
await knex(TableName.Identity).insert([
{
// eslint-disable-next-line
// @ts-ignore
id: seedData1.machineIdentity.id,
name: seedData1.machineIdentity.name,
authMethod: IdentityAuthMethod.Univeral
}
]);
const identityUa = await knex(TableName.IdentityUniversalAuth)
.insert([
{
identityId: seedData1.machineIdentity.id,
clientId: seedData1.machineIdentity.clientCredentials.id,
clientSecretTrustedIps: JSON.stringify([
{
type: "ipv4",
prefix: 0,
ipAddress: "0.0.0.0"
},
{
type: "ipv6",
prefix: 0,
ipAddress: "::"
}
]),
accessTokenTrustedIps: JSON.stringify([
{
type: "ipv4",
prefix: 0,
ipAddress: "0.0.0.0"
},
{
type: "ipv6",
prefix: 0,
ipAddress: "::"
}
]),
accessTokenTTL: 2592000,
accessTokenMaxTTL: 2592000,
accessTokenNumUsesLimit: 0
}
])
.returning("*");
const clientSecretHash = await bcrypt.hash(seedData1.machineIdentity.clientCredentials.secret, 10);
await knex(TableName.IdentityUaClientSecret).insert([
{
identityUAId: identityUa[0].id,
description: "",
clientSecretTTL: 0,
clientSecretNumUses: 0,
clientSecretNumUsesLimit: 0,
clientSecretPrefix: seedData1.machineIdentity.clientCredentials.secret.slice(0, 4),
clientSecretHash,
isClientSecretRevoked: false
}
]);
await knex(TableName.IdentityOrgMembership).insert([
{
identityId: seedData1.machineIdentity.id,
orgId: seedData1.organization.id,
role: OrgMembershipRole.Admin
}
]);
await knex(TableName.IdentityProjectMembership).insert({
identityId: seedData1.machineIdentity.id,
role: ProjectMembershipRole.Admin,
projectId: seedData1.project.id
});
}

View File

@ -3,7 +3,6 @@ import { registerOrgRoleRouter } from "./org-role-router";
import { registerProjectRoleRouter } from "./project-role-router";
import { registerProjectRouter } from "./project-router";
import { registerSamlRouter } from "./saml-router";
import { registerScimRouter } from "./scim-router";
import { registerSecretApprovalPolicyRouter } from "./secret-approval-policy-router";
import { registerSecretApprovalRequestRouter } from "./secret-approval-request-router";
import { registerSecretRotationProviderRouter } from "./secret-rotation-provider-router";
@ -34,7 +33,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
prefix: "/secret-rotation-providers"
});
await server.register(registerSamlRouter, { prefix: "/sso" });
await server.register(registerScimRouter, { prefix: "/scim" });
await server.register(registerSecretScanningRouter, { prefix: "/secret-scanning" });
await server.register(registerSecretRotationRouter, { prefix: "/secret-rotations" });
await server.register(registerSecretVersionRouter, { prefix: "/secret" });

View File

@ -1,331 +0,0 @@
import { z } from "zod";
import { ScimTokensSchema } from "@app/db/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerScimRouter = async (server: FastifyZodProvider) => {
server.addContentTypeParser("application/scim+json", { parseAs: "string" }, (_, body, done) => {
try {
const strBody = body instanceof Buffer ? body.toString() : body;
const json: unknown = JSON.parse(strBody);
done(null, json);
} catch (err) {
const error = err as Error;
done(error, undefined);
}
});
server.route({
url: "/scim-tokens",
method: "POST",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
body: z.object({
organizationId: z.string().trim(),
description: z.string().trim().default(""),
ttlDays: z.number().min(0).default(0)
}),
response: {
200: z.object({
scimToken: z.string().trim()
})
}
},
handler: async (req) => {
const { scimToken } = await server.services.scim.createScimToken({
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
orgId: req.body.organizationId,
description: req.body.description,
ttlDays: req.body.ttlDays
});
return { scimToken };
}
});
server.route({
url: "/scim-tokens",
method: "GET",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
querystring: z.object({
organizationId: z.string().trim()
}),
response: {
200: z.object({
scimTokens: z.array(ScimTokensSchema)
})
}
},
handler: async (req) => {
const scimTokens = await server.services.scim.listScimTokens({
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
orgId: req.query.organizationId
});
return { scimTokens };
}
});
server.route({
url: "/scim-tokens/:scimTokenId",
method: "DELETE",
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
scimTokenId: z.string().trim()
}),
response: {
200: z.object({
scimToken: ScimTokensSchema
})
}
},
handler: async (req) => {
const scimToken = await server.services.scim.deleteScimToken({
scimTokenId: req.params.scimTokenId,
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId
});
return { scimToken };
}
});
// SCIM server endpoints
server.route({
url: "/Users",
method: "GET",
schema: {
querystring: z.object({
startIndex: z.coerce.number().default(1),
count: z.coerce.number().default(20),
filter: z.string().trim().optional()
}),
response: {
200: z.object({
Resources: z.array(
z.object({
id: z.string().trim(),
userName: z.string().trim(),
name: z.object({
familyName: z.string().trim(),
givenName: z.string().trim()
}),
emails: z.array(
z.object({
primary: z.boolean(),
value: z.string().email(),
type: z.string().trim()
})
),
displayName: z.string().trim(),
active: z.boolean()
})
),
itemsPerPage: z.number(),
schemas: z.array(z.string()),
startIndex: z.number(),
totalResults: z.number()
})
}
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const users = await req.server.services.scim.listScimUsers({
offset: req.query.startIndex,
limit: req.query.count,
filter: req.query.filter,
orgId: req.permission.orgId as string
});
return users;
}
});
server.route({
url: "/Users/:userId",
method: "GET",
schema: {
params: z.object({
userId: z.string().trim()
}),
response: {
201: z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
userName: z.string().trim(),
name: z.object({
familyName: z.string().trim(),
givenName: z.string().trim()
}),
emails: z.array(
z.object({
primary: z.boolean(),
value: z.string().email(),
type: z.string().trim()
})
),
displayName: z.string().trim(),
active: z.boolean()
})
}
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const user = await req.server.services.scim.getScimUser({
userId: req.params.userId,
orgId: req.permission.orgId as string
});
return user;
}
});
server.route({
url: "/Users",
method: "POST",
schema: {
body: z.object({
schemas: z.array(z.string()),
userName: z.string().trim().email(),
name: z.object({
familyName: z.string().trim(),
givenName: z.string().trim()
}),
// emails: z.array( // optional?
// z.object({
// primary: z.boolean(),
// value: z.string().email(),
// type: z.string().trim()
// })
// ),
// displayName: z.string().trim(),
active: z.boolean()
}),
response: {
200: z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
userName: z.string().trim().email(),
name: z.object({
familyName: z.string().trim(),
givenName: z.string().trim()
}),
emails: z.array(
z.object({
primary: z.boolean(),
value: z.string().email(),
type: z.string().trim()
})
),
displayName: z.string().trim(),
active: z.boolean()
})
}
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const user = await req.server.services.scim.createScimUser({
email: req.body.userName,
firstName: req.body.name.givenName,
lastName: req.body.name.familyName,
orgId: req.permission.orgId as string
});
return user;
}
});
server.route({
url: "/Users/:userId",
method: "PATCH",
schema: {
params: z.object({
userId: z.string().trim()
}),
body: z.object({
schemas: z.array(z.string()),
Operations: z.array(
z.object({
op: z.string().trim(),
path: z.string().trim().optional(),
value: z.union([
z.object({
active: z.boolean()
}),
z.string().trim()
])
})
)
}),
response: {
200: z.object({})
}
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const user = await req.server.services.scim.updateScimUser({
userId: req.params.userId,
orgId: req.permission.orgId as string,
operations: req.body.Operations
});
return user;
}
});
server.route({
url: "/Users/:userId",
method: "PUT",
schema: {
params: z.object({
userId: z.string().trim()
}),
body: z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
userName: z.string().trim(),
name: z.object({
familyName: z.string().trim(),
givenName: z.string().trim()
}),
displayName: z.string().trim(),
active: z.boolean()
}),
response: {
200: z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
userName: z.string().trim(),
name: z.object({
familyName: z.string().trim(),
givenName: z.string().trim()
}),
emails: z.array(
z.object({
primary: z.boolean(),
value: z.string().email(),
type: z.string().trim()
})
),
displayName: z.string().trim(),
active: z.boolean()
})
}
},
onRequest: verifyAuth([AuthMode.SCIM_TOKEN]),
handler: async (req) => {
const user = await req.server.services.scim.replaceScimUser({
userId: req.params.userId,
orgId: req.permission.orgId as string,
active: req.body.active
});
return user;
}
});
};

View File

@ -58,7 +58,6 @@ export const auditLogServiceFactory = ({
if (data.event.type !== EventType.LOGIN_IDENTITY_UNIVERSAL_AUTH) {
if (!data.projectId && !data.orgId) throw new BadRequestError({ message: "Must either project id or org id" });
}
return auditLogQueue.pushToLog(data);
};

View File

@ -15,7 +15,7 @@ export type TListProjectAuditLogDTO = {
export type TCreateAuditLogDTO = {
event: Event;
actor: UserActor | IdentityActor | ServiceActor | ScimClientActor;
actor: UserActor | IdentityActor | ServiceActor;
orgId?: string;
projectId?: string;
} & BaseAuthData;
@ -105,8 +105,6 @@ interface IdentityActorMetadata {
name: string;
}
interface ScimClientActorMetadata {}
export interface UserActor {
type: ActorType.USER;
metadata: UserActorMetadata;
@ -122,12 +120,7 @@ export interface IdentityActor {
metadata: IdentityActorMetadata;
}
export interface ScimClientActor {
type: ActorType.SCIM_CLIENT;
metadata: ScimClientActorMetadata;
}
export type Actor = UserActor | ServiceActor | IdentityActor | ScimClientActor;
export type Actor = UserActor | ServiceActor | IdentityActor;
interface GetSecretsEvent {
type: EventType.GET_SECRETS;

View File

@ -1,27 +0,0 @@
export const getDefaultOnPremFeatures = () => {
return {
_id: null,
slug: null,
tier: -1,
workspaceLimit: null,
workspacesUsed: 0,
memberLimit: null,
membersUsed: 0,
environmentLimit: null,
environmentsUsed: 0,
secretVersioning: true,
pitRecovery: false,
ipAllowlisting: true,
rbac: false,
customRateLimits: false,
customAlerts: false,
auditLogs: false,
auditLogsRetentionDays: 0,
samlSSO: false,
status: null,
trial_end: null,
has_used_trial: true,
secretApproval: false,
secretRotation: true
};
};

View File

@ -24,7 +24,6 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
auditLogs: false,
auditLogsRetentionDays: 0,
samlSSO: false,
scim: false,
status: null,
trial_end: null,
has_used_trial: true,

View File

@ -25,7 +25,6 @@ export type TFeatureSet = {
auditLogs: false;
auditLogsRetentionDays: 0;
samlSSO: false;
scim: false;
status: null;
trial_end: null;
has_used_trial: true;

View File

@ -16,7 +16,6 @@ export enum OrgPermissionSubjects {
Settings = "settings",
IncidentAccount = "incident-contact",
Sso = "sso",
Scim = "scim",
Billing = "billing",
SecretScanning = "secret-scanning",
Identity = "identity"
@ -30,7 +29,6 @@ export type OrgPermissionSet =
| [OrgPermissionActions, OrgPermissionSubjects.Settings]
| [OrgPermissionActions, OrgPermissionSubjects.IncidentAccount]
| [OrgPermissionActions, OrgPermissionSubjects.Sso]
| [OrgPermissionActions, OrgPermissionSubjects.Scim]
| [OrgPermissionActions, OrgPermissionSubjects.SecretScanning]
| [OrgPermissionActions, OrgPermissionSubjects.Billing]
| [OrgPermissionActions, OrgPermissionSubjects.Identity];
@ -71,11 +69,6 @@ const buildAdminPermission = () => {
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Sso);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Sso);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Scim);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Scim);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Scim);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Scim);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Billing);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Billing);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Billing);

View File

@ -177,8 +177,6 @@ export const permissionServiceFactory = ({
const getServiceTokenProjectPermission = async (serviceTokenId: string, projectId: string) => {
const serviceToken = await serviceTokenDAL.findById(serviceTokenId);
if (!serviceToken) throw new BadRequestError({ message: "Service token not found" });
if (serviceToken.projectId !== projectId)
throw new UnauthorizedError({
message: "Failed to find service authorization for given project"

View File

@ -195,7 +195,7 @@ export const samlConfigServiceFactory = ({
updateQuery.certTag = certTag;
}
const [ssoConfig] = await samlConfigDAL.update({ orgId }, updateQuery);
await orgDAL.updateById(orgId, { authEnforced: false, scimEnabled: false });
await orgDAL.updateById(orgId, { authEnforced: false });
return ssoConfig;
};
@ -338,8 +338,7 @@ export const samlConfigServiceFactory = ({
email,
firstName,
lastName,
authMethods: [AuthMethod.EMAIL],
isGhost: false
authMethods: [AuthMethod.EMAIL]
},
tx
);

View File

@ -1,10 +0,0 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TScimDALFactory = ReturnType<typeof scimDALFactory>;
export const scimDALFactory = (db: TDbClient) => {
const scimTokenOrm = ormify(db, TableName.ScimToken);
return scimTokenOrm;
};

View File

@ -1,58 +0,0 @@
import { TListScimUsers, TScimUser } from "./scim-types";
export const buildScimUserList = ({
scimUsers,
offset,
limit
}: {
scimUsers: TScimUser[];
offset: number;
limit: number;
}): TListScimUsers => {
return {
Resources: scimUsers,
itemsPerPage: limit,
schemas: ["urn:ietf:params:scim:api:messages:2.0:ListResponse"],
startIndex: offset,
totalResults: scimUsers.length
};
};
export const buildScimUser = ({
userId,
firstName,
lastName,
email,
active
}: {
userId: string;
firstName: string;
lastName: string;
email: string;
active: boolean;
}): TScimUser => {
return {
schemas: ["urn:ietf:params:scim:schemas:core:2.0:User"],
id: userId,
userName: email,
displayName: `${firstName} ${lastName}`,
name: {
givenName: firstName,
middleName: null,
familyName: lastName
},
emails: [
{
primary: true,
value: email,
type: "work"
}
],
active,
groups: [],
meta: {
resourceType: "User",
location: null
}
};
};

View File

@ -1,431 +0,0 @@
import { ForbiddenError } from "@casl/ability";
import jwt from "jsonwebtoken";
import { OrgMembershipRole, OrgMembershipStatus } from "@app/db/schemas";
import { TScimDALFactory } from "@app/ee/services/scim/scim-dal";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ScimRequestError, UnauthorizedError } from "@app/lib/errors";
import { TOrgPermission } from "@app/lib/types";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { TOrgDALFactory } from "@app/services/org/org-dal";
import { deleteOrgMembership } from "@app/services/org/org-fns";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { buildScimUser, buildScimUserList } from "./scim-fns";
import {
TCreateScimTokenDTO,
TCreateScimUserDTO,
TDeleteScimTokenDTO,
TGetScimUserDTO,
TListScimUsers,
TListScimUsersDTO,
TReplaceScimUserDTO,
TScimTokenJwtPayload,
TUpdateScimUserDTO
} from "./scim-types";
type TScimServiceFactoryDep = {
scimDAL: Pick<TScimDALFactory, "create" | "find" | "findById" | "deleteById">;
userDAL: Pick<TUserDALFactory, "findOne" | "create" | "transaction">;
orgDAL: Pick<
TOrgDALFactory,
"createMembership" | "findById" | "findMembership" | "deleteMembershipById" | "transaction"
>;
projectDAL: Pick<TProjectDALFactory, "find">;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find" | "delete">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
smtpService: TSmtpService;
};
export type TScimServiceFactory = ReturnType<typeof scimServiceFactory>;
export const scimServiceFactory = ({
licenseService,
scimDAL,
userDAL,
orgDAL,
projectDAL,
projectMembershipDAL,
permissionService,
smtpService
}: TScimServiceFactoryDep) => {
const createScimToken = async ({ actor, actorId, actorOrgId, orgId, description, ttlDays }: TCreateScimTokenDTO) => {
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Scim);
const plan = await licenseService.getPlan(orgId);
if (!plan.scim)
throw new BadRequestError({
message: "Failed to create a SCIM token due to plan restriction. Upgrade plan to create a SCIM token."
});
const appCfg = getConfig();
const scimTokenData = await scimDAL.create({
orgId,
description,
ttlDays
});
const scimToken = jwt.sign(
{
scimTokenId: scimTokenData.id,
authTokenType: AuthTokenType.SCIM_TOKEN
},
appCfg.AUTH_SECRET
);
return { scimToken };
};
const listScimTokens = async ({ actor, actorId, actorOrgId, orgId }: TOrgPermission) => {
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Scim);
const plan = await licenseService.getPlan(orgId);
if (!plan.scim)
throw new BadRequestError({
message: "Failed to get SCIM tokens due to plan restriction. Upgrade plan to get SCIM tokens."
});
const scimTokens = await scimDAL.find({ orgId });
return scimTokens;
};
const deleteScimToken = async ({ scimTokenId, actor, actorId, actorOrgId }: TDeleteScimTokenDTO) => {
let scimToken = await scimDAL.findById(scimTokenId);
if (!scimToken) throw new BadRequestError({ message: "Failed to find SCIM token to delete" });
const { permission } = await permissionService.getOrgPermission(actor, actorId, scimToken.orgId, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Scim);
const plan = await licenseService.getPlan(scimToken.orgId);
if (!plan.scim)
throw new BadRequestError({
message: "Failed to delete the SCIM token due to plan restriction. Upgrade plan to delete the SCIM token."
});
scimToken = await scimDAL.deleteById(scimTokenId);
return scimToken;
};
// SCIM server endpoints
const listScimUsers = async ({ offset, limit, filter, orgId }: TListScimUsersDTO): Promise<TListScimUsers> => {
const org = await orgDAL.findById(orgId);
if (!org.scimEnabled)
throw new ScimRequestError({
detail: "SCIM is disabled for the organization",
status: 403
});
const parseFilter = (filterToParse: string | undefined) => {
if (!filterToParse) return {};
const [parsedName, parsedValue] = filterToParse.split("eq").map((s) => s.trim());
let attributeName = parsedName;
if (parsedName === "userName") {
attributeName = "email";
}
return { [attributeName]: parsedValue };
};
const findOpts = {
...(offset && { offset }),
...(limit && { limit })
};
const users = await orgDAL.findMembership(
{
orgId,
...parseFilter(filter)
},
findOpts
);
const scimUsers = users.map(({ userId, firstName, lastName, email }) =>
buildScimUser({
userId: userId ?? "",
firstName: firstName ?? "",
lastName: lastName ?? "",
email,
active: true
})
);
return buildScimUserList({
scimUsers,
offset,
limit
});
};
const getScimUser = async ({ userId, orgId }: TGetScimUserDTO) => {
const [membership] = await orgDAL
.findMembership({
userId,
orgId
})
.catch(() => {
throw new ScimRequestError({
detail: "User not found",
status: 404
});
});
if (!membership)
throw new ScimRequestError({
detail: "User not found",
status: 404
});
if (!membership.scimEnabled)
throw new ScimRequestError({
detail: "SCIM is disabled for the organization",
status: 403
});
return buildScimUser({
userId: membership.userId as string,
firstName: membership.firstName as string,
lastName: membership.lastName as string,
email: membership.email,
active: true
});
};
const createScimUser = async ({ firstName, lastName, email, orgId }: TCreateScimUserDTO) => {
const org = await orgDAL.findById(orgId);
if (!org)
throw new ScimRequestError({
detail: "Organization not found",
status: 404
});
if (!org.scimEnabled)
throw new ScimRequestError({
detail: "SCIM is disabled for the organization",
status: 403
});
let user = await userDAL.findOne({
email
});
if (user) {
await userDAL.transaction(async (tx) => {
const [orgMembership] = await orgDAL.findMembership({ userId: user.id, orgId }, { tx });
if (orgMembership)
throw new ScimRequestError({
detail: "User already exists in the database",
status: 409
});
if (!orgMembership) {
await orgDAL.createMembership(
{
userId: user.id,
orgId,
inviteEmail: email,
role: OrgMembershipRole.Member,
status: OrgMembershipStatus.Invited
},
tx
);
}
});
} else {
user = await userDAL.transaction(async (tx) => {
const newUser = await userDAL.create(
{
email,
firstName,
lastName,
authMethods: [AuthMethod.EMAIL],
isGhost: false
},
tx
);
await orgDAL.createMembership(
{
inviteEmail: email,
orgId,
userId: newUser.id,
role: OrgMembershipRole.Member,
status: OrgMembershipStatus.Invited
},
tx
);
return newUser;
});
}
const appCfg = getConfig();
await smtpService.sendMail({
template: SmtpTemplates.ScimUserProvisioned,
subjectLine: "Infisical organization invitation",
recipients: [email],
substitutions: {
organizationName: org.name,
callback_url: `${appCfg.SITE_URL}/api/v1/sso/redirect/saml2/organizations/${org.slug}`
}
});
return buildScimUser({
userId: user.id,
firstName: user.firstName as string,
lastName: user.lastName as string,
email: user.email,
active: true
});
};
const updateScimUser = async ({ userId, orgId, operations }: TUpdateScimUserDTO) => {
const [membership] = await orgDAL
.findMembership({
userId,
orgId
})
.catch(() => {
throw new ScimRequestError({
detail: "User not found",
status: 404
});
});
if (!membership)
throw new ScimRequestError({
detail: "User not found",
status: 404
});
if (!membership.scimEnabled)
throw new ScimRequestError({
detail: "SCIM is disabled for the organization",
status: 403
});
let active = true;
operations.forEach((operation) => {
if (operation.op.toLowerCase() === "replace") {
if (operation.path === "active" && operation.value === "False") {
// azure scim op format
active = false;
} else if (typeof operation.value === "object" && operation.value.active === false) {
// okta scim op format
active = false;
}
}
});
if (!active) {
await deleteOrgMembership({
orgMembershipId: membership.id,
orgId: membership.orgId,
orgDAL,
projectDAL,
projectMembershipDAL
});
}
return buildScimUser({
userId: membership.userId as string,
firstName: membership.firstName as string,
lastName: membership.lastName as string,
email: membership.email,
active
});
};
const replaceScimUser = async ({ userId, active, orgId }: TReplaceScimUserDTO) => {
const [membership] = await orgDAL
.findMembership({
userId,
orgId
})
.catch(() => {
throw new ScimRequestError({
detail: "User not found",
status: 404
});
});
if (!membership)
throw new ScimRequestError({
detail: "User not found",
status: 404
});
if (!membership.scimEnabled)
throw new ScimRequestError({
detail: "SCIM is disabled for the organization",
status: 403
});
if (!active) {
// tx
await deleteOrgMembership({
orgMembershipId: membership.id,
orgId: membership.orgId,
orgDAL,
projectDAL,
projectMembershipDAL
});
}
return buildScimUser({
userId: membership.userId as string,
firstName: membership.firstName as string,
lastName: membership.lastName as string,
email: membership.email,
active
});
};
const fnValidateScimToken = async (token: TScimTokenJwtPayload) => {
const scimToken = await scimDAL.findById(token.scimTokenId);
if (!scimToken) throw new UnauthorizedError();
const { ttlDays, createdAt } = scimToken;
// ttl check
if (Number(ttlDays) > 0) {
const currentDate = new Date();
const scimTokenCreatedAt = new Date(createdAt);
const ttlInMilliseconds = Number(scimToken.ttlDays) * 86400 * 1000;
const expirationDate = new Date(scimTokenCreatedAt.getTime() + ttlInMilliseconds);
if (currentDate > expirationDate)
throw new ScimRequestError({
detail: "The access token expired",
status: 401
});
}
return { scimTokenId: scimToken.id, orgId: scimToken.orgId };
};
return {
createScimToken,
listScimTokens,
deleteScimToken,
listScimUsers,
getScimUser,
createScimUser,
updateScimUser,
replaceScimUser,
fnValidateScimToken
};
};

View File

@ -1,87 +0,0 @@
import { TOrgPermission } from "@app/lib/types";
export type TCreateScimTokenDTO = {
description: string;
ttlDays: number;
} & TOrgPermission;
export type TDeleteScimTokenDTO = {
scimTokenId: string;
} & Omit<TOrgPermission, "orgId">;
// SCIM server endpoint types
export type TListScimUsersDTO = {
offset: number;
limit: number;
filter?: string;
orgId: string;
};
export type TListScimUsers = {
schemas: ["urn:ietf:params:scim:api:messages:2.0:ListResponse"];
totalResults: number;
Resources: TScimUser[];
itemsPerPage: number;
startIndex: number;
};
export type TGetScimUserDTO = {
userId: string;
orgId: string;
};
export type TCreateScimUserDTO = {
email: string;
firstName: string;
lastName: string;
orgId: string;
};
export type TUpdateScimUserDTO = {
userId: string;
orgId: string;
operations: {
op: string;
path?: string;
value?:
| string
| {
active: boolean;
};
}[];
};
export type TReplaceScimUserDTO = {
userId: string;
active: boolean;
orgId: string;
};
export type TScimTokenJwtPayload = {
scimTokenId: string;
authTokenType: string;
};
export type TScimUser = {
schemas: string[];
id: string;
userName: string;
displayName: string;
name: {
givenName: string;
middleName: null;
familyName: string;
};
emails: {
primary: boolean;
value: string;
type: string;
}[];
active: boolean;
groups: string[];
meta: {
resourceType: string;
location: null;
};
};

View File

@ -1,13 +1,8 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import {
SecretApprovalRequestsSecretsSchema,
TableName,
TSecretApprovalRequestsSecrets,
TSecretTags
} from "@app/db/schemas";
import { BadRequestError, DatabaseError } from "@app/lib/errors";
import { SecretApprovalRequestsSecretsSchema, TableName, TSecretTags } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
export type TSecretApprovalRequestSecretDALFactory = ReturnType<typeof secretApprovalRequestSecretDALFactory>;
@ -16,35 +11,6 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
const secretApprovalRequestSecretOrm = ormify(db, TableName.SecretApprovalRequestSecret);
const secretApprovalRequestSecretTagOrm = ormify(db, TableName.SecretApprovalRequestSecretTag);
const bulkUpdateNoVersionIncrement = async (data: TSecretApprovalRequestsSecrets[], tx?: Knex) => {
try {
const existingApprovalSecrets = await secretApprovalRequestSecretOrm.find(
{
$in: {
id: data.map((el) => el.id)
}
},
{ tx }
);
if (existingApprovalSecrets.length !== data.length) {
throw new BadRequestError({ message: "Some of the secret approvals do not exist" });
}
if (data.length === 0) return [];
const updatedApprovalSecrets = await (tx || db)(TableName.SecretApprovalRequestSecret)
.insert(data)
.onConflict("id") // this will cause a conflict then merge the data
.merge() // Merge the data with the existing data
.returning("*");
return updatedApprovalSecrets;
} catch (error) {
throw new DatabaseError({ error, name: "bulk update secret" });
}
};
const findByRequestId = async (requestId: string, tx?: Knex) => {
try {
const doc = await (tx || db)({
@ -224,7 +190,6 @@ export const secretApprovalRequestSecretDALFactory = (db: TDbClient) => {
return {
...secretApprovalRequestSecretOrm,
findByRequestId,
bulkUpdateNoVersionIncrement,
insertApprovalSecretTags: secretApprovalRequestSecretTagOrm.insertMany
};
};

View File

@ -11,7 +11,6 @@ import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
import { groupBy, pick, unique } from "@app/lib/fn";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { ActorType } from "@app/services/auth/auth-type";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TSecretQueueFactory } from "@app/services/secret/secret-queue";
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
@ -48,7 +47,6 @@ type TSecretApprovalRequestServiceFactoryDep = {
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
snapshotService: Pick<TSecretSnapshotServiceFactory, "performSnapshot">;
secretVersionDAL: Pick<TSecretVersionDALFactory, "findLatestVersionMany">;
projectDAL: Pick<TProjectDALFactory, "checkProjectUpgradeStatus">;
secretService: Pick<
TSecretServiceFactory,
| "fnSecretBulkInsert"
@ -69,7 +67,6 @@ export const secretApprovalRequestServiceFactory = ({
secretApprovalRequestReviewerDAL,
secretApprovalRequestSecretDAL,
secretBlindIndexDAL,
projectDAL,
permissionService,
snapshotService,
secretService,
@ -437,8 +434,6 @@ export const secretApprovalRequestServiceFactory = ({
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
);
await projectDAL.checkProjectUpgradeStatus(projectId);
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
if (!folder) throw new BadRequestError({ message: "Folder not found", name: "GenSecretApproval" });
const folderId = folder.id;

View File

@ -8,9 +8,6 @@ import { SecretEncryptionAlgo, SecretKeyEncoding } from "@app/db/schemas";
import { getConfig } from "../config/env";
export const decodeBase64 = (s: string) => naclUtils.decodeBase64(s);
export const encodeBase64 = (u: Uint8Array) => naclUtils.encodeBase64(u);
export type TDecryptSymmetricInput = {
ciphertext: string;
iv: string;
@ -47,7 +44,7 @@ export const encryptSymmetric = (plaintext: string, key: string) => {
};
};
export const encryptSymmetric128BitHexKeyUTF8 = (plaintext: string, key: string | Buffer) => {
export const encryptSymmetric128BitHexKeyUTF8 = (plaintext: string, key: string) => {
const iv = crypto.randomBytes(BLOCK_SIZE_BYTES_16);
const cipher = crypto.createCipheriv(SecretEncryptionAlgo.AES_256_GCM, key, iv);
@ -61,12 +58,7 @@ export const encryptSymmetric128BitHexKeyUTF8 = (plaintext: string, key: string
};
};
export const decryptSymmetric128BitHexKeyUTF8 = ({
ciphertext,
iv,
tag,
key
}: Omit<TDecryptSymmetricInput, "key"> & { key: string | Buffer }): string => {
export const decryptSymmetric128BitHexKeyUTF8 = ({ ciphertext, iv, tag, key }: TDecryptSymmetricInput): string => {
const decipher = crypto.createDecipheriv(SecretEncryptionAlgo.AES_256_GCM, key, Buffer.from(iv, "base64"));
decipher.setAuthTag(Buffer.from(tag, "base64"));

View File

@ -1,20 +1,12 @@
export {
buildSecretBlindIndexFromName,
createSecretBlindIndex,
decodeBase64,
decryptAsymmetric,
decryptSymmetric,
decryptSymmetric128BitHexKeyUTF8,
encodeBase64,
encryptAsymmetric,
encryptSymmetric,
encryptSymmetric128BitHexKeyUTF8,
generateAsymmetricKeyPair
} from "./encryption";
export {
decryptIntegrationAuths,
decryptSecretApprovals,
decryptSecrets,
decryptSecretVersions
} from "./secret-encryption";
export { generateSrpServerKey, srpCheckClientProof } from "./srp";

View File

@ -1,293 +0,0 @@
import crypto from "crypto";
import { z } from "zod";
import {
IntegrationAuthsSchema,
SecretApprovalRequestsSecretsSchema,
SecretsSchema,
SecretVersionsSchema,
TIntegrationAuths,
TProjectKeys,
TSecretApprovalRequestsSecrets,
TSecrets,
TSecretVersions
} from "../../db/schemas";
import { decryptAsymmetric } from "./encryption";
const DecryptedValuesSchema = z.object({
id: z.string(),
secretKey: z.string(),
secretValue: z.string(),
secretComment: z.string().optional()
});
const DecryptedSecretSchema = z.object({
decrypted: DecryptedValuesSchema,
original: SecretsSchema
});
const DecryptedIntegrationAuthsSchema = z.object({
decrypted: z.object({
id: z.string(),
access: z.string(),
accessId: z.string(),
refresh: z.string()
}),
original: IntegrationAuthsSchema
});
const DecryptedSecretVersionsSchema = z.object({
decrypted: DecryptedValuesSchema,
original: SecretVersionsSchema
});
const DecryptedSecretApprovalsSchema = z.object({
decrypted: DecryptedValuesSchema,
original: SecretApprovalRequestsSecretsSchema
});
type DecryptedSecret = z.infer<typeof DecryptedSecretSchema>;
type DecryptedSecretVersions = z.infer<typeof DecryptedSecretVersionsSchema>;
type DecryptedSecretApprovals = z.infer<typeof DecryptedSecretApprovalsSchema>;
type DecryptedIntegrationAuths = z.infer<typeof DecryptedIntegrationAuthsSchema>;
type TLatestKey = TProjectKeys & {
sender: {
publicKey: string;
};
};
const decryptCipher = ({
ciphertext,
iv,
tag,
key
}: {
ciphertext: string;
iv: string;
tag: string;
key: string | Buffer;
}) => {
const decipher = crypto.createDecipheriv("aes-256-gcm", key, Buffer.from(iv, "base64"));
decipher.setAuthTag(Buffer.from(tag, "base64"));
let cleartext = decipher.update(ciphertext, "base64", "utf8");
cleartext += decipher.final("utf8");
return cleartext;
};
const getDecryptedValues = (data: Array<{ ciphertext: string; iv: string; tag: string }>, key: string | Buffer) => {
const results: string[] = [];
for (const { ciphertext, iv, tag } of data) {
if (!ciphertext || !iv || !tag) {
results.push("");
} else {
results.push(decryptCipher({ ciphertext, iv, tag, key }));
}
}
return results;
};
export const decryptSecrets = (encryptedSecrets: TSecrets[], privateKey: string, latestKey: TLatestKey) => {
const key = decryptAsymmetric({
ciphertext: latestKey.encryptedKey,
nonce: latestKey.nonce,
publicKey: latestKey.sender.publicKey,
privateKey
});
const decryptedSecrets: DecryptedSecret[] = [];
encryptedSecrets.forEach((encSecret) => {
const [secretKey, secretValue, secretComment] = getDecryptedValues(
[
{
ciphertext: encSecret.secretKeyCiphertext,
iv: encSecret.secretKeyIV,
tag: encSecret.secretKeyTag
},
{
ciphertext: encSecret.secretValueCiphertext,
iv: encSecret.secretValueIV,
tag: encSecret.secretValueTag
},
{
ciphertext: encSecret.secretCommentCiphertext || "",
iv: encSecret.secretCommentIV || "",
tag: encSecret.secretCommentTag || ""
}
],
key
);
const decryptedSecret: DecryptedSecret = {
decrypted: {
secretKey,
secretValue,
secretComment,
id: encSecret.id
},
original: encSecret
};
decryptedSecrets.push(DecryptedSecretSchema.parse(decryptedSecret));
});
return decryptedSecrets;
};
export const decryptSecretVersions = (
encryptedSecretVersions: TSecretVersions[],
privateKey: string,
latestKey: TLatestKey
) => {
const key = decryptAsymmetric({
ciphertext: latestKey.encryptedKey,
nonce: latestKey.nonce,
publicKey: latestKey.sender.publicKey,
privateKey
});
const decryptedSecrets: DecryptedSecretVersions[] = [];
encryptedSecretVersions.forEach((encSecret) => {
const [secretKey, secretValue, secretComment] = getDecryptedValues(
[
{
ciphertext: encSecret.secretKeyCiphertext,
iv: encSecret.secretKeyIV,
tag: encSecret.secretKeyTag
},
{
ciphertext: encSecret.secretValueCiphertext,
iv: encSecret.secretValueIV,
tag: encSecret.secretValueTag
},
{
ciphertext: encSecret.secretCommentCiphertext || "",
iv: encSecret.secretCommentIV || "",
tag: encSecret.secretCommentTag || ""
}
],
key
);
const decryptedSecret: DecryptedSecretVersions = {
decrypted: {
secretKey,
secretValue,
secretComment,
id: encSecret.id
},
original: encSecret
};
decryptedSecrets.push(DecryptedSecretVersionsSchema.parse(decryptedSecret));
});
return decryptedSecrets;
};
export const decryptSecretApprovals = (
encryptedSecretApprovals: TSecretApprovalRequestsSecrets[],
privateKey: string,
latestKey: TLatestKey
) => {
const key = decryptAsymmetric({
ciphertext: latestKey.encryptedKey,
nonce: latestKey.nonce,
publicKey: latestKey.sender.publicKey,
privateKey
});
const decryptedSecrets: DecryptedSecretApprovals[] = [];
encryptedSecretApprovals.forEach((encApproval) => {
const [secretKey, secretValue, secretComment] = getDecryptedValues(
[
{
ciphertext: encApproval.secretKeyCiphertext,
iv: encApproval.secretKeyIV,
tag: encApproval.secretKeyTag
},
{
ciphertext: encApproval.secretValueCiphertext,
iv: encApproval.secretValueIV,
tag: encApproval.secretValueTag
},
{
ciphertext: encApproval.secretCommentCiphertext || "",
iv: encApproval.secretCommentIV || "",
tag: encApproval.secretCommentTag || ""
}
],
key
);
const decryptedSecret: DecryptedSecretApprovals = {
decrypted: {
secretKey,
secretValue,
secretComment,
id: encApproval.id
},
original: encApproval
};
decryptedSecrets.push(DecryptedSecretApprovalsSchema.parse(decryptedSecret));
});
return decryptedSecrets;
};
export const decryptIntegrationAuths = (
encryptedIntegrationAuths: TIntegrationAuths[],
privateKey: string,
latestKey: TLatestKey
) => {
const key = decryptAsymmetric({
ciphertext: latestKey.encryptedKey,
nonce: latestKey.nonce,
publicKey: latestKey.sender.publicKey,
privateKey
});
const decryptedIntegrationAuths: DecryptedIntegrationAuths[] = [];
encryptedIntegrationAuths.forEach((encAuth) => {
const [access, accessId, refresh] = getDecryptedValues(
[
{
ciphertext: encAuth.accessCiphertext || "",
iv: encAuth.accessIV || "",
tag: encAuth.accessTag || ""
},
{
ciphertext: encAuth.accessIdCiphertext || "",
iv: encAuth.accessIdIV || "",
tag: encAuth.accessIdTag || ""
},
{
ciphertext: encAuth.refreshCiphertext || "",
iv: encAuth.refreshIV || "",
tag: encAuth.refreshTag || ""
}
],
key
);
decryptedIntegrationAuths.push({
decrypted: {
id: encAuth.id,
access,
accessId,
refresh
},
original: encAuth
});
});
return decryptedIntegrationAuths;
};

View File

@ -1,12 +1,4 @@
import argon2 from "argon2";
import crypto from "crypto";
import jsrp from "jsrp";
import nacl from "tweetnacl";
import tweetnacl from "tweetnacl-util";
import { TUserEncryptionKeys } from "@app/db/schemas";
import { decryptSymmetric, encryptAsymmetric, encryptSymmetric } from "./encryption";
export const generateSrpServerKey = async (salt: string, verifier: string) => {
// eslint-disable-next-line new-cap
@ -32,99 +24,3 @@ export const srpCheckClientProof = async (
server.setClientPublicKey(clientPublicKey);
return server.checkClientProof(clientProof);
};
// Ghost user related:
// This functionality is intended for ghost user logic. This happens on the frontend when a user is being created.
// We replicate the same functionality on the backend when creating a ghost user.
export const generateUserSrpKeys = async (email: string, password: string) => {
const pair = nacl.box.keyPair();
const secretKeyUint8Array = pair.secretKey;
const publicKeyUint8Array = pair.publicKey;
const privateKey = tweetnacl.encodeBase64(secretKeyUint8Array);
const publicKey = tweetnacl.encodeBase64(publicKeyUint8Array);
// eslint-disable-next-line
const client = new jsrp.client();
await new Promise((resolve) => {
client.init({ username: email, password }, () => resolve(null));
});
const { salt, verifier } = await new Promise<{ salt: string; verifier: string }>((resolve, reject) => {
client.createVerifier((err, res) => {
if (err) return reject(err);
return resolve(res);
});
});
const derivedKey = await argon2.hash(password, {
salt: Buffer.from(salt),
memoryCost: 65536,
timeCost: 3,
parallelism: 1,
hashLength: 32,
type: argon2.argon2id,
raw: true
});
if (!derivedKey) throw new Error("Failed to derive key from password");
const key = crypto.randomBytes(32);
// create encrypted private key by encrypting the private
// key with the symmetric key [key]
const {
ciphertext: encryptedPrivateKey,
iv: encryptedPrivateKeyIV,
tag: encryptedPrivateKeyTag
} = encryptSymmetric(privateKey, key.toString("base64"));
// create the protected key by encrypting the symmetric key
// [key] with the derived key
const {
ciphertext: protectedKey,
iv: protectedKeyIV,
tag: protectedKeyTag
} = encryptSymmetric(key.toString("hex"), derivedKey.toString("base64"));
return {
protectedKey,
plainPrivateKey: privateKey,
protectedKeyIV,
protectedKeyTag,
publicKey,
encryptedPrivateKey,
encryptedPrivateKeyIV,
encryptedPrivateKeyTag,
salt,
verifier
};
};
export const getUserPrivateKey = async (password: string, user: TUserEncryptionKeys) => {
const derivedKey = await argon2.hash(password, {
salt: Buffer.from(user.salt),
memoryCost: 65536,
timeCost: 3,
parallelism: 1,
hashLength: 32,
type: argon2.argon2id,
raw: true
});
if (!derivedKey) throw new Error("Failed to derive key from password");
const key = decryptSymmetric({
ciphertext: user.protectedKey!,
iv: user.protectedKeyIV!,
tag: user.protectedKeyTag!,
key: derivedKey.toString("base64")
});
const privateKey = decryptSymmetric({
ciphertext: user.encryptedPrivateKey,
iv: user.iv,
tag: user.tag,
key
});
return privateKey;
};
export const buildUserProjectKey = async (privateKey: string, publickey: string) => {
const randomBytes = crypto.randomBytes(16).toString("hex");
const { nonce, ciphertext } = encryptAsymmetric(randomBytes, publickey, privateKey);
return { nonce, ciphertext };
};

View File

@ -58,35 +58,3 @@ export class BadRequestError extends Error {
this.error = error;
}
}
export class ScimRequestError extends Error {
name: string;
schemas: string[];
detail: string;
status: number;
error: unknown;
constructor({
name,
error,
detail,
status
}: {
message?: string;
name?: string;
error?: unknown;
detail: string;
status: number;
}) {
super(detail ?? "The request is invalid");
this.name = name || "ScimRequestError";
this.schemas = ["urn:ietf:params:scim:api:messages:2.0:Error"];
this.error = error;
this.detail = detail;
this.status = status;
}
}

View File

@ -1,7 +1,6 @@
import { Job, JobsOptions, Queue, QueueOptions, RepeatOptions, Worker, WorkerListener } from "bullmq";
import Redis from "ioredis";
import { SecretKeyEncoding } from "@app/db/schemas";
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import {
TScanFullRepoEventPayload,
@ -16,8 +15,7 @@ export enum QueueName {
IntegrationSync = "sync-integrations",
SecretWebhook = "secret-webhook",
SecretFullRepoScan = "secret-full-repo-scan",
SecretPushEventScan = "secret-push-event-scan",
UpgradeProjectToGhost = "upgrade-project-to-ghost"
SecretPushEventScan = "secret-push-event-scan"
}
export enum QueueJobs {
@ -27,8 +25,7 @@ export enum QueueJobs {
AuditLogPrune = "audit-log-prune-job",
SecWebhook = "secret-webhook-trigger",
IntegrationSync = "secret-integration-pull",
SecretScan = "secret-scan",
UpgradeProjectToGhost = "upgrade-project-to-ghost-job"
SecretScan = "secret-scan"
}
export type TQueueJobTypes = {
@ -67,20 +64,6 @@ export type TQueueJobTypes = {
payload: TScanFullRepoEventPayload;
};
[QueueName.SecretPushEventScan]: { name: QueueJobs.SecretScan; payload: TScanPushEventPayload };
[QueueName.UpgradeProjectToGhost]: {
name: QueueJobs.UpgradeProjectToGhost;
payload: {
projectId: string;
startedByUserId: string;
encryptedPrivateKey: {
encryptedKey: string;
encryptedKeyIv: string;
encryptedKeyTag: string;
keyEncoding: SecretKeyEncoding;
};
};
};
};
export type TQueueServiceFactory = ReturnType<typeof queueServiceFactory>;

View File

@ -37,7 +37,7 @@ type TMain = {
export const main = async ({ db, smtp, logger, queue }: TMain) => {
const appCfg = getConfig();
const server = fasitfy({
logger: appCfg.NODE_ENV === "test" ? false : logger,
logger,
trustProxy: true,
connectionTimeout: 30 * 1000,
ignoreTrailingSlash: true

View File

@ -1,17 +0,0 @@
import { FastifyRequest } from "fastify";
import { ActorType } from "@app/services/auth/auth-type";
// this is a unique id for sending posthog event
export const getTelemetryDistinctId = (req: FastifyRequest) => {
if (req.auth.actor === ActorType.USER) {
return req.auth.user.email;
}
if (req.auth.actor === ActorType.IDENTITY) {
return `identity-${req.auth.identityId}`;
}
if (req.auth.actor === ActorType.SERVICE) {
return req.auth.serviceToken.createdByEmail || `service-token-null-creator-${req.auth.serviceTokenId}`; // when user gets removed from system
}
return "unknown-auth-data";
};

View File

@ -63,11 +63,6 @@ export const injectAuditLogInfo = fp(async (server: FastifyZodProvider) => {
identityId: req.auth.identityId
}
};
} else if (req.auth.actor === ActorType.SCIM_CLIENT) {
payload.actor = {
type: ActorType.SCIM_CLIENT,
metadata: {}
};
} else {
throw new BadRequestError({ message: "Missing logic for other actor" });
}

View File

@ -3,7 +3,6 @@ import fp from "fastify-plugin";
import jwt, { JwtPayload } from "jsonwebtoken";
import { TServiceTokens, TUsers } from "@app/db/schemas";
import { TScimTokenJwtPayload } from "@app/ee/services/scim/scim-types";
import { getConfig } from "@app/lib/config/env";
import { UnauthorizedError } from "@app/lib/errors";
import { ActorType, AuthMode, AuthModeJwtTokenPayload, AuthTokenType } from "@app/services/auth/auth-type";
@ -27,7 +26,7 @@ export type TAuthMode =
}
| {
authMode: AuthMode.SERVICE_TOKEN;
serviceToken: TServiceTokens & { createdByEmail: string };
serviceToken: TServiceTokens;
actor: ActorType.SERVICE;
serviceTokenId: string;
}
@ -36,12 +35,6 @@ export type TAuthMode =
actor: ActorType.IDENTITY;
identityId: string;
identityName: string;
}
| {
authMode: AuthMode.SCIM_TOKEN;
actor: ActorType.SCIM_CLIENT;
scimTokenId: string;
orgId: string;
};
const extractAuth = async (req: FastifyRequest, jwtSecret: string) => {
@ -62,7 +55,6 @@ const extractAuth = async (req: FastifyRequest, jwtSecret: string) => {
}
const decodedToken = jwt.verify(authTokenValue, jwtSecret) as JwtPayload;
switch (decodedToken.authTokenType) {
case AuthTokenType.ACCESS_TOKEN:
return {
@ -78,12 +70,6 @@ const extractAuth = async (req: FastifyRequest, jwtSecret: string) => {
token: decodedToken as TIdentityAccessTokenJwtPayload,
actor: ActorType.IDENTITY
} as const;
case AuthTokenType.SCIM_TOKEN:
return {
authMode: AuthMode.SCIM_TOKEN,
token: decodedToken as TScimTokenJwtPayload,
actor: ActorType.SCIM_CLIENT
} as const;
default:
return { authMode: null, token: null } as const;
}
@ -127,11 +113,6 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
req.auth = { authMode: AuthMode.API_KEY as const, userId: user.id, actor, user };
break;
}
case AuthMode.SCIM_TOKEN: {
const { orgId, scimTokenId } = await server.services.scim.fnValidateScimToken(token);
req.auth = { authMode: AuthMode.SCIM_TOKEN, actor, scimTokenId, orgId };
break;
}
default:
throw new UnauthorizedError({ name: "Unknown token strategy" });
}

View File

@ -14,8 +14,6 @@ export const injectPermission = fp(async (server) => {
req.permission = { type: ActorType.IDENTITY, id: req.auth.identityId };
} else if (req.auth.actor === ActorType.SERVICE) {
req.permission = { type: ActorType.SERVICE, id: req.auth.serviceTokenId };
} else if (req.auth.actor === ActorType.SCIM_CLIENT) {
req.permission = { type: ActorType.SCIM_CLIENT, id: req.auth.scimTokenId, orgId: req.auth.orgId };
}
});
});

View File

@ -2,13 +2,7 @@ import { ForbiddenError } from "@casl/ability";
import fastifyPlugin from "fastify-plugin";
import { ZodError } from "zod";
import {
BadRequestError,
DatabaseError,
InternalServerError,
ScimRequestError,
UnauthorizedError
} from "@app/lib/errors";
import { BadRequestError, DatabaseError, InternalServerError, UnauthorizedError } from "@app/lib/errors";
export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider) => {
server.setErrorHandler((error, req, res) => {
@ -27,12 +21,6 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
error: "PermissionDenied",
message: `You are not allowed to ${error.action} on ${error.subjectType}`
});
} else if (error instanceof ScimRequestError) {
void res.status(error.status).send({
schemas: error.schemas,
status: error.status,
detail: error.detail
});
} else {
void res.send(error);
}

View File

@ -11,8 +11,6 @@ import { permissionDALFactory } from "@app/ee/services/permission/permission-dal
import { permissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { samlConfigDALFactory } from "@app/ee/services/saml-config/saml-config-dal";
import { samlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { scimDALFactory } from "@app/ee/services/scim/scim-dal";
import { scimServiceFactory } from "@app/ee/services/scim/scim-service";
import { secretApprovalPolicyApproverDALFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-approver-dal";
import { secretApprovalPolicyDALFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-dal";
import { secretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
@ -65,7 +63,6 @@ import { orgRoleDALFactory } from "@app/services/org/org-role-dal";
import { orgRoleServiceFactory } from "@app/services/org/org-role-service";
import { orgServiceFactory } from "@app/services/org/org-service";
import { projectDALFactory } from "@app/services/project/project-dal";
import { projectQueueFactory } from "@app/services/project/project-queue";
import { projectServiceFactory } from "@app/services/project/project-service";
import { projectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
import { projectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@ -158,7 +155,6 @@ export const registerRoutes = async (
const auditLogDAL = auditLogDALFactory(db);
const trustedIpDAL = trustedIpDALFactory(db);
const scimDAL = scimDALFactory(db);
// ee db layer ops
const permissionDAL = permissionDALFactory(db);
@ -192,7 +188,6 @@ export const registerRoutes = async (
trustedIpDAL,
permissionService
});
const auditLogQueue = auditLogQueueServiceFactory({
auditLogDAL,
queueService,
@ -215,16 +210,6 @@ export const registerRoutes = async (
samlConfigDAL,
licenseService
});
const scimService = scimServiceFactory({
licenseService,
scimDAL,
userDAL,
orgDAL,
projectDAL,
projectMembershipDAL,
permissionService,
smtpService
});
const telemetryService = telemetryServiceFactory();
const tokenService = tokenServiceFactory({ tokenDAL: authTokenDAL, userDAL });
@ -281,13 +266,19 @@ export const registerRoutes = async (
secretScanningDAL,
secretScanningQueue
});
const projectBotService = projectBotServiceFactory({ permissionService, projectBotDAL, projectDAL });
const projectService = projectServiceFactory({
permissionService,
projectDAL,
secretBlindIndexDAL,
projectEnvDAL,
projectMembershipDAL,
folderDAL,
licenseService
});
const projectMembershipService = projectMembershipServiceFactory({
projectMembershipDAL,
projectDAL,
permissionService,
projectBotDAL,
orgDAL,
userDAL,
smtpService,
@ -295,46 +286,6 @@ export const registerRoutes = async (
projectRoleDAL,
licenseService
});
const projectKeyService = projectKeyServiceFactory({
permissionService,
projectKeyDAL,
projectMembershipDAL
});
const projectQueueService = projectQueueFactory({
queueService,
secretDAL,
folderDAL,
projectDAL,
orgDAL,
integrationAuthDAL,
orgService,
projectEnvDAL,
userDAL,
secretVersionDAL,
projectKeyDAL,
projectBotDAL,
projectMembershipDAL,
secretApprovalRequestDAL,
secretApprovalSecretDAL: sarSecretDAL
});
const projectService = projectServiceFactory({
permissionService,
projectDAL,
projectQueue: projectQueueService,
secretBlindIndexDAL,
identityProjectDAL,
identityOrgMembershipDAL,
projectBotDAL,
projectKeyDAL,
userDAL,
projectEnvDAL,
orgService,
projectMembershipDAL,
folderDAL,
licenseService
});
const projectEnvService = projectEnvServiceFactory({
permissionService,
projectEnvDAL,
@ -342,7 +293,11 @@ export const registerRoutes = async (
projectDAL,
folderDAL
});
const projectKeyService = projectKeyServiceFactory({
permissionService,
projectKeyDAL,
projectMembershipDAL
});
const projectRoleService = projectRoleServiceFactory({ permissionService, projectRoleDAL });
const snapshotService = secretSnapshotServiceFactory({
@ -377,9 +332,9 @@ export const registerRoutes = async (
folderDAL,
permissionService,
secretImportDAL,
projectDAL,
secretDAL
});
const projectBotService = projectBotServiceFactory({ permissionService, projectBotDAL });
const integrationAuthService = integrationAuthServiceFactory({
integrationAuthDAL,
integrationDAL,
@ -413,7 +368,6 @@ export const registerRoutes = async (
secretVersionTagDAL,
secretBlindIndexDAL,
permissionService,
projectDAL,
secretDAL,
secretTagDAL,
snapshotService,
@ -427,7 +381,6 @@ export const registerRoutes = async (
secretTagDAL,
secretApprovalRequestSecretDAL: sarSecretDAL,
secretApprovalRequestReviewerDAL: sarReviewerDAL,
projectDAL,
secretVersionDAL,
secretBlindIndexDAL,
secretApprovalRequestDAL,
@ -533,7 +486,6 @@ export const registerRoutes = async (
secretScanning: secretScanningService,
license: licenseService,
trustedIp: trustedIpService,
scim: scimService,
secretBlindIndex: secretBlindIndexService,
telemetry: telemetryService
});
@ -585,8 +537,4 @@ export const registerRoutes = async (
);
await server.register(registerV2Routes, { prefix: "/api/v2" });
await server.register(registerV3Routes, { prefix: "/api/v3" });
server.addHook("onClose", async () => {
await telemetryService.flushAll();
});
};

View File

@ -1,6 +1,6 @@
import { z } from "zod";
import { OrganizationsSchema, SuperAdminSchema, UsersSchema } from "@app/db/schemas";
import { SuperAdminSchema, UsersSchema } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { UnauthorizedError } from "@app/lib/errors";
import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
@ -31,8 +31,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
method: "PATCH",
schema: {
body: z.object({
allowSignUp: z.boolean().optional(),
allowedSignUpDomain: z.string().optional().nullable()
allowSignUp: z.boolean().optional()
}),
response: {
200: z.object({
@ -73,7 +72,6 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
200: z.object({
message: z.string(),
user: UsersSchema,
organization: OrganizationsSchema,
token: z.string(),
new: z.string()
})
@ -84,7 +82,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
const serverCfg = await getServerCfg();
if (serverCfg.initialized)
throw new UnauthorizedError({ name: "Admin sign up", message: "Admin has been created" });
const { user, token, organization } = await server.services.superAdmin.adminSignUp({
const { user, token } = await server.services.superAdmin.adminSignUp({
...req.body,
ip: req.realIp,
userAgent: req.headers["user-agent"] || ""
@ -111,7 +109,6 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
message: "Successfully set up admin account",
user: user.user,
token: token.access,
organization,
new: "123"
};
}

View File

@ -2,16 +2,14 @@ import { z } from "zod";
import { IdentitiesSchema, OrgMembershipRole } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
export const registerIdentityRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
description: "Create identity",
security: [
@ -51,17 +49,6 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
}
});
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.MachineIdentityCreated,
distinctId: getTelemetryDistinctId(req),
properties: {
orgId: req.body.organizationId,
name: identity.name,
identityId: identity.id,
...req.auditLogInfo
}
});
return { identity };
}
});
@ -69,7 +56,7 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
server.route({
method: "PATCH",
url: "/:identityId",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
description: "Update identity",
security: [
@ -118,7 +105,7 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
server.route({
method: "DELETE",
url: "/:identityId",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
description: "Delete identity",
security: [

View File

@ -48,7 +48,6 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
await projectRouter.register(registerProjectMembershipRouter);
await projectRouter.register(registerSecretTagRouter);
},
{ prefix: "/workspace" }
);

View File

@ -3,10 +3,8 @@ import { z } from "zod";
import { IntegrationsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { removeTrailingSlash, shake } from "@app/lib/fn";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { PostHogEventTypes, TIntegrationCreatedEvent } from "@app/services/telemetry/telemetry-types";
export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
server.route({
@ -55,8 +53,13 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
actorOrgId: req.permission.orgId,
...req.body
});
const createIntegrationEventProperty = shake({
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: integrationAuth.projectId,
event: {
type: EventType.CREATE_INTEGRATION,
// eslint-disable-next-line
metadata: shake({
integrationId: integration.id.toString(),
integration: integration.integration,
environment: req.body.sourceEnvironment,
@ -70,25 +73,8 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
targetServiceId: integration.targetServiceId,
path: integration.path,
region: integration.region
}) as TIntegrationCreatedEvent["properties"];
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: integrationAuth.projectId,
event: {
type: EventType.CREATE_INTEGRATION,
// eslint-disable-next-line
metadata: createIntegrationEventProperty
}
});
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IntegrationCreated,
distinctId: getTelemetryDistinctId(req),
properties: {
...createIntegrationEventProperty,
projectId: integrationAuth.projectId,
...req.auditLogInfo
}) as any
}
});
return { integration };

View File

@ -1,10 +1,8 @@
import { z } from "zod";
import { UsersSchema } from "@app/db/schemas";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
export const registerInviteOrgRouter = async (server: FastifyZodProvider) => {
server.route({
@ -32,15 +30,6 @@ export const registerInviteOrgRouter = async (server: FastifyZodProvider) => {
actorOrgId: req.permission.orgId
});
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.UserOrgInvitation,
distinctId: getTelemetryDistinctId(req),
properties: {
inviteeEmail: req.body.inviteeEmail,
...req.auditLogInfo
}
});
return {
completeInviteLink,
message: `Send an invite link to ${req.body.inviteeEmail}`

View File

@ -93,8 +93,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
.trim()
.regex(/^[a-zA-Z0-9-]+$/, "Name must only contain alphanumeric characters or hyphens")
.optional(),
authEnforced: z.boolean().optional(),
scimEnabled: z.boolean().optional()
authEnforced: z.boolean().optional()
}),
response: {
200: z.object({

View File

@ -1,12 +1,6 @@
import { z } from "zod";
import {
OrgMembershipsSchema,
ProjectMembershipRole,
ProjectMembershipsSchema,
UserEncryptionKeysSchema,
UsersSchema
} from "@app/db/schemas";
import { OrgMembershipsSchema, ProjectMembershipsSchema, UserEncryptionKeysSchema, UsersSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -86,10 +80,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
actor: req.permission.type,
actorOrgId: req.permission.orgId,
projectId: req.params.workspaceId,
members: req.body.members.map((member) => ({
...member,
projectRole: ProjectMembershipRole.Member
}))
members: req.body.members
});
await server.services.auditLog.createAuditLog({

View File

@ -2,11 +2,13 @@ import { z } from "zod";
import {
IntegrationsSchema,
ProjectKeysSchema,
ProjectMembershipsSchema,
ProjectsSchema,
UserEncryptionKeysSchema,
UsersSchema
} from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -117,7 +119,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.SERVICE_TOKEN, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const workspace = await server.services.project.getAProject({
actorId: req.permission.id,
@ -169,7 +171,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const workspace = await server.services.project.deleteProject({
actorId: req.permission.id,
@ -214,41 +216,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
url: "/:workspaceId",
method: "PATCH",
schema: {
params: z.object({
workspaceId: z.string().trim()
}),
body: z.object({
name: z.string().trim().optional(),
autoCapitalization: z.boolean().optional()
}),
response: {
200: z.object({
workspace: ProjectsSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const workspace = await server.services.project.updateProject({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
projectId: req.params.workspaceId,
update: {
name: req.body.name,
autoCapitalization: req.body.autoCapitalization
}
});
return {
workspace
};
}
});
server.route({
url: "/:workspaceId/auto-capitalization",
method: "POST",
@ -282,6 +249,48 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
url: "/:workspaceId/invite-signup",
method: "POST",
schema: {
params: z.object({
workspaceId: z.string().trim()
}),
body: z.object({
email: z.string().trim()
}),
response: {
200: z.object({
invitee: UsersSchema,
latestKey: ProjectKeysSchema.optional()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { invitee, latestKey } = await server.services.projectMembership.inviteUserToProject({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
projectId: req.params.workspaceId,
email: req.body.email
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.params.workspaceId,
event: {
type: EventType.ADD_WORKSPACE_MEMBER,
metadata: {
userId: invitee.id,
email: invitee.email
}
}
});
return { invitee, latestKey };
}
});
server.route({
url: "/:workspaceId/integrations",
method: "GET",

View File

@ -18,6 +18,7 @@ import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { fetchGithubEmails } from "@app/lib/requests/github";
import { AuthMethod } from "@app/services/auth/auth-type";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
export const registerSsoRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
@ -41,6 +42,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
async (req, _accessToken, _refreshToken, profile, cb) => {
try {
const email = profile?.emails?.[0]?.value;
const serverCfg = await getServerCfg();
if (!email)
throw new BadRequestError({
message: "Email not found",
@ -52,7 +54,8 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
firstName: profile?.name?.givenName || "",
lastName: profile?.name?.familyName || "",
authMethod: AuthMethod.GOOGLE,
callbackPort: req.query.state as string
callbackPort: req.query.state as string,
isSignupAllowed: Boolean(serverCfg.allowSignUp)
});
cb(null, { isUserCompleted, providerAuthToken });
} catch (error) {
@ -81,12 +84,14 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
try {
const ghEmails = await fetchGithubEmails(accessToken);
const { email } = ghEmails.filter((gitHubEmail) => gitHubEmail.primary)[0];
const serverCfg = await getServerCfg();
const { isUserCompleted, providerAuthToken } = await server.services.login.oauth2Login({
email,
firstName: profile.displayName,
lastName: "",
authMethod: AuthMethod.GITHUB,
callbackPort: req.query.state as string
callbackPort: req.query.state as string,
isSignupAllowed: Boolean(serverCfg.allowSignUp)
});
return cb(null, { isUserCompleted, providerAuthToken });
} catch (error) {
@ -115,12 +120,14 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
async (req: any, _accessToken: string, _refreshToken: string, profile: any, cb: any) => {
try {
const email = profile.emails[0].value;
const serverCfg = await getServerCfg();
const { isUserCompleted, providerAuthToken } = await server.services.login.oauth2Login({
email,
firstName: profile.displayName,
lastName: "",
authMethod: AuthMethod.GITLAB,
callbackPort: req.query.state as string
callbackPort: req.query.state as string,
isSignupAllowed: Boolean(serverCfg.allowSignUp)
});
return cb(null, { isUserCompleted, providerAuthToken });

View File

@ -2,7 +2,6 @@ import { registerIdentityOrgRouter } from "./identity-org-router";
import { registerIdentityProjectRouter } from "./identity-project-router";
import { registerMfaRouter } from "./mfa-router";
import { registerOrgRouter } from "./organization-router";
import { registerProjectMembershipRouter } from "./project-membership-router";
import { registerProjectRouter } from "./project-router";
import { registerServiceTokenRouter } from "./service-token-router";
import { registerUserRouter } from "./user-router";
@ -22,7 +21,6 @@ export const registerV2Routes = async (server: FastifyZodProvider) => {
async (projectServer) => {
await projectServer.register(registerProjectRouter);
await projectServer.register(registerIdentityProjectRouter);
await projectServer.register(registerProjectMembershipRouter);
},
{ prefix: "/workspace" }
);

View File

@ -1,95 +0,0 @@
import { z } from "zod";
import { ProjectMembershipsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerProjectMembershipRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/:projectId/memberships",
schema: {
params: z.object({
projectId: z.string().describe("The ID of the project.")
}),
body: z.object({
emails: z.string().email().array().describe("Emails of the users to add to the project.")
}),
response: {
200: z.object({
memberships: ProjectMembershipsSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const memberships = await server.services.projectMembership.addUsersToProjectNonE2EE({
projectId: req.params.projectId,
actorId: req.permission.id,
actor: req.permission.type,
emails: req.body.emails
});
await server.services.auditLog.createAuditLog({
projectId: req.params.projectId,
...req.auditLogInfo,
event: {
type: EventType.ADD_BATCH_WORKSPACE_MEMBER,
metadata: memberships.map(({ userId, id }) => ({
userId: userId || "",
membershipId: id,
email: ""
}))
}
});
return { memberships };
}
});
server.route({
method: "DELETE",
url: "/:projectId/memberships",
schema: {
params: z.object({
projectId: z.string().describe("The ID of the project.")
}),
body: z.object({
emails: z.string().email().array().describe("Emails of the users to remove from the project.")
}),
response: {
200: z.object({
memberships: ProjectMembershipsSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const memberships = await server.services.projectMembership.deleteProjectMemberships({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
projectId: req.params.projectId,
emails: req.body.emails
});
for (const membership of memberships) {
// eslint-disable-next-line no-await-in-loop
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.params.projectId,
event: {
type: EventType.REMOVE_WORKSPACE_MEMBER,
metadata: {
userId: membership.userId,
email: ""
}
}
});
}
return { memberships };
}
});
};

View File

@ -1,23 +1,11 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { ProjectKeysSchema, ProjectsSchema } from "@app/db/schemas";
import { ProjectKeysSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { authRateLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
const projectWithEnv = ProjectsSchema.merge(
z.object({
_id: z.string(),
environments: z.object({ name: z.string(), slug: z.string(), id: z.string() }).array()
})
);
export const registerProjectRouter = async (server: FastifyZodProvider) => {
/* Get project key */
server.route({
url: "/:workspaceId/encrypted-key",
method: "GET",
@ -46,8 +34,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
const key = await server.services.projectKey.getLatestProjectKey({
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
projectId: req.params.workspaceId
projectId: req.params.workspaceId,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
@ -64,107 +52,4 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
return key;
}
});
/* Start upgrade of a project */
server.route({
url: "/:projectId/upgrade",
method: "POST",
schema: {
params: z.object({
projectId: z.string().trim()
}),
body: z.object({
userPrivateKey: z.string().trim()
}),
response: {
200: z.void()
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY]),
handler: async (req) => {
await server.services.project.upgradeProject({
actorId: req.permission.id,
actor: req.permission.type,
projectId: req.params.projectId,
userPrivateKey: req.body.userPrivateKey
});
}
});
/* Get upgrade status of project */
server.route({
url: "/:projectId/upgrade/status",
method: "GET",
schema: {
params: z.object({
projectId: z.string().trim()
}),
response: {
200: z.object({
status: z.string().nullable()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY]),
handler: async (req) => {
const status = await server.services.project.getProjectUpgradeStatus({
projectId: req.params.projectId,
actor: req.permission.type,
actorId: req.permission.id
});
return { status };
}
});
/* Create new project */
server.route({
method: "POST",
url: "/",
config: {
rateLimit: authRateLimit
},
schema: {
body: z.object({
projectName: z.string().trim(),
slug: z
.string()
.min(5)
.max(36)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional(),
organizationId: z.string().trim()
}),
response: {
200: z.object({
project: projectWithEnv
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const project = await server.services.project.createProject({
actorId: req.permission.id,
actor: req.permission.type,
orgId: req.body.organizationId,
workspaceName: req.body.projectName,
slug: req.body.slug
});
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.ProjectCreated,
distinctId: getTelemetryDistinctId(req),
properties: {
orgId: req.body.organizationId,
name: project.name,
...req.auditLogInfo
}
});
return { project };
}
});
};

View File

@ -197,7 +197,7 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY]),
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const user = await server.services.user.getMe(req.permission.id);
return { user };

View File

@ -1,3 +1,4 @@
import { FastifyRequest } from "fastify";
import picomatch from "picomatch";
import { z } from "zod";
@ -12,7 +13,6 @@ import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { CommitType } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
import { BadRequestError } from "@app/lib/errors";
import { removeTrailingSlash } from "@app/lib/fn";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { getUserAgentType } from "@app/server/plugins/audit-log";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
@ -20,6 +20,19 @@ import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
import { secretRawSchema } from "../sanitizedSchemas";
const getDistinctId = (req: FastifyRequest) => {
if (req.auth.actor === ActorType.USER) {
return req.auth.user.email;
}
if (req.auth.actor === ActorType.IDENTITY) {
return `identity-${req.auth.identityId}`;
}
if (req.auth.actor === ActorType.SERVICE) {
return `service-token-${req.auth.serviceToken.id}`;
}
return "unknown-auth-data";
};
export const registerSecretRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/raw",
@ -97,7 +110,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretPulled,
distinctId: getTelemetryDistinctId(req),
distinctId: getDistinctId(req),
properties: {
numberOfSecrets: secrets.length,
workspaceId,
@ -187,7 +200,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretPulled,
distinctId: getTelemetryDistinctId(req),
distinctId: getDistinctId(req),
properties: {
numberOfSecrets: 1,
workspaceId,
@ -263,7 +276,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretCreated,
distinctId: getTelemetryDistinctId(req),
distinctId: getDistinctId(req),
properties: {
numberOfSecrets: 1,
workspaceId: req.body.workspaceId,
@ -338,7 +351,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretUpdated,
distinctId: getTelemetryDistinctId(req),
distinctId: getDistinctId(req),
properties: {
numberOfSecrets: 1,
workspaceId: req.body.workspaceId,
@ -408,7 +421,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretDeleted,
distinctId: getTelemetryDistinctId(req),
distinctId: getDistinctId(req),
properties: {
numberOfSecrets: 1,
workspaceId: req.body.workspaceId,
@ -514,7 +527,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
if (shouldCapture) {
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretPulled,
distinctId: getTelemetryDistinctId(req),
distinctId: getDistinctId(req),
properties: {
numberOfSecrets: shouldRecordK8Event ? approximateNumberTotalSecrets : secrets.length,
workspaceId: req.query.workspaceId,
@ -591,7 +604,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretPulled,
distinctId: getTelemetryDistinctId(req),
distinctId: getDistinctId(req),
properties: {
numberOfSecrets: 1,
workspaceId: req.query.workspaceId,
@ -754,7 +767,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretCreated,
distinctId: getTelemetryDistinctId(req),
distinctId: getDistinctId(req),
properties: {
numberOfSecrets: 1,
workspaceId: req.body.workspaceId,
@ -936,7 +949,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretUpdated,
distinctId: getTelemetryDistinctId(req),
distinctId: getDistinctId(req),
properties: {
numberOfSecrets: 1,
workspaceId: req.body.workspaceId,
@ -1054,7 +1067,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretDeleted,
distinctId: getTelemetryDistinctId(req),
distinctId: getDistinctId(req),
properties: {
numberOfSecrets: 1,
workspaceId: req.body.workspaceId,
@ -1079,6 +1092,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
secrets: z
.object({
secretName: z.string().trim(),
type: z.nativeEnum(SecretType).default(SecretType.Shared),
secretKeyCiphertext: z.string().trim(),
secretKeyIV: z.string().trim(),
secretKeyTag: z.string().trim(),
@ -1125,7 +1139,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
projectId,
policy,
data: {
[CommitType.Create]: inputSecrets
[CommitType.Create]: inputSecrets.filter(({ type }) => type === "shared")
}
});
@ -1174,7 +1188,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretCreated,
distinctId: getTelemetryDistinctId(req),
distinctId: getDistinctId(req),
properties: {
numberOfSecrets: secrets.length,
workspaceId: req.body.workspaceId,
@ -1294,7 +1308,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretUpdated,
distinctId: getTelemetryDistinctId(req),
distinctId: getDistinctId(req),
properties: {
numberOfSecrets: secrets.length,
workspaceId: req.body.workspaceId,
@ -1402,7 +1416,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretDeleted,
distinctId: getTelemetryDistinctId(req),
distinctId: getDistinctId(req),
properties: {
numberOfSecrets: secrets.length,
workspaceId: req.body.workspaceId,

View File

@ -2,9 +2,7 @@ import { z } from "zod";
import { UsersSchema } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { authRateLimit } from "@app/server/config/rateLimiter";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
export const registerSignupRouter = async (server: FastifyZodProvider) => {
@ -25,26 +23,8 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
}
},
handler: async (req) => {
const { email } = req.body;
const serverCfg = await getServerCfg();
if (!serverCfg.allowSignUp) {
throw new BadRequestError({
message: "Sign up is disabled"
});
}
if (serverCfg?.allowedSignUpDomain) {
const domain = email.split("@")[1];
const allowedDomains = serverCfg.allowedSignUpDomain.split(",").map((e) => e.trim());
if (!allowedDomains.includes(domain)) {
throw new BadRequestError({
message: `Email with a domain (@${domain}) is not supported`
});
}
}
await server.services.signup.beginEmailSignupProcess(email);
return { message: `Sent an email verification code to ${email}` };
await server.services.signup.beginEmailSignupProcess(req.body.email);
return { message: `Sent an email verification code to ${req.body.email}` };
}
});
@ -68,15 +48,8 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
}
},
handler: async (req) => {
const serverCfg = await getServerCfg();
if (!serverCfg.allowSignUp) {
throw new BadRequestError({
message: "Sign up is disabled"
});
}
const { token, user } = await server.services.signup.verifyEmailSignup(req.body.email, req.body.code);
return { message: "Successfuly verified email", token, user };
return { message: "Successfully verified email", token, user };
}
});
@ -117,13 +90,6 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
if (!userAgent) throw new Error("user agent header is required");
const appCfg = getConfig();
const serverCfg = await getServerCfg();
if (!serverCfg.allowSignUp) {
throw new BadRequestError({
message: "Sign up is disabled"
});
}
const { user, accessToken, refreshToken } = await server.services.signup.completeEmailAccountSignup({
...req.body,
ip: req.realIp,
@ -190,8 +156,7 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
const { user, accessToken, refreshToken } = await server.services.signup.completeAccountInvite({
...req.body,
ip: req.realIp,
userAgent,
authorization: req.headers.authorization as string
userAgent
});
void server.services.telemetry.sendLoopsEvent(user.email, user.firstName || "", user.lastName || "");

View File

@ -4,7 +4,6 @@ import { TUsers, UserDeviceSchema } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { generateSrpServerKey, srpCheckClientProof } from "@app/lib/crypto";
import { BadRequestError } from "@app/lib/errors";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service";
import { TokenType } from "../auth-token/auth-token-types";
@ -262,27 +261,21 @@ export const authLoginServiceFactory = ({ userDAL, tokenService, smtpService }:
/*
* OAuth2 login for google,github, and other oauth2 provider
* */
const oauth2Login = async ({ email, firstName, lastName, authMethod, callbackPort }: TOauthLoginDTO) => {
const oauth2Login = async ({
email,
firstName,
lastName,
authMethod,
callbackPort,
isSignupAllowed
}: TOauthLoginDTO) => {
let user = await userDAL.findUserByEmail(email);
const serverCfg = await getServerCfg();
const appCfg = getConfig();
const isOauthSignUpDisabled = !isSignupAllowed && !user;
if (isOauthSignUpDisabled) throw new BadRequestError({ message: "User signup disabled", name: "Oauth 2 login" });
if (!user) {
// Create a new user based on oAuth
if (!serverCfg?.allowSignUp) throw new BadRequestError({ message: "Sign up disabled", name: "Oauth 2 login" });
if (serverCfg?.allowedSignUpDomain) {
const domain = email.split("@")[1];
const allowedDomains = serverCfg.allowedSignUpDomain.split(",").map((e) => e.trim());
if (!allowedDomains.includes(domain))
throw new BadRequestError({
message: `Email with a domain (@${domain}) is not supported`,
name: "Oauth 2 login"
});
}
user = await userDAL.create({ email, firstName, lastName, authMethods: [authMethod], isGhost: false });
user = await userDAL.create({ email, firstName, lastName, authMethods: [authMethod] });
}
const isLinkingRequired = !user?.authMethods?.includes(authMethod);
const isUserCompleted = user.isAccepted;

View File

@ -28,4 +28,5 @@ export type TOauthLoginDTO = {
lastName?: string;
authMethod: AuthMethod;
callbackPort?: string;
isSignupAllowed?: boolean;
};

View File

@ -50,7 +50,7 @@ export const authSignupServiceFactory = ({
throw new Error("Failed to send verification code for complete account");
}
if (!user) {
user = await userDAL.create({ authMethods: [AuthMethod.EMAIL], email, isGhost: false });
user = await userDAL.create({ authMethods: [AuthMethod.EMAIL], email });
}
if (!user) throw new Error("Failed to create user");
@ -212,16 +212,13 @@ export const authSignupServiceFactory = ({
protectedKeyTag,
encryptedPrivateKey,
encryptedPrivateKeyIV,
encryptedPrivateKeyTag,
authorization
encryptedPrivateKeyTag
}: TCompleteAccountInviteDTO) => {
const user = await userDAL.findUserByEmail(email);
if (!user || (user && user.isAccepted)) {
throw new Error("Failed to complete account for complete user");
}
validateSignUpAuthorization(authorization, user.id);
const [orgMembership] = await orgDAL.findMembership({
inviteEmail: email,
status: OrgMembershipStatus.Invited

View File

@ -34,5 +34,4 @@ export type TCompleteAccountInviteDTO = {
verifier: string;
ip: string;
userAgent: string;
authorization: string;
};

View File

@ -17,24 +17,21 @@ export enum AuthTokenType {
API_KEY = "apiKey",
SERVICE_ACCESS_TOKEN = "serviceAccessToken",
SERVICE_REFRESH_TOKEN = "serviceRefreshToken",
IDENTITY_ACCESS_TOKEN = "identityAccessToken",
SCIM_TOKEN = "scimToken"
IDENTITY_ACCESS_TOKEN = "identityAccessToken"
}
export enum AuthMode {
JWT = "jwt",
SERVICE_TOKEN = "serviceToken",
API_KEY = "apiKey",
IDENTITY_ACCESS_TOKEN = "identityAccessToken",
SCIM_TOKEN = "scimToken"
IDENTITY_ACCESS_TOKEN = "identityAccessToken"
}
export enum ActorType { // would extend to AWS, Azure, ...
USER = "user", // userIdentity
SERVICE = "service",
IDENTITY = "identity",
Machine = "machine",
SCIM_CLIENT = "scimClient"
Machine = "machine"
}
export type AuthModeJwtTokenPayload = {

View File

@ -35,12 +35,12 @@ export const identityAccessTokenServiceFactory = ({
}
// ttl check
if (Number(accessTokenTTL) > 0) {
if (accessTokenTTL > 0) {
const currentDate = new Date();
if (accessTokenLastRenewedAt) {
// access token has been renewed
const accessTokenRenewed = new Date(accessTokenLastRenewedAt);
const ttlInMilliseconds = Number(accessTokenTTL) * 1000;
const ttlInMilliseconds = accessTokenTTL * 1000;
const expirationDate = new Date(accessTokenRenewed.getTime() + ttlInMilliseconds);
if (currentDate > expirationDate)
@ -50,7 +50,7 @@ export const identityAccessTokenServiceFactory = ({
} else {
// access token has never been renewed
const accessTokenCreated = new Date(accessTokenCreatedAt);
const ttlInMilliseconds = Number(accessTokenTTL) * 1000;
const ttlInMilliseconds = accessTokenTTL * 1000;
const expirationDate = new Date(accessTokenCreated.getTime() + ttlInMilliseconds);
if (currentDate > expirationDate)
@ -61,9 +61,9 @@ export const identityAccessTokenServiceFactory = ({
}
// max ttl checks
if (Number(accessTokenMaxTTL) > 0) {
if (accessTokenMaxTTL > 0) {
const accessTokenCreated = new Date(accessTokenCreatedAt);
const ttlInMilliseconds = Number(accessTokenMaxTTL) * 1000;
const ttlInMilliseconds = accessTokenMaxTTL * 1000;
const currentDate = new Date();
const expirationDate = new Date(accessTokenCreated.getTime() + ttlInMilliseconds);
@ -72,7 +72,7 @@ export const identityAccessTokenServiceFactory = ({
message: "Failed to renew MI access token due to Max TTL expiration"
});
const extendToDate = new Date(currentDate.getTime() + Number(accessTokenTTL));
const extendToDate = new Date(currentDate.getTime() + accessTokenTTL);
if (extendToDate > expirationDate)
throw new UnauthorizedError({
message: "Failed to renew MI access token past its Max TTL expiration"

View File

@ -69,9 +69,9 @@ export const identityUaServiceFactory = ({
if (!validClientSecretInfo) throw new UnauthorizedError();
const { clientSecretTTL, clientSecretNumUses, clientSecretNumUsesLimit } = validClientSecretInfo;
if (Number(clientSecretTTL) > 0) {
if (clientSecretTTL > 0) {
const clientSecretCreated = new Date(validClientSecretInfo.createdAt);
const ttlInMilliseconds = Number(clientSecretTTL) * 1000;
const ttlInMilliseconds = clientSecretTTL * 1000;
const currentDate = new Date();
const expirationTime = new Date(clientSecretCreated.getTime() + ttlInMilliseconds);
@ -124,10 +124,7 @@ export const identityUaServiceFactory = ({
} as TIdentityAccessTokenJwtPayload,
appCfg.AUTH_SECRET,
{
expiresIn:
Number(identityAccessToken.accessTokenMaxTTL) === 0
? undefined
: Number(identityAccessToken.accessTokenMaxTTL)
expiresIn: identityAccessToken.accessTokenMaxTTL === 0 ? undefined : identityAccessToken.accessTokenMaxTTL
}
);

View File

@ -1,35 +1,10 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName, TIntegrationAuths, TIntegrationAuthsUpdate } from "@app/db/schemas";
import { BadRequestError, DatabaseError } from "@app/lib/errors";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TIntegrationAuthDALFactory = ReturnType<typeof integrationAuthDALFactory>;
export const integrationAuthDALFactory = (db: TDbClient) => {
const integrationAuthOrm = ormify(db, TableName.IntegrationAuth);
const bulkUpdate = async (
data: Array<{ filter: Partial<TIntegrationAuths>; data: TIntegrationAuthsUpdate }>,
tx?: Knex
) => {
try {
const integrationAuths = await Promise.all(
data.map(async ({ filter, data: updateData }) => {
const [doc] = await (tx || db)(TableName.IntegrationAuth).where(filter).update(updateData).returning("*");
if (!doc) throw new BadRequestError({ message: "Failed to update document" });
return doc;
})
);
return integrationAuths;
} catch (error) {
throw new DatabaseError({ error, name: "bulk update secret" });
}
};
return {
...integrationAuthOrm,
bulkUpdate
};
return integrationAuthOrm;
};

View File

@ -76,8 +76,7 @@ export const orgDALFactory = (db: TDbClient) => {
db.ref("lastName").withSchema(TableName.Users),
db.ref("id").withSchema(TableName.Users).as("userId"),
db.ref("publicKey").withSchema(TableName.UserEncryptionKey)
)
.where({ isGhost: false }); // MAKE SURE USER IS NOT A GHOST USER
);
return members.map(({ email, firstName, lastName, userId, publicKey, ...data }) => ({
...data,
user: { email, firstName, lastName, id: userId, publicKey }
@ -87,79 +86,6 @@ export const orgDALFactory = (db: TDbClient) => {
}
};
const findOrgMembersByEmail = async (orgId: string, emails: string[]) => {
try {
const members = await db(TableName.OrgMembership)
.where({ orgId })
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
.leftJoin<TUserEncryptionKeys>(
TableName.UserEncryptionKey,
`${TableName.UserEncryptionKey}.userId`,
`${TableName.Users}.id`
)
.select(
db.ref("id").withSchema(TableName.OrgMembership),
db.ref("inviteEmail").withSchema(TableName.OrgMembership),
db.ref("orgId").withSchema(TableName.OrgMembership),
db.ref("role").withSchema(TableName.OrgMembership),
db.ref("roleId").withSchema(TableName.OrgMembership),
db.ref("status").withSchema(TableName.OrgMembership),
db.ref("email").withSchema(TableName.Users),
db.ref("firstName").withSchema(TableName.Users),
db.ref("lastName").withSchema(TableName.Users),
db.ref("id").withSchema(TableName.Users).as("userId"),
db.ref("publicKey").withSchema(TableName.UserEncryptionKey)
)
.where({ isGhost: false })
.whereIn("email", emails);
return members.map(({ email, firstName, lastName, userId, publicKey, ...data }) => ({
...data,
user: { email, firstName, lastName, id: userId, publicKey }
}));
} catch (error) {
throw new DatabaseError({ error, name: "Find all org members" });
}
};
const findOrgGhostUser = async (orgId: string) => {
try {
const member = await db(TableName.OrgMembership)
.where({ orgId })
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.UserEncryptionKey, `${TableName.UserEncryptionKey}.userId`, `${TableName.Users}.id`)
.select(
db.ref("id").withSchema(TableName.OrgMembership),
db.ref("orgId").withSchema(TableName.OrgMembership),
db.ref("role").withSchema(TableName.OrgMembership),
db.ref("roleId").withSchema(TableName.OrgMembership),
db.ref("status").withSchema(TableName.OrgMembership),
db.ref("email").withSchema(TableName.Users),
db.ref("id").withSchema(TableName.Users).as("userId"),
db.ref("publicKey").withSchema(TableName.UserEncryptionKey)
)
.where({ isGhost: true })
.first();
return member;
} catch (error) {
return null;
}
};
const ghostUserExists = async (orgId: string) => {
try {
const member = await db(TableName.OrgMembership)
.where({ orgId })
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
.leftJoin(TableName.UserEncryptionKey, `${TableName.UserEncryptionKey}.userId`, `${TableName.Users}.id`)
.select(db.ref("id").withSchema(TableName.Users).as("userId"))
.where({ isGhost: true })
.first();
return Boolean(member);
} catch (error) {
return false;
}
};
const create = async (dto: TOrganizationsInsert, tx?: Knex) => {
try {
const [organization] = await (tx || db)(TableName.Organization).insert(dto).returning("*");
@ -239,14 +165,7 @@ export const orgDALFactory = (db: TDbClient) => {
// eslint-disable-next-line
.where(buildFindFilter(filter))
.join(TableName.Users, `${TableName.Users}.id`, `${TableName.OrgMembership}.userId`)
.join(TableName.Organization, `${TableName.Organization}.id`, `${TableName.OrgMembership}.orgId`)
.select(
selectAllTableCols(TableName.OrgMembership),
db.ref("email").withSchema(TableName.Users),
db.ref("firstName").withSchema(TableName.Users),
db.ref("lastName").withSchema(TableName.Users),
db.ref("scimEnabled").withSchema(TableName.Organization)
);
.select(selectAllTableCols(TableName.OrgMembership), db.ref("email").withSchema(TableName.Users));
if (limit) void query.limit(limit);
if (offset) void query.offset(offset);
if (sort) {
@ -265,9 +184,6 @@ export const orgDALFactory = (db: TDbClient) => {
findAllOrgMembers,
findOrgById,
findAllOrgsByUserId,
ghostUserExists,
findOrgMembersByEmail,
findOrgGhostUser,
create,
updateById,
deleteById,

View File

@ -1,41 +0,0 @@
import { TOrgDALFactory } from "@app/services/org/org-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
type TDeleteOrgMembership = {
orgMembershipId: string;
orgId: string;
orgDAL: Pick<TOrgDALFactory, "findMembership" | "deleteMembershipById" | "transaction">;
projectDAL: Pick<TProjectDALFactory, "find">;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find" | "delete">;
};
export const deleteOrgMembership = async ({
orgMembershipId,
orgId,
orgDAL,
projectDAL,
projectMembershipDAL
}: TDeleteOrgMembership) => {
const membership = await orgDAL.transaction(async (tx) => {
// delete org membership
const orgMembership = await orgDAL.deleteMembershipById(orgMembershipId, orgId, tx);
const projects = await projectDAL.find({ orgId }, { tx });
// delete associated project memberships
await projectMembershipDAL.delete(
{
$in: {
projectId: projects.map((project) => project.id)
},
userId: orgMembership.userId as string
},
tx
);
return orgMembership;
});
return membership;
};

View File

@ -1,8 +1,6 @@
import { ForbiddenError } from "@casl/ability";
import slugify from "@sindresorhus/slugify";
import crypto from "crypto";
import jwt from "jsonwebtoken";
import { Knex } from "knex";
import { OrgMembershipRole, OrgMembershipStatus } from "@app/db/schemas";
import { TProjects } from "@app/db/schemas/projects";
@ -13,7 +11,6 @@ import { TSamlConfigDALFactory } from "@app/ee/services/saml-config/saml-config-
import { getConfig } from "@app/lib/config/env";
import { generateAsymmetricKeyPair } from "@app/lib/crypto";
import { generateSymmetricKey, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
import { generateUserSrpKeys } from "@app/lib/crypto/srp";
import { BadRequestError, UnauthorizedError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { isDisposableEmail } from "@app/lib/validator";
@ -31,7 +28,6 @@ import { TOrgRoleDALFactory } from "./org-role-dal";
import {
TDeleteOrgMembershipDTO,
TFindAllWorkspacesDTO,
TFindOrgMembersByEmailDTO,
TInviteUserToOrgDTO,
TUpdateOrgDTO,
TUpdateOrgMembershipDTO,
@ -97,15 +93,6 @@ export const orgServiceFactory = ({
return members;
};
const findOrgMembersByEmail = async ({ actor, actorId, orgId, emails }: TFindOrgMembersByEmailDTO) => {
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Member);
const members = await orgDAL.findOrgMembersByEmail(orgId, emails);
return members;
};
const findAllWorkspaces = async ({ actor, actorId, actorOrgId, orgId }: TFindAllWorkspacesDTO) => {
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Workspace);
@ -131,54 +118,6 @@ export const orgServiceFactory = ({
return workspaces.filter((workspace) => organizationWorkspaceIds.has(workspace.id));
};
const addGhostUser = async (orgId: string, tx?: Knex) => {
const email = `sudo-${alphaNumericNanoId(16)}-${orgId}@infisical.com`; // We add a nanoid because the email is unique. And we have to create a new ghost user each time, so we can have access to the private key.
const password = crypto.randomBytes(128).toString("hex");
const user = await userDAL.create(
{
isGhost: true,
authMethods: [AuthMethod.EMAIL],
email,
isAccepted: true
},
tx
);
const encKeys = await generateUserSrpKeys(email, password);
await userDAL.upsertUserEncryptionKey(
user.id,
{
encryptionVersion: 2,
protectedKey: encKeys.protectedKey,
protectedKeyIV: encKeys.protectedKeyIV,
protectedKeyTag: encKeys.protectedKeyTag,
publicKey: encKeys.publicKey,
encryptedPrivateKey: encKeys.encryptedPrivateKey,
iv: encKeys.encryptedPrivateKeyIV,
tag: encKeys.encryptedPrivateKeyTag,
salt: encKeys.salt,
verifier: encKeys.verifier
},
tx
);
const createMembershipData = {
orgId,
userId: user.id,
role: OrgMembershipRole.Admin,
status: OrgMembershipStatus.Accepted
};
await orgDAL.createMembership(createMembershipData, tx);
return {
user,
keys: encKeys
};
};
/*
* Update organization details
* */
@ -187,32 +126,16 @@ export const orgServiceFactory = ({
actorId,
actorOrgId,
orgId,
data: { name, slug, authEnforced, scimEnabled }
data: { name, slug, authEnforced }
}: TUpdateOrgDTO) => {
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
const plan = await licenseService.getPlan(orgId);
if (authEnforced !== undefined) {
if (!plan?.samlSSO)
throw new BadRequestError({
message:
"Failed to enforce/un-enforce SAML SSO due to plan restriction. Upgrade plan to enforce/un-enforce SAML SSO."
});
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Sso);
}
if (scimEnabled !== undefined) {
if (!plan?.scim)
throw new BadRequestError({
message:
"Failed to enable/disable SCIM provisioning due to plan restriction. Upgrade plan to enable/disable SCIM provisioning."
});
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Scim);
}
if (authEnforced || scimEnabled) {
if (authEnforced) {
const samlCfg = await samlConfigDAL.findEnforceableSamlCfg(orgId);
if (!samlCfg)
throw new BadRequestError({
@ -224,8 +147,7 @@ export const orgServiceFactory = ({
const org = await orgDAL.updateById(orgId, {
name,
slug: slug ? slugify(slug) : undefined,
authEnforced,
scimEnabled
authEnforced
});
if (!org) throw new BadRequestError({ name: "Org not found", message: "Organization not found" });
return org;
@ -399,8 +321,7 @@ export const orgServiceFactory = ({
{
email: inviteeEmail,
isAccepted: false,
authMethods: [AuthMethod.EMAIL],
isGhost: false
authMethods: [AuthMethod.EMAIL]
},
tx
);
@ -549,12 +470,10 @@ export const orgServiceFactory = ({
inviteUserToOrganization,
verifyUserToOrg,
updateOrg,
findOrgMembersByEmail,
createOrganization,
deleteOrganizationById,
deleteOrgMembership,
findAllWorkspaces,
addGhostUser,
updateOrgMembership,
// incident contacts
findIncidentContacts,

View File

@ -30,13 +30,6 @@ export type TVerifyUserToOrgDTO = {
code: string;
};
export type TFindOrgMembersByEmailDTO = {
actor: ActorType;
actorId: string;
orgId: string;
emails: string[];
};
export type TFindAllWorkspacesDTO = {
actor: ActorType;
actorId: string;
@ -45,5 +38,5 @@ export type TFindAllWorkspacesDTO = {
};
export type TUpdateOrgDTO = {
data: Partial<{ name: string; slug: string; authEnforced: boolean; scimEnabled: boolean }>;
data: Partial<{ name: string; slug: string; authEnforced: boolean }>;
} & TOrgPermission;

View File

@ -27,19 +27,5 @@ export const projectBotDALFactory = (db: TDbClient) => {
}
};
const findProjectByBotId = async (botId: string) => {
try {
const project = await db(TableName.ProjectBot)
.where({ [`${TableName.ProjectBot}.id` as "id"]: botId })
.join(TableName.Project, `${TableName.ProjectBot}.projectId`, `${TableName.Project}.id`)
.select(selectAllTableCols(TableName.Project))
.first();
return project || null;
} catch (error) {
throw new DatabaseError({ error, name: "Find project by bot id" });
}
};
return { ...projectBotOrm, findOne, findProjectByBotId };
return { ...projectBotOrm, findOne };
};

View File

@ -1,80 +1,88 @@
import { ForbiddenError } from "@casl/ability";
import { ProjectVersion, SecretKeyEncoding } from "@app/db/schemas";
import { SecretEncryptionAlgo, SecretKeyEncoding } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { decryptAsymmetric, generateAsymmetricKeyPair } from "@app/lib/crypto";
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
import { getConfig } from "@app/lib/config/env";
import {
decryptAsymmetric,
decryptSymmetric,
decryptSymmetric128BitHexKeyUTF8,
encryptSymmetric,
encryptSymmetric128BitHexKeyUTF8,
generateAsymmetricKeyPair
} from "@app/lib/crypto";
import { BadRequestError } from "@app/lib/errors";
import { TProjectPermission } from "@app/lib/types";
import { TProjectDALFactory } from "../project/project-dal";
import { TProjectBotDALFactory } from "./project-bot-dal";
import { TFindBotByProjectIdDTO, TGetPrivateKeyDTO, TSetActiveStateDTO } from "./project-bot-types";
import { TSetActiveStateDTO } from "./project-bot-types";
type TProjectBotServiceFactoryDep = {
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
projectDAL: Pick<TProjectDALFactory, "findById">;
projectBotDAL: TProjectBotDALFactory;
};
export type TProjectBotServiceFactory = ReturnType<typeof projectBotServiceFactory>;
export const projectBotServiceFactory = ({
projectBotDAL,
projectDAL,
permissionService
}: TProjectBotServiceFactoryDep) => {
const getBotPrivateKey = ({ bot }: TGetPrivateKeyDTO) =>
infisicalSymmetricDecrypt({
keyEncoding: bot.keyEncoding as SecretKeyEncoding,
iv: bot.iv,
tag: bot.tag,
ciphertext: bot.encryptedPrivateKey
});
export const projectBotServiceFactory = ({ projectBotDAL, permissionService }: TProjectBotServiceFactoryDep) => {
const getBotKey = async (projectId: string) => {
const appCfg = getConfig();
const encryptionKey = appCfg.ENCRYPTION_KEY;
const rootEncryptionKey = appCfg.ROOT_ENCRYPTION_KEY;
const bot = await projectBotDAL.findOne({ projectId });
if (!bot) throw new BadRequestError({ message: "failed to find bot key" });
if (!bot.isActive) throw new BadRequestError({ message: "Bot is not active" });
if (!bot.encryptedProjectKeyNonce || !bot.encryptedProjectKey)
throw new BadRequestError({ message: "Encryption key missing" });
const botPrivateKey = getBotPrivateKey({ bot });
if (rootEncryptionKey && (bot.keyEncoding as SecretKeyEncoding) === SecretKeyEncoding.BASE64) {
const privateKeyBot = decryptSymmetric({
iv: bot.iv,
tag: bot.tag,
ciphertext: bot.encryptedPrivateKey,
key: rootEncryptionKey
});
return decryptAsymmetric({
ciphertext: bot.encryptedProjectKey,
privateKey: botPrivateKey,
privateKey: privateKeyBot,
nonce: bot.encryptedProjectKeyNonce,
publicKey: bot.sender.publicKey
});
}
if (encryptionKey && (bot.keyEncoding as SecretKeyEncoding) === SecretKeyEncoding.UTF8) {
const privateKeyBot = decryptSymmetric128BitHexKeyUTF8({
iv: bot.iv,
tag: bot.tag,
ciphertext: bot.encryptedPrivateKey,
key: encryptionKey
});
return decryptAsymmetric({
ciphertext: bot.encryptedProjectKey,
privateKey: privateKeyBot,
nonce: bot.encryptedProjectKeyNonce,
publicKey: bot.sender.publicKey
});
}
throw new BadRequestError({
message: "Failed to obtain bot copy of workspace key needed for operation"
});
};
const findBotByProjectId = async ({
actorId,
actor,
projectId,
actorOrgId,
privateKey,
botKey,
publicKey
}: TFindBotByProjectIdDTO) => {
const findBotByProjectId = async ({ actorId, actor, actorOrgId, projectId }: TProjectPermission) => {
const { permission } = await permissionService.getProjectPermission(actor, actorId, projectId, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
const appCfg = getConfig();
const bot = await projectBotDAL.transaction(async (tx) => {
const doc = await projectBotDAL.findOne({ projectId }, tx);
if (doc) return doc;
const keys = privateKey && publicKey ? { privateKey, publicKey } : generateAsymmetricKeyPair();
const { iv, tag, ciphertext, encoding, algorithm } = infisicalSymmetricEncypt(keys.privateKey);
const project = await projectDAL.findById(projectId, tx);
if (project.version === ProjectVersion.V2) {
throw new BadRequestError({ message: "Failed to create bot, project is upgraded." });
}
const { publicKey, privateKey } = generateAsymmetricKeyPair();
if (appCfg.ROOT_ENCRYPTION_KEY) {
const { iv, tag, ciphertext } = encryptSymmetric(privateKey, appCfg.ROOT_ENCRYPTION_KEY);
return projectBotDAL.create(
{
name: "Infisical Bot",
@ -83,29 +91,35 @@ export const projectBotServiceFactory = ({
iv,
encryptedPrivateKey: ciphertext,
isActive: false,
publicKey: keys.publicKey,
algorithm,
keyEncoding: encoding,
...(botKey && {
encryptedProjectKey: botKey.encryptedKey,
encryptedProjectKeyNonce: botKey.nonce
})
publicKey,
algorithm: SecretEncryptionAlgo.AES_256_GCM,
keyEncoding: SecretKeyEncoding.BASE64
},
tx
);
}
if (appCfg.ENCRYPTION_KEY) {
const { iv, tag, ciphertext } = encryptSymmetric128BitHexKeyUTF8(privateKey, appCfg.ENCRYPTION_KEY);
return projectBotDAL.create(
{
name: "Infisical Bot",
projectId,
tag,
iv,
encryptedPrivateKey: ciphertext,
isActive: false,
publicKey,
algorithm: SecretEncryptionAlgo.AES_256_GCM,
keyEncoding: SecretKeyEncoding.UTF8
},
tx
);
}
throw new BadRequestError({ message: "Failed to create bot due to missing encryption key" });
});
return bot;
};
const findProjectByBotId = async (botId: string) => {
try {
const bot = await projectBotDAL.findProjectByBotId(botId);
return bot;
} catch (e) {
throw new BadRequestError({ message: "Failed to find bot by ID" });
}
};
const setBotActiveState = async ({ actor, botId, botKey, actorId, actorOrgId, isActive }: TSetActiveStateDTO) => {
const bot = await projectBotDAL.findById(botId);
if (!bot) throw new BadRequestError({ message: "Bot not found" });
@ -113,16 +127,6 @@ export const projectBotServiceFactory = ({
const { permission } = await permissionService.getProjectPermission(actor, actorId, bot.projectId, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Integrations);
const project = await projectBotDAL.findProjectByBotId(botId);
if (!project) {
throw new BadRequestError({ message: "Failed to find project by bot ID" });
}
if (project.version === ProjectVersion.V2) {
throw new BadRequestError({ message: "Failed to set bot active for upgraded project. Bot is already active" });
}
if (isActive) {
if (!botKey?.nonce || !botKey?.encryptedKey) {
throw new BadRequestError({ message: "Failed to set bot active - missing bot key" });
@ -149,8 +153,6 @@ export const projectBotServiceFactory = ({
return {
findBotByProjectId,
setBotActiveState,
getBotPrivateKey,
findProjectByBotId,
getBotKey
};
};

View File

@ -1,4 +1,3 @@
import { TProjectBots } from "@app/db/schemas";
import { TProjectPermission } from "@app/lib/types";
export type TSetActiveStateDTO = {
@ -9,16 +8,3 @@ export type TSetActiveStateDTO = {
};
botId: string;
} & Omit<TProjectPermission, "projectId">;
export type TFindBotByProjectIdDTO = {
privateKey?: string;
publicKey?: string;
botKey?: {
nonce: string;
encryptedKey: string;
};
} & TProjectPermission;
export type TGetPrivateKeyDTO = {
bot: TProjectBots;
};

Some files were not shown because too many files have changed in this diff Show More