Compare commits

..

3 Commits

Author SHA1 Message Date
7e4ccc35fc Merge pull request #1255 from Infisical/patch-6
add owner reference to InfisicalSecret
2023-12-18 19:20:37 -05:00
95f6f2dcea increase chart version 2023-12-18 19:20:17 -05:00
4ac8c004d7 add owner reference 2023-12-18 19:17:25 -05:00
1314 changed files with 79696 additions and 116066 deletions

View File

@ -3,7 +3,6 @@ on:
push:
tags:
- "infisical/v*.*.*"
- "!infisical/v*.*.*-postgres"
jobs:
backend-image:

View File

@ -1,57 +0,0 @@
name: Release standalone docker image
on:
push:
tags:
- "infisical/v*.*.*-postgres"
jobs:
infisical-standalone:
name: Build infisical standalone image postgres
runs-on: ubuntu-latest
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
- name: ☁️ Checkout source
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: version output
run: |
echo "Output Value: ${{ steps.version.outputs.major }}"
echo "Output Value: ${{ steps.version.outputs.minor }}"
echo "Output Value: ${{ steps.version.outputs.patch }}"
echo "Output Value: ${{ steps.version.outputs.version }}"
echo "Output Value: ${{ steps.version.outputs.version_type }}"
echo "Output Value: ${{ steps.version.outputs.increment }}"
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 📦 Build backend and export to Docker
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
tags: |
infisical/infisical:latest-postgres
infisical/infisical:${{ steps.commit.outputs.short }}
infisical/infisical:${{ steps.extract_version.outputs.version }}
platforms: linux/amd64,linux/arm64
file: Dockerfile.standalone-infisical
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}

View File

@ -3,7 +3,6 @@ on:
push:
tags:
- "infisical/v*.*.*"
- "!infisical/v*.*.*-postgres"
jobs:
infisical-standalone:

3
.gitignore vendored
View File

@ -1,7 +1,6 @@
# backend
node_modules
.env
.env.test
.env.dev
.env.gamma
.env.prod
@ -62,4 +61,4 @@ yarn-error.log*
# Editor specific
.vscode/*
frontend-build
frontend-build

View File

@ -108,7 +108,7 @@ brews:
zsh_completion.install "completions/infisical.zsh" => "_infisical"
fish_completion.install "completions/infisical.fish"
man1.install "manpages/infisical.1.gz"
- name: "infisical@{{.Version}}"
- name: 'infisical@{{.Version}}'
tap:
owner: Infisical
name: homebrew-get-cli
@ -186,14 +186,12 @@ aurs:
# man pages
install -Dm644 "./manpages/infisical.1.gz" "${pkgdir}/usr/share/man/man1/infisical.1.gz"
dockers:
- dockerfile: docker/alpine
goos: linux
goarch: amd64
ids:
- all-other-builds
image_templates:
- "infisical/cli:{{ .Version }}"
- "infisical/cli:{{ .Major }}.{{ .Minor }}"
- "infisical/cli:{{ .Major }}"
- "infisical/cli:latest"
# dockers:
# - dockerfile: cli/docker/Dockerfile
# goos: linux
# goarch: amd64
# ids:
# - infisical
# image_templates:
# - "infisical/cli:{{ .Version }}"
# - "infisical/cli:latest"

View File

@ -2,7 +2,7 @@ ARG POSTHOG_HOST=https://app.posthog.com
ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
FROM node:20-alpine AS base
FROM node:16-alpine AS base
FROM base AS frontend-dependencies
@ -73,7 +73,6 @@ RUN npm ci --only-production
COPY /backend .
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
RUN npm i -D tsconfig-paths
RUN npm run build
# Production stage
@ -104,17 +103,14 @@ ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
WORKDIR /
COPY --from=backend-runner /app /backend
COPY --from=backend-runner /app/dist/services/smtp/templates /backend/dist/templates
COPY --from=frontend-runner /app ./backend/frontend-build
ENV PORT 8080
ENV HOST=0.0.0.0
ENV HTTPS_ENABLED false
ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
WORKDIR /backend
ENV TELEMETRY_ENABLED true
@ -123,8 +119,10 @@ HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js
EXPOSE 8080
EXPOSE 443
USER non-root-user
CMD ["./standalone-entrypoint.sh"]

View File

@ -7,9 +7,6 @@ push:
up-dev:
docker-compose -f docker-compose.dev.yml up --build
up-pg-dev:
docker compose -f docker-compose.pg.yml up --build
i-dev:
infisical run -- docker-compose -f docker-compose.dev.yml up --build

View File

@ -129,7 +129,7 @@ Note that this security address should be used only for undisclosed vulnerabilit
## Contributing
Whether it's big or small, we love contributions. Check out our guide to see how to [get started](https://infisical.com/docs/contributing/getting-started).
Whether it's big or small, we love contributions. Check out our guide to see how to [get started](https://infisical.com/docs/contributing/overview).
Not sure where to get started? You can:

View File

@ -1,11 +0,0 @@
node_modules
.env
.env.*
.git
.gitignore
Dockerfile
.dockerignore
docker-compose.*
.DS_Store
*.swp
*~

View File

@ -1,2 +0,0 @@
node_modules
built

View File

@ -1,33 +0,0 @@
# Build stage
FROM node:16-alpine AS build
WORKDIR /app
COPY package*.json ./
RUN npm ci --only-production
COPY . .
RUN npm run build
# Production stage
FROM node:16-alpine
WORKDIR /app
ENV npm_config_cache /home/node/.npm
COPY package*.json ./
RUN npm ci --only-production && npm cache clean --force
COPY --from=build /app .
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.8.1 && apk add --no-cache git
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js
EXPOSE 4000
CMD ["node", "build/index.js"]

View File

@ -1,6 +0,0 @@
{
"watch": ["src"],
"ext": ".ts,.js",
"ignore": [],
"exec": "ts-node ./src/index.ts"
}

File diff suppressed because it is too large Load Diff

View File

@ -1,148 +0,0 @@
{
"dependencies": {
"@aws-sdk/client-secrets-manager": "^3.319.0",
"@casl/ability": "^6.5.0",
"@casl/mongoose": "^7.2.1",
"@godaddy/terminus": "^4.12.0",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/rest": "^19.0.5",
"@sentry/node": "^7.77.0",
"@sentry/tracing": "^7.48.0",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@types/crypto-js": "^4.1.1",
"@types/libsodium-wrappers": "^0.7.10",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.30.3",
"aws-sdk": "^2.1364.0",
"axios": "^1.6.0",
"axios-retry": "^3.4.0",
"bcrypt": "^5.1.0",
"bigint-conversion": "^2.4.0",
"cookie-parser": "^1.4.6",
"cors": "^2.8.5",
"crypto-js": "^4.2.0",
"dotenv": "^16.0.1",
"express": "^4.18.1",
"express-async-errors": "^3.1.1",
"express-rate-limit": "^6.7.0",
"express-validator": "^6.14.2",
"handlebars": "^4.7.7",
"helmet": "^5.1.1",
"infisical-node": "^1.2.1",
"ioredis": "^5.3.2",
"jmespath": "^0.16.0",
"js-yaml": "^4.1.0",
"jsonwebtoken": "^9.0.0",
"jsrp": "^0.2.4",
"libsodium-wrappers": "^0.7.10",
"lodash": "^4.17.21",
"mongoose": "^7.4.1",
"mysql2": "^3.6.2",
"nanoid": "^3.3.6",
"node-cache": "^5.1.2",
"nodemailer": "^6.8.0",
"ora": "^5.4.1",
"passport": "^0.6.0",
"passport-github": "^1.1.0",
"passport-gitlab2": "^5.0.0",
"passport-google-oauth20": "^2.0.0",
"pg": "^8.11.3",
"pino": "^8.16.1",
"pino-http": "^8.5.1",
"posthog-node": "^2.6.0",
"probot": "^12.3.3",
"query-string": "^7.1.3",
"rate-limit-mongo": "^2.3.2",
"rimraf": "^3.0.2",
"swagger-ui-express": "^4.6.2",
"tweetnacl": "^1.0.3",
"tweetnacl-util": "^0.15.1",
"typescript": "^4.9.3",
"utility-types": "^3.10.0",
"zod": "^3.22.3"
},
"overrides": {
"rate-limit-mongo": {
"mongodb": "5.8.0"
}
},
"name": "infisical-api",
"version": "1.0.0",
"main": "src/index.js",
"scripts": {
"start": "node build/index.js",
"dev": "nodemon index.js",
"swagger-autogen": "node ./swagger/index.ts",
"build": "rimraf ./build && tsc && cp -R ./src/templates ./build && cp -R ./src/data ./build",
"lint": "eslint . --ext .ts",
"lint-and-fix": "eslint . --ext .ts --fix",
"lint-staged": "lint-staged",
"pretest": "docker compose -f test-resources/docker-compose.test.yml up -d",
"test": "cross-env NODE_ENV=test jest --verbose --testTimeout=10000 --detectOpenHandles; npm run posttest",
"test:ci": "npm test -- --watchAll=false --ci --reporters=default --reporters=jest-junit --reporters=github-actions --coverage --testLocationInResults --json --outputFile=coverage/report.json",
"posttest": "docker compose -f test-resources/docker-compose.test.yml down"
},
"repository": {
"type": "git",
"url": "git+https://github.com/Infisical/infisical-api.git"
},
"author": "",
"license": "ISC",
"bugs": {
"url": "https://github.com/Infisical/infisical-api/issues"
},
"homepage": "https://github.com/Infisical/infisical-api#readme",
"description": "",
"devDependencies": {
"@jest/globals": "^29.3.1",
"@posthog/plugin-scaffold": "^1.3.4",
"@swc/core": "^1.3.99",
"@swc/helpers": "^0.5.3",
"@types/bcrypt": "^5.0.0",
"@types/bcryptjs": "^2.4.2",
"@types/bull": "^4.10.0",
"@types/cookie-parser": "^1.4.3",
"@types/cors": "^2.8.12",
"@types/express": "^4.17.14",
"@types/jest": "^29.5.0",
"@types/jmespath": "^0.15.1",
"@types/jsonwebtoken": "^8.5.9",
"@types/lodash": "^4.14.191",
"@types/node": "^18.11.3",
"@types/nodemailer": "^6.4.6",
"@types/passport": "^1.0.12",
"@types/pg": "^8.10.7",
"@types/picomatch": "^2.3.0",
"@types/pino": "^7.0.5",
"@types/supertest": "^2.0.12",
"@types/swagger-jsdoc": "^6.0.1",
"@types/swagger-ui-express": "^4.1.3",
"@typescript-eslint/eslint-plugin": "^5.54.0",
"@typescript-eslint/parser": "^5.40.1",
"cross-env": "^7.0.3",
"eslint": "^8.26.0",
"eslint-plugin-unused-imports": "^2.0.0",
"install": "^0.13.0",
"jest": "^29.3.1",
"jest-junit": "^15.0.0",
"nodemon": "^2.0.19",
"npm": "^8.19.3",
"pino-pretty": "^10.2.3",
"regenerator-runtime": "^0.14.0",
"smee-client": "^1.2.3",
"supertest": "^6.3.3",
"swagger-autogen": "^2.23.5",
"ts-jest": "^29.0.3",
"ts-node": "^10.9.1"
},
"jest-junit": {
"outputDirectory": "reports",
"outputName": "jest-junit.xml",
"ancestorSeparator": " ",
"uniqueOutputName": "false",
"suiteNameTemplate": "{filepath}",
"classNameTemplate": "{classname}",
"titleTemplate": "{title}"
}
}

View File

@ -1,69 +0,0 @@
export enum ActorType { // would extend to AWS, Azure, ...
USER = "user", // userIdentity
SERVICE = "service",
IDENTITY = "identity"
}
export enum UserAgentType {
WEB = "web",
CLI = "cli",
K8_OPERATOR = "k8-operator",
TERRAFORM = "terraform",
OTHER = "other",
PYTHON_SDK = "InfisicalPythonSDK",
NODE_SDK = "InfisicalNodeSDK"
}
export enum EventType {
GET_SECRETS = "get-secrets",
GET_SECRET = "get-secret",
REVEAL_SECRET = "reveal-secret",
CREATE_SECRET = "create-secret",
CREATE_SECRETS = "create-secrets",
UPDATE_SECRET = "update-secret",
UPDATE_SECRETS = "update-secrets",
DELETE_SECRET = "delete-secret",
DELETE_SECRETS = "delete-secrets",
GET_WORKSPACE_KEY = "get-workspace-key",
AUTHORIZE_INTEGRATION = "authorize-integration",
UNAUTHORIZE_INTEGRATION = "unauthorize-integration",
CREATE_INTEGRATION = "create-integration",
DELETE_INTEGRATION = "delete-integration",
ADD_TRUSTED_IP = "add-trusted-ip",
UPDATE_TRUSTED_IP = "update-trusted-ip",
DELETE_TRUSTED_IP = "delete-trusted-ip",
CREATE_SERVICE_TOKEN = "create-service-token", // v2
DELETE_SERVICE_TOKEN = "delete-service-token", // v2
CREATE_IDENTITY = "create-identity",
UPDATE_IDENTITY = "update-identity",
DELETE_IDENTITY = "delete-identity",
LOGIN_IDENTITY_UNIVERSAL_AUTH = "login-identity-universal-auth",
ADD_IDENTITY_UNIVERSAL_AUTH = "add-identity-universal-auth",
UPDATE_IDENTITY_UNIVERSAL_AUTH = "update-identity-universal-auth",
GET_IDENTITY_UNIVERSAL_AUTH = "get-identity-universal-auth",
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
CREATE_ENVIRONMENT = "create-environment",
UPDATE_ENVIRONMENT = "update-environment",
DELETE_ENVIRONMENT = "delete-environment",
ADD_WORKSPACE_MEMBER = "add-workspace-member",
ADD_BATCH_WORKSPACE_MEMBER = "add-workspace-members",
REMOVE_WORKSPACE_MEMBER = "remove-workspace-member",
CREATE_FOLDER = "create-folder",
UPDATE_FOLDER = "update-folder",
DELETE_FOLDER = "delete-folder",
CREATE_WEBHOOK = "create-webhook",
UPDATE_WEBHOOK_STATUS = "update-webhook-status",
DELETE_WEBHOOK = "delete-webhook",
GET_SECRET_IMPORTS = "get-secret-imports",
CREATE_SECRET_IMPORT = "create-secret-import",
UPDATE_SECRET_IMPORT = "update-secret-import",
DELETE_SECRET_IMPORT = "delete-secret-import",
UPDATE_USER_WORKSPACE_ROLE = "update-user-workspace-role",
UPDATE_USER_WORKSPACE_DENIED_PERMISSIONS = "update-user-workspace-denied-permissions",
SECRET_APPROVAL_MERGED = "secret-approval-merged",
SECRET_APPROVAL_REQUEST = "secret-approval-request",
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
SECRET_APPROVAL_REOPENED = "secret-approval-reopened"
}

View File

@ -1,31 +0,0 @@
import identities from "./identities";
import secret from "./secret";
import secretSnapshot from "./secretSnapshot";
import organizations from "./organizations";
import sso from "./sso";
import users from "./users";
import workspace from "./workspace";
import cloudProducts from "./cloudProducts";
import secretScanning from "./secretScanning";
import roles from "./role";
import secretApprovalPolicy from "./secretApprovalPolicy";
import secretApprovalRequest from "./secretApprovalRequest";
import secretRotationProvider from "./secretRotationProvider";
import secretRotation from "./secretRotation";
export {
identities,
secret,
secretSnapshot,
organizations,
sso,
users,
workspace,
cloudProducts,
secretScanning,
roles,
secretApprovalPolicy,
secretApprovalRequest,
secretRotationProvider,
secretRotation
};

View File

@ -1,194 +0,0 @@
import { AuthData } from "../../../interfaces/middleware";
import jwt from "jsonwebtoken";
import { getAuthSecret } from "../../../config";
import { ActorType } from "../../../ee/models";
import { AuthMode, AuthTokenType } from "../../../variables";
import { UnauthorizedRequestError } from "../../errors";
import {
validateAPIKey,
validateAPIKeyV2,
validateIdentity,
validateJWT,
validateServiceTokenV2
} from "../authModeValidators";
import { getUserAgentType } from "../../posthog";
export * from "./authDataExtractors";
interface ExtractAuthModeParams {
headers: { [key: string]: string | string[] | undefined };
}
interface ExtractAuthModeReturn {
authMode: AuthMode;
authTokenValue: string;
}
interface GetAuthDataParams {
authMode: AuthMode;
authTokenValue: string;
ipAddress: string;
userAgent: string;
}
/**
* Returns the recognized authentication mode based on token in [headers]; accepted token types include:
* - SERVICE_TOKEN
* - API_KEY
* - JWT
* - IDENTITY_ACCESS_TOKEN (from identity)
* - API_KEY_V2
* @param {Object} params
* @param {Object.<string, (string|string[]|undefined)>} params.headers - The HTTP request headers, usually from Express's `req.headers`.
* @returns {Promise<AuthMode>} The derived authentication mode based on the headers.
* @throws {UnauthorizedError} Throws an error if no applicable authMode is found.
*/
export const extractAuthMode = async ({
headers
}: ExtractAuthModeParams): Promise<ExtractAuthModeReturn> => {
const apiKey = headers["x-api-key"] as string;
const authHeader = headers["authorization"] as string;
if (apiKey) {
return { authMode: AuthMode.API_KEY, authTokenValue: apiKey };
}
if (!authHeader)
throw UnauthorizedRequestError({
message: "Failed to authenticate unknown authentication method"
});
if (!authHeader.startsWith("Bearer "))
throw UnauthorizedRequestError({
message: "Failed to authenticate unknown authentication method"
});
const authTokenValue = authHeader.slice(7);
if (authTokenValue.startsWith("st.")) {
return { authMode: AuthMode.SERVICE_TOKEN, authTokenValue };
}
const decodedToken = <jwt.AuthnJwtPayload>jwt.verify(authTokenValue, await getAuthSecret());
switch (decodedToken.authTokenType) {
case AuthTokenType.ACCESS_TOKEN:
return { authMode: AuthMode.JWT, authTokenValue };
case AuthTokenType.API_KEY:
return { authMode: AuthMode.API_KEY_V2, authTokenValue };
case AuthTokenType.IDENTITY_ACCESS_TOKEN:
return { authMode: AuthMode.IDENTITY_ACCESS_TOKEN, authTokenValue };
default:
throw UnauthorizedRequestError({
message: "Failed to authenticate unknown authentication method"
});
}
};
export const getAuthData = async ({
authMode,
authTokenValue,
ipAddress,
userAgent
}: GetAuthDataParams): Promise<AuthData> => {
const userAgentType = getUserAgentType(userAgent);
switch (authMode) {
case AuthMode.SERVICE_TOKEN: {
const serviceTokenData = await validateServiceTokenV2({
authTokenValue
});
return {
actor: {
type: ActorType.SERVICE,
metadata: {
serviceId: serviceTokenData._id.toString(),
name: serviceTokenData.name
}
},
authPayload: serviceTokenData,
ipAddress,
userAgent,
userAgentType
};
}
case AuthMode.IDENTITY_ACCESS_TOKEN: {
const identity = await validateIdentity({
authTokenValue,
ipAddress
});
return {
actor: {
type: ActorType.IDENTITY,
metadata: {
identityId: identity._id.toString(),
name: identity.name
}
},
authPayload: identity,
ipAddress,
userAgent,
userAgentType
};
}
case AuthMode.API_KEY: {
const user = await validateAPIKey({
authTokenValue
});
return {
actor: {
type: ActorType.USER,
metadata: {
userId: user._id.toString(),
email: user.email
}
},
authPayload: user,
ipAddress,
userAgent,
userAgentType
};
}
case AuthMode.API_KEY_V2: {
const user = await validateAPIKeyV2({
authTokenValue
});
return {
actor: {
type: ActorType.USER,
metadata: {
userId: user._id.toString(),
email: user.email
}
},
authPayload: user,
ipAddress,
userAgent,
userAgentType
};
}
case AuthMode.JWT: {
const user = await validateJWT({
authTokenValue
});
return {
actor: {
type: ActorType.USER,
metadata: {
userId: user._id.toString(),
email: user.email
}
},
authPayload: user,
ipAddress,
userAgent,
userAgentType
};
}
}
};

View File

@ -1,23 +0,0 @@
{
"compilerOptions": {
"target": "es2016",
"lib": ["es6", "es2021"],
"module": "commonjs",
"rootDir": "src",
"resolveJsonModule": true,
"allowJs": true,
"outDir": "build",
"esModuleInterop": true,
"moduleResolution": "node",
"forceConsistentCasingInFileNames": true,
"strict": true,
"noImplicitAny": true,
"skipLibCheck": true,
"typeRoots": ["./src/types", "./node_modules/@types"]
},
"ts-node": {
"swc": true
},
"include": ["src/**/*"],
"exclude": ["node_modules"]
}

View File

@ -1,2 +1,2 @@
vitest-environment-infisical.ts
vitest.config.ts
node_modules
built

View File

@ -1,56 +0,0 @@
module.exports = {
root: true,
env: {
browser: true,
es2021: true
},
extends: ["airbnb-base", "airbnb-typescript/base", "prettier"],
plugins: ["prettier", "simple-import-sort", "import"],
parserOptions: {
ecmaVersion: "latest",
sourceType: "module",
project: "./tsconfig.json",
tsconfigRootDir: __dirname
},
rules: {
"@typescript-eslint/no-empty-function": "off",
"consistent-return": "off", // my style
"import/order": "off", // for simple-import-order
"import/prefer-default-export": "off", // why
"no-restricted-syntax": "off",
"import/first": "error",
"import/newline-after-import": "error",
"import/no-duplicates": "error",
"simple-import-sort/exports": "error",
"simple-import-sort/imports": [
"warn",
{
groups: [
// Side effect imports.
["^\\u0000"],
// Node.js builtins prefixed with `node:`.
["^node:"],
// Packages.
// Things that start with a letter (or digit or underscore), or `@` followed by a letter.
["^@?\\w"],
["^@app"],
["@lib"],
["@server"],
// Absolute imports and other imports such as Vue-style `@/foo`.
// Anything not matched in another group.
["^"],
// Relative imports.
// Anything that starts with a dot.
["^\\."]
]
}
]
},
settings: {
"import/resolver": {
typescript: {
project: ["./tsconfig.json"]
}
}
}
};

1
backend/.gitignore vendored
View File

@ -1 +0,0 @@
dist

View File

@ -1,7 +0,0 @@
{
"singleQuote": false,
"printWidth": 100,
"trailingComma": "none",
"tabWidth": 2,
"semi": true
}

View File

@ -1,5 +1,5 @@
# Build stage
FROM node:20-alpine AS build
FROM node:16-alpine AS build
WORKDIR /app
@ -10,7 +10,7 @@ COPY . .
RUN npm run build
# Production stage
FROM node:20-alpine
FROM node:16-alpine
WORKDIR /app
@ -28,8 +28,6 @@ RUN apk add --no-cache bash curl && curl -1sLf \
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js
ENV HOST=0.0.0.0
EXPOSE 4000
CMD ["npm", "start"]
CMD ["node", "build/index.js"]

View File

@ -1,18 +0,0 @@
FROM node:20-alpine
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.8.1 && apk add --no-cache git
WORKDIR /app
COPY package.json package.json
COPY package-lock.json package-lock.json
RUN npm install
COPY . .
ENV HOST=0.0.0.0
CMD ["npm", "run", "dev:docker"]

View File

@ -1,24 +0,0 @@
import { TQueueServiceFactory } from "@app/queue";
export const mockQueue = (): TQueueServiceFactory => {
const queues: Record<string, unknown> = {};
const workers: Record<string, unknown> = {};
const job: Record<string, unknown> = {};
const events: Record<string, unknown> = {};
return {
queue: async (name, jobData) => {
job[name] = jobData;
},
shutdown: async () => undefined,
stopRepeatableJob: async () => true,
start: (name, jobFn) => {
queues[name] = jobFn;
workers[name] = jobFn;
},
listen: async (name, event) => {
events[name] = event;
},
stopRepeatableJobByJobId: async () => true
};
};

View File

@ -1,10 +0,0 @@
import { TSmtpSendMail, TSmtpService } from "@app/services/smtp/smtp-service";
export const mockSmtpServer = (): TSmtpService => {
const storage: TSmtpSendMail[] = [];
return {
sendMail: async (data) => {
storage.push(data);
}
};
};

View File

@ -1,45 +0,0 @@
import { seedData1 } from "@app/db/seed-data";
import jsrp from "jsrp";
describe("Login V1 Router", async () => {
// eslint-disable-next-line
const client = new jsrp.client();
await new Promise((resolve) => {
client.init({ username: seedData1.email, password: seedData1.password }, () => resolve(null));
});
let clientProof: string;
test("Login first phase", async () => {
const res = await testServer.inject({
method: "POST",
url: "/api/v3/auth/login1",
body: {
email: "test@localhost.local",
clientPublicKey: client.getPublicKey()
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("serverPublicKey");
expect(payload).toHaveProperty("salt");
client.setSalt(payload.salt);
client.setServerPublicKey(payload.serverPublicKey);
clientProof = client.getProof(); // called M1
});
test("Login second phase", async () => {
const res = await testServer.inject({
method: "POST",
url: "/api/v3/auth/login2",
body: {
email: seedData1.email,
clientProof
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("mfaEnabled");
expect(payload).toHaveProperty("token");
expect(payload.mfaEnabled).toBeFalsy();
});
});

View File

@ -1,19 +0,0 @@
import { seedData1 } from "@app/db/seed-data";
describe("Org V1 Router", async () => {
test("GET Org list", async () => {
const res = await testServer.inject({
method: "GET",
url: "/api/v1/organization",
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("organizations");
expect(payload).toEqual({
organizations: [expect.objectContaining({ name: seedData1.organization.name })]
});
});
});

View File

@ -1,151 +0,0 @@
import { seedData1 } from "@app/db/seed-data";
import { DEFAULT_PROJECT_ENVS } from "@app/db/seeds/3-project";
describe("Project Environment Router", async () => {
test("Get default environments", async () => {
const res = await testServer.inject({
method: "GET",
url: `/api/v1/workspace/${seedData1.project.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("workspace");
// check for default environments
expect(payload).toEqual({
workspace: expect.objectContaining({
name: seedData1.project.name,
id: seedData1.project.id,
slug: seedData1.project.slug,
environments: expect.arrayContaining([
expect.objectContaining(DEFAULT_PROJECT_ENVS[0]),
expect.objectContaining(DEFAULT_PROJECT_ENVS[1]),
expect.objectContaining(DEFAULT_PROJECT_ENVS[2])
])
})
});
// ensure only two default environments exist
expect(payload.workspace.environments.length).toBe(3);
});
const mockProjectEnv = { name: "temp", slug: "temp", id: "" }; // id will be filled in create op
test("Create environment", async () => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/workspace/${seedData1.project.id}/environments`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
name: mockProjectEnv.name,
slug: mockProjectEnv.slug
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("environment");
expect(payload.environment).toEqual(
expect.objectContaining({
id: expect.any(String),
name: mockProjectEnv.name,
slug: mockProjectEnv.slug,
projectId: seedData1.project.id,
position: DEFAULT_PROJECT_ENVS.length + 1,
createdAt: expect.any(String),
updatedAt: expect.any(String)
})
);
mockProjectEnv.id = payload.environment.id;
});
test("Update environment", async () => {
const updatedName = { name: "temp#2", slug: "temp2" };
const res = await testServer.inject({
method: "PATCH",
url: `/api/v1/workspace/${seedData1.project.id}/environments/${mockProjectEnv.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
name: updatedName.name,
slug: updatedName.slug,
position: 1
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("environment");
expect(payload.environment).toEqual(
expect.objectContaining({
id: expect.any(String),
name: updatedName.name,
slug: updatedName.slug,
projectId: seedData1.project.id,
position: 1,
createdAt: expect.any(String),
updatedAt: expect.any(String)
})
);
mockProjectEnv.name = updatedName.name;
mockProjectEnv.slug = updatedName.slug;
});
test("Delete environment", async () => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/workspace/${seedData1.project.id}/environments/${mockProjectEnv.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("environment");
expect(payload.environment).toEqual(
expect.objectContaining({
id: expect.any(String),
name: mockProjectEnv.name,
slug: mockProjectEnv.slug,
position: 1,
createdAt: expect.any(String),
updatedAt: expect.any(String)
})
);
});
// after all these opreations the list of environment should be still same
test("Default list of environment", async () => {
const res = await testServer.inject({
method: "GET",
url: `/api/v1/workspace/${seedData1.project.id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("workspace");
// check for default environments
expect(payload).toEqual({
workspace: expect.objectContaining({
name: seedData1.project.name,
id: seedData1.project.id,
slug: seedData1.project.slug,
environments: expect.arrayContaining([
expect.objectContaining(DEFAULT_PROJECT_ENVS[0]),
expect.objectContaining(DEFAULT_PROJECT_ENVS[1]),
expect.objectContaining(DEFAULT_PROJECT_ENVS[2])
])
})
});
// ensure only two default environments exist
expect(payload.workspace.environments.length).toBe(3);
});
});

View File

@ -1,155 +0,0 @@
import { seedData1 } from "@app/db/seed-data";
describe("Secret Folder Router", async () => {
test.each([
{ name: "folder1", path: "/" }, // one in root
{ name: "folder1", path: "/level1/level2" }, // then create a deep one creating intermediate ones
{ name: "folder2", path: "/" },
{ name: "folder1", path: "/level1/level2" } // this should not create folder return same thing
])("Create folder $name in $path", async ({ name, path }) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
name,
path
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("folder");
// check for default environments
expect(payload).toEqual({
folder: expect.objectContaining({
name,
id: expect.any(String)
})
});
});
test.each([
{
path: "/",
expected: {
folders: [{ name: "folder1" }, { name: "level1" }, { name: "folder2" }],
length: 3
}
},
{ path: "/level1/level2", expected: { folders: [{ name: "folder1" }], length: 1 } }
])("Get folders $path", async ({ path, expected }) => {
const res = await testServer.inject({
method: "GET",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
query: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("folders");
expect(payload.folders.length).toBe(expected.length);
expect(payload).toEqual({ folders: expected.folders.map((el) => expect.objectContaining(el)) });
});
let toBeDeleteFolderId = "";
test("Update a deep folder", async () => {
const res = await testServer.inject({
method: "PATCH",
url: `/api/v1/folders/folder1`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
name: "folder-updated",
path: "/level1/level2"
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("folder");
expect(payload.folder).toEqual(
expect.objectContaining({
id: expect.any(String),
name: "folder-updated"
})
);
toBeDeleteFolderId = payload.folder.id;
const resUpdatedFolders = await testServer.inject({
method: "GET",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
query: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/level1/level2"
}
});
expect(resUpdatedFolders.statusCode).toBe(200);
const updatedFolderList = JSON.parse(resUpdatedFolders.payload);
expect(updatedFolderList).toHaveProperty("folders");
expect(updatedFolderList.folders.length).toEqual(1);
expect(updatedFolderList.folders[0].name).toEqual("folder-updated");
});
test("Delete a deep folder", async () => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/folders/${toBeDeleteFolderId}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/level1/level2"
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("folder");
expect(payload.folder).toEqual(
expect.objectContaining({
id: expect.any(String),
name: "folder-updated"
})
);
const resUpdatedFolders = await testServer.inject({
method: "GET",
url: `/api/v1/folders`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
query: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/level1/level2"
}
});
expect(resUpdatedFolders.statusCode).toBe(200);
const updatedFolderList = JSON.parse(resUpdatedFolders.payload);
expect(updatedFolderList).toHaveProperty("folders");
expect(updatedFolderList.folders.length).toEqual(0);
});
});

View File

@ -1,179 +0,0 @@
import { seedData1 } from "@app/db/seed-data";
describe("Secret Folder Router", async () => {
test.each([
{ importEnv: "dev", importPath: "/" }, // one in root
{ importEnv: "staging", importPath: "/" } // then create a deep one creating intermediate ones
])("Create secret import $importEnv with path $importPath", async ({ importPath, importEnv }) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/",
import: {
environment: importEnv,
path: importPath
}
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
// check for default environments
expect(payload.secretImport).toEqual(
expect.objectContaining({
id: expect.any(String),
importPath: expect.any(String),
importEnv: expect.objectContaining({
name: expect.any(String),
slug: expect.any(String),
id: expect.any(String)
})
})
);
});
let testSecretImportId = "";
test("Get secret imports", async () => {
const res = await testServer.inject({
method: "GET",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
query: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/"
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImports");
expect(payload.secretImports.length).toBe(2);
testSecretImportId = payload.secretImports[0].id;
expect(payload.secretImports).toEqual(
expect.arrayContaining([
expect.objectContaining({
id: expect.any(String),
importPath: expect.any(String),
importEnv: expect.objectContaining({
name: expect.any(String),
slug: expect.any(String),
id: expect.any(String)
})
})
])
);
});
test("Update secret import position", async () => {
const res = await testServer.inject({
method: "PATCH",
url: `/api/v1/secret-imports/${testSecretImportId}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/",
import: {
position: 2
}
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
// check for default environments
expect(payload.secretImport).toEqual(
expect.objectContaining({
id: expect.any(String),
importPath: expect.any(String),
position: 2,
importEnv: expect.objectContaining({
name: expect.any(String),
slug: expect.any(String),
id: expect.any(String)
})
})
);
const secretImportsListRes = await testServer.inject({
method: "GET",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
query: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/"
}
});
expect(secretImportsListRes.statusCode).toBe(200);
const secretImportList = JSON.parse(secretImportsListRes.payload);
expect(secretImportList).toHaveProperty("secretImports");
expect(secretImportList.secretImports[1].id).toEqual(testSecretImportId);
});
test("Delete secret import position", async () => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/secret-imports/${testSecretImportId}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/"
}
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("secretImport");
// check for default environments
expect(payload.secretImport).toEqual(
expect.objectContaining({
id: expect.any(String),
importPath: expect.any(String),
importEnv: expect.objectContaining({
name: expect.any(String),
slug: expect.any(String),
id: expect.any(String)
})
})
);
const secretImportsListRes = await testServer.inject({
method: "GET",
url: `/api/v1/secret-imports`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
query: {
workspaceId: seedData1.project.id,
environment: seedData1.environment.slug,
path: "/"
}
});
expect(secretImportsListRes.statusCode).toBe(200);
const secretImportList = JSON.parse(secretImportsListRes.payload);
expect(secretImportList).toHaveProperty("secretImports");
expect(secretImportList.secretImports.length).toEqual(1);
expect(secretImportList.secretImports[0].position).toEqual(1);
});
});

View File

@ -1,9 +0,0 @@
describe("Status V1 Router", async () => {
test("Simple check", async () => {
const res = await testServer.inject({
method: "GET",
url: "/api/status"
});
expect(res.statusCode).toBe(200);
});
});

View File

@ -1,75 +0,0 @@
// import { main } from "@app/server/app";
import { initEnvConfig } from "@app/lib/config/env";
import dotenv from "dotenv";
import knex from "knex";
import path from "path";
import { mockSmtpServer } from "./mocks/smtp";
import { initLogger } from "@app/lib/logger";
import jwt from "jsonwebtoken";
import "ts-node/register";
import { main } from "@app/server/app";
import { mockQueue } from "./mocks/queue";
import { AuthTokenType } from "@app/services/auth/auth-type";
import { seedData1 } from "@app/db/seed-data";
dotenv.config({ path: path.join(__dirname, "../.env.test") });
export default {
name: "knex-env",
transformMode: "ssr",
async setup() {
const db = knex({
client: "pg",
connection: process.env.DB_CONNECTION_URI,
migrations: {
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
tableName: "infisical_migrations"
},
seeds: {
directory: path.join(__dirname, "../src/db/seeds"),
extension: "ts"
}
});
try {
await db.migrate.latest();
await db.seed.run();
const smtp = mockSmtpServer();
const queue = mockQueue();
const logger = await initLogger();
const cfg = initEnvConfig(logger);
const server = await main({ db, smtp, logger, queue });
// @ts-expect-error type
globalThis.testServer = server;
// @ts-expect-error type
globalThis.jwtAuthToken = jwt.sign(
{
authTokenType: AuthTokenType.ACCESS_TOKEN,
userId: seedData1.id,
tokenVersionId: seedData1.token.id,
accessVersion: 1
},
cfg.AUTH_SECRET,
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
);
} catch (error) {
await db.destroy();
throw error;
}
// custom setup
return {
async teardown() {
// @ts-expect-error type
await globalThis.testServer.close();
// @ts-expect-error type
delete globalThis.testServer;
// @ts-expect-error type
delete globalThis.jwtToken;
// called after all tests with this env have been run
await db.migrate.rollback({}, true);
await db.destroy();
}
};
}
};

View File

Before

Width:  |  Height:  |  Size: 493 KiB

After

Width:  |  Height:  |  Size: 493 KiB

View File

@ -1,6 +1,6 @@
{
"watch": ["src"],
"ext": ".ts,.js",
"ignore": [],
"exec": "tsx ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine"
}
"watch": ["src"],
"ext": ".ts,.js",
"ignore": [],
"exec": "ts-node ./src/index.ts"
}

33435
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,126 +1,148 @@
{
"name": "backend",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"dev": "tsx watch --clear-screen=false ./src/main.ts | pino-pretty --colorize --colorizeObjects --singleLine",
"dev:docker": "nodemon",
"build": "rimraf dist && tsup && cp -R ./src/lib/validator/disposable_emails.txt ./dist && cp -R ./src/services/smtp/templates ./dist",
"start": "node dist/main.mjs",
"type:check": "tsc --noEmit",
"lint:fix": "eslint --fix --ext js,ts ./src",
"lint": "eslint 'src/**/*.ts'",
"test:e2e": "vitest run -c vitest.e2e.config.ts",
"test:e2e-watch": "vitest -c vitest.e2e.config.ts",
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
"generate:component": "tsx ./scripts/create-backend-file.ts",
"generate:schema": "tsx ./scripts/generate-schema-types.ts",
"migration:new": "tsx ./scripts/create-migration.ts",
"migration:up": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
"migration:down": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
"migration:list": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
"migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"seed:new": "tsx ./scripts/create-seed-file.ts",
"seed:run": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest && npm run seed:run"
},
"keywords": [],
"author": "",
"license": "ISC",
"devDependencies": {
"@types/bcrypt": "^5.0.2",
"@types/jmespath": "^0.15.2",
"@types/jsonwebtoken": "^9.0.5",
"@types/jsrp": "^0.2.6",
"@types/libsodium-wrappers": "^0.7.13",
"@types/lodash.isequal": "^4.5.8",
"@types/node": "^20.9.5",
"@types/nodemailer": "^6.4.14",
"@types/passport-github": "^1.1.12",
"@types/passport-google-oauth20": "^2.0.14",
"@types/pg": "^8.10.9",
"@types/picomatch": "^2.3.3",
"@types/prompt-sync": "^4.2.3",
"@types/uuid": "^9.0.7",
"@typescript-eslint/eslint-plugin": "^6.13.2",
"@typescript-eslint/parser": "^6.13.2",
"eslint": "^8.56.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-config-prettier": "^9.1.0",
"eslint-import-resolver-typescript": "^3.6.1",
"eslint-plugin-import": "^2.29.1",
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-simple-import-sort": "^10.0.0",
"nodemon": "^3.0.2",
"pino-pretty": "^10.2.3",
"prompt-sync": "^4.2.0",
"rimraf": "^5.0.5",
"ts-node": "^10.9.1",
"tsconfig-paths": "^4.2.0",
"tsup": "^8.0.1",
"tsx": "^4.4.0",
"typescript": "^5.3.2",
"vite-tsconfig-paths": "^4.2.2",
"vitest": "^1.0.4"
},
"dependencies": {
"@aws-sdk/client-secrets-manager": "^3.485.0",
"@aws-sdk/client-secrets-manager": "^3.319.0",
"@casl/ability": "^6.5.0",
"@fastify/cookie": "^9.2.0",
"@fastify/cors": "^8.4.1",
"@fastify/formbody": "^7.4.0",
"@fastify/helmet": "^11.1.1",
"@fastify/passport": "^2.4.0",
"@fastify/rate-limit": "^9.0.0",
"@fastify/session": "^10.7.0",
"@fastify/swagger": "^8.12.0",
"@fastify/swagger-ui": "^1.10.1",
"@casl/mongoose": "^7.2.1",
"@godaddy/terminus": "^4.12.0",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@octokit/rest": "^19.0.5",
"@sentry/node": "^7.77.0",
"@sentry/tracing": "^7.48.0",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "^2.2.1",
"@types/crypto-js": "^4.1.1",
"@types/libsodium-wrappers": "^0.7.10",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
"aws-sdk": "^2.1532.0",
"axios": "^1.6.2",
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"bullmq": "^5.1.1",
"dotenv": "^16.3.1",
"eslint-config-airbnb-typescript": "^17.1.0",
"fastify": "^4.24.3",
"fastify-plugin": "^4.5.1",
"handlebars": "^4.7.8",
"argon2": "^0.30.3",
"aws-sdk": "^2.1364.0",
"axios": "^1.6.0",
"axios-retry": "^3.4.0",
"bcrypt": "^5.1.0",
"bigint-conversion": "^2.4.0",
"cookie-parser": "^1.4.6",
"cors": "^2.8.5",
"crypto-js": "^4.2.0",
"dotenv": "^16.0.1",
"express": "^4.18.1",
"express-async-errors": "^3.1.1",
"express-rate-limit": "^6.7.0",
"express-validator": "^6.14.2",
"handlebars": "^4.7.7",
"helmet": "^5.1.1",
"infisical-node": "^1.2.1",
"ioredis": "^5.3.2",
"jmespath": "^0.16.0",
"jsonwebtoken": "^9.0.2",
"js-yaml": "^4.1.0",
"jsonwebtoken": "^9.0.0",
"jsrp": "^0.2.4",
"knex": "^3.0.1",
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"mysql2": "^3.6.5",
"nanoid": "^5.0.4",
"libsodium-wrappers": "^0.7.10",
"lodash": "^4.17.21",
"mongoose": "^7.4.1",
"mysql2": "^3.6.2",
"nanoid": "^3.3.6",
"node-cache": "^5.1.2",
"nodemailer": "^6.9.7",
"ora": "^7.0.1",
"nodemailer": "^6.8.0",
"ora": "^5.4.1",
"passport": "^0.6.0",
"passport-github": "^1.1.0",
"passport-gitlab2": "^5.0.0",
"passport-google-oauth20": "^2.0.0",
"pg": "^8.11.3",
"picomatch": "^3.0.1",
"pino": "^8.16.2",
"posthog-node": "^3.6.0",
"probot": "^12.3.3",
"smee-client": "^2.0.0",
"pino": "^8.16.1",
"pino-http": "^8.5.1",
"posthog-node": "^2.6.0",
"probot": "^12.3.1",
"query-string": "^7.1.3",
"rate-limit-mongo": "^2.3.2",
"rimraf": "^3.0.2",
"swagger-ui-express": "^4.6.2",
"tweetnacl": "^1.0.3",
"tweetnacl-util": "^0.15.1",
"uuid": "^9.0.1",
"zod": "^3.22.4",
"zod-to-json-schema": "^3.22.0"
"typescript": "^4.9.3",
"utility-types": "^3.10.0",
"zod": "^3.22.3"
},
"overrides": {
"rate-limit-mongo": {
"mongodb": "5.8.0"
}
},
"name": "infisical-api",
"version": "1.0.0",
"main": "src/index.js",
"scripts": {
"start": "node build/index.js",
"dev": "nodemon index.js",
"swagger-autogen": "node ./swagger/index.ts",
"build": "rimraf ./build && tsc && cp -R ./src/templates ./build && cp -R ./src/data ./build",
"lint": "eslint . --ext .ts",
"lint-and-fix": "eslint . --ext .ts --fix",
"lint-staged": "lint-staged",
"pretest": "docker compose -f test-resources/docker-compose.test.yml up -d",
"test": "cross-env NODE_ENV=test jest --verbose --testTimeout=10000 --detectOpenHandles; npm run posttest",
"test:ci": "npm test -- --watchAll=false --ci --reporters=default --reporters=jest-junit --reporters=github-actions --coverage --testLocationInResults --json --outputFile=coverage/report.json",
"posttest": "docker compose -f test-resources/docker-compose.test.yml down"
},
"repository": {
"type": "git",
"url": "git+https://github.com/Infisical/infisical-api.git"
},
"author": "",
"license": "ISC",
"bugs": {
"url": "https://github.com/Infisical/infisical-api/issues"
},
"homepage": "https://github.com/Infisical/infisical-api#readme",
"description": "",
"devDependencies": {
"@jest/globals": "^29.3.1",
"@posthog/plugin-scaffold": "^1.3.4",
"@swc/core": "^1.3.99",
"@swc/helpers": "^0.5.3",
"@types/bcrypt": "^5.0.0",
"@types/bcryptjs": "^2.4.2",
"@types/bull": "^4.10.0",
"@types/cookie-parser": "^1.4.3",
"@types/cors": "^2.8.12",
"@types/express": "^4.17.14",
"@types/jest": "^29.5.0",
"@types/jmespath": "^0.15.1",
"@types/jsonwebtoken": "^8.5.9",
"@types/lodash": "^4.14.191",
"@types/node": "^18.11.3",
"@types/nodemailer": "^6.4.6",
"@types/passport": "^1.0.12",
"@types/pg": "^8.10.7",
"@types/picomatch": "^2.3.0",
"@types/pino": "^7.0.5",
"@types/supertest": "^2.0.12",
"@types/swagger-jsdoc": "^6.0.1",
"@types/swagger-ui-express": "^4.1.3",
"@typescript-eslint/eslint-plugin": "^5.54.0",
"@typescript-eslint/parser": "^5.40.1",
"cross-env": "^7.0.3",
"eslint": "^8.26.0",
"eslint-plugin-unused-imports": "^2.0.0",
"install": "^0.13.0",
"jest": "^29.3.1",
"jest-junit": "^15.0.0",
"nodemon": "^2.0.19",
"npm": "^8.19.3",
"pino-pretty": "^10.2.3",
"regenerator-runtime": "^0.14.0",
"smee-client": "^1.2.3",
"supertest": "^6.3.3",
"swagger-autogen": "^2.23.5",
"ts-jest": "^29.0.3",
"ts-node": "^10.9.1"
},
"jest-junit": {
"outputDirectory": "reports",
"outputName": "jest-junit.xml",
"ancestorSeparator": " ",
"uniqueOutputName": "false",
"suiteNameTemplate": "{filepath}",
"classNameTemplate": "{classname}",
"titleTemplate": "{title}"
}
}

View File

@ -1,123 +0,0 @@
/* eslint-disable */
import { mkdirSync, writeFileSync } from "fs";
import path from "path";
import promptSync from "prompt-sync";
const prompt = promptSync({
sigint: true
});
console.log(`
Component List
--------------
1. Service component
2. DAL component
3. Router component
`);
const componentType = parseInt(prompt("Select a component: "), 10);
if (componentType === 1) {
const componentName = prompt("Enter service name: ");
const dir = path.join(__dirname, `../src/services/${componentName}`);
const pascalCase = componentName
.split("-")
.map((el) => `${el[0].toUpperCase()}${el.slice(1)}`)
.join("");
const camelCase = componentName
.split("-")
.map((el, index) => (index === 0 ? el : `${el[0].toUpperCase()}${el.slice(1)}`))
.join("");
const dalTypeName = `T${pascalCase}DALFactory`;
const dalName = `${camelCase}DALFactory`;
const serviceTypeName = `T${pascalCase}ServiceFactory`;
const serviceName = `${camelCase}ServiceFactory`;
mkdirSync(dir);
writeFileSync(
path.join(dir, `${componentName}-dal.ts`),
`import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
export type ${dalTypeName} = ReturnType<typeof ${dalName}>;
export const ${dalName} = (db: TDbClient) => {
return { };
};
`
);
writeFileSync(
path.join(dir, `${componentName}-service.ts`),
`import { ${dalTypeName} } from "./${componentName}-dal";
type ${serviceTypeName}Dep = {
${camelCase}DAL: ${dalTypeName};
};
export type ${serviceTypeName} = ReturnType<typeof ${serviceName}>;
export const ${serviceName} = ({ ${camelCase}DAL }: ${serviceTypeName}Dep) => {
return {};
};
`
);
writeFileSync(path.join(dir, `${componentName}-types.ts`), "");
} else if (componentType === 2) {
const componentName = prompt("Enter service name: ");
const componentPath = prompt("Path wrt service folder: ");
const pascalCase = componentName
.split("-")
.map((el) => `${el[0].toUpperCase()}${el.slice(1)}`)
.join("");
const camelCase = componentName
.split("-")
.map((el, index) => (index === 0 ? el : `${el[0].toUpperCase()}${el.slice(1)}`))
.join("");
const dalTypeName = `T${pascalCase}DALFactory`;
const dalName = `${camelCase}DALFactory`;
writeFileSync(
path.join(__dirname, "../src/services", componentPath, `${componentName}-dal.ts`),
`import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
export type ${dalTypeName} = ReturnType<typeof ${dalName}>;
export const ${dalName} = (db: TDbClient) => {
return { };
};
`
);
} else if (componentType === 3) {
const name = prompt("Enter router name: ");
const version = prompt("Version number: ");
const pascalCase = name
.split("-")
.map((el) => `${el[0].toUpperCase()}${el.slice(1)}`)
.join("");
writeFileSync(
path.join(__dirname, `../src/server/routes/v${Number(version)}/${name}-router.ts`),
`import { z } from "zod";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const register${pascalCase}Router = async (server: FastifyZodProvider) => {
server.route({
url: "/",
method: "GET",
schema: {
params: z.object({}),
response: {
200: z.object({})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {}
});
};
`
);
}

View File

@ -1,16 +0,0 @@
/* eslint-disable */
import { execSync } from "child_process";
import path from "path";
import promptSync from "prompt-sync";
const prompt = promptSync({ sigint: true });
const migrationName = prompt("Enter name for migration: ");
execSync(
`npx knex migrate:make --knexfile ${path.join(
__dirname,
"../src/db/knexfile.ts"
)} -x ts ${migrationName}`,
{ stdio: "inherit" }
);

View File

@ -1,17 +0,0 @@
/* eslint-disable */
import { execSync } from "child_process";
import { readdirSync } from "fs";
import path from "path";
import promptSync from "prompt-sync";
const prompt = promptSync({ sigint: true });
const migrationName = prompt("Enter name for seedfile: ");
const fileCounter = readdirSync(path.join(__dirname, "../src/db/seed")).length || 1;
execSync(
`npx knex seed:make --knexfile ${path.join(
__dirname,
"../src/db/knexfile.ts"
)} -x ts ${fileCounter}-${migrationName}`,
{ stdio: "inherit" }
);

View File

@ -1,169 +0,0 @@
/* eslint-disable */
import dotenv from "dotenv";
import path from "path";
import knex from "knex";
import { writeFileSync } from "fs";
import promptSync from "prompt-sync";
const prompt = promptSync({ sigint: true });
dotenv.config({
path: path.join(__dirname, "../.env"),
debug: true
});
const db = knex({
client: "pg",
connection: process.env.DB_CONNECTION_URI
});
const getZodPrimitiveType = (type: string) => {
switch (type) {
case "uuid":
return "z.string().uuid()";
case "character varying":
return "z.string()";
case "ARRAY":
return "z.string().array()";
case "boolean":
return "z.boolean()";
case "jsonb":
return "z.unknown()";
case "json":
return "z.unknown()";
case "timestamp with time zone":
return "z.date()";
case "integer":
return "z.number()";
case "bigint":
return "z.coerce.number()";
case "text":
return "z.string()";
default:
throw new Error(`Invalid type: ${type}`);
}
};
const getZodDefaultValue = (type: unknown, value: string | number | boolean | Object) => {
if (!value || value === "null") return;
switch (type) {
case "uuid":
return;
case "character varying": {
if (value === "gen_random_uuid()") return;
if (typeof value === "string" && value.includes("::")) {
return `.default(${value.split("::")[0]})`;
}
return `.default(${value})`;
}
case "ARRAY":
return `.default(${value})`;
case "boolean":
return `.default(${value})`;
case "jsonb":
return "z.string()";
case "json":
return "z.string()";
case "timestamp with time zone": {
if (value === "CURRENT_TIMESTAMP") return;
return "z.string().datetime()";
}
case "integer": {
if ((value as string).includes("nextval")) return;
return `.default(${value})`;
}
case "bigint": {
if ((value as string).includes("nextval")) return;
return `.default(${parseInt((value as string).split("::")[0].slice(1, -1), 10)})`;
}
case "text":
if (typeof value === "string" && value.includes("::")) {
return `.default(${value.split("::")[0]})`;
}
return `.default(${value})`;
default:
throw new Error(`Invalid type: ${type}`);
}
};
const main = async () => {
const tables = (
await db("information_schema.tables")
.whereRaw("table_schema = current_schema()")
.select<{ tableName: string }[]>("table_name as tableName")
.orderBy("table_name")
).filter((el) => !el.tableName.includes("_migrations"));
console.log("Select a table to generate schema");
console.table(tables);
console.log("all: all tables");
const selectedTables = prompt("Type table numbers comma seperated: ");
const tableNumbers =
selectedTables !== "all" ? selectedTables.split(",").map((el) => Number(el)) : [];
for (let i = 0; i < tables.length; i += 1) {
// skip if not desired table
if (selectedTables !== "all" && !tableNumbers.includes(i)) continue;
const { tableName } = tables[i];
const columns = await db(tableName).columnInfo();
const columnNames = Object.keys(columns);
let schema = "";
for (let colNum = 0; colNum < columnNames.length; colNum++) {
const columnName = columnNames[colNum];
const colInfo = columns[columnName];
let ztype = getZodPrimitiveType(colInfo.type);
if (colInfo.defaultValue) {
const { defaultValue } = colInfo;
const zSchema = getZodDefaultValue(colInfo.type, defaultValue);
if (zSchema) {
ztype = ztype.concat(zSchema);
}
}
if (colInfo.nullable) {
ztype = ztype.concat(".nullable().optional()");
}
schema = schema.concat(`${!schema ? "\n" : ""} ${columnName}: ${ztype},\n`);
}
const dashcase = tableName.split("_").join("-");
const pascalCase = tableName
.split("_")
.reduce(
(prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`,
""
);
writeFileSync(
path.join(__dirname, "../src/db/schemas", `${dashcase}.ts`),
`// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ${pascalCase}Schema = z.object({${schema}});
export type T${pascalCase} = z.infer<typeof ${pascalCase}Schema>;
export type T${pascalCase}Insert = Omit<T${pascalCase}, TImmutableDBKeys>;
export type T${pascalCase}Update = Partial<Omit<T${pascalCase}, TImmutableDBKeys>>;
`
);
// const file = readFileSync(path.join(__dirname, "../src/db/schemas/index.ts"), "utf8");
// if (!file.includes(`export * from "./${dashcase};"`)) {
// appendFileSync(
// path.join(__dirname, "../src/db/schemas/index.ts"),
// `\nexport * from "./${dashcase}";`,
// "utf8"
// );
// }
}
process.exit(0);
};
main();

View File

@ -4962,8 +4962,7 @@
},
"security": [
{
"apiKeyAuth": [],
"bearerAuth": []
"apiKeyAuth": []
}
]
}

View File

@ -1,23 +0,0 @@
import {
FastifyInstance,
RawReplyDefaultExpression,
RawRequestDefaultExpression,
RawServerDefault
} from "fastify";
import { Logger } from "pino";
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
declare global {
type FastifyZodProvider = FastifyInstance<
RawServerDefault,
RawRequestDefaultExpression<RawServerDefault>,
RawReplyDefaultExpression<RawServerDefault>,
Readonly<Logger>,
ZodTypeProvider
>;
// used only for testing
const testServer: FastifyZodProvider;
const jwtAuthToken: string;
}

View File

@ -1,119 +0,0 @@
import "fastify";
import { TUsers } from "@app/db/schemas";
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
import { ActorType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service";
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
import { TOrgRoleServiceFactory } from "@app/services/org/org-role-service";
import { TOrgServiceFactory } from "@app/services/org/org-service";
import { TProjectServiceFactory } from "@app/services/project/project-service";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
import { TProjectKeyServiceFactory } from "@app/services/project-key/project-key-service";
import { TProjectMembershipServiceFactory } from "@app/services/project-membership/project-membership-service";
import { TProjectRoleServiceFactory } from "@app/services/project-role/project-role-service";
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
import { TSecretBlindIndexServiceFactory } from "@app/services/secret-blind-index/secret-blind-index-service";
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TUserServiceFactory } from "@app/services/user/user-service";
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
declare module "fastify" {
interface FastifyRequest {
realIp: string;
// used for mfa session authentication
mfa: {
userId: string;
user: TUsers;
};
// identity injection. depending on which kinda of token the information is filled in auth
auth: TAuthMode;
permission: {
type: ActorType;
id: string;
};
// passport data
passportUser: {
isUserCompleted: string;
providerAuthToken: string;
};
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
}
interface FastifyInstance {
services: {
login: TAuthLoginFactory;
password: TAuthPasswordFactory;
signup: TAuthSignupFactory;
authToken: TAuthTokenServiceFactory;
permission: TPermissionServiceFactory;
org: TOrgServiceFactory;
orgRole: TOrgRoleServiceFactory;
superAdmin: TSuperAdminServiceFactory;
user: TUserServiceFactory;
apiKey: TApiKeyServiceFactory;
project: TProjectServiceFactory;
projectMembership: TProjectMembershipServiceFactory;
projectEnv: TProjectEnvServiceFactory;
projectKey: TProjectKeyServiceFactory;
projectRole: TProjectRoleServiceFactory;
secret: TSecretServiceFactory;
secretTag: TSecretTagServiceFactory;
secretImport: TSecretImportServiceFactory;
projectBot: TProjectBotServiceFactory;
folder: TSecretFolderServiceFactory;
integration: TIntegrationServiceFactory;
integrationAuth: TIntegrationAuthServiceFactory;
webhook: TWebhookServiceFactory;
serviceToken: TServiceTokenServiceFactory;
identity: TIdentityServiceFactory;
identityAccessToken: TIdentityAccessTokenServiceFactory;
identityProject: TIdentityProjectServiceFactory;
identityUa: TIdentityUaServiceFactory;
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
secretApprovalRequest: TSecretApprovalRequestServiceFactory;
secretRotation: TSecretRotationServiceFactory;
snapshot: TSecretSnapshotServiceFactory;
saml: TSamlConfigServiceFactory;
auditLog: TAuditLogServiceFactory;
secretScanning: TSecretScanningServiceFactory;
license: TLicenseServiceFactory;
trustedIp: TTrustedIpServiceFactory;
secretBlindIndex: TSecretBlindIndexServiceFactory;
telemetry: TTelemetryServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer
store: {
user: Pick<TUserDALFactory, "findById">;
};
}
}

View File

@ -1,415 +0,0 @@
import { Knex } from "knex";
import {
TableName,
TApiKeys,
TApiKeysInsert,
TApiKeysUpdate,
TAuditLogs,
TAuditLogsInsert,
TAuditLogsUpdate,
TAuthTokens,
TAuthTokenSessions,
TAuthTokenSessionsInsert,
TAuthTokenSessionsUpdate,
TAuthTokensInsert,
TAuthTokensUpdate,
TBackupPrivateKey,
TBackupPrivateKeyInsert,
TBackupPrivateKeyUpdate,
TGitAppInstallSessions,
TGitAppInstallSessionsInsert,
TGitAppInstallSessionsUpdate,
TGitAppOrg,
TGitAppOrgInsert,
TGitAppOrgUpdate,
TIdentities,
TIdentitiesInsert,
TIdentitiesUpdate,
TIdentityAccessTokens,
TIdentityAccessTokensInsert,
TIdentityAccessTokensUpdate,
TIdentityOrgMemberships,
TIdentityOrgMembershipsInsert,
TIdentityOrgMembershipsUpdate,
TIdentityProjectMemberships,
TIdentityProjectMembershipsInsert,
TIdentityProjectMembershipsUpdate,
TIdentityUaClientSecrets,
TIdentityUaClientSecretsInsert,
TIdentityUaClientSecretsUpdate,
TIdentityUniversalAuths,
TIdentityUniversalAuthsInsert,
TIdentityUniversalAuthsUpdate,
TIncidentContacts,
TIncidentContactsInsert,
TIncidentContactsUpdate,
TIntegrationAuths,
TIntegrationAuthsInsert,
TIntegrationAuthsUpdate,
TIntegrations,
TIntegrationsInsert,
TIntegrationsUpdate,
TOrganizations,
TOrganizationsInsert,
TOrganizationsUpdate,
TOrgBots,
TOrgBotsInsert,
TOrgBotsUpdate,
TOrgMemberships,
TOrgMembershipsInsert,
TOrgMembershipsUpdate,
TOrgRoles,
TOrgRolesInsert,
TOrgRolesUpdate,
TProjectBots,
TProjectBotsInsert,
TProjectBotsUpdate,
TProjectEnvironments,
TProjectEnvironmentsInsert,
TProjectEnvironmentsUpdate,
TProjectKeys,
TProjectKeysInsert,
TProjectKeysUpdate,
TProjectMemberships,
TProjectMembershipsInsert,
TProjectMembershipsUpdate,
TProjectRoles,
TProjectRolesInsert,
TProjectRolesUpdate,
TProjects,
TProjectsInsert,
TProjectsUpdate,
TSamlConfigs,
TSamlConfigsInsert,
TSamlConfigsUpdate,
TSecretApprovalPolicies,
TSecretApprovalPoliciesApprovers,
TSecretApprovalPoliciesApproversInsert,
TSecretApprovalPoliciesApproversUpdate,
TSecretApprovalPoliciesInsert,
TSecretApprovalPoliciesUpdate,
TSecretApprovalRequests,
TSecretApprovalRequestSecretTags,
TSecretApprovalRequestSecretTagsInsert,
TSecretApprovalRequestSecretTagsUpdate,
TSecretApprovalRequestsInsert,
TSecretApprovalRequestsReviewers,
TSecretApprovalRequestsReviewersInsert,
TSecretApprovalRequestsReviewersUpdate,
TSecretApprovalRequestsSecrets,
TSecretApprovalRequestsSecretsInsert,
TSecretApprovalRequestsSecretsUpdate,
TSecretApprovalRequestsUpdate,
TSecretBlindIndexes,
TSecretBlindIndexesInsert,
TSecretBlindIndexesUpdate,
TSecretFolders,
TSecretFoldersInsert,
TSecretFoldersUpdate,
TSecretFolderVersions,
TSecretFolderVersionsInsert,
TSecretFolderVersionsUpdate,
TSecretImports,
TSecretImportsInsert,
TSecretImportsUpdate,
TSecretRotationOutputs,
TSecretRotationOutputsInsert,
TSecretRotationOutputsUpdate,
TSecretRotations,
TSecretRotationsInsert,
TSecretRotationsUpdate,
TSecrets,
TSecretScanningGitRisks,
TSecretScanningGitRisksInsert,
TSecretScanningGitRisksUpdate,
TSecretsInsert,
TSecretSnapshotFolders,
TSecretSnapshotFoldersInsert,
TSecretSnapshotFoldersUpdate,
TSecretSnapshots,
TSecretSnapshotSecrets,
TSecretSnapshotSecretsInsert,
TSecretSnapshotSecretsUpdate,
TSecretSnapshotsInsert,
TSecretSnapshotsUpdate,
TSecretsUpdate,
TSecretTagJunction,
TSecretTagJunctionInsert,
TSecretTagJunctionUpdate,
TSecretTags,
TSecretTagsInsert,
TSecretTagsUpdate,
TSecretVersions,
TSecretVersionsInsert,
TSecretVersionsUpdate,
TSecretVersionTagJunction,
TSecretVersionTagJunctionInsert,
TSecretVersionTagJunctionUpdate,
TServiceTokens,
TServiceTokensInsert,
TServiceTokensUpdate,
TSuperAdmin,
TSuperAdminInsert,
TSuperAdminUpdate,
TTrustedIps,
TTrustedIpsInsert,
TTrustedIpsUpdate,
TUserActions,
TUserActionsInsert,
TUserActionsUpdate,
TUserEncryptionKeys,
TUserEncryptionKeysInsert,
TUserEncryptionKeysUpdate,
TUsers,
TUsersInsert,
TUsersUpdate,
TWebhooks,
TWebhooksInsert,
TWebhooksUpdate
} from "@app/db/schemas";
declare module "knex/types/tables" {
interface Tables {
[TableName.Users]: Knex.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.UserEncryptionKey]: Knex.CompositeTableType<
TUserEncryptionKeys,
TUserEncryptionKeysInsert,
TUserEncryptionKeysUpdate
>;
[TableName.AuthTokens]: Knex.CompositeTableType<
TAuthTokens,
TAuthTokensInsert,
TAuthTokensUpdate
>;
[TableName.AuthTokenSession]: Knex.CompositeTableType<
TAuthTokenSessions,
TAuthTokenSessionsInsert,
TAuthTokenSessionsUpdate
>;
[TableName.BackupPrivateKey]: Knex.CompositeTableType<
TBackupPrivateKey,
TBackupPrivateKeyInsert,
TBackupPrivateKeyUpdate
>;
[TableName.Organization]: Knex.CompositeTableType<
TOrganizations,
TOrganizationsInsert,
TOrganizationsUpdate
>;
[TableName.OrgMembership]: Knex.CompositeTableType<
TOrgMemberships,
TOrgMembershipsInsert,
TOrgMembershipsUpdate
>;
[TableName.OrgRoles]: Knex.CompositeTableType<TOrgRoles, TOrgRolesInsert, TOrgRolesUpdate>;
[TableName.IncidentContact]: Knex.CompositeTableType<
TIncidentContacts,
TIncidentContactsInsert,
TIncidentContactsUpdate
>;
[TableName.UserAction]: Knex.CompositeTableType<
TUserActions,
TUserActionsInsert,
TUserActionsUpdate
>;
[TableName.SuperAdmin]: Knex.CompositeTableType<
TSuperAdmin,
TSuperAdminInsert,
TSuperAdminUpdate
>;
[TableName.ApiKey]: Knex.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
[TableName.Project]: Knex.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
[TableName.ProjectMembership]: Knex.CompositeTableType<
TProjectMemberships,
TProjectMembershipsInsert,
TProjectMembershipsUpdate
>;
[TableName.Environment]: Knex.CompositeTableType<
TProjectEnvironments,
TProjectEnvironmentsInsert,
TProjectEnvironmentsUpdate
>;
[TableName.ProjectBot]: Knex.CompositeTableType<
TProjectBots,
TProjectBotsInsert,
TProjectBotsUpdate
>;
[TableName.ProjectRoles]: Knex.CompositeTableType<
TProjectRoles,
TProjectRolesInsert,
TProjectRolesUpdate
>;
[TableName.ProjectKeys]: Knex.CompositeTableType<
TProjectKeys,
TProjectKeysInsert,
TProjectKeysUpdate
>;
[TableName.Secret]: Knex.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
[TableName.SecretBlindIndex]: Knex.CompositeTableType<
TSecretBlindIndexes,
TSecretBlindIndexesInsert,
TSecretBlindIndexesUpdate
>;
[TableName.SecretVersion]: Knex.CompositeTableType<
TSecretVersions,
TSecretVersionsInsert,
TSecretVersionsUpdate
>;
[TableName.SecretFolder]: Knex.CompositeTableType<
TSecretFolders,
TSecretFoldersInsert,
TSecretFoldersUpdate
>;
[TableName.SecretFolderVersion]: Knex.CompositeTableType<
TSecretFolderVersions,
TSecretFolderVersionsInsert,
TSecretFolderVersionsUpdate
>;
[TableName.SecretTag]: Knex.CompositeTableType<
TSecretTags,
TSecretTagsInsert,
TSecretTagsUpdate
>;
[TableName.SecretImport]: Knex.CompositeTableType<
TSecretImports,
TSecretImportsInsert,
TSecretImportsUpdate
>;
[TableName.Integration]: Knex.CompositeTableType<
TIntegrations,
TIntegrationsInsert,
TIntegrationsUpdate
>;
[TableName.Webhook]: Knex.CompositeTableType<TWebhooks, TWebhooksInsert, TWebhooksUpdate>;
[TableName.ServiceToken]: Knex.CompositeTableType<
TServiceTokens,
TServiceTokensInsert,
TServiceTokensUpdate
>;
[TableName.IntegrationAuth]: Knex.CompositeTableType<
TIntegrationAuths,
TIntegrationAuthsInsert,
TIntegrationAuthsUpdate
>;
[TableName.Identity]: Knex.CompositeTableType<
TIdentities,
TIdentitiesInsert,
TIdentitiesUpdate
>;
[TableName.IdentityUniversalAuth]: Knex.CompositeTableType<
TIdentityUniversalAuths,
TIdentityUniversalAuthsInsert,
TIdentityUniversalAuthsUpdate
>;
[TableName.IdentityUaClientSecret]: Knex.CompositeTableType<
TIdentityUaClientSecrets,
TIdentityUaClientSecretsInsert,
TIdentityUaClientSecretsUpdate
>;
[TableName.IdentityAccessToken]: Knex.CompositeTableType<
TIdentityAccessTokens,
TIdentityAccessTokensInsert,
TIdentityAccessTokensUpdate
>;
[TableName.IdentityOrgMembership]: Knex.CompositeTableType<
TIdentityOrgMemberships,
TIdentityOrgMembershipsInsert,
TIdentityOrgMembershipsUpdate
>;
[TableName.IdentityProjectMembership]: Knex.CompositeTableType<
TIdentityProjectMemberships,
TIdentityProjectMembershipsInsert,
TIdentityProjectMembershipsUpdate
>;
[TableName.SecretApprovalPolicy]: Knex.CompositeTableType<
TSecretApprovalPolicies,
TSecretApprovalPoliciesInsert,
TSecretApprovalPoliciesUpdate
>;
[TableName.SecretApprovalPolicyApprover]: Knex.CompositeTableType<
TSecretApprovalPoliciesApprovers,
TSecretApprovalPoliciesApproversInsert,
TSecretApprovalPoliciesApproversUpdate
>;
[TableName.SecretApprovalRequest]: Knex.CompositeTableType<
TSecretApprovalRequests,
TSecretApprovalRequestsInsert,
TSecretApprovalRequestsUpdate
>;
[TableName.SecretApprovalRequestReviewer]: Knex.CompositeTableType<
TSecretApprovalRequestsReviewers,
TSecretApprovalRequestsReviewersInsert,
TSecretApprovalRequestsReviewersUpdate
>;
[TableName.SecretApprovalRequestSecret]: Knex.CompositeTableType<
TSecretApprovalRequestsSecrets,
TSecretApprovalRequestsSecretsInsert,
TSecretApprovalRequestsSecretsUpdate
>;
[TableName.SecretApprovalRequestSecretTag]: Knex.CompositeTableType<
TSecretApprovalRequestSecretTags,
TSecretApprovalRequestSecretTagsInsert,
TSecretApprovalRequestSecretTagsUpdate
>;
[TableName.SecretRotation]: Knex.CompositeTableType<
TSecretRotations,
TSecretRotationsInsert,
TSecretRotationsUpdate
>;
[TableName.SecretRotationOutput]: Knex.CompositeTableType<
TSecretRotationOutputs,
TSecretRotationOutputsInsert,
TSecretRotationOutputsUpdate
>;
[TableName.Snapshot]: Knex.CompositeTableType<
TSecretSnapshots,
TSecretSnapshotsInsert,
TSecretSnapshotsUpdate
>;
[TableName.SnapshotSecret]: Knex.CompositeTableType<
TSecretSnapshotSecrets,
TSecretSnapshotSecretsInsert,
TSecretSnapshotSecretsUpdate
>;
[TableName.SnapshotFolder]: Knex.CompositeTableType<
TSecretSnapshotFolders,
TSecretSnapshotFoldersInsert,
TSecretSnapshotFoldersUpdate
>;
[TableName.SamlConfig]: Knex.CompositeTableType<
TSamlConfigs,
TSamlConfigsInsert,
TSamlConfigsUpdate
>;
[TableName.OrgBot]: Knex.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
[TableName.AuditLog]: Knex.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
[TableName.GitAppInstallSession]: Knex.CompositeTableType<
TGitAppInstallSessions,
TGitAppInstallSessionsInsert,
TGitAppInstallSessionsUpdate
>;
[TableName.GitAppOrg]: Knex.CompositeTableType<TGitAppOrg, TGitAppOrgInsert, TGitAppOrgUpdate>;
[TableName.SecretScanningGitRisk]: Knex.CompositeTableType<
TSecretScanningGitRisks,
TSecretScanningGitRisksInsert,
TSecretScanningGitRisksUpdate
>;
[TableName.TrustedIps]: Knex.CompositeTableType<
TTrustedIps,
TTrustedIpsInsert,
TTrustedIpsUpdate
>;
// Junction tables
[TableName.JnSecretTag]: Knex.CompositeTableType<
TSecretTagJunction,
TSecretTagJunctionInsert,
TSecretTagJunctionUpdate
>;
[TableName.SecretVersionTag]: Knex.CompositeTableType<
TSecretVersionTagJunction,
TSecretVersionTagJunctionInsert,
TSecretVersionTagJunctionUpdate
>;
}
}

View File

@ -1 +0,0 @@
declare module "passport-gitlab2";

View File

@ -1,6 +0,0 @@
import Redis from "ioredis";
export const initRedisConnection = (redisUrl: string) => {
const redis = new Redis(redisUrl);
return redis;
};

View File

@ -6,9 +6,6 @@ export const client = new InfisicalClient({
token: process.env.INFISICAL_TOKEN!
});
export const getIsMigrationMode = async () =>
(await client.getSecret("MIGRATION_MODE")).secretValue === "true";
export const getPort = async () => (await client.getSecret("PORT")).secretValue || 4000;
export const getEncryptionKey = async () => {
const secretValue = (await client.getSecret("ENCRYPTION_KEY")).secretValue;

View File

@ -3,11 +3,11 @@ import { IServerConfig, ServerConfig } from "../models/serverConfig";
let serverConfig: IServerConfig;
export const serverConfigInit = async () => {
const cfg = await ServerConfig.findOne({}).lean();
const cfg = await ServerConfig.findOne({});
if (!cfg) {
const cfg = new ServerConfig();
await cfg.save();
serverConfig = cfg.toObject();
serverConfig = cfg;
} else {
serverConfig = cfg;
}
@ -19,6 +19,6 @@ export const getServerConfig = () => serverConfig;
export const updateServerConfig = async (data: Partial<IServerConfig>) => {
const cfg = await ServerConfig.findByIdAndUpdate(serverConfig._id, data, { new: true });
if (!cfg) throw new Error("Failed to update server config");
serverConfig = cfg.toObject();
serverConfig = cfg;
return serverConfig;
};

View File

@ -1,5 +1,5 @@
import { Request, Response } from "express";
import { getHttpsEnabled, getIsMigrationMode } from "../../config";
import { getHttpsEnabled } from "../../config";
import { getServerConfig, updateServerConfig as setServerConfig } from "../../config/serverConfig";
import { initializeDefaultOrg, issueAuthTokens } from "../../helpers";
import { validateRequest } from "../../helpers/validation";
@ -8,10 +8,9 @@ import { TelemetryService } from "../../services";
import { BadRequestError, UnauthorizedRequestError } from "../../utils/errors";
import * as reqValidator from "../../validation/admin";
export const getServerConfigInfo = async (_req: Request, res: Response) => {
export const getServerConfigInfo = (_req: Request, res: Response) => {
const config = getServerConfig();
const isMigrationModeOn = await getIsMigrationMode();
return res.send({ config: { ...config, isMigrationModeOn } });
return res.send({ config });
};
export const updateServerConfig = async (req: Request, res: Response) => {

View File

@ -2,7 +2,7 @@ import { Request, Response } from "express";
import { Types } from "mongoose";
import { standardRequest } from "../../config/request";
import { getApps, getTeams, revokeAccess } from "../../integrations";
import { Bot, IIntegrationAuth, Integration, IntegrationAuth, Workspace } from "../../models";
import { Bot, IntegrationAuth, Workspace } from "../../models";
import { EventType } from "../../ee/models";
import { IntegrationService } from "../../services";
import { EEAuditLogService } from "../../ee/services";
@ -130,6 +130,7 @@ export const oAuthExchange = async (req: Request, res: Response) => {
export const saveIntegrationToken = async (req: Request, res: Response) => {
// TODO: refactor
// TODO: check if access token is valid for each integration
let integrationAuth;
const {
body: { workspaceId, integration, url, accessId, namespace, accessToken, refreshToken }
} = await validateRequest(reqValidator.SaveIntegrationAccessTokenV1, req);
@ -151,21 +152,31 @@ export const saveIntegrationToken = async (req: Request, res: Response) => {
if (!bot) throw new Error("Bot must be enabled to save integration access token");
let integrationAuth = await new IntegrationAuth({
workspace: new Types.ObjectId(workspaceId),
integration,
url,
namespace,
algorithm: ALGORITHM_AES_256_GCM,
keyEncoding: ENCODING_SCHEME_UTF8,
...(integration === INTEGRATION_GCP_SECRET_MANAGER
? {
metadata: {
authMethod: "serviceAccount"
integrationAuth = await IntegrationAuth.findOneAndUpdate(
{
workspace: new Types.ObjectId(workspaceId),
integration
},
{
workspace: new Types.ObjectId(workspaceId),
integration,
url,
namespace,
algorithm: ALGORITHM_AES_256_GCM,
keyEncoding: ENCODING_SCHEME_UTF8,
...(integration === INTEGRATION_GCP_SECRET_MANAGER
? {
metadata: {
authMethod: "serviceAccount"
}
}
}
: {})
}).save();
: {})
},
{
new: true,
upsert: true
}
);
// encrypt and save integration access details
if (refreshToken) {
@ -177,12 +188,12 @@ export const saveIntegrationToken = async (req: Request, res: Response) => {
// encrypt and save integration access details
if (accessId || accessToken) {
integrationAuth = (await IntegrationService.setIntegrationAuthAccess({
integrationAuth = await IntegrationService.setIntegrationAuthAccess({
integrationAuthId: integrationAuth._id.toString(),
accessId,
accessToken,
accessExpiresAt: undefined
})) as IIntegrationAuth;
});
}
if (!integrationAuth) throw new Error("Failed to save integration access token");
@ -1197,64 +1208,13 @@ export const getIntegrationAuthTeamCityBuildConfigs = async (req: Request, res:
});
};
/**
* Delete all integration authorizations and integrations for workspace with id [workspaceId]
* with integration name [integration]
* @param req
* @param res
* @returns
*/
export const deleteIntegrationAuths = async (req: Request, res: Response) => {
const {
query: { integration, workspaceId }
} = await validateRequest(reqValidator.DeleteIntegrationAuthsV1, req);
const { permission } = await getAuthDataProjectPermissions({
authData: req.authData,
workspaceId: new Types.ObjectId(workspaceId)
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Delete,
ProjectPermissionSub.Integrations
);
const integrationAuths = await IntegrationAuth.deleteMany({
integration,
workspace: new Types.ObjectId(workspaceId)
});
const integrations = await Integration.deleteMany({
integration,
workspace: new Types.ObjectId(workspaceId)
});
await EEAuditLogService.createAuditLog(
req.authData,
{
type: EventType.UNAUTHORIZE_INTEGRATION,
metadata: {
integration
}
},
{
workspaceId: new Types.ObjectId(workspaceId)
}
);
return res.status(200).send({
integrationAuths,
integrations
});
}
/**
* Delete integration authorization with id [integrationAuthId]
* @param req
* @param res
* @returns
*/
export const deleteIntegrationAuthById = async (req: Request, res: Response) => {
export const deleteIntegrationAuth = async (req: Request, res: Response) => {
const {
params: { integrationAuthId }
} = await validateRequest(reqValidator.DeleteIntegrationAuthV1, req);

View File

@ -251,21 +251,6 @@ export const deleteIntegration = async (req: Request, res: Response) => {
});
if (!deletedIntegration) throw new Error("Failed to find integration");
const numOtherIntegrationsUsingSameAuth = await Integration.countDocuments({
integrationAuth: deletedIntegration.integrationAuth,
_id: {
$nin: [deletedIntegration._id]
}
});
if (numOtherIntegrationsUsingSameAuth === 0) {
// no other integrations are using the same integration auth
// -> delete integration auth associated with the integration being deleted
await IntegrationAuth.deleteOne({
_id: deletedIntegration.integrationAuth
});
}
await EEAuditLogService.createAuditLog(
req.authData,

View File

@ -111,17 +111,11 @@ export const createSecretImp = async (req: Request, res: Response) => {
authData: req.authData,
workspaceId: new Types.ObjectId(workspaceId)
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Create,
subject(ProjectPermissionSub.Secrets, { environment, secretPath: directory })
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Create,
subject(ProjectPermissionSub.Secrets, { environment: secretImport.environment, secretPath: secretImport.secretPath })
);
}
const folders = await Folder.findOne({
@ -329,7 +323,7 @@ export const updateSecretImport = async (req: Request, res: Response) => {
authData: req.authData,
workspaceId: importSecDoc.workspace
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Secrets, {
@ -337,13 +331,6 @@ export const updateSecretImport = async (req: Request, res: Response) => {
secretPath
})
);
secretImports.forEach(({ environment, secretPath }) => {
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Create,
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
);
})
}
const orderBefore = importSecDoc.imports;
@ -466,7 +453,7 @@ export const deleteSecretImport = async (req: Request, res: Response) => {
authData: req.authData,
workspaceId: importSecDoc.workspace
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Delete,
subject(ProjectPermissionSub.Secrets, {
@ -633,7 +620,7 @@ export const getAllSecretsFromImport = async (req: Request, res: Response) => {
authData: req.authData,
workspaceId: new Types.ObjectId(workspaceId)
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, {
@ -690,7 +677,7 @@ export const getAllSecretsFromImport = async (req: Request, res: Response) => {
authData: req.authData,
workspaceId: importSecDoc.workspace
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, {

View File

@ -550,7 +550,7 @@ export const attachIdentityUniversalAuth = async (req: Request, res: Response) =
// validate trusted ips
const reformattedClientSecretTrustedIps = clientSecretTrustedIps.map((clientSecretTrustedIp) => {
if (!plan.ipAllowlisting && (clientSecretTrustedIp.ipAddress !== "0.0.0.0/0" && clientSecretTrustedIp.ipAddress !== "::/0")) return res.status(400).send({
if (!plan.ipAllowlisting && clientSecretTrustedIp.ipAddress !== "0.0.0.0/0") return res.status(400).send({
message: "Failed to add IP access range to service token due to plan restriction. Upgrade plan to add IP access range."
});
@ -564,7 +564,7 @@ export const attachIdentityUniversalAuth = async (req: Request, res: Response) =
});
const reformattedAccessTokenTrustedIps = accessTokenTrustedIps.map((accessTokenTrustedIp) => {
if (!plan.ipAllowlisting && (accessTokenTrustedIp.ipAddress !== "0.0.0.0/0" && accessTokenTrustedIp.ipAddress !== "::/0")) return res.status(400).send({
if (!plan.ipAllowlisting && accessTokenTrustedIp.ipAddress !== "0.0.0.0/0") return res.status(400).send({
message: "Failed to add IP access range to service token due to plan restriction. Upgrade plan to add IP access range."
});
@ -750,7 +750,7 @@ export const updateIdentityUniversalAuth = async (req: Request, res: Response) =
let reformattedClientSecretTrustedIps;
if (clientSecretTrustedIps) {
reformattedClientSecretTrustedIps = clientSecretTrustedIps.map((clientSecretTrustedIp) => {
if (!plan.ipAllowlisting && (clientSecretTrustedIp.ipAddress !== "0.0.0.0/0" && clientSecretTrustedIp.ipAddress !== "::/0")) return res.status(400).send({
if (!plan.ipAllowlisting && clientSecretTrustedIp.ipAddress !== "0.0.0.0/0") return res.status(400).send({
message: "Failed to add IP access range to service token due to plan restriction. Upgrade plan to add IP access range."
});
@ -767,7 +767,7 @@ export const updateIdentityUniversalAuth = async (req: Request, res: Response) =
let reformattedAccessTokenTrustedIps;
if (accessTokenTrustedIps) {
reformattedAccessTokenTrustedIps = accessTokenTrustedIps.map((accessTokenTrustedIp) => {
if (!plan.ipAllowlisting && (accessTokenTrustedIp.ipAddress !== "0.0.0.0/0" && accessTokenTrustedIp.ipAddress !== "::/0")) return res.status(400).send({
if (!plan.ipAllowlisting && accessTokenTrustedIp.ipAddress !== "0.0.0.0/0") return res.status(400).send({
message: "Failed to add IP access range to service token due to plan restriction. Upgrade plan to add IP access range."
});

View File

@ -1,13 +1,9 @@
import { Request, Response } from "express";
import { Types } from "mongoose";
import {
IWorkspace,
Identity,
IdentityMembership,
IdentityMembershipOrg,
Membership,
IdentityMembershipOrg,
Membership,
MembershipOrg,
User,
Workspace
} from "../../models";
import { Role } from "../../ee/models";
@ -302,8 +298,7 @@ export const getOrganizationWorkspaces = async (req: Request, res: Response) =>
#swagger.description = 'Return projects in organization that user is part of'
#swagger.security = [{
"apiKeyAuth": [],
"bearerAuth": []
"apiKeyAuth": []
}]
#swagger.parameters['organizationId'] = {
@ -331,7 +326,6 @@ export const getOrganizationWorkspaces = async (req: Request, res: Response) =>
}
}
*/
const {
params: { organizationId }
} = await validateRequest(reqValidator.GetOrgWorkspacesv2, req);
@ -357,27 +351,13 @@ export const getOrganizationWorkspaces = async (req: Request, res: Response) =>
).map((w) => w._id.toString())
);
let workspaces: IWorkspace[] = [];
if (req.authData.authPayload instanceof Identity) {
workspaces = (
await IdentityMembership.find({
identity: req.authData.authPayload._id
}).populate<{ workspace: IWorkspace }>("workspace")
)
.filter((m) => workspacesSet.has(m.workspace._id.toString()))
.map((m) => m.workspace);
}
if (req.authData.authPayload instanceof User) {
workspaces = (
await Membership.find({
user: req.authData.authPayload._id
}).populate<{ workspace: IWorkspace }>("workspace")
)
.filter((m) => workspacesSet.has(m.workspace._id.toString()))
.map((m) => m.workspace);
}
const workspaces = (
await Membership.find({
user: req.user._id
}).populate("workspace")
)
.filter((m) => workspacesSet.has(m.workspace._id.toString()))
.map((m) => m.workspace);
return res.status(200).send({
workspaces

View File

@ -13,7 +13,7 @@ import {
ProjectPermissionSub,
getAuthDataProjectPermissions
} from "../../ee/services/ProjectRoleService";
import { ForbiddenError, subject } from "@casl/ability";
import { ForbiddenError } from "@casl/ability";
import { Types } from "mongoose";
/**
@ -86,14 +86,6 @@ export const createServiceTokenData = async (req: Request, res: Response) => {
ProjectPermissionSub.ServiceTokens
);
scopes.forEach(({ environment, secretPath }) => {
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Create,
subject(ProjectPermissionSub.Secrets, { environment, secretPath: secretPath })
);
})
const secret = crypto.randomBytes(16).toString("hex");
const secretHash = await bcrypt.hash(secret, await getSaltRounds());

View File

@ -348,7 +348,7 @@ export const getSecretByNameRaw = async (req: Request, res: Response) => {
}
*/
const {
query: { secretPath, environment, workspaceId, type, include_imports, version },
query: { secretPath, environment, workspaceId, type, include_imports },
params: { secretName }
} = await validateRequest(reqValidator.GetSecretByNameRawV3, req);
@ -371,8 +371,7 @@ export const getSecretByNameRaw = async (req: Request, res: Response) => {
type,
secretPath,
authData: req.authData,
include_imports,
version
include_imports
});
const key = await BotService.getWorkspaceKeyWithBot({
@ -866,7 +865,7 @@ export const getSecrets = async (req: Request, res: Response) => {
*/
export const getSecretByName = async (req: Request, res: Response) => {
const {
query: { secretPath, environment, workspaceId, type, include_imports, version },
query: { secretPath, environment, workspaceId, type, include_imports },
params: { secretName }
} = await validateRequest(reqValidator.GetSecretByNameV3, req);
@ -889,8 +888,7 @@ export const getSecretByName = async (req: Request, res: Response) => {
type,
secretPath,
authData: req.authData,
include_imports,
version
include_imports
});
return res.status(200).send({

View File

@ -1,2 +0,0 @@
export type { TDbClient } from "./instance";
export { initDbConnection } from "./instance";

Some files were not shown because too many files have changed in this diff Show More