mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-25 14:05:03 +00:00
Merge pull request #2837 from Infisical/misc/use-pg-queue-for-audit-logs-with-flag
misc: pg-queue for audit logs
This commit is contained in:
@ -10,12 +10,15 @@ export const mockQueue = (): TQueueServiceFactory => {
|
||||
queue: async (name, jobData) => {
|
||||
job[name] = jobData;
|
||||
},
|
||||
queuePg: async () => {},
|
||||
initialize: async () => {},
|
||||
shutdown: async () => undefined,
|
||||
stopRepeatableJob: async () => true,
|
||||
start: (name, jobFn) => {
|
||||
queues[name] = jobFn;
|
||||
workers[name] = jobFn;
|
||||
},
|
||||
startPg: async () => {},
|
||||
listen: (name, event) => {
|
||||
events[name] = event;
|
||||
},
|
||||
|
@ -53,7 +53,7 @@ export default {
|
||||
extension: "ts"
|
||||
});
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL);
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL, cfg.DB_CONNECTION_URI);
|
||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule();
|
||||
|
74
backend/package-lock.json
generated
74
backend/package-lock.json
generated
@ -93,6 +93,7 @@
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-ldapauth": "^3.0.1",
|
||||
"pg": "^8.11.3",
|
||||
"pg-boss": "^10.1.5",
|
||||
"pg-query-stream": "^4.5.3",
|
||||
"picomatch": "^3.0.1",
|
||||
"pino": "^8.16.2",
|
||||
@ -12272,14 +12273,6 @@
|
||||
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
|
||||
"integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="
|
||||
},
|
||||
"node_modules/buffer-writer": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz",
|
||||
"integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/bullmq": {
|
||||
"version": "5.4.2",
|
||||
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.4.2.tgz",
|
||||
@ -18236,11 +18229,6 @@
|
||||
"integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==",
|
||||
"license": "BlueOak-1.0.0"
|
||||
},
|
||||
"node_modules/packet-reader": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz",
|
||||
"integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ=="
|
||||
},
|
||||
"node_modules/parent-module": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
|
||||
@ -18459,15 +18447,13 @@
|
||||
"integrity": "sha512-KG8UEiEVkR3wGEb4m5yZkVCzigAD+cVEJck2CzYZO37ZGJfctvVptVO192MwrtPhzONn6go8ylnOdMhKqi4nfg=="
|
||||
},
|
||||
"node_modules/pg": {
|
||||
"version": "8.11.3",
|
||||
"resolved": "https://registry.npmjs.org/pg/-/pg-8.11.3.tgz",
|
||||
"integrity": "sha512-+9iuvG8QfaaUrrph+kpF24cXkH1YOOUeArRNYIxq1viYHZagBxrTno7cecY1Fa44tJeZvaoG+Djpkc3JwehN5g==",
|
||||
"version": "8.13.1",
|
||||
"resolved": "https://registry.npmjs.org/pg/-/pg-8.13.1.tgz",
|
||||
"integrity": "sha512-OUir1A0rPNZlX//c7ksiu7crsGZTKSOXJPgtNiHGIlC9H0lO+NC6ZDYksSgBYY/thSWhnSRBv8w1lieNNGATNQ==",
|
||||
"dependencies": {
|
||||
"buffer-writer": "2.0.0",
|
||||
"packet-reader": "1.0.0",
|
||||
"pg-connection-string": "^2.6.2",
|
||||
"pg-pool": "^3.6.1",
|
||||
"pg-protocol": "^1.6.0",
|
||||
"pg-connection-string": "^2.7.0",
|
||||
"pg-pool": "^3.7.0",
|
||||
"pg-protocol": "^1.7.0",
|
||||
"pg-types": "^2.1.0",
|
||||
"pgpass": "1.x"
|
||||
},
|
||||
@ -18486,6 +18472,19 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/pg-boss": {
|
||||
"version": "10.1.5",
|
||||
"resolved": "https://registry.npmjs.org/pg-boss/-/pg-boss-10.1.5.tgz",
|
||||
"integrity": "sha512-H87NL6c7N6nTCSCePh16EaSQVSFevNXWdJuzY6PZz4rw+W/nuMKPfI/vYyXS0AdT1g1Q3S3EgeOYOHcB7ZVToQ==",
|
||||
"dependencies": {
|
||||
"cron-parser": "^4.9.0",
|
||||
"pg": "^8.13.0",
|
||||
"serialize-error": "^8.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
}
|
||||
},
|
||||
"node_modules/pg-cloudflare": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.1.1.tgz",
|
||||
@ -18522,17 +18521,17 @@
|
||||
}
|
||||
},
|
||||
"node_modules/pg-pool": {
|
||||
"version": "3.6.1",
|
||||
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.6.1.tgz",
|
||||
"integrity": "sha512-jizsIzhkIitxCGfPRzJn1ZdcosIt3pz9Sh3V01fm1vZnbnCMgmGl5wvGGdNN2EL9Rmb0EcFoCkixH4Pu+sP9Og==",
|
||||
"version": "3.7.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.7.0.tgz",
|
||||
"integrity": "sha512-ZOBQForurqh4zZWjrgSwwAtzJ7QiRX0ovFkZr2klsen3Nm0aoh33Ls0fzfv3imeH/nw/O27cjdz5kzYJfeGp/g==",
|
||||
"peerDependencies": {
|
||||
"pg": ">=8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/pg-protocol": {
|
||||
"version": "1.6.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.6.0.tgz",
|
||||
"integrity": "sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q=="
|
||||
"version": "1.7.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.7.0.tgz",
|
||||
"integrity": "sha512-hTK/mE36i8fDDhgDFjy6xNOG+LCorxLG3WO17tku+ij6sVHXh1jQUJ8hYAnRhNla4QVD2H8er/FOjc/+EgC6yQ=="
|
||||
},
|
||||
"node_modules/pg-query-stream": {
|
||||
"version": "4.5.3",
|
||||
@ -18561,9 +18560,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/pg/node_modules/pg-connection-string": {
|
||||
"version": "2.6.2",
|
||||
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.6.2.tgz",
|
||||
"integrity": "sha512-ch6OwaeaPYcova4kKZ15sbJ2hKb/VP48ZD2gE7i1J+L4MspCtBMAx8nMgz7bksc7IojCIIWuEhHibSMFH8m8oA=="
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.7.0.tgz",
|
||||
"integrity": "sha512-PI2W9mv53rXJQEOb8xNR8lH7Hr+EKa6oJa38zsK0S/ky2er16ios1wLKhZyxzD7jUReiWokc9WK5nxSnC7W1TA=="
|
||||
},
|
||||
"node_modules/pgpass": {
|
||||
"version": "1.0.5",
|
||||
@ -20174,6 +20173,20 @@
|
||||
"resolved": "https://registry.npmjs.org/seq-queue/-/seq-queue-0.0.5.tgz",
|
||||
"integrity": "sha512-hr3Wtp/GZIc/6DAGPDcV4/9WoZhjrkXsi5B/07QgX8tsdc6ilr7BFM6PM6rbdAX1kFSDYeZGLipIZZKyQP0O5Q=="
|
||||
},
|
||||
"node_modules/serialize-error": {
|
||||
"version": "8.1.0",
|
||||
"resolved": "https://registry.npmjs.org/serialize-error/-/serialize-error-8.1.0.tgz",
|
||||
"integrity": "sha512-3NnuWfM6vBYoy5gZFvHiYsVbafvI9vZv/+jlIigFn4oP4zjNPK3LhcY0xSCgeb1a5L8jO71Mit9LlNoi2UfDDQ==",
|
||||
"dependencies": {
|
||||
"type-fest": "^0.20.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/serve-static": {
|
||||
"version": "1.16.2",
|
||||
"resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz",
|
||||
@ -22193,7 +22206,6 @@
|
||||
"version": "0.20.2",
|
||||
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz",
|
||||
"integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
|
@ -201,6 +201,7 @@
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-ldapauth": "^3.0.1",
|
||||
"pg": "^8.11.3",
|
||||
"pg-boss": "^10.1.5",
|
||||
"pg-query-stream": "^4.5.3",
|
||||
"picomatch": "^3.0.1",
|
||||
"pino": "^8.16.2",
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { RawAxiosRequestHeaders } from "axios";
|
||||
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { request } from "@app/lib/config/request";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
@ -20,27 +21,128 @@ type TAuditLogQueueServiceFactoryDep = {
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TAuditLogQueueServiceFactory = ReturnType<typeof auditLogQueueServiceFactory>;
|
||||
export type TAuditLogQueueServiceFactory = Awaited<ReturnType<typeof auditLogQueueServiceFactory>>;
|
||||
|
||||
// keep this timeout 5s it must be fast because else the queue will take time to finish
|
||||
// audit log is a crowded queue thus needs to be fast
|
||||
export const AUDIT_LOG_STREAM_TIMEOUT = 5 * 1000;
|
||||
export const auditLogQueueServiceFactory = ({
|
||||
|
||||
export const auditLogQueueServiceFactory = async ({
|
||||
auditLogDAL,
|
||||
queueService,
|
||||
projectDAL,
|
||||
licenseService,
|
||||
auditLogStreamDAL
|
||||
}: TAuditLogQueueServiceFactoryDep) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const pushToLog = async (data: TCreateAuditLogDTO) => {
|
||||
await queueService.queue(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
||||
removeOnFail: {
|
||||
count: 3
|
||||
},
|
||||
removeOnComplete: true
|
||||
});
|
||||
if (appCfg.USE_PG_QUEUE) {
|
||||
await queueService.queuePg<QueueName.AuditLog>(QueueJobs.AuditLog, data, {
|
||||
retryLimit: 10,
|
||||
retryBackoff: true
|
||||
});
|
||||
} else {
|
||||
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
|
||||
removeOnFail: {
|
||||
count: 3
|
||||
},
|
||||
removeOnComplete: true
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
await queueService.startPg<QueueName.AuditLog>(
|
||||
QueueJobs.AuditLog,
|
||||
async ([job]) => {
|
||||
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
|
||||
let { orgId } = job.data;
|
||||
const MS_IN_DAY = 24 * 60 * 60 * 1000;
|
||||
let project;
|
||||
|
||||
if (!orgId) {
|
||||
// it will never be undefined for both org and project id
|
||||
// TODO(akhilmhdh): use caching here in dal to avoid db calls
|
||||
project = await projectDAL.findById(projectId as string);
|
||||
orgId = project.orgId;
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (plan.auditLogsRetentionDays === 0) {
|
||||
// skip inserting if audit log retention is 0 meaning its not supported
|
||||
return;
|
||||
}
|
||||
|
||||
// For project actions, set TTL to project-level audit log retention config
|
||||
// This condition ensures that the plan's audit log retention days cannot be bypassed
|
||||
const ttlInDays =
|
||||
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
|
||||
? project.auditLogsRetentionDays
|
||||
: plan.auditLogsRetentionDays;
|
||||
|
||||
const ttl = ttlInDays * MS_IN_DAY;
|
||||
|
||||
const auditLog = await auditLogDAL.create({
|
||||
actor: actor.type,
|
||||
actorMetadata: actor.metadata,
|
||||
userAgent,
|
||||
projectId,
|
||||
projectName: project?.name,
|
||||
ipAddress,
|
||||
orgId,
|
||||
eventType: event.type,
|
||||
expiresAt: new Date(Date.now() + ttl),
|
||||
eventMetadata: event.metadata,
|
||||
userAgentType
|
||||
});
|
||||
|
||||
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
|
||||
await Promise.allSettled(
|
||||
logStreams.map(
|
||||
async ({
|
||||
url,
|
||||
encryptedHeadersTag,
|
||||
encryptedHeadersIV,
|
||||
encryptedHeadersKeyEncoding,
|
||||
encryptedHeadersCiphertext
|
||||
}) => {
|
||||
const streamHeaders =
|
||||
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
|
||||
? (JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
|
||||
iv: encryptedHeadersIV,
|
||||
tag: encryptedHeadersTag,
|
||||
ciphertext: encryptedHeadersCiphertext
|
||||
})
|
||||
) as LogStreamHeaders[])
|
||||
: [];
|
||||
|
||||
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
|
||||
|
||||
if (streamHeaders.length)
|
||||
streamHeaders.forEach(({ key, value }) => {
|
||||
headers[key] = value;
|
||||
});
|
||||
|
||||
return request.post(url, auditLog, {
|
||||
headers,
|
||||
// request timeout
|
||||
timeout: AUDIT_LOG_STREAM_TIMEOUT,
|
||||
// connection timeout
|
||||
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
|
||||
});
|
||||
}
|
||||
)
|
||||
);
|
||||
},
|
||||
{
|
||||
batchSize: 1,
|
||||
workerCount: 30,
|
||||
pollingIntervalSeconds: 0.5
|
||||
}
|
||||
);
|
||||
|
||||
queueService.start(QueueName.AuditLog, async (job) => {
|
||||
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
|
||||
let { orgId } = job.data;
|
||||
|
@ -178,7 +178,9 @@ const envSchema = z
|
||||
HSM_LIB_PATH: zpStr(z.string().optional()),
|
||||
HSM_PIN: zpStr(z.string().optional()),
|
||||
HSM_KEY_LABEL: zpStr(z.string().optional()),
|
||||
HSM_SLOT: z.coerce.number().optional().default(0)
|
||||
HSM_SLOT: z.coerce.number().optional().default(0),
|
||||
|
||||
USE_PG_QUEUE: zodStrBool.default("false")
|
||||
})
|
||||
// To ensure that basic encryption is always possible.
|
||||
.refine(
|
||||
|
@ -55,7 +55,10 @@ const run = async () => {
|
||||
}
|
||||
|
||||
const smtp = smtpServiceFactory(formatSmtpConfig());
|
||||
const queue = queueServiceFactory(appCfg.REDIS_URL);
|
||||
|
||||
const queue = queueServiceFactory(appCfg.REDIS_URL, appCfg.DB_CONNECTION_URI);
|
||||
await queue.initialize();
|
||||
|
||||
const keyStore = keyStoreFactory(appCfg.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule();
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { Job, JobsOptions, Queue, QueueOptions, RepeatOptions, Worker, WorkerListener } from "bullmq";
|
||||
import Redis from "ioredis";
|
||||
import PgBoss, { WorkOptions } from "pg-boss";
|
||||
|
||||
import { SecretEncryptionAlgo, SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||
@ -7,6 +8,7 @@ import {
|
||||
TScanFullRepoEventPayload,
|
||||
TScanPushEventPayload
|
||||
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import {
|
||||
TFailedIntegrationSyncEmailsPayload,
|
||||
TIntegrationSyncPayload,
|
||||
@ -184,17 +186,35 @@ export type TQueueJobTypes = {
|
||||
};
|
||||
|
||||
export type TQueueServiceFactory = ReturnType<typeof queueServiceFactory>;
|
||||
export const queueServiceFactory = (redisUrl: string) => {
|
||||
export const queueServiceFactory = (redisUrl: string, dbConnectionUrl: string) => {
|
||||
const connection = new Redis(redisUrl, { maxRetriesPerRequest: null });
|
||||
const queueContainer = {} as Record<
|
||||
QueueName,
|
||||
Queue<TQueueJobTypes[QueueName]["payload"], void, TQueueJobTypes[QueueName]["name"]>
|
||||
>;
|
||||
|
||||
const pgBoss = new PgBoss({
|
||||
connectionString: dbConnectionUrl,
|
||||
archiveCompletedAfterSeconds: 60,
|
||||
archiveFailedAfterSeconds: 1000, // we want to keep failed jobs for a longer time so that it can be retried
|
||||
deleteAfterSeconds: 30
|
||||
});
|
||||
|
||||
const queueContainerPg = {} as Record<QueueJobs, boolean>;
|
||||
|
||||
const workerContainer = {} as Record<
|
||||
QueueName,
|
||||
Worker<TQueueJobTypes[QueueName]["payload"], void, TQueueJobTypes[QueueName]["name"]>
|
||||
>;
|
||||
|
||||
const initialize = async () => {
|
||||
await pgBoss.start();
|
||||
|
||||
pgBoss.on("error", (error) => {
|
||||
logger.error(error, "pg-queue error");
|
||||
});
|
||||
};
|
||||
|
||||
const start = <T extends QueueName>(
|
||||
name: T,
|
||||
jobFn: (job: Job<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>, token?: string) => Promise<void>,
|
||||
@ -215,6 +235,27 @@ export const queueServiceFactory = (redisUrl: string) => {
|
||||
});
|
||||
};
|
||||
|
||||
const startPg = async <T extends QueueName>(
|
||||
jobName: QueueJobs,
|
||||
jobsFn: (jobs: PgBoss.Job<TQueueJobTypes[T]["payload"]>[]) => Promise<void>,
|
||||
options: WorkOptions & {
|
||||
workerCount: number;
|
||||
}
|
||||
) => {
|
||||
if (queueContainerPg[jobName]) {
|
||||
throw new Error(`${jobName} queue is already initialized`);
|
||||
}
|
||||
|
||||
await pgBoss.createQueue(jobName);
|
||||
queueContainerPg[jobName] = true;
|
||||
|
||||
await Promise.all(
|
||||
Array.from({ length: options.workerCount }).map(() =>
|
||||
pgBoss.work<TQueueJobTypes[T]["payload"]>(jobName, options, jobsFn)
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
const listen = <
|
||||
T extends QueueName,
|
||||
U extends keyof WorkerListener<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>
|
||||
@ -238,6 +279,18 @@ export const queueServiceFactory = (redisUrl: string) => {
|
||||
await q.add(job, data, opts);
|
||||
};
|
||||
|
||||
const queuePg = async <T extends QueueName>(
|
||||
job: TQueueJobTypes[T]["name"],
|
||||
data: TQueueJobTypes[T]["payload"],
|
||||
opts?: PgBoss.SendOptions & { jobId?: string }
|
||||
) => {
|
||||
await pgBoss.send({
|
||||
name: job,
|
||||
data,
|
||||
options: opts
|
||||
});
|
||||
};
|
||||
|
||||
const stopRepeatableJob = async <T extends QueueName>(
|
||||
name: T,
|
||||
job: TQueueJobTypes[T]["name"],
|
||||
@ -274,5 +327,17 @@ export const queueServiceFactory = (redisUrl: string) => {
|
||||
await Promise.all(Object.values(workerContainer).map((worker) => worker.close()));
|
||||
};
|
||||
|
||||
return { start, listen, queue, shutdown, stopRepeatableJob, stopRepeatableJobByJobId, clearQueue, stopJobById };
|
||||
return {
|
||||
initialize,
|
||||
start,
|
||||
listen,
|
||||
queue,
|
||||
shutdown,
|
||||
stopRepeatableJob,
|
||||
stopRepeatableJobByJobId,
|
||||
clearQueue,
|
||||
stopJobById,
|
||||
startPg,
|
||||
queuePg
|
||||
};
|
||||
};
|
||||
|
@ -394,13 +394,14 @@ export const registerRoutes = async (
|
||||
permissionService
|
||||
});
|
||||
|
||||
const auditLogQueue = auditLogQueueServiceFactory({
|
||||
const auditLogQueue = await auditLogQueueServiceFactory({
|
||||
auditLogDAL,
|
||||
queueService,
|
||||
projectDAL,
|
||||
licenseService,
|
||||
auditLogStreamDAL
|
||||
});
|
||||
|
||||
const auditLogService = auditLogServiceFactory({ auditLogDAL, permissionService, auditLogQueue });
|
||||
const auditLogStreamService = auditLogStreamServiceFactory({
|
||||
licenseService,
|
||||
|
Reference in New Issue
Block a user