🐂 Migration to moleculer-bullMQ

This commit is contained in:
2023-06-29 14:16:58 -04:00
parent 795ac561c7
commit cb84e4893f
5 changed files with 1572 additions and 1053 deletions

2059
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -23,12 +23,14 @@
"@types/lodash": "^4.14.168",
"@typescript-eslint/eslint-plugin": "^5.56.0",
"@typescript-eslint/parser": "^5.56.0",
"bull": "^4.10.4",
"eslint": "^8.36.0",
"eslint-plugin-import": "^2.20.2",
"eslint-plugin-prefer-arrow": "^1.2.2",
"install": "^0.13.0",
"jest": "^29.5.0",
"jest-cli": "^29.5.0",
"moleculer-bullmq": "^3.0.0",
"moleculer-repl": "^0.7.0",
"node-calibre": "^2.1.1",
"npm": "^8.4.1",
@@ -37,8 +39,8 @@
"typescript": "^5.0.2"
},
"dependencies": {
"@elastic/elasticsearch": "^8.6.0",
"@bluelovers/fast-glob": "https://github.com/rishighan/fast-glob-v2-api.git",
"@elastic/elasticsearch": "^8.6.0",
"@jorgeferrero/stream-to-buffer": "^2.0.6",
"@npcz/magic": "^1.3.14",
"@root/walk": "^1.1.0",
@@ -64,7 +66,6 @@
"leven": "^3.1.0",
"lodash": "^4.17.21",
"mkdirp": "^0.5.5",
"moleculer": "^0.14.29",
"moleculer-bull": "github:rishighan/moleculer-bull#1.0.0",
"moleculer-db": "^0.8.23",
"moleculer-db-adapter-mongoose": "^0.9.2",

View File

@@ -1,291 +0,0 @@
/*
* MIT License
*
* Copyright (c) 2022 Rishi Ghan
*
The MIT License (MIT)
Copyright (c) 2015 Rishi Ghan
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
/*
* Revision History:
* Initial: 2022/01/28 Rishi Ghan
*/
"use strict";
import { refineQuery } from "filename-parser";
import { isNil, isUndefined } from "lodash";
import { Context, Service, ServiceBroker, ServiceSchema } from "moleculer";
import BullMQMixin, { SandboxedJob } from "moleculer-bull";
import { DbMixin } from "../mixins/db.mixin";
import Comic from "../models/comic.model";
import {
extractFromArchive,
uncompressEntireArchive,
} from "../utils/uncompression.utils";
const REDIS_URI = process.env.REDIS_URI || `redis://localhost:6379`;
const EventEmitter = require("events");
EventEmitter.defaultMaxListeners = 20;
console.log(`REDIS -> ${REDIS_URI}`);
export default class QueueService extends Service {
public constructor(
public broker: ServiceBroker,
schema: ServiceSchema<{}> = { name: "importqueue" }
) {
super(broker);
this.parseServiceSchema({
name: "importqueue",
mixins: [BullMQMixin(REDIS_URI), DbMixin("comics", Comic)],
settings: {
bullmq: {
maxStalledCount: 0,
},
},
hooks: {},
queues: {
"process.import": {
concurrency: 10,
async process(job: SandboxedJob) {
console.info("New job received!", job.data);
console.info(`Processing queue...`);
// extract the cover
const result = await extractFromArchive(
job.data.fileObject.filePath
);
const {
name,
filePath,
fileSize,
extension,
mimeType,
cover,
containedIn,
comicInfoJSON,
} = result;
// Infer any issue-related metadata from the filename
const { inferredIssueDetails } = refineQuery(
result.name
);
console.log(
"Issue metadata inferred: ",
JSON.stringify(inferredIssueDetails, null, 2)
);
// Add the bundleId, if present to the payload
let bundleId = null;
if (!isNil(job.data.bundleId)) {
bundleId = job.data.bundleId;
}
// Orchestrate the payload
const payload = {
importStatus: {
isImported: true,
tagged: false,
matchedResult: {
score: "0",
},
},
rawFileDetails: {
name,
filePath,
fileSize,
extension,
mimeType,
containedIn,
cover,
},
inferredMetadata: {
issue: inferredIssueDetails,
},
sourcedMetadata: {
// except for ComicInfo.xml, everything else should be copied over from the
// parent comic
comicInfo: comicInfoJSON,
},
// since we already have at least 1 copy
// mark it as not wanted by default
"acquisition.source.wanted": false,
// clear out the downloads array
// "acquisition.directconnect.downloads": [],
// mark the metadata source
"acquisition.source.name": job.data.sourcedFrom,
};
// Add the sourcedMetadata, if present
if (!isNil(job.data.sourcedMetadata) && !isUndefined(job.data.sourcedMetadata.comicvine)) {
Object.assign(
payload.sourcedMetadata,
job.data.sourcedMetadata
);
}
// write to mongo
const importResult = await this.broker.call(
"library.rawImportToDB",
{
importType: job.data.importType,
bundleId,
payload,
}
);
return {
data: {
importResult,
},
id: job.id,
worker: process.pid,
};
},
},
"process.uncompressAndResize": {
concurrency: 2,
async process(job: SandboxedJob) {
console.log(`Initiating uncompression job...`);
return await uncompressEntireArchive(
job.data.filePath,
job.data.options
);
},
},
},
actions: {
uncompressResize: {
rest: "POST /uncompressResize",
params: {},
async handler(
ctx: Context<{
data: { filePath: string; options: any };
}>
) {
return await this.createJob(
"process.uncompressAndResize",
ctx.params
);
},
},
processImport: {
rest: "POST /processImport",
params: {},
async handler(
ctx: Context<{
fileObject: object;
importType: string;
bundleId: number;
sourcedFrom?: string;
sourcedMetadata: object;
}>
) {
return await this.createJob("process.import", {
fileObject: ctx.params.fileObject,
importType: ctx.params.importType,
bundleId: ctx.params.bundleId,
sourcedFrom: ctx.params.sourcedFrom,
sourcedMetadata: ctx.params.sourcedMetadata,
});
},
},
toggleImportQueue: {
rest: "POST /pauseImportQueue",
params: {},
handler: async (ctx: Context<{ action: string }>) => {
switch (ctx.params.action) {
case "pause":
const foo = await this.getQueue(
"process.import"
).pause();
console.log("paused", foo);
return foo;
case "resume":
const soo = await this.getQueue(
"process.import"
).resume();
console.log("resumed", soo);
return soo;
default:
console.log("Unrecognized queue action.");
}
},
},
},
methods: {},
async started(): Promise<any> {
await this.getQueue("process.import").on(
"failed",
async (job, error) => {
console.error(
`An error occured in 'process.import' queue on job id '${job.id}': ${error.message}`
);
console.error(job.data);
}
);
await this.getQueue("process.import").on(
"completed",
async (job, res) => {
await this.broker.call("socket.broadcast", {
namespace: "/", //optional
event: "action",
args: [{ type: "LS_COVER_EXTRACTED", result: res }], //optional
});
console.info(
`Import Job with the id '${job.id}' completed.`
);
}
);
await this.getQueue("process.import").on(
"stalled",
async (job) => {
console.warn(`Import job '${job.id} stalled!`);
console.log(`${JSON.stringify(job, null, 2)}`);
console.log(`is stalled.`);
}
);
await this.getQueue("process.uncompressAndResize").on(
"completed",
async (job, res) => {
await this.broker.call("socket.broadcast", {
namespace: "/",
event: "action",
args: [
{
type: "COMICBOOK_EXTRACTION_SUCCESS",
result: {
files: res,
purpose: job.data.options.purpose,
},
},
],
});
console.info(`Uncompression Job ${job.id} completed.`);
}
);
},
});
}
}

View File

@@ -0,0 +1,152 @@
import {
Context,
Service,
ServiceBroker,
ServiceSchema,
Errors,
} from "moleculer";
// import { BullMQAdapter, JobStatus, BullMqMixin } from 'moleculer-bullmq';
import { refineQuery } from "filename-parser";
import BullMqMixin from 'moleculer-bullmq';
import { extractFromArchive } from "../utils/uncompression.utils";
import { isNil, isUndefined } from "lodash";
export default class JobQueueService extends Service {
public constructor(public broker: ServiceBroker) {
super(broker);
this.parseServiceSchema({
name: "jobqueue",
hooks: {},
mixins: [BullMqMixin],
settings: {
bullmq: {
client: process.env.REDIS_URI,
}
},
actions: {
enqueue: {
queue: true,
rest: "/GET enqueue",
handler: async (ctx: Context<{}>) => {
// Enqueue the job
const job = await this.localQueue(ctx, 'enqueue.async', ctx.params, { priority: 10 });
console.log(`Job ${job.id} enqueued`);
return job.id;
}
},
"enqueue.async": {
handler: async (ctx: Context<{}>) => {
console.log(`Recieved Job ID ${ctx.locals.job.id}, processing...`);
// 1. De-structure the job params
const { fileObject } = ctx.locals.job.data.params;
// 2. Extract metadata from the archive
const result = await extractFromArchive(fileObject.filePath);
const {
name,
filePath,
fileSize,
extension,
mimeType,
cover,
containedIn,
comicInfoJSON,
} = result;
// 3a. Infer any issue-related metadata from the filename
const { inferredIssueDetails } = refineQuery(
result.name
);
console.log(
"Issue metadata inferred: ",
JSON.stringify(inferredIssueDetails, null, 2)
);
// 3c. Orchestrate the payload
const payload = {
importStatus: {
isImported: true,
tagged: false,
matchedResult: {
score: "0",
},
},
rawFileDetails: {
name,
filePath,
fileSize,
extension,
mimeType,
containedIn,
cover,
},
inferredMetadata: {
issue: inferredIssueDetails,
},
sourcedMetadata: {
// except for ComicInfo.xml, everything else should be copied over from the
// parent comic
comicInfo: comicInfoJSON,
},
// since we already have at least 1 copy
// mark it as not wanted by default
"acquisition.source.wanted": false,
// clear out the downloads array
// "acquisition.directconnect.downloads": [],
// mark the metadata source
"acquisition.source.name": ctx.locals.job.data.params.sourcedFrom,
}
// Add the bundleId, if present to the payload
let bundleId = null;
if (!isNil(ctx.locals.job.data.params.bundleId)) {
bundleId = ctx.locals.job.data.params.bundleId;
}
// Add the sourcedMetadata, if present
if (
!isNil(ctx.locals.job.data.params.sourcedMetadata) &&
!isUndefined(ctx.locals.job.data.params.sourcedMetadata.comicvine)
) {
Object.assign(
payload.sourcedMetadata,
ctx.locals.job.data.paramssourcedMetadata
);
}
// write to mongo
const importResult = await this.broker.call(
"library.rawImportToDB",
{
importType: ctx.locals.job.data.params.importType,
bundleId,
payload,
}
);
return {
data: {
importResult,
},
id: ctx.locals.job.id,
};
}
},
},
events: {
// use the `${QUEUE_NAME}.QUEUE_EVENT` scheme
async "enqueue.async.active"(ctx) {
console.log(`Job ID ${ctx.params.id} is set to active.`);
},
async "enqueue.async.completed" (ctx) {
console.log(`Job ID ${ctx.params.id} completed.`);
}
}
});
}
}

View File

@@ -66,6 +66,17 @@ export default class ImportService extends Service {
mixins: [DbMixin("comics", Comic)],
hooks: {},
actions: {
getHealthInformation: {
rest: "GET /getHealthInformation",
params: {},
handler: async (ctx: Context<{}>) => {
try {
return await ctx.broker.call("$node.services");
} catch (error) {
return new Error("Service is down.");
}
},
},
walkFolders: {
rest: "POST /walkFolders",
params: {
@@ -139,61 +150,72 @@ export default class ImportService extends Service {
},
newImport: {
rest: "POST /newImport",
params: {},
// params: {},
async handler(
ctx: Context<{
extractionOptions?: any;
}>
) {
// 1. Walk the Source folder
klaw(path.resolve(COMICS_DIRECTORY))
// 1.1 Filter on .cb* extensions
.pipe(
through2.obj(function (item, enc, next) {
let fileExtension = path.extname(item.path);
if (
[".cbz", ".cbr", ".cb7"].includes(
fileExtension
)
) {
this.push(item);
}
next();
})
)
// 1.2 Pipe filtered results to the next step
.on("data", async (item) => {
console.info(
"Found a file at path: %s",
item.path
);
let comicExists = await Comic.exists({
"rawFileDetails.name": `${path.basename(
item.path,
path.extname(item.path)
)}`,
});
if (!comicExists) {
// 2. Send the extraction job to the queue
await broker.call(
"importqueue.processImport",
{
try {
// 1. Walk the Source folder
klaw(path.resolve(COMICS_DIRECTORY))
// 1.1 Filter on .cb* extensions
.pipe(
through2.obj(function(item, enc, next) {
let fileExtension = path.extname(item.path);
if (
[".cbz", ".cbr", ".cb7"].includes(
fileExtension
)
) {
this.push(item);
}
next();
})
)
// 1.2 Pipe filtered results to the next step
.on("data", async (item) => {
console.info(
"Found a file at path: %s",
item.path
);
let comicExists = await Comic.exists({
"rawFileDetails.name": `${path.basename(
item.path,
path.extname(item.path)
)}`,
});
if (!comicExists) {
// 2. Send the extraction job to the queue
// await broker.call(
// "importqueue.processImport",
// {
// fileObject: {
// filePath: item.path,
// fileSize: item.stats.size,
// },
// importType: "new",
// }
// );
this.broker.call('jobqueue.enqueue', {
fileObject: {
filePath: item.path,
fileSize: item.stats.size,
},
importType: "new",
}
);
} else {
console.log(
"Comic already exists in the library."
);
}
})
.on("end", () => {
console.log("All files traversed.");
});
});
} else {
console.log(
"Comic already exists in the library."
);
}
})
.on("end", () => {
console.log("All files traversed.");
});
} catch (error) {
console.log(error);
}
},
},
@@ -564,9 +586,9 @@ export default class ImportService extends Service {
{
$match: {
"sourcedMetadata.comicvine.volumeInformation":
{
$gt: {},
},
{
$gt: {},
},
},
},
{