🏗 Refactoring the import process WIP

This commit is contained in:
2021-11-30 22:44:06 -08:00
parent e8d21d4292
commit 1fd5f5b6cf
12 changed files with 380 additions and 1046 deletions

View File

@@ -7,7 +7,11 @@ import {
Errors,
} from "moleculer";
import BullMQMixin from "moleculer-bull";
const REDIS_URI = process.env.REDIS_URI || `redis://0.0.0.0:6379`;
import { SandboxedJob } from "moleculer-bull";
import { DbMixin } from "../mixins/db.mixin";
import Comic from "../models/comic.model";
import { extractCoverFromFile2 } from "../utils/uncompression.utils";
const REDIS_URI = process.env.REDIS_URI || `redis://0.0.0.0:6379`;
export default class LibraryQueueService extends Service {
public constructor(
@@ -15,22 +19,45 @@ export default class LibraryQueueService extends Service {
schema: ServiceSchema<{}> = { name: "libraryqueue" }
) {
super(broker);
console.log(this.io);
this.parseServiceSchema(
Service.mergeSchemas(
{
name: "libraryqueue",
mixins: [BullMQMixin(REDIS_URI)],
mixins: [BullMQMixin(REDIS_URI), DbMixin("comics", Comic)],
settings: {},
hooks: {},
queues: {
"process.import": {
async process(job) {
this.logger.info("New job received!", job.data);
this.logger.info(`Processing queue...`);
const result = await this.broker.call('import.processAndImportToDB', job.data);
async process(job: SandboxedJob) {
console.info("New job received!", job.data);
console.info(`Processing queue...`);
// extract the cover
const result = await extractCoverFromFile2(
job.data.fileObject
);
// write to mongo
const dbImportResult = await this.broker.call(
"import.rawImportToDB",
{
importStatus: {
isImported: true,
tagged: false,
matchedResult: {
score: "0",
},
},
rawFileDetails: result,
sourcedMetadata: {
comicvine: {},
},
},
{}
);
return Promise.resolve({
result,
dbImportResult,
id: job.id,
worker: process.pid,
});
@@ -41,40 +68,42 @@ export default class LibraryQueueService extends Service {
enqueue: {
rest: "POST /enqueue",
params: {},
async handler(ctx: Context<{ extractionOptions: object, walkedFolders: object}>) {
async handler(
ctx: Context<{
fileObject: object;
}>
) {
return await this.createJob("process.import", {
extractionOptions: ctx.params.extractionOptions,
walkedFolders: ctx.params.walkedFolders,
fileObject: ctx.params.fileObject,
});
},
},
},
methods: {},
async started(): Promise<any> {
const failed = await this.getQueue(
"process.import"
).on("failed", async (job, error) => {
this.logger.error(
`An error occured in 'mail.send' queue on job id '${job.id}': ${error.message}`
);
});
const completed = await this.getQueue(
"process.import"
).on("completed", async (job, res) => {
this.logger.info(
`Job with the id '${job.id}' completed.`
);
});
const stalled = await this.getQueue(
"process.import"
).on("stalled", async (job) => {
this.logger.warn(
`The job with the id '${job} got stalled!`
);
});
},
const failed = await this.getQueue("process.import").on(
"failed",
async (job, error) => {
console.error(
`An error occured in 'process.import' queue on job id '${job.id}': ${error.message}`
);
}
);
const completed = await this.getQueue(
"process.import"
).on("completed", async (job, res) => {
console.info(
`Job with the id '${job.id}' completed.`
);
});
const stalled = await this.getQueue(
"process.import"
).on("stalled", async (job) => {
console.warn(
`The job with the id '${job} got stalled!`
);
});
},
},
schema
)