🔢 Persisting the sessionId in the JobResult
This commit is contained in:
@@ -3,6 +3,7 @@ const mongoose = require("mongoose");
|
|||||||
const JobResultScehma = mongoose.Schema({
|
const JobResultScehma = mongoose.Schema({
|
||||||
id: Number,
|
id: Number,
|
||||||
status: String,
|
status: String,
|
||||||
|
sessionId: String,
|
||||||
failedReason: Object,
|
failedReason: Object,
|
||||||
timestamp: Date,
|
timestamp: Date,
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -61,12 +61,12 @@ export default class JobQueueService extends Service {
|
|||||||
"enqueue.async": {
|
"enqueue.async": {
|
||||||
handler: async (
|
handler: async (
|
||||||
ctx: Context<{
|
ctx: Context<{
|
||||||
socketSessionId: String;
|
sessionId: String;
|
||||||
}>
|
}>
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
console.log(`Recieved Job ID ${ctx.locals.job.id}, processing...`);
|
console.log(`Recieved Job ID ${ctx.locals.job.id}, processing...`);
|
||||||
|
console.log(ctx.params);
|
||||||
// 1. De-structure the job params
|
// 1. De-structure the job params
|
||||||
const { fileObject } = ctx.locals.job.data.params;
|
const { fileObject } = ctx.locals.job.data.params;
|
||||||
|
|
||||||
@@ -155,14 +155,14 @@ export default class JobQueueService extends Service {
|
|||||||
importResult,
|
importResult,
|
||||||
},
|
},
|
||||||
id: ctx.locals.job.id,
|
id: ctx.locals.job.id,
|
||||||
socketSessionId: ctx.params.socketSessionId,
|
sessionId: ctx.params.sessionId,
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(
|
console.error(
|
||||||
`An error occurred processing Job ID ${ctx.locals.job.id}`
|
`An error occurred processing Job ID ${ctx.locals.job.id}`
|
||||||
);
|
);
|
||||||
throw new MoleculerError(error, 500, "IMPORT_JOB_ERROR", {
|
throw new MoleculerError(error, 500, "IMPORT_JOB_ERROR", {
|
||||||
data: ctx.params.socketSessionId,
|
data: ctx.params.sessionId,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -227,6 +227,7 @@ export default class JobQueueService extends Service {
|
|||||||
id: ctx.params.id,
|
id: ctx.params.id,
|
||||||
status: "completed",
|
status: "completed",
|
||||||
timestamp: job.timestamp,
|
timestamp: job.timestamp,
|
||||||
|
sessionId: job.returnvalue.sessionId,
|
||||||
failedReason: {},
|
failedReason: {},
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -242,6 +243,7 @@ export default class JobQueueService extends Service {
|
|||||||
id: ctx.params.id,
|
id: ctx.params.id,
|
||||||
status: "failed",
|
status: "failed",
|
||||||
failedReason: job.failedReason,
|
failedReason: job.failedReason,
|
||||||
|
sessionId: job.returnvalue.sessionId,
|
||||||
timestamp: job.timestamp,
|
timestamp: job.timestamp,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -33,13 +33,7 @@ SOFTWARE.
|
|||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
import { isNil } from "lodash";
|
import { isNil } from "lodash";
|
||||||
import {
|
import { Context, Service, ServiceBroker, ServiceSchema, Errors } from "moleculer";
|
||||||
Context,
|
|
||||||
Service,
|
|
||||||
ServiceBroker,
|
|
||||||
ServiceSchema,
|
|
||||||
Errors,
|
|
||||||
} from "moleculer";
|
|
||||||
import { DbMixin } from "../mixins/db.mixin";
|
import { DbMixin } from "../mixins/db.mixin";
|
||||||
import Comic from "../models/comic.model";
|
import Comic from "../models/comic.model";
|
||||||
import { walkFolder, getSizeOfDirectory } from "../utils/file.utils";
|
import { walkFolder, getSizeOfDirectory } from "../utils/file.utils";
|
||||||
@@ -101,9 +95,7 @@ export default class ImportService extends Service {
|
|||||||
uncompressFullArchive: {
|
uncompressFullArchive: {
|
||||||
rest: "POST /uncompressFullArchive",
|
rest: "POST /uncompressFullArchive",
|
||||||
params: {},
|
params: {},
|
||||||
handler: async (
|
handler: async (ctx: Context<{ filePath: string; options: any }>) => {
|
||||||
ctx: Context<{ filePath: string; options: any }>
|
|
||||||
) => {
|
|
||||||
await broker.call("importqueue.uncompressResize", {
|
await broker.call("importqueue.uncompressResize", {
|
||||||
filePath: ctx.params.filePath,
|
filePath: ctx.params.filePath,
|
||||||
options: ctx.params.options,
|
options: ctx.params.options,
|
||||||
@@ -121,8 +113,7 @@ export default class ImportService extends Service {
|
|||||||
});
|
});
|
||||||
// Determine source where the comic was added from
|
// Determine source where the comic was added from
|
||||||
// and gather identifying information about it
|
// and gather identifying information about it
|
||||||
const sourceName =
|
const sourceName = referenceComicObject[0].acquisition.source.name;
|
||||||
referenceComicObject[0].acquisition.source.name;
|
|
||||||
const { sourcedMetadata } = referenceComicObject[0];
|
const { sourcedMetadata } = referenceComicObject[0];
|
||||||
|
|
||||||
const filePath = `${COMICS_DIRECTORY}/${ctx.params.bundle.data.name}`;
|
const filePath = `${COMICS_DIRECTORY}/${ctx.params.bundle.data.name}`;
|
||||||
@@ -155,32 +146,28 @@ export default class ImportService extends Service {
|
|||||||
async handler(
|
async handler(
|
||||||
ctx: Context<{
|
ctx: Context<{
|
||||||
extractionOptions?: any;
|
extractionOptions?: any;
|
||||||
|
sessionId: string;
|
||||||
}>
|
}>
|
||||||
) {
|
) {
|
||||||
try {
|
try {
|
||||||
|
// Get params to be passed to the import jobs
|
||||||
|
const { sessionId } = ctx.params;
|
||||||
// 1. Walk the Source folder
|
// 1. Walk the Source folder
|
||||||
klaw(path.resolve(COMICS_DIRECTORY))
|
klaw(path.resolve(COMICS_DIRECTORY))
|
||||||
// 1.1 Filter on .cb* extensions
|
// 1.1 Filter on .cb* extensions
|
||||||
.pipe(
|
.pipe(
|
||||||
through2.obj(function(item, enc, next) {
|
through2.obj(function (item, enc, next) {
|
||||||
let fileExtension = path.extname(item.path);
|
let fileExtension = path.extname(item.path);
|
||||||
if (
|
if ([".cbz", ".cbr", ".cb7"].includes(fileExtension)) {
|
||||||
[".cbz", ".cbr", ".cb7"].includes(
|
|
||||||
fileExtension
|
|
||||||
)
|
|
||||||
) {
|
|
||||||
this.push(item);
|
this.push(item);
|
||||||
}
|
}
|
||||||
next();
|
next();
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
// 1.2 Pipe filtered results to the next step
|
// 1.2 Pipe filtered results to the next step
|
||||||
// Enqueue the job in the queue
|
// Enqueue the job in the queue
|
||||||
.on("data", async (item) => {
|
.on("data", async (item) => {
|
||||||
console.info(
|
console.info("Found a file at path: %s", item.path);
|
||||||
"Found a file at path: %s",
|
|
||||||
item.path
|
|
||||||
);
|
|
||||||
let comicExists = await Comic.exists({
|
let comicExists = await Comic.exists({
|
||||||
"rawFileDetails.name": `${path.basename(
|
"rawFileDetails.name": `${path.basename(
|
||||||
item.path,
|
item.path,
|
||||||
@@ -192,17 +179,16 @@ export default class ImportService extends Service {
|
|||||||
await pubClient.set("completedJobCount", 0);
|
await pubClient.set("completedJobCount", 0);
|
||||||
await pubClient.set("failedJobCount", 0);
|
await pubClient.set("failedJobCount", 0);
|
||||||
// 2.2 Send the extraction job to the queue
|
// 2.2 Send the extraction job to the queue
|
||||||
this.broker.call('jobqueue.enqueue', {
|
this.broker.call("jobqueue.enqueue", {
|
||||||
fileObject: {
|
fileObject: {
|
||||||
filePath: item.path,
|
filePath: item.path,
|
||||||
fileSize: item.stats.size,
|
fileSize: item.stats.size,
|
||||||
},
|
},
|
||||||
|
sessionId,
|
||||||
importType: "new",
|
importType: "new",
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
console.log(
|
console.log("Comic already exists in the library.");
|
||||||
"Comic already exists in the library."
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.on("end", () => {
|
.on("end", () => {
|
||||||
@@ -254,28 +240,19 @@ export default class ImportService extends Service {
|
|||||||
// we solicit volume information and add that to mongo
|
// we solicit volume information and add that to mongo
|
||||||
if (
|
if (
|
||||||
comicMetadata.sourcedMetadata.comicvine &&
|
comicMetadata.sourcedMetadata.comicvine &&
|
||||||
!isNil(
|
!isNil(comicMetadata.sourcedMetadata.comicvine.volume)
|
||||||
comicMetadata.sourcedMetadata.comicvine
|
|
||||||
.volume
|
|
||||||
)
|
|
||||||
) {
|
) {
|
||||||
volumeDetails = await this.broker.call(
|
volumeDetails = await this.broker.call("comicvine.getVolumes", {
|
||||||
"comicvine.getVolumes",
|
volumeURI:
|
||||||
{
|
comicMetadata.sourcedMetadata.comicvine.volume
|
||||||
volumeURI:
|
.api_detail_url,
|
||||||
comicMetadata.sourcedMetadata
|
});
|
||||||
.comicvine.volume
|
|
||||||
.api_detail_url,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
comicMetadata.sourcedMetadata.comicvine.volumeInformation =
|
comicMetadata.sourcedMetadata.comicvine.volumeInformation =
|
||||||
volumeDetails.results;
|
volumeDetails.results;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log("Saving to Mongo...");
|
console.log("Saving to Mongo...");
|
||||||
console.log(
|
console.log(`Import type: [${ctx.params.importType}]`);
|
||||||
`Import type: [${ctx.params.importType}]`
|
|
||||||
);
|
|
||||||
switch (ctx.params.importType) {
|
switch (ctx.params.importType) {
|
||||||
case "new":
|
case "new":
|
||||||
return await Comic.create(comicMetadata);
|
return await Comic.create(comicMetadata);
|
||||||
@@ -296,10 +273,7 @@ export default class ImportService extends Service {
|
|||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error);
|
console.log(error);
|
||||||
throw new Errors.MoleculerError(
|
throw new Errors.MoleculerError("Import failed.", 500);
|
||||||
"Import failed.",
|
|
||||||
500
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -317,9 +291,7 @@ export default class ImportService extends Service {
|
|||||||
) {
|
) {
|
||||||
// 1. Find mongo object by id
|
// 1. Find mongo object by id
|
||||||
// 2. Import payload into sourcedMetadata.comicvine
|
// 2. Import payload into sourcedMetadata.comicvine
|
||||||
const comicObjectId = new ObjectId(
|
const comicObjectId = new ObjectId(ctx.params.comicObjectId);
|
||||||
ctx.params.comicObjectId
|
|
||||||
);
|
|
||||||
|
|
||||||
return new Promise(async (resolve, reject) => {
|
return new Promise(async (resolve, reject) => {
|
||||||
let volumeDetails = {};
|
let volumeDetails = {};
|
||||||
@@ -328,18 +300,15 @@ export default class ImportService extends Service {
|
|||||||
const volumeDetails = await this.broker.call(
|
const volumeDetails = await this.broker.call(
|
||||||
"comicvine.getVolumes",
|
"comicvine.getVolumes",
|
||||||
{
|
{
|
||||||
volumeURI:
|
volumeURI: matchedResult.volume.api_detail_url,
|
||||||
matchedResult.volume.api_detail_url,
|
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
matchedResult.volumeInformation =
|
matchedResult.volumeInformation = volumeDetails.results;
|
||||||
volumeDetails.results;
|
|
||||||
Comic.findByIdAndUpdate(
|
Comic.findByIdAndUpdate(
|
||||||
comicObjectId,
|
comicObjectId,
|
||||||
{
|
{
|
||||||
$set: {
|
$set: {
|
||||||
"sourcedMetadata.comicvine":
|
"sourcedMetadata.comicvine": matchedResult,
|
||||||
matchedResult,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{ new: true },
|
{ new: true },
|
||||||
@@ -370,9 +339,7 @@ export default class ImportService extends Service {
|
|||||||
}>
|
}>
|
||||||
) {
|
) {
|
||||||
console.log(JSON.stringify(ctx.params, null, 2));
|
console.log(JSON.stringify(ctx.params, null, 2));
|
||||||
const comicObjectId = new ObjectId(
|
const comicObjectId = new ObjectId(ctx.params.comicObjectId);
|
||||||
ctx.params.comicObjectId
|
|
||||||
);
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
Comic.findByIdAndUpdate(
|
Comic.findByIdAndUpdate(
|
||||||
@@ -426,9 +393,7 @@ export default class ImportService extends Service {
|
|||||||
params: { ids: "array" },
|
params: { ids: "array" },
|
||||||
handler: async (ctx: Context<{ ids: [string] }>) => {
|
handler: async (ctx: Context<{ ids: [string] }>) => {
|
||||||
console.log(ctx.params.ids);
|
console.log(ctx.params.ids);
|
||||||
const queryIds = ctx.params.ids.map(
|
const queryIds = ctx.params.ids.map((id) => new ObjectId(id));
|
||||||
(id) => new ObjectId(id)
|
|
||||||
);
|
|
||||||
return await Comic.find({
|
return await Comic.find({
|
||||||
_id: {
|
_id: {
|
||||||
$in: queryIds,
|
$in: queryIds,
|
||||||
@@ -444,8 +409,7 @@ export default class ImportService extends Service {
|
|||||||
const volumes = await Comic.aggregate([
|
const volumes = await Comic.aggregate([
|
||||||
{
|
{
|
||||||
$project: {
|
$project: {
|
||||||
volumeInfo:
|
volumeInfo: "$sourcedMetadata.comicvine.volumeInformation",
|
||||||
"$sourcedMetadata.comicvine.volumeInformation",
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -491,52 +455,46 @@ export default class ImportService extends Service {
|
|||||||
const { queryObjects } = ctx.params;
|
const { queryObjects } = ctx.params;
|
||||||
// construct the query for ElasticSearch
|
// construct the query for ElasticSearch
|
||||||
let elasticSearchQuery = {};
|
let elasticSearchQuery = {};
|
||||||
const elasticSearchQueries = queryObjects.map(
|
const elasticSearchQueries = queryObjects.map((queryObject) => {
|
||||||
(queryObject) => {
|
console.log("Volume: ", queryObject.volumeName);
|
||||||
console.log("Volume: ", queryObject.volumeName);
|
console.log("Issue: ", queryObject.issueName);
|
||||||
console.log("Issue: ", queryObject.issueName);
|
if (queryObject.issueName === null) {
|
||||||
if (queryObject.issueName === null) {
|
queryObject.issueName = "";
|
||||||
queryObject.issueName = "";
|
|
||||||
}
|
|
||||||
if (queryObject.volumeName === null) {
|
|
||||||
queryObject.volumeName = "";
|
|
||||||
}
|
|
||||||
elasticSearchQuery = {
|
|
||||||
bool: {
|
|
||||||
must: [
|
|
||||||
{
|
|
||||||
match_phrase: {
|
|
||||||
"rawFileDetails.name":
|
|
||||||
queryObject.volumeName,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
term: {
|
|
||||||
"inferredMetadata.issue.number":
|
|
||||||
parseInt(
|
|
||||||
queryObject.issueNumber,
|
|
||||||
10
|
|
||||||
),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
index: "comics",
|
|
||||||
search_type: "dfs_query_then_fetch",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
query: elasticSearchQuery,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
}
|
}
|
||||||
);
|
if (queryObject.volumeName === null) {
|
||||||
console.log(
|
queryObject.volumeName = "";
|
||||||
JSON.stringify(elasticSearchQueries, null, 2)
|
}
|
||||||
);
|
elasticSearchQuery = {
|
||||||
|
bool: {
|
||||||
|
must: [
|
||||||
|
{
|
||||||
|
match_phrase: {
|
||||||
|
"rawFileDetails.name": queryObject.volumeName,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
term: {
|
||||||
|
"inferredMetadata.issue.number": parseInt(
|
||||||
|
queryObject.issueNumber,
|
||||||
|
10
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
index: "comics",
|
||||||
|
search_type: "dfs_query_then_fetch",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
query: elasticSearchQuery,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
});
|
||||||
|
console.log(JSON.stringify(elasticSearchQueries, null, 2));
|
||||||
|
|
||||||
return await ctx.broker.call("search.searchComic", {
|
return await ctx.broker.call("search.searchComic", {
|
||||||
elasticSearchQueries,
|
elasticSearchQueries,
|
||||||
@@ -549,10 +507,11 @@ export default class ImportService extends Service {
|
|||||||
rest: "GET /libraryStatistics",
|
rest: "GET /libraryStatistics",
|
||||||
params: {},
|
params: {},
|
||||||
handler: async (ctx: Context<{}>) => {
|
handler: async (ctx: Context<{}>) => {
|
||||||
const comicDirectorySize = await getSizeOfDirectory(
|
const comicDirectorySize = await getSizeOfDirectory(COMICS_DIRECTORY, [
|
||||||
COMICS_DIRECTORY,
|
".cbz",
|
||||||
[".cbz", ".cbr", ".cb7"]
|
".cbr",
|
||||||
);
|
".cb7",
|
||||||
|
]);
|
||||||
const totalCount = await Comic.countDocuments({});
|
const totalCount = await Comic.countDocuments({});
|
||||||
const statistics = await Comic.aggregate([
|
const statistics = await Comic.aggregate([
|
||||||
{
|
{
|
||||||
@@ -561,11 +520,7 @@ export default class ImportService extends Service {
|
|||||||
{
|
{
|
||||||
$match: {
|
$match: {
|
||||||
"rawFileDetails.extension": {
|
"rawFileDetails.extension": {
|
||||||
$in: [
|
$in: [".cbr", ".cbz", ".cb7"],
|
||||||
".cbr",
|
|
||||||
".cbz",
|
|
||||||
".cb7",
|
|
||||||
],
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -579,8 +534,7 @@ export default class ImportService extends Service {
|
|||||||
issues: [
|
issues: [
|
||||||
{
|
{
|
||||||
$match: {
|
$match: {
|
||||||
"sourcedMetadata.comicvine.volumeInformation":
|
"sourcedMetadata.comicvine.volumeInformation": {
|
||||||
{
|
|
||||||
$gt: {},
|
$gt: {},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -645,23 +599,16 @@ export default class ImportService extends Service {
|
|||||||
.drop()
|
.drop()
|
||||||
.then(async (data) => {
|
.then(async (data) => {
|
||||||
console.info(data);
|
console.info(data);
|
||||||
const coversFolderDeleteResult =
|
const coversFolderDeleteResult = fsExtra.emptyDirSync(
|
||||||
fsExtra.emptyDirSync(
|
path.resolve(`${USERDATA_DIRECTORY}/covers`)
|
||||||
path.resolve(
|
);
|
||||||
`${USERDATA_DIRECTORY}/covers`
|
const expandedFolderDeleteResult = fsExtra.emptyDirSync(
|
||||||
)
|
path.resolve(`${USERDATA_DIRECTORY}/expanded`)
|
||||||
);
|
);
|
||||||
const expandedFolderDeleteResult =
|
const eSIndicesDeleteResult = await ctx.broker.call(
|
||||||
fsExtra.emptyDirSync(
|
"search.deleteElasticSearchIndices",
|
||||||
path.resolve(
|
{}
|
||||||
`${USERDATA_DIRECTORY}/expanded`
|
);
|
||||||
)
|
|
||||||
);
|
|
||||||
const eSIndicesDeleteResult =
|
|
||||||
await ctx.broker.call(
|
|
||||||
"search.deleteElasticSearchIndices",
|
|
||||||
{}
|
|
||||||
);
|
|
||||||
return {
|
return {
|
||||||
data,
|
data,
|
||||||
coversFolderDeleteResult,
|
coversFolderDeleteResult,
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
import { Service, ServiceBroker, ServiceSchema, Context } from "moleculer";
|
import { Service, ServiceBroker, ServiceSchema, Context } from "moleculer";
|
||||||
import {JobType} from "moleculer-bullmq";
|
import { JobType } from "moleculer-bullmq";
|
||||||
import { createClient } from "redis";
|
import { createClient } from "redis";
|
||||||
import { createAdapter } from "@socket.io/redis-adapter";
|
import { createAdapter } from "@socket.io/redis-adapter";
|
||||||
import Session from "../models/session.model";
|
import Session from "../models/session.model";
|
||||||
@@ -16,7 +16,6 @@ export default class SocketService extends Service {
|
|||||||
schema: ServiceSchema<{}> = { name: "socket" }
|
schema: ServiceSchema<{}> = { name: "socket" }
|
||||||
) {
|
) {
|
||||||
super(broker);
|
super(broker);
|
||||||
let socketSessionId = null;
|
|
||||||
this.parseServiceSchema({
|
this.parseServiceSchema({
|
||||||
name: "socket",
|
name: "socket",
|
||||||
mixins: [SocketIOService],
|
mixins: [SocketIOService],
|
||||||
@@ -41,17 +40,25 @@ export default class SocketService extends Service {
|
|||||||
if (
|
if (
|
||||||
sessionRecord.length !== 0 &&
|
sessionRecord.length !== 0 &&
|
||||||
sessionRecord[0].sessionId ===
|
sessionRecord[0].sessionId ===
|
||||||
data.session.sessionId
|
data.session.sessionId
|
||||||
) {
|
) {
|
||||||
// 2. Find if the queue has active jobs
|
// 2. Find if the queue has active jobs
|
||||||
const jobs: JobType = await this.broker.call("jobqueue.getJobCountsByType", {})
|
const jobs: JobType = await this.broker.call(
|
||||||
|
"jobqueue.getJobCountsByType",
|
||||||
|
{}
|
||||||
|
);
|
||||||
const { active, prioritized } = jobs;
|
const { active, prioritized } = jobs;
|
||||||
|
|
||||||
if (active > 0 && prioritized > 0) {
|
if (active > 0 && prioritized > 0) {
|
||||||
// 3. Get job counts
|
// 3. Get job counts
|
||||||
const completedJobCount = await pubClient.get("completedJobCount");
|
const completedJobCount =
|
||||||
const failedJobCount = await pubClient.get("failedJobCount");
|
await pubClient.get(
|
||||||
|
"completedJobCount"
|
||||||
|
);
|
||||||
|
const failedJobCount = await pubClient.get(
|
||||||
|
"failedJobCount"
|
||||||
|
);
|
||||||
|
|
||||||
// 4. Send the counts to the active socket.io session
|
// 4. Send the counts to the active socket.io session
|
||||||
await this.broker.call("socket.broadcast", {
|
await this.broker.call("socket.broadcast", {
|
||||||
namespace: "/",
|
namespace: "/",
|
||||||
@@ -66,9 +73,6 @@ export default class SocketService extends Service {
|
|||||||
],
|
],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
throw new MoleculerError(
|
throw new MoleculerError(
|
||||||
@@ -83,19 +87,6 @@ export default class SocketService extends Service {
|
|||||||
|
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case "LS_IMPORT":
|
|
||||||
console.log(`Recieved ${data.type} event.`);
|
|
||||||
// 1. Send task to queue
|
|
||||||
await this.broker.call(
|
|
||||||
"library.newImport",
|
|
||||||
{
|
|
||||||
data: data.data,
|
|
||||||
socketSessionId,
|
|
||||||
},
|
|
||||||
{}
|
|
||||||
);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case "LS_SET_QUEUE_STATUS":
|
case "LS_SET_QUEUE_STATUS":
|
||||||
console.log(data);
|
console.log(data);
|
||||||
await this.broker.call(
|
await this.broker.call(
|
||||||
|
|||||||
Reference in New Issue
Block a user