🏗️ Refactor for import of downloaded comics to use socket.io
This commit is contained in:
18
package-lock.json
generated
18
package-lock.json
generated
@@ -45,7 +45,6 @@
|
||||
"mongoose-paginate-v2": "^1.3.18",
|
||||
"nats": "^1.3.2",
|
||||
"node-calibre": "^2.1.1",
|
||||
"node-unrar-js": "^1.0.5",
|
||||
"opds-extra": "^3.0.9",
|
||||
"p7zip-threetwo": "^1.0.4",
|
||||
"sanitize-filename-ts": "^1.0.2",
|
||||
@@ -73,7 +72,7 @@
|
||||
"typescript": "^4.6.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10.x.x"
|
||||
"node": ">= 18.x.x"
|
||||
}
|
||||
},
|
||||
"node_modules/@ampproject/remapping": {
|
||||
@@ -8513,14 +8512,6 @@
|
||||
"url": "https://github.com/sponsors/antelle"
|
||||
}
|
||||
},
|
||||
"node_modules/node-unrar-js": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/node-unrar-js/-/node-unrar-js-1.0.6.tgz",
|
||||
"integrity": "sha512-jObs9hXUmXldlQI04/oYb+97RolAx4aJO08Rz5mRE/wFdwzcftuffEPZLZao+c0nKiwXCT77ZUN+sp/5lnpA2w==",
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/normalize-path": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
|
||||
@@ -20336,7 +20327,7 @@
|
||||
},
|
||||
"moleculer-bull": {
|
||||
"version": "git+ssh://git@github.com/rishighan/moleculer-bull.git#487020c3f3b4879bbf1bb40c75f19a259d17dd59",
|
||||
"from": "moleculer-bull@rishighan/moleculer-bull#1.0.0",
|
||||
"from": "moleculer-bull@github:rishighan/moleculer-bull#1.0.0",
|
||||
"requires": {
|
||||
"bull": "^4.10.2",
|
||||
"lodash": "^4.17.21"
|
||||
@@ -20686,11 +20677,6 @@
|
||||
"resolved": "https://registry.npmjs.org/node-stream-zip/-/node-stream-zip-1.15.0.tgz",
|
||||
"integrity": "sha512-LN4fydt9TqhZhThkZIVQnF9cwjU3qmUH9h78Mx/K7d3VvfRqqwthLwJEUOEL0QPZ0XQmNN7be5Ggit5+4dq3Bw=="
|
||||
},
|
||||
"node-unrar-js": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/node-unrar-js/-/node-unrar-js-1.0.6.tgz",
|
||||
"integrity": "sha512-jObs9hXUmXldlQI04/oYb+97RolAx4aJO08Rz5mRE/wFdwzcftuffEPZLZao+c0nKiwXCT77ZUN+sp/5lnpA2w=="
|
||||
},
|
||||
"normalize-path": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
|
||||
|
||||
@@ -73,7 +73,6 @@
|
||||
"mongoose-paginate-v2": "^1.3.18",
|
||||
"nats": "^1.3.2",
|
||||
"node-calibre": "^2.1.1",
|
||||
"node-unrar-js": "^1.0.5",
|
||||
"opds-extra": "^3.0.9",
|
||||
"p7zip-threetwo": "^1.0.4",
|
||||
"sanitize-filename-ts": "^1.0.2",
|
||||
@@ -85,7 +84,7 @@
|
||||
"xml2js": "^0.4.23"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10.x.x"
|
||||
"node": ">= 18.x.x"
|
||||
},
|
||||
"jest": {
|
||||
"coverageDirectory": "<rootDir>/coverage",
|
||||
|
||||
@@ -34,6 +34,7 @@ SOFTWARE.
|
||||
"use strict";
|
||||
|
||||
import { refineQuery } from "filename-parser";
|
||||
import { isNil, isUndefined } from "lodash";
|
||||
import { Context, Service, ServiceBroker, ServiceSchema } from "moleculer";
|
||||
import BullMQMixin, { SandboxedJob } from "moleculer-bull";
|
||||
import { DbMixin } from "../mixins/db.mixin";
|
||||
@@ -92,10 +93,14 @@ export default class QueueService extends Service {
|
||||
"Issue metadata inferred: ",
|
||||
JSON.stringify(inferredIssueDetails, null, 2)
|
||||
);
|
||||
|
||||
// write to mongo
|
||||
console.log("Writing to mongo...");
|
||||
await this.broker.call("library.rawImportToDB", {
|
||||
// Add the bundleId, if present to the payload
|
||||
let bundleId = null;
|
||||
if (!isNil(job.data.bundleId)) {
|
||||
bundleId = job.data.bundleId;
|
||||
}
|
||||
|
||||
// Orchestrate the payload
|
||||
const payload = {
|
||||
importStatus: {
|
||||
isImported: true,
|
||||
tagged: false,
|
||||
@@ -115,30 +120,41 @@ export default class QueueService extends Service {
|
||||
issue: inferredIssueDetails,
|
||||
},
|
||||
sourcedMetadata: {
|
||||
// except for ComicInfo.xml, everything else should be copied over from the
|
||||
// parent comic
|
||||
comicInfo: comicInfoJSON,
|
||||
comicvine: {},
|
||||
},
|
||||
// since we already have at least 1 copy
|
||||
// mark it as not wanted by default
|
||||
acquisition: {
|
||||
source: {
|
||||
wanted: false,
|
||||
},
|
||||
directconnect: {
|
||||
downloads: [],
|
||||
},
|
||||
},
|
||||
});
|
||||
"acquisition.source.wanted": false,
|
||||
|
||||
// clear out the downloads array
|
||||
// "acquisition.directconnect.downloads": [],
|
||||
|
||||
// mark the metadata source
|
||||
"acquisition.source.name": job.data.sourcedFrom,
|
||||
};
|
||||
|
||||
// Add the sourcedMetadata, if present
|
||||
if (!isNil(job.data.sourcedMetadata) && !isUndefined(job.data.sourcedMetadata.comicvine)) {
|
||||
Object.assign(
|
||||
payload.sourcedMetadata,
|
||||
job.data.sourcedMetadata
|
||||
);
|
||||
}
|
||||
|
||||
// write to mongo
|
||||
const importResult = await this.broker.call(
|
||||
"library.rawImportToDB",
|
||||
{
|
||||
importType: job.data.importType,
|
||||
bundleId,
|
||||
payload,
|
||||
}
|
||||
);
|
||||
return {
|
||||
data: {
|
||||
result,
|
||||
inferredMetadata: {
|
||||
issue: inferredIssueDetails,
|
||||
},
|
||||
sourcedMetadata: {
|
||||
comicInfo: comicInfoJSON,
|
||||
comicvine: {},
|
||||
},
|
||||
importResult,
|
||||
},
|
||||
id: job.id,
|
||||
worker: process.pid,
|
||||
@@ -177,10 +193,18 @@ export default class QueueService extends Service {
|
||||
async handler(
|
||||
ctx: Context<{
|
||||
fileObject: object;
|
||||
importType: string;
|
||||
bundleId: number;
|
||||
sourcedFrom?: string;
|
||||
sourcedMetadata: object;
|
||||
}>
|
||||
) {
|
||||
return await this.createJob("process.import", {
|
||||
fileObject: ctx.params.fileObject,
|
||||
importType: ctx.params.importType,
|
||||
bundleId: ctx.params.bundleId,
|
||||
sourcedFrom: ctx.params.sourcedFrom,
|
||||
sourcedMetadata: ctx.params.sourcedMetadata,
|
||||
});
|
||||
},
|
||||
},
|
||||
|
||||
@@ -32,7 +32,7 @@ SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
import { isNil, isUndefined } from "lodash";
|
||||
import { isNil } from "lodash";
|
||||
import {
|
||||
Context,
|
||||
Service,
|
||||
@@ -42,15 +42,8 @@ import {
|
||||
} from "moleculer";
|
||||
import { DbMixin } from "../mixins/db.mixin";
|
||||
import Comic from "../models/comic.model";
|
||||
import {
|
||||
explodePath,
|
||||
walkFolder,
|
||||
getSizeOfDirectory,
|
||||
} from "../utils/file.utils";
|
||||
import {
|
||||
extractFromArchive,
|
||||
uncompressEntireArchive,
|
||||
} from "../utils/uncompression.utils";
|
||||
import { walkFolder, getSizeOfDirectory } from "../utils/file.utils";
|
||||
import { extractFromArchive } from "../utils/uncompression.utils";
|
||||
import { convertXMLToJSON } from "../utils/xml.utils";
|
||||
import {
|
||||
IExtractComicBookCoverErrorResponse,
|
||||
@@ -104,6 +97,45 @@ export default class ImportService extends Service {
|
||||
});
|
||||
},
|
||||
},
|
||||
importDownloadedComic: {
|
||||
rest: "POST /importDownloadedComic",
|
||||
params: {},
|
||||
handler: async (ctx: Context<{ bundle: any }>) => {
|
||||
console.log(ctx.params);
|
||||
// Find the comic by bundleId
|
||||
const referenceComicObject = await Comic.find({
|
||||
"acquisition.directconnect.downloads.bundleId": `${ctx.params.bundle.data.id}`,
|
||||
});
|
||||
// Determine source where the comic was added from
|
||||
// and gather identifying information about it
|
||||
const sourceName =
|
||||
referenceComicObject[0].acquisition.source.name;
|
||||
const { sourcedMetadata } = referenceComicObject[0];
|
||||
|
||||
const filePath = `${COMICS_DIRECTORY}/${ctx.params.bundle.data.name}`;
|
||||
let comicExists = await Comic.exists({
|
||||
"rawFileDetails.name": `${path.basename(
|
||||
ctx.params.bundle.data.name,
|
||||
path.extname(ctx.params.bundle.data.name)
|
||||
)}`,
|
||||
});
|
||||
if (!comicExists) {
|
||||
// 2. Send the extraction job to the queue
|
||||
await broker.call("importqueue.processImport", {
|
||||
importType: "update",
|
||||
sourcedFrom: sourceName,
|
||||
bundleId: ctx.params.bundle.data.id,
|
||||
sourcedMetadata,
|
||||
fileObject: {
|
||||
filePath,
|
||||
// fileSize: item.stats.size,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
console.log("Comic already exists in the library.");
|
||||
}
|
||||
},
|
||||
},
|
||||
newImport: {
|
||||
rest: "POST /newImport",
|
||||
params: {},
|
||||
@@ -113,7 +145,6 @@ export default class ImportService extends Service {
|
||||
}>
|
||||
) {
|
||||
// 1. Walk the Source folder
|
||||
|
||||
klaw(path.resolve(COMICS_DIRECTORY))
|
||||
// 1.1 Filter on .cb* extensions
|
||||
.pipe(
|
||||
@@ -126,7 +157,6 @@ export default class ImportService extends Service {
|
||||
) {
|
||||
this.push(item);
|
||||
}
|
||||
|
||||
next();
|
||||
})
|
||||
)
|
||||
@@ -151,6 +181,7 @@ export default class ImportService extends Service {
|
||||
filePath: item.path,
|
||||
fileSize: item.stats.size,
|
||||
},
|
||||
importType: "new",
|
||||
}
|
||||
);
|
||||
} else {
|
||||
@@ -170,35 +201,39 @@ export default class ImportService extends Service {
|
||||
params: {},
|
||||
async handler(
|
||||
ctx: Context<{
|
||||
_id: string;
|
||||
sourcedMetadata: {
|
||||
comicvine?: {
|
||||
volume: { api_detail_url: string };
|
||||
volumeInformation: {};
|
||||
bundleId?: string;
|
||||
importType: string;
|
||||
payload: {
|
||||
_id?: string;
|
||||
sourcedMetadata: {
|
||||
comicvine?: {
|
||||
volume: { api_detail_url: string };
|
||||
volumeInformation: {};
|
||||
};
|
||||
locg?: {};
|
||||
};
|
||||
locg?: {};
|
||||
};
|
||||
inferredMetadata: {
|
||||
issue: Object;
|
||||
};
|
||||
rawFileDetails: {
|
||||
name: string;
|
||||
};
|
||||
acquisition: {
|
||||
source: {
|
||||
wanted: boolean;
|
||||
name?: string;
|
||||
inferredMetadata: {
|
||||
issue: Object;
|
||||
};
|
||||
directconnect: {
|
||||
downloads: [];
|
||||
rawFileDetails: {
|
||||
name: string;
|
||||
};
|
||||
acquisition: {
|
||||
source: {
|
||||
wanted: boolean;
|
||||
name?: string;
|
||||
};
|
||||
directconnect: {
|
||||
downloads: [];
|
||||
};
|
||||
};
|
||||
};
|
||||
}>
|
||||
) {
|
||||
try {
|
||||
let volumeDetails;
|
||||
const comicMetadata = ctx.params;
|
||||
console.log(JSON.stringify(comicMetadata, null, 4));
|
||||
const comicMetadata = ctx.params.payload;
|
||||
|
||||
// When an issue is added from the search CV feature
|
||||
// we solicit volume information and add that to mongo
|
||||
if (
|
||||
@@ -220,23 +255,30 @@ export default class ImportService extends Service {
|
||||
comicMetadata.sourcedMetadata.comicvine.volumeInformation =
|
||||
volumeDetails.results;
|
||||
}
|
||||
Comic.findOneAndUpdate(
|
||||
{ _id: new ObjectId(ctx.params._id) },
|
||||
ctx.params,
|
||||
{ upsert: true, new: true },
|
||||
(error, data) => {
|
||||
if (data) {
|
||||
return data;
|
||||
} else if (error) {
|
||||
console.log("data", data);
|
||||
console.log("error", error);
|
||||
throw new Errors.MoleculerError(
|
||||
"Failed to import comic book",
|
||||
500
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
console.log("Saving to Mongo...");
|
||||
console.log(
|
||||
`Import type: [${ctx.params.importType}]`
|
||||
);
|
||||
console.log(JSON.stringify(comicMetadata, null, 4));
|
||||
switch (ctx.params.importType) {
|
||||
case "new":
|
||||
return await Comic.create(comicMetadata);
|
||||
case "update":
|
||||
return await Comic.findOneAndUpdate(
|
||||
{
|
||||
"acquisition.directconnect.downloads.bundleId":
|
||||
ctx.params.bundleId,
|
||||
},
|
||||
comicMetadata,
|
||||
{
|
||||
upsert: true,
|
||||
new: true,
|
||||
}
|
||||
);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Errors.MoleculerError(
|
||||
"Import failed.",
|
||||
@@ -341,39 +383,6 @@ export default class ImportService extends Service {
|
||||
});
|
||||
},
|
||||
},
|
||||
importDownloadedFileToLibrary: {
|
||||
rest: "POST /importDownloadedFileToLibrary",
|
||||
params: {},
|
||||
handler: async (
|
||||
ctx: Context<{
|
||||
comicObjectId: string;
|
||||
comicObject: {
|
||||
acquisition: {
|
||||
source: {
|
||||
wanted: boolean;
|
||||
};
|
||||
};
|
||||
};
|
||||
downloadStatus: { name: string };
|
||||
}>
|
||||
) => {
|
||||
const result = await extractFromArchive(
|
||||
`${COMICS_DIRECTORY}/${ctx.params.downloadStatus.name}`
|
||||
);
|
||||
Object.assign(ctx.params.comicObject, {
|
||||
rawFileDetails: result,
|
||||
});
|
||||
ctx.params.comicObject.acquisition.source.wanted =
|
||||
false;
|
||||
const updateResult = await Comic.findOneAndUpdate(
|
||||
{ _id: new ObjectId(ctx.params.comicObjectId) },
|
||||
ctx.params.comicObject,
|
||||
{ upsert: true, new: true }
|
||||
);
|
||||
await updateResult.index();
|
||||
},
|
||||
},
|
||||
|
||||
getComicBooks: {
|
||||
rest: "POST /getComicBooks",
|
||||
params: {},
|
||||
|
||||
@@ -51,8 +51,8 @@ export default class SocketService extends Service {
|
||||
);
|
||||
console.log(data);
|
||||
await this.broker.call(
|
||||
"library.importDownloadedFileToLibrary",
|
||||
data.data,
|
||||
"library.importDownloadedComic",
|
||||
{ bundle: data },
|
||||
{}
|
||||
);
|
||||
break;
|
||||
|
||||
@@ -73,10 +73,6 @@ export const extractComicInfoXMLFromRar = async (
|
||||
filePath: string
|
||||
): Promise<any> => {
|
||||
try {
|
||||
const result = {
|
||||
filePath,
|
||||
};
|
||||
|
||||
// Create the target directory
|
||||
const directoryOptions = {
|
||||
mode: 0o2775,
|
||||
@@ -92,7 +88,7 @@ export const extractComicInfoXMLFromRar = async (
|
||||
path: path.resolve(filePath),
|
||||
bin: `${UNRAR_BIN_PATH}`, // this will change depending on Docker base OS
|
||||
});
|
||||
|
||||
|
||||
const filesInArchive: [RarFile] = await new Promise(
|
||||
(resolve, reject) => {
|
||||
return archive.list((err, entries) => {
|
||||
@@ -128,7 +124,6 @@ export const extractComicInfoXMLFromRar = async (
|
||||
const comicInfoXMLFilePromise = new Promise((resolve, reject) => {
|
||||
let comicinfostring = "";
|
||||
if (!isUndefined(comicInfoXML[0])) {
|
||||
console.log(path.basename(comicInfoXML[0].name));
|
||||
const comicInfoXMLFileName = path.basename(
|
||||
comicInfoXML[0].name
|
||||
);
|
||||
@@ -138,6 +133,7 @@ export const extractComicInfoXMLFromRar = async (
|
||||
|
||||
archive.stream(comicInfoXML[0]["name"]).pipe(writeStream);
|
||||
writeStream.on("finish", async () => {
|
||||
console.log(`Attempting to write comicInfo.xml...`);
|
||||
const readStream = createReadStream(
|
||||
`${targetDirectory}/${comicInfoXMLFileName}`
|
||||
);
|
||||
@@ -154,7 +150,7 @@ export const extractComicInfoXMLFromRar = async (
|
||||
const comicInfoJSON = await convertXMLToJSON(
|
||||
comicinfostring.toString()
|
||||
);
|
||||
|
||||
console.log(`comicInfo.xml successfully written: ${comicInfoJSON.comicinfo}`)
|
||||
resolve({ comicInfoJSON: comicInfoJSON.comicinfo });
|
||||
}
|
||||
});
|
||||
@@ -357,6 +353,8 @@ export const extractFromArchive = async (filePath: string) => {
|
||||
|
||||
case ".cbr":
|
||||
const cbrResult = await extractComicInfoXMLFromRar(filePath);
|
||||
console.log("ASDASDASDASDas");
|
||||
console.log(JSON.stringify(cbrResult, null, 4))
|
||||
return Object.assign({}, ...cbrResult);
|
||||
|
||||
default:
|
||||
|
||||
Reference in New Issue
Block a user