Merge pull request #8 from rishighan/qbittorrent-settings

Miscellaneous Settings
This commit was merged in pull request #8.
This commit is contained in:
2024-01-08 16:42:54 -05:00
committed by GitHub
5 changed files with 423 additions and 181 deletions

View File

@@ -28,6 +28,10 @@ const RawFileDetailsSchema = mongoose.Schema({
mimeType: String, mimeType: String,
containedIn: String, containedIn: String,
pageCount: Number, pageCount: Number,
archive: {
uncompressed: Boolean,
expandedPath: String,
},
cover: { cover: {
filePath: String, filePath: String,
stats: Object, stats: Object,

View File

@@ -7,6 +7,8 @@ import {
ServiceSchema, ServiceSchema,
Errors, Errors,
} from "moleculer"; } from "moleculer";
import { DbMixin } from "../mixins/db.mixin";
import Comic from "../models/comic.model";
import path from "path"; import path from "path";
import { import {
analyze, analyze,
@@ -22,16 +24,13 @@ export default class ImageTransformation extends Service {
super(broker); super(broker);
this.parseServiceSchema({ this.parseServiceSchema({
name: "imagetransformation", name: "imagetransformation",
mixins: [], mixins: [DbMixin("comics", Comic)],
settings: { settings: {
// Available fields in the responses // Available fields in the responses
fields: ["_id", "name", "quantity", "price"], fields: ["_id"],
// Validator for the `create` & `insert` actions. // Validator for the `create` & `insert` actions.
entityValidator: { entityValidator: {},
name: "string|min:3",
price: "number|positive",
},
}, },
hooks: {}, hooks: {},
actions: { actions: {

View File

@@ -2,9 +2,16 @@ import { Context, Service, ServiceBroker } from "moleculer";
import JobResult from "../models/jobresult.model"; import JobResult from "../models/jobresult.model";
import { refineQuery } from "filename-parser"; import { refineQuery } from "filename-parser";
import BullMqMixin from "moleculer-bullmq"; import BullMqMixin from "moleculer-bullmq";
import { extractFromArchive, uncompressEntireArchive } from "../utils/uncompression.utils"; import { DbMixin } from "../mixins/db.mixin";
import Comic from "../models/comic.model";
const ObjectId = require("mongoose").Types.ObjectId;
import {
extractFromArchive,
uncompressEntireArchive,
} from "../utils/uncompression.utils";
import { isNil, isUndefined } from "lodash"; import { isNil, isUndefined } from "lodash";
import { pubClient } from "../config/redis.config"; import { pubClient } from "../config/redis.config";
import path from "path";
const { MoleculerError } = require("moleculer").Errors; const { MoleculerError } = require("moleculer").Errors;
@@ -15,7 +22,7 @@ export default class JobQueueService extends Service {
this.parseServiceSchema({ this.parseServiceSchema({
name: "jobqueue", name: "jobqueue",
hooks: {}, hooks: {},
mixins: [BullMqMixin], mixins: [DbMixin("comics", Comic), BullMqMixin],
settings: { settings: {
bullmq: { bullmq: {
client: process.env.REDIS_URI, client: process.env.REDIS_URI,
@@ -47,13 +54,19 @@ export default class JobQueueService extends Service {
enqueue: { enqueue: {
queue: true, queue: true,
rest: "/GET enqueue", rest: "/GET enqueue",
handler: async (ctx: Context<{ queueName: string; description: string }>) => { handler: async (
console.log(ctx.params); ctx: Context<{ queueName: string; description: string }>
) => {
const { queueName, description } = ctx.params; const { queueName, description } = ctx.params;
// Enqueue the job // Enqueue the job
const job = await this.localQueue(ctx, queueName, ctx.params, { const job = await this.localQueue(
ctx,
queueName,
ctx.params,
{
priority: 10, priority: 10,
}); }
);
console.log(`Job ${job.id} enqueued`); console.log(`Job ${job.id} enqueued`);
console.log(`${description}`); console.log(`${description}`);
@@ -68,13 +81,16 @@ export default class JobQueueService extends Service {
}> }>
) => { ) => {
try { try {
console.log(`Recieved Job ID ${ctx.locals.job.id}, processing...`); console.log(
console.log(ctx.params); `Recieved Job ID ${ctx.locals.job.id}, processing...`
);
// 1. De-structure the job params // 1. De-structure the job params
const { fileObject } = ctx.locals.job.data.params; const { fileObject } = ctx.locals.job.data.params;
// 2. Extract metadata from the archive // 2. Extract metadata from the archive
const result = await extractFromArchive(fileObject.filePath); const result = await extractFromArchive(
fileObject.filePath
);
const { const {
name, name,
filePath, filePath,
@@ -87,7 +103,9 @@ export default class JobQueueService extends Service {
} = result; } = result;
// 3a. Infer any issue-related metadata from the filename // 3a. Infer any issue-related metadata from the filename
const { inferredIssueDetails } = refineQuery(result.name); const { inferredIssueDetails } = refineQuery(
result.name
);
console.log( console.log(
"Issue metadata inferred: ", "Issue metadata inferred: ",
JSON.stringify(inferredIssueDetails, null, 2) JSON.stringify(inferredIssueDetails, null, 2)
@@ -127,7 +145,8 @@ export default class JobQueueService extends Service {
// "acquisition.directconnect.downloads": [], // "acquisition.directconnect.downloads": [],
// mark the metadata source // mark the metadata source
"acquisition.source.name": ctx.locals.job.data.params.sourcedFrom, "acquisition.source.name":
ctx.locals.job.data.params.sourcedFrom,
}; };
// 3c. Add the bundleId, if present to the payload // 3c. Add the bundleId, if present to the payload
@@ -138,8 +157,13 @@ export default class JobQueueService extends Service {
// 3d. Add the sourcedMetadata, if present // 3d. Add the sourcedMetadata, if present
if ( if (
!isNil(ctx.locals.job.data.params.sourcedMetadata) && !isNil(
!isUndefined(ctx.locals.job.data.params.sourcedMetadata.comicvine) ctx.locals.job.data.params.sourcedMetadata
) &&
!isUndefined(
ctx.locals.job.data.params.sourcedMetadata
.comicvine
)
) { ) {
Object.assign( Object.assign(
payload.sourcedMetadata, payload.sourcedMetadata,
@@ -148,11 +172,15 @@ export default class JobQueueService extends Service {
} }
// 4. write to mongo // 4. write to mongo
const importResult = await this.broker.call("library.rawImportToDB", { const importResult = await this.broker.call(
importType: ctx.locals.job.data.params.importType, "library.rawImportToDB",
{
importType:
ctx.locals.job.data.params.importType,
bundleId, bundleId,
payload, payload,
}); }
);
return { return {
data: { data: {
importResult, importResult,
@@ -164,9 +192,14 @@ export default class JobQueueService extends Service {
console.error( console.error(
`An error occurred processing Job ID ${ctx.locals.job.id}` `An error occurred processing Job ID ${ctx.locals.job.id}`
); );
throw new MoleculerError(error, 500, "IMPORT_JOB_ERROR", { throw new MoleculerError(
error,
500,
"IMPORT_JOB_ERROR",
{
data: ctx.params.sessionId, data: ctx.params.sessionId,
}); }
);
} }
}, },
}, },
@@ -194,7 +227,8 @@ export default class JobQueueService extends Service {
statuses: { statuses: {
$push: { $push: {
status: "$_id.status", status: "$_id.status",
earliestTimestamp: "$earliestTimestamp", earliestTimestamp:
"$earliestTimestamp",
count: "$count", count: "$count",
}, },
}, },
@@ -214,7 +248,10 @@ export default class JobQueueService extends Service {
{ {
$cond: [ $cond: [
{ {
$eq: ["$$this.status", "completed"], $eq: [
"$$this.status",
"completed",
],
}, },
"$$this.count", "$$this.count",
0, 0,
@@ -234,7 +271,10 @@ export default class JobQueueService extends Service {
{ {
$cond: [ $cond: [
{ {
$eq: ["$$this.status", "failed"], $eq: [
"$$this.status",
"failed",
],
}, },
"$$this.count", "$$this.count",
0, 0,
@@ -254,21 +294,72 @@ export default class JobQueueService extends Service {
}, },
"uncompressFullArchive.async": { "uncompressFullArchive.async": {
rest: "POST /uncompressFullArchive", rest: "POST /uncompressFullArchive",
handler: async (ctx: Context<{ filePath: string; options: any }>) => { handler: async (
const { filePath, options } = ctx.params; ctx: Context<{
console.log("asd", filePath); filePath: string;
comicObjectId: string;
options: any;
}>
) => {
console.log(
`Recieved Job ID ${JSON.stringify(
ctx.locals
)}, processing...`
);
const { filePath, options, comicObjectId } = ctx.params;
const comicId = new ObjectId(comicObjectId);
// 2. Extract metadata from the archive // 2. Extract metadata from the archive
return await uncompressEntireArchive(filePath, options); const result: string[] = await uncompressEntireArchive(
filePath,
options
);
if (Array.isArray(result) && result.length !== 0) {
// Get the containing directory of the uncompressed archive
const directoryPath = path.dirname(result[0]);
// Add to mongo object
await Comic.findByIdAndUpdate(
comicId,
{
$set: {
"rawFileDetails.archive": {
uncompressed: true,
expandedPath: directoryPath,
},
},
},
{ new: true, safe: true, upsert: true }
);
return result;
}
}, },
}, },
}, },
events: { events: {
async "uncompressFullArchive.async.active"(ctx: Context<{ id: number }>) { async "uncompressFullArchive.async.active"(
console.log(`Uncompression Job ID ${ctx.params.id} is set to active.`); ctx: Context<{ id: number }>
) {
console.log(
`Uncompression Job ID ${ctx.params.id} is set to active.`
);
}, },
async "uncompressFullArchive.async.completed"(ctx: Context<{ id: number }>) { async "uncompressFullArchive.async.completed"(
console.log(`Uncompression Job ID ${ctx.params.id} completed.`); ctx: Context<{ id: number }>
) {
console.log(
`Uncompression Job ID ${ctx.params.id} completed.`
);
const job = await this.job(ctx.params.id);
await this.broker.call("socket.broadcast", {
namespace: "/",
event: "LS_UNCOMPRESSION_JOB_COMPLETE",
args: [
{
uncompressedArchive: job.returnvalue,
},
],
});
return job.returnvalue;
}, },
// use the `${QUEUE_NAME}.QUEUE_EVENT` scheme // use the `${QUEUE_NAME}.QUEUE_EVENT` scheme
async "enqueue.async.active"(ctx: Context<{ id: Number }>) { async "enqueue.async.active"(ctx: Context<{ id: Number }>) {
@@ -292,7 +383,9 @@ export default class JobQueueService extends Service {
// 2. Increment the completed job counter // 2. Increment the completed job counter
await pubClient.incr("completedJobCount"); await pubClient.incr("completedJobCount");
// 3. Fetch the completed job count for the final payload to be sent to the client // 3. Fetch the completed job count for the final payload to be sent to the client
const completedJobCount = await pubClient.get("completedJobCount"); const completedJobCount = await pubClient.get(
"completedJobCount"
);
// 4. Emit the LS_COVER_EXTRACTED event with the necessary details // 4. Emit the LS_COVER_EXTRACTED event with the necessary details
await this.broker.call("socket.broadcast", { await this.broker.call("socket.broadcast", {
namespace: "/", namespace: "/",
@@ -319,7 +412,9 @@ export default class JobQueueService extends Service {
async "enqueue.async.failed"(ctx) { async "enqueue.async.failed"(ctx) {
const job = await this.job(ctx.params.id); const job = await this.job(ctx.params.id);
await pubClient.incr("failedJobCount"); await pubClient.incr("failedJobCount");
const failedJobCount = await pubClient.get("failedJobCount"); const failedJobCount = await pubClient.get(
"failedJobCount"
);
await JobResult.create({ await JobResult.create({
id: ctx.params.id, id: ctx.params.id,

View File

@@ -33,7 +33,13 @@ SOFTWARE.
"use strict"; "use strict";
import { isNil } from "lodash"; import { isNil } from "lodash";
import { Context, Service, ServiceBroker, ServiceSchema, Errors } from "moleculer"; import {
Context,
Service,
ServiceBroker,
ServiceSchema,
Errors,
} from "moleculer";
import { DbMixin } from "../mixins/db.mixin"; import { DbMixin } from "../mixins/db.mixin";
import Comic from "../models/comic.model"; import Comic from "../models/comic.model";
import { walkFolder, getSizeOfDirectory } from "../utils/file.utils"; import { walkFolder, getSizeOfDirectory } from "../utils/file.utils";
@@ -74,14 +80,19 @@ export default class ImportService extends Service {
}, },
walkFolders: { walkFolders: {
rest: "POST /walkFolders", rest: "POST /walkFolders",
params: { params: {},
basePathToWalk: "string", async handler(
}, ctx: Context<{
async handler(ctx: Context<{ basePathToWalk: string }>) { basePathToWalk: string;
extensions: string[];
}>
) {
console.log(ctx.params);
return await walkFolder(ctx.params.basePathToWalk, [ return await walkFolder(ctx.params.basePathToWalk, [
".cbz", ".cbz",
".cbr", ".cbr",
".cb7", ".cb7",
...ctx.params.extensions,
]); ]);
}, },
}, },
@@ -95,10 +106,19 @@ export default class ImportService extends Service {
uncompressFullArchive: { uncompressFullArchive: {
rest: "POST /uncompressFullArchive", rest: "POST /uncompressFullArchive",
params: {}, params: {},
handler: async (ctx: Context<{ filePath: string; options: any }>) => { handler: async (
await broker.call("importqueue.uncompressResize", { ctx: Context<{
filePath: string;
comicObjectId: string;
options: any;
}>
) => {
this.broker.call("jobqueue.enqueue", {
filePath: ctx.params.filePath, filePath: ctx.params.filePath,
comicObjectId: ctx.params.comicObjectId,
options: ctx.params.options, options: ctx.params.options,
queueName: "uncompressFullArchive.async",
description: `Job for uncompressing archive at ${ctx.params.filePath}`,
}); });
}, },
}, },
@@ -113,7 +133,8 @@ export default class ImportService extends Service {
}); });
// Determine source where the comic was added from // Determine source where the comic was added from
// and gather identifying information about it // and gather identifying information about it
const sourceName = referenceComicObject[0].acquisition.source.name; const sourceName =
referenceComicObject[0].acquisition.source.name;
const { sourcedMetadata } = referenceComicObject[0]; const { sourcedMetadata } = referenceComicObject[0];
const filePath = `${COMICS_DIRECTORY}/${ctx.params.bundle.data.name}`; const filePath = `${COMICS_DIRECTORY}/${ctx.params.bundle.data.name}`;
@@ -157,8 +178,14 @@ export default class ImportService extends Service {
// 1.1 Filter on .cb* extensions // 1.1 Filter on .cb* extensions
.pipe( .pipe(
through2.obj(function (item, enc, next) { through2.obj(function (item, enc, next) {
let fileExtension = path.extname(item.path); let fileExtension = path.extname(
if ([".cbz", ".cbr", ".cb7"].includes(fileExtension)) { item.path
);
if (
[".cbz", ".cbr", ".cb7"].includes(
fileExtension
)
) {
this.push(item); this.push(item);
} }
next(); next();
@@ -167,7 +194,10 @@ export default class ImportService extends Service {
// 1.2 Pipe filtered results to the next step // 1.2 Pipe filtered results to the next step
// Enqueue the job in the queue // Enqueue the job in the queue
.on("data", async (item) => { .on("data", async (item) => {
console.info("Found a file at path: %s", item.path); console.info(
"Found a file at path: %s",
item.path
);
let comicExists = await Comic.exists({ let comicExists = await Comic.exists({
"rawFileDetails.name": `${path.basename( "rawFileDetails.name": `${path.basename(
item.path, item.path,
@@ -176,8 +206,14 @@ export default class ImportService extends Service {
}); });
if (!comicExists) { if (!comicExists) {
// 2.1 Reset the job counters in Redis // 2.1 Reset the job counters in Redis
await pubClient.set("completedJobCount", 0); await pubClient.set(
await pubClient.set("failedJobCount", 0); "completedJobCount",
0
);
await pubClient.set(
"failedJobCount",
0
);
// 2.2 Send the extraction job to the queue // 2.2 Send the extraction job to the queue
this.broker.call("jobqueue.enqueue", { this.broker.call("jobqueue.enqueue", {
fileObject: { fileObject: {
@@ -189,7 +225,9 @@ export default class ImportService extends Service {
queueName: "enqueue.async", queueName: "enqueue.async",
}); });
} else { } else {
console.log("Comic already exists in the library."); console.log(
"Comic already exists in the library."
);
} }
}) })
.on("end", () => { .on("end", () => {
@@ -241,19 +279,28 @@ export default class ImportService extends Service {
// we solicit volume information and add that to mongo // we solicit volume information and add that to mongo
if ( if (
comicMetadata.sourcedMetadata.comicvine && comicMetadata.sourcedMetadata.comicvine &&
!isNil(comicMetadata.sourcedMetadata.comicvine.volume) !isNil(
comicMetadata.sourcedMetadata.comicvine
.volume
)
) { ) {
volumeDetails = await this.broker.call("comicvine.getVolumes", { volumeDetails = await this.broker.call(
"comicvine.getVolumes",
{
volumeURI: volumeURI:
comicMetadata.sourcedMetadata.comicvine.volume comicMetadata.sourcedMetadata
.comicvine.volume
.api_detail_url, .api_detail_url,
}); }
);
comicMetadata.sourcedMetadata.comicvine.volumeInformation = comicMetadata.sourcedMetadata.comicvine.volumeInformation =
volumeDetails.results; volumeDetails.results;
} }
console.log("Saving to Mongo..."); console.log("Saving to Mongo...");
console.log(`Import type: [${ctx.params.importType}]`); console.log(
`Import type: [${ctx.params.importType}]`
);
switch (ctx.params.importType) { switch (ctx.params.importType) {
case "new": case "new":
return await Comic.create(comicMetadata); return await Comic.create(comicMetadata);
@@ -274,7 +321,10 @@ export default class ImportService extends Service {
} }
} catch (error) { } catch (error) {
console.log(error); console.log(error);
throw new Errors.MoleculerError("Import failed.", 500); throw new Errors.MoleculerError(
"Import failed.",
500
);
} }
}, },
}, },
@@ -292,7 +342,9 @@ export default class ImportService extends Service {
) { ) {
// 1. Find mongo object by id // 1. Find mongo object by id
// 2. Import payload into sourcedMetadata.comicvine // 2. Import payload into sourcedMetadata.comicvine
const comicObjectId = new ObjectId(ctx.params.comicObjectId); const comicObjectId = new ObjectId(
ctx.params.comicObjectId
);
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
let volumeDetails = {}; let volumeDetails = {};
@@ -301,15 +353,18 @@ export default class ImportService extends Service {
const volumeDetails = await this.broker.call( const volumeDetails = await this.broker.call(
"comicvine.getVolumes", "comicvine.getVolumes",
{ {
volumeURI: matchedResult.volume.api_detail_url, volumeURI:
matchedResult.volume.api_detail_url,
} }
); );
matchedResult.volumeInformation = volumeDetails.results; matchedResult.volumeInformation =
volumeDetails.results;
Comic.findByIdAndUpdate( Comic.findByIdAndUpdate(
comicObjectId, comicObjectId,
{ {
$set: { $set: {
"sourcedMetadata.comicvine": matchedResult, "sourcedMetadata.comicvine":
matchedResult,
}, },
}, },
{ new: true }, { new: true },
@@ -340,7 +395,9 @@ export default class ImportService extends Service {
}> }>
) { ) {
console.log(JSON.stringify(ctx.params, null, 2)); console.log(JSON.stringify(ctx.params, null, 2));
const comicObjectId = new ObjectId(ctx.params.comicObjectId); const comicObjectId = new ObjectId(
ctx.params.comicObjectId
);
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
Comic.findByIdAndUpdate( Comic.findByIdAndUpdate(
@@ -395,7 +452,9 @@ export default class ImportService extends Service {
params: { ids: "array" }, params: { ids: "array" },
handler: async (ctx: Context<{ ids: [string] }>) => { handler: async (ctx: Context<{ ids: [string] }>) => {
console.log(ctx.params.ids); console.log(ctx.params.ids);
const queryIds = ctx.params.ids.map((id) => new ObjectId(id)); const queryIds = ctx.params.ids.map(
(id) => new ObjectId(id)
);
return await Comic.find({ return await Comic.find({
_id: { _id: {
$in: queryIds, $in: queryIds,
@@ -411,7 +470,8 @@ export default class ImportService extends Service {
const volumes = await Comic.aggregate([ const volumes = await Comic.aggregate([
{ {
$project: { $project: {
volumeInfo: "$sourcedMetadata.comicvine.volumeInformation", volumeInfo:
"$sourcedMetadata.comicvine.volumeInformation",
}, },
}, },
{ {
@@ -457,7 +517,8 @@ export default class ImportService extends Service {
const { queryObjects } = ctx.params; const { queryObjects } = ctx.params;
// construct the query for ElasticSearch // construct the query for ElasticSearch
let elasticSearchQuery = {}; let elasticSearchQuery = {};
const elasticSearchQueries = queryObjects.map((queryObject) => { const elasticSearchQueries = queryObjects.map(
(queryObject) => {
console.log("Volume: ", queryObject.volumeName); console.log("Volume: ", queryObject.volumeName);
console.log("Issue: ", queryObject.issueName); console.log("Issue: ", queryObject.issueName);
if (queryObject.issueName === null) { if (queryObject.issueName === null) {
@@ -471,12 +532,14 @@ export default class ImportService extends Service {
must: [ must: [
{ {
match_phrase: { match_phrase: {
"rawFileDetails.name": queryObject.volumeName, "rawFileDetails.name":
queryObject.volumeName,
}, },
}, },
{ {
term: { term: {
"inferredMetadata.issue.number": parseInt( "inferredMetadata.issue.number":
parseInt(
queryObject.issueNumber, queryObject.issueNumber,
10 10
), ),
@@ -495,8 +558,11 @@ export default class ImportService extends Service {
query: elasticSearchQuery, query: elasticSearchQuery,
}, },
]; ];
}); }
console.log(JSON.stringify(elasticSearchQueries, null, 2)); );
console.log(
JSON.stringify(elasticSearchQueries, null, 2)
);
return await ctx.broker.call("search.searchComic", { return await ctx.broker.call("search.searchComic", {
elasticSearchQueries, elasticSearchQueries,
@@ -509,11 +575,10 @@ export default class ImportService extends Service {
rest: "GET /libraryStatistics", rest: "GET /libraryStatistics",
params: {}, params: {},
handler: async (ctx: Context<{}>) => { handler: async (ctx: Context<{}>) => {
const comicDirectorySize = await getSizeOfDirectory(COMICS_DIRECTORY, [ const comicDirectorySize = await getSizeOfDirectory(
".cbz", COMICS_DIRECTORY,
".cbr", [".cbz", ".cbr", ".cb7"]
".cb7", );
]);
const totalCount = await Comic.countDocuments({}); const totalCount = await Comic.countDocuments({});
const statistics = await Comic.aggregate([ const statistics = await Comic.aggregate([
{ {
@@ -522,7 +587,11 @@ export default class ImportService extends Service {
{ {
$match: { $match: {
"rawFileDetails.extension": { "rawFileDetails.extension": {
$in: [".cbr", ".cbz", ".cb7"], $in: [
".cbr",
".cbz",
".cb7",
],
}, },
}, },
}, },
@@ -536,7 +605,8 @@ export default class ImportService extends Service {
issues: [ issues: [
{ {
$match: { $match: {
"sourcedMetadata.comicvine.volumeInformation": { "sourcedMetadata.comicvine.volumeInformation":
{
$gt: {}, $gt: {},
}, },
}, },
@@ -601,13 +671,20 @@ export default class ImportService extends Service {
.drop() .drop()
.then(async (data) => { .then(async (data) => {
console.info(data); console.info(data);
const coversFolderDeleteResult = fsExtra.emptyDirSync( const coversFolderDeleteResult =
path.resolve(`${USERDATA_DIRECTORY}/covers`) fsExtra.emptyDirSync(
path.resolve(
`${USERDATA_DIRECTORY}/covers`
)
); );
const expandedFolderDeleteResult = fsExtra.emptyDirSync( const expandedFolderDeleteResult =
path.resolve(`${USERDATA_DIRECTORY}/expanded`) fsExtra.emptyDirSync(
path.resolve(
`${USERDATA_DIRECTORY}/expanded`
)
); );
const eSIndicesDeleteResult = await ctx.broker.call( const eSIndicesDeleteResult =
await ctx.broker.call(
"search.deleteElasticSearchIndices", "search.deleteElasticSearchIndices",
{} {}
); );

View File

@@ -81,7 +81,8 @@ export const extractComicInfoXMLFromRar = async (
const directoryOptions = { const directoryOptions = {
mode: 0o2775, mode: 0o2775,
}; };
const { fileNameWithoutExtension, extension } = getFileConstituents(filePath); const { fileNameWithoutExtension, extension } =
getFileConstituents(filePath);
const targetDirectory = `${USERDATA_DIRECTORY}/covers/${sanitize( const targetDirectory = `${USERDATA_DIRECTORY}/covers/${sanitize(
fileNameWithoutExtension fileNameWithoutExtension
)}`; )}`;
@@ -92,7 +93,8 @@ export const extractComicInfoXMLFromRar = async (
bin: `${UNRAR_BIN_PATH}`, // this will change depending on Docker base OS bin: `${UNRAR_BIN_PATH}`, // this will change depending on Docker base OS
arguments: ["-v"], arguments: ["-v"],
}); });
const filesInArchive: [RarFile] = await new Promise((resolve, reject) => { const filesInArchive: [RarFile] = await new Promise(
(resolve, reject) => {
return archive.list((err, entries) => { return archive.list((err, entries) => {
if (err) { if (err) {
console.log(`DEBUG: ${JSON.stringify(err, null, 2)}`); console.log(`DEBUG: ${JSON.stringify(err, null, 2)}`);
@@ -100,7 +102,8 @@ export const extractComicInfoXMLFromRar = async (
} }
resolve(entries); resolve(entries);
}); });
}); }
);
remove(filesInArchive, ({ type }) => type === "Directory"); remove(filesInArchive, ({ type }) => type === "Directory");
const comicInfoXML = remove( const comicInfoXML = remove(
@@ -110,7 +113,10 @@ export const extractComicInfoXMLFromRar = async (
remove( remove(
filesInArchive, filesInArchive,
({ name }) => !IMPORT_IMAGE_FILE_FORMATS.includes(path.extname(name).toLowerCase()) ({ name }) =>
!IMPORT_IMAGE_FILE_FORMATS.includes(
path.extname(name).toLowerCase()
)
); );
const files = filesInArchive.sort((a, b) => { const files = filesInArchive.sort((a, b) => {
if (!isUndefined(a) && !isUndefined(b)) { if (!isUndefined(a) && !isUndefined(b)) {
@@ -123,8 +129,12 @@ export const extractComicInfoXMLFromRar = async (
const comicInfoXMLFilePromise = new Promise((resolve, reject) => { const comicInfoXMLFilePromise = new Promise((resolve, reject) => {
let comicinfostring = ""; let comicinfostring = "";
if (!isUndefined(comicInfoXML[0])) { if (!isUndefined(comicInfoXML[0])) {
const comicInfoXMLFileName = path.basename(comicInfoXML[0].name); const comicInfoXMLFileName = path.basename(
const writeStream = createWriteStream(`${targetDirectory}/${comicInfoXMLFileName}`); comicInfoXML[0].name
);
const writeStream = createWriteStream(
`${targetDirectory}/${comicInfoXMLFileName}`
);
archive.stream(comicInfoXML[0]["name"]).pipe(writeStream); archive.stream(comicInfoXML[0]["name"]).pipe(writeStream);
writeStream.on("finish", async () => { writeStream.on("finish", async () => {
@@ -137,7 +147,11 @@ export const extractComicInfoXMLFromRar = async (
}); });
readStream.on("error", (error) => reject(error)); readStream.on("error", (error) => reject(error));
readStream.on("end", async () => { readStream.on("end", async () => {
if (existsSync(`${targetDirectory}/${comicInfoXMLFileName}`)) { if (
existsSync(
`${targetDirectory}/${comicInfoXMLFileName}`
)
) {
const comicInfoJSON = await convertXMLToJSON( const comicInfoJSON = await convertXMLToJSON(
comicinfostring.toString() comicinfostring.toString()
); );
@@ -158,11 +172,14 @@ export const extractComicInfoXMLFromRar = async (
const sharpStream = sharp().resize(275).toFormat("png"); const sharpStream = sharp().resize(275).toFormat("png");
const coverExtractionStream = archive.stream(files[0].name); const coverExtractionStream = archive.stream(files[0].name);
const resizeStream = coverExtractionStream.pipe(sharpStream); const resizeStream = coverExtractionStream.pipe(sharpStream);
resizeStream.toFile(`${targetDirectory}/${coverFile}`, (err, info) => { resizeStream.toFile(
`${targetDirectory}/${coverFile}`,
(err, info) => {
if (err) { if (err) {
reject(err); reject(err);
} }
checkFileExists(`${targetDirectory}/${coverFile}`).then((bool) => { checkFileExists(`${targetDirectory}/${coverFile}`).then(
(bool) => {
console.log(`${coverFile} exists: ${bool}`); console.log(`${coverFile} exists: ${bool}`);
// orchestrate result // orchestrate result
resolve({ resolve({
@@ -179,8 +196,10 @@ export const extractComicInfoXMLFromRar = async (
), ),
}, },
}); });
}); }
}); );
}
);
}); });
return Promise.all([comicInfoXMLFilePromise, coverFilePromise]); return Promise.all([comicInfoXMLFilePromise, coverFilePromise]);
@@ -198,7 +217,8 @@ export const extractComicInfoXMLFromZip = async (
const directoryOptions = { const directoryOptions = {
mode: 0o2775, mode: 0o2775,
}; };
const { fileNameWithoutExtension, extension } = getFileConstituents(filePath); const { fileNameWithoutExtension, extension } =
getFileConstituents(filePath);
const targetDirectory = `${USERDATA_DIRECTORY}/covers/${sanitize( const targetDirectory = `${USERDATA_DIRECTORY}/covers/${sanitize(
fileNameWithoutExtension fileNameWithoutExtension
)}`; )}`;
@@ -217,7 +237,10 @@ export const extractComicInfoXMLFromZip = async (
// only allow allowed image formats // only allow allowed image formats
remove( remove(
filesFromArchive.files, filesFromArchive.files,
({ name }) => !IMPORT_IMAGE_FILE_FORMATS.includes(path.extname(name).toLowerCase()) ({ name }) =>
!IMPORT_IMAGE_FILE_FORMATS.includes(
path.extname(name).toLowerCase()
)
); );
// Natural sort // Natural sort
@@ -238,7 +261,13 @@ export const extractComicInfoXMLFromZip = async (
extractionTargets.push(filesToWriteToDisk.comicInfoXML); extractionTargets.push(filesToWriteToDisk.comicInfoXML);
} }
// Extract the files. // Extract the files.
await p7zip.extract(filePath, targetDirectory, extractionTargets, "", false); await p7zip.extract(
filePath,
targetDirectory,
extractionTargets,
"",
false
);
// ComicInfoXML detection, parsing and conversion to JSON // ComicInfoXML detection, parsing and conversion to JSON
// Write ComicInfo.xml to disk // Write ComicInfo.xml to disk
@@ -246,15 +275,26 @@ export const extractComicInfoXMLFromZip = async (
const comicInfoXMLPromise = new Promise((resolve, reject) => { const comicInfoXMLPromise = new Promise((resolve, reject) => {
if ( if (
!isNil(filesToWriteToDisk.comicInfoXML) && !isNil(filesToWriteToDisk.comicInfoXML) &&
existsSync(`${targetDirectory}/${path.basename(filesToWriteToDisk.comicInfoXML)}`) existsSync(
`${targetDirectory}/${path.basename(
filesToWriteToDisk.comicInfoXML
)}`
)
) { ) {
let comicinfoString = ""; let comicinfoString = "";
const comicInfoXMLStream = createReadStream( const comicInfoXMLStream = createReadStream(
`${targetDirectory}/${path.basename(filesToWriteToDisk.comicInfoXML)}` `${targetDirectory}/${path.basename(
filesToWriteToDisk.comicInfoXML
)}`
);
comicInfoXMLStream.on(
"data",
(data) => (comicinfoString += data)
); );
comicInfoXMLStream.on("data", (data) => (comicinfoString += data));
comicInfoXMLStream.on("end", async () => { comicInfoXMLStream.on("end", async () => {
const comicInfoJSON = await convertXMLToJSON(comicinfoString.toString()); const comicInfoJSON = await convertXMLToJSON(
comicinfoString.toString()
);
resolve({ resolve({
comicInfoJSON: comicInfoJSON.comicinfo, comicInfoJSON: comicInfoJSON.comicinfo,
}); });
@@ -274,7 +314,9 @@ export const extractComicInfoXMLFromZip = async (
coverStream coverStream
.pipe(sharpStream) .pipe(sharpStream)
.toFile( .toFile(
`${targetDirectory}/${path.basename(filesToWriteToDisk.coverFile)}`, `${targetDirectory}/${path.basename(
filesToWriteToDisk.coverFile
)}`,
(err, info) => { (err, info) => {
if (err) { if (err) {
reject(err); reject(err);
@@ -315,15 +357,23 @@ export const extractFromArchive = async (filePath: string) => {
switch (mimeType) { switch (mimeType) {
case "application/x-7z-compressed; charset=binary": case "application/x-7z-compressed; charset=binary":
case "application/zip; charset=binary": case "application/zip; charset=binary":
const cbzResult = await extractComicInfoXMLFromZip(filePath, mimeType); const cbzResult = await extractComicInfoXMLFromZip(
filePath,
mimeType
);
return Object.assign({}, ...cbzResult); return Object.assign({}, ...cbzResult);
case "application/x-rar; charset=binary": case "application/x-rar; charset=binary":
const cbrResult = await extractComicInfoXMLFromRar(filePath, mimeType); const cbrResult = await extractComicInfoXMLFromRar(
filePath,
mimeType
);
return Object.assign({}, ...cbrResult); return Object.assign({}, ...cbrResult);
default: default:
console.error("Error inferring filetype for comicinfo.xml extraction."); console.error(
"Error inferring filetype for comicinfo.xml extraction."
);
throw new MoleculerError({}, 500, "FILETYPE_INFERENCE_ERROR", { throw new MoleculerError({}, 500, "FILETYPE_INFERENCE_ERROR", {
data: { message: "Cannot infer filetype." }, data: { message: "Cannot infer filetype." },
}); });
@@ -336,7 +386,10 @@ export const extractFromArchive = async (filePath: string) => {
* @param {any} options * @param {any} options
* @returns {Promise} A promise containing the contents of the uncompressed archive. * @returns {Promise} A promise containing the contents of the uncompressed archive.
*/ */
export const uncompressEntireArchive = async (filePath: string, options: any) => { export const uncompressEntireArchive = async (
filePath: string,
options: any
) => {
const mimeType = await getMimeType(filePath); const mimeType = await getMimeType(filePath);
console.log(`File has the following mime-type: ${mimeType}`); console.log(`File has the following mime-type: ${mimeType}`);
switch (mimeType) { switch (mimeType) {
@@ -378,7 +431,8 @@ export const uncompressRarArchive = async (filePath: string, options: any) => {
const directoryOptions = { const directoryOptions = {
mode: 0o2775, mode: 0o2775,
}; };
const { fileNameWithoutExtension, extension } = getFileConstituents(filePath); const { fileNameWithoutExtension, extension } =
getFileConstituents(filePath);
const targetDirectory = `${USERDATA_DIRECTORY}/expanded/${options.purpose}/${fileNameWithoutExtension}`; const targetDirectory = `${USERDATA_DIRECTORY}/expanded/${options.purpose}/${fileNameWithoutExtension}`;
await createDirectory(directoryOptions, targetDirectory); await createDirectory(directoryOptions, targetDirectory);
@@ -415,7 +469,10 @@ export const uncompressRarArchive = async (filePath: string, options: any) => {
return await resizeImageDirectory(targetDirectory, options); return await resizeImageDirectory(targetDirectory, options);
}; };
export const resizeImageDirectory = async (directoryPath: string, options: any) => { export const resizeImageDirectory = async (
directoryPath: string,
options: any
) => {
const files = await walkFolder(directoryPath, [ const files = await walkFolder(directoryPath, [
".jpg", ".jpg",
".jpeg", ".jpeg",
@@ -443,15 +500,25 @@ export const resizeImage = (directoryPath: string, file: any, options: any) => {
const { baseWidth } = options.imageResizeOptions; const { baseWidth } = options.imageResizeOptions;
const sharpResizeInstance = sharp().resize(baseWidth).toFormat("jpg"); const sharpResizeInstance = sharp().resize(baseWidth).toFormat("jpg");
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const resizedStream = createReadStream(`${directoryPath}/${file.name}${file.extension}`); const resizedStream = createReadStream(
`${directoryPath}/${file.name}${file.extension}`
);
if (fse.existsSync(`${directoryPath}/${file.name}${file.extension}`)) { if (fse.existsSync(`${directoryPath}/${file.name}${file.extension}`)) {
resizedStream resizedStream
.pipe(sharpResizeInstance) .pipe(sharpResizeInstance)
.toFile(`${directoryPath}/${file.name}_${baseWidth}px${file.extension}`) .toFile(
`${directoryPath}/${file.name}_${baseWidth}px${file.extension}`
)
.then((data) => { .then((data) => {
console.log(`Resized image ${JSON.stringify(data, null, 4)}`); console.log(
fse.unlink(`${directoryPath}/${file.name}${file.extension}`); `Resized image ${JSON.stringify(data, null, 4)}`
resolve(`${directoryPath}/${file.name}_${baseWidth}px${file.extension}`); );
fse.unlink(
`${directoryPath}/${file.name}${file.extension}`
);
resolve(
`${directoryPath}/${file.name}_${baseWidth}px${file.extension}`
);
}); });
} }
}); });