🔧 Prettification

This commit is contained in:
2024-10-23 14:26:24 -04:00
parent baa5a99855
commit d7e865f84f

View File

@@ -33,7 +33,13 @@ SOFTWARE.
"use strict";
import { isNil } from "lodash";
import { Context, Service, ServiceBroker, ServiceSchema, Errors } from "moleculer";
import {
Context,
Service,
ServiceBroker,
ServiceSchema,
Errors,
} from "moleculer";
import { DbMixin } from "../mixins/db.mixin";
import Comic from "../models/comic.model";
import { walkFolder, getSizeOfDirectory } from "../utils/file.utils";
@@ -128,7 +134,8 @@ export default class ImportService extends Service {
});
// Determine source where the comic was added from
// and gather identifying information about it
const sourceName = referenceComicObject[0].acquisition.source.name;
const sourceName =
referenceComicObject[0].acquisition.source.name;
const { sourcedMetadata } = referenceComicObject[0];
const filePath = `${COMICS_DIRECTORY}/${ctx.params.bundle.data.name}`;
@@ -172,8 +179,14 @@ export default class ImportService extends Service {
// 1.1 Filter on .cb* extensions
.pipe(
through2.obj(function (item, enc, next) {
let fileExtension = path.extname(item.path);
if ([".cbz", ".cbr", ".cb7"].includes(fileExtension)) {
let fileExtension = path.extname(
item.path
);
if (
[".cbz", ".cbr", ".cb7"].includes(
fileExtension
)
) {
this.push(item);
}
next();
@@ -182,7 +195,10 @@ export default class ImportService extends Service {
// 1.2 Pipe filtered results to the next step
// Enqueue the job in the queue
.on("data", async (item) => {
console.info("Found a file at path: %s", item.path);
console.info(
"Found a file at path: %s",
item.path
);
let comicExists = await Comic.exists({
"rawFileDetails.name": `${path.basename(
item.path,
@@ -191,8 +207,14 @@ export default class ImportService extends Service {
});
if (!comicExists) {
// 2.1 Reset the job counters in Redis
await pubClient.set("completedJobCount", 0);
await pubClient.set("failedJobCount", 0);
await pubClient.set(
"completedJobCount",
0
);
await pubClient.set(
"failedJobCount",
0
);
// 2.2 Send the extraction job to the queue
this.broker.call("jobqueue.enqueue", {
fileObject: {
@@ -204,7 +226,9 @@ export default class ImportService extends Service {
action: "enqueue.async",
});
} else {
console.log("Comic already exists in the library.");
console.log(
"Comic already exists in the library."
);
}
})
.on("end", () => {
@@ -249,13 +273,19 @@ export default class ImportService extends Service {
}>
) {
try {
console.log(JSON.stringify(ctx.params.payload, null, 4));
console.log(
JSON.stringify(ctx.params.payload, null, 4)
);
const { payload } = ctx.params;
const { wanted } = payload;
console.log("Saving to Mongo...");
if (!wanted || !wanted.volume || !wanted.volume.id) {
if (
!wanted ||
!wanted.volume ||
!wanted.volume.id
) {
console.log(
"No valid identifier for upsert. Attempting to create a new document with minimal data..."
);
@@ -300,7 +330,11 @@ export default class ImportService extends Service {
new: true,
};
const result = await Comic.findOneAndUpdate(condition, update, options);
const result = await Comic.findOneAndUpdate(
condition,
update,
options
);
console.log(
"Operation completed. Document updated or inserted:",
result
@@ -313,7 +347,10 @@ export default class ImportService extends Service {
};
} catch (error) {
console.log(error);
throw new Errors.MoleculerError("Operation failed.", 500);
throw new Errors.MoleculerError(
"Operation failed.",
500
);
}
},
},
@@ -353,7 +390,9 @@ export default class ImportService extends Service {
) {
// 1. Find mongo object by id
// 2. Import payload into sourcedMetadata.comicvine
const comicObjectId = new ObjectId(ctx.params.comicObjectId);
const comicObjectId = new ObjectId(
ctx.params.comicObjectId
);
return new Promise(async (resolve, reject) => {
let volumeDetails = {};
@@ -362,15 +401,18 @@ export default class ImportService extends Service {
const volumeDetails = await this.broker.call(
"comicvine.getVolumes",
{
volumeURI: matchedResult.volume.api_detail_url,
volumeURI:
matchedResult.volume.api_detail_url,
}
);
matchedResult.volumeInformation = volumeDetails.results;
matchedResult.volumeInformation =
volumeDetails.results;
Comic.findByIdAndUpdate(
comicObjectId,
{
$set: {
"sourcedMetadata.comicvine": matchedResult,
"sourcedMetadata.comicvine":
matchedResult,
},
},
{ new: true },
@@ -401,7 +443,9 @@ export default class ImportService extends Service {
}>
) {
console.log(JSON.stringify(ctx.params, null, 2));
const comicObjectId = new ObjectId(ctx.params.comicObjectId);
const comicObjectId = new ObjectId(
ctx.params.comicObjectId
);
return new Promise((resolve, reject) => {
Comic.findByIdAndUpdate(
@@ -439,8 +483,13 @@ export default class ImportService extends Service {
announce: [String];
}>
) => {
const { name, torrentToDownload, comicObjectId, announce, infoHash } =
ctx.params;
const {
name,
torrentToDownload,
comicObjectId,
announce,
infoHash,
} = ctx.params;
console.log(JSON.stringify(ctx.params, null, 4));
try {
return await Comic.findByIdAndUpdate(
@@ -503,7 +552,9 @@ export default class ImportService extends Service {
params: { id: "string" },
async handler(ctx: Context<{ id: string }>) {
console.log(ctx.params.id);
return await Comic.findById(new ObjectId(ctx.params.id));
return await Comic.findById(
new ObjectId(ctx.params.id)
);
},
},
getComicBooksByIds: {
@@ -511,7 +562,9 @@ export default class ImportService extends Service {
params: { ids: "array" },
handler: async (ctx: Context<{ ids: [string] }>) => {
console.log(ctx.params.ids);
const queryIds = ctx.params.ids.map((id) => new ObjectId(id));
const queryIds = ctx.params.ids.map(
(id) => new ObjectId(id)
);
return await Comic.find({
_id: {
$in: queryIds,
@@ -527,7 +580,8 @@ export default class ImportService extends Service {
const volumes = await Comic.aggregate([
{
$project: {
volumeInfo: "$sourcedMetadata.comicvine.volumeInformation",
volumeInfo:
"$sourcedMetadata.comicvine.volumeInformation",
},
},
{
@@ -573,46 +627,52 @@ export default class ImportService extends Service {
const { queryObjects } = ctx.params;
// construct the query for ElasticSearch
let elasticSearchQuery = {};
const elasticSearchQueries = queryObjects.map((queryObject) => {
console.log("Volume: ", queryObject.volumeName);
console.log("Issue: ", queryObject.issueName);
if (queryObject.issueName === null) {
queryObject.issueName = "";
}
if (queryObject.volumeName === null) {
queryObject.volumeName = "";
}
elasticSearchQuery = {
bool: {
must: [
{
match_phrase: {
"rawFileDetails.name": queryObject.volumeName,
const elasticSearchQueries = queryObjects.map(
(queryObject) => {
console.log("Volume: ", queryObject.volumeName);
console.log("Issue: ", queryObject.issueName);
if (queryObject.issueName === null) {
queryObject.issueName = "";
}
if (queryObject.volumeName === null) {
queryObject.volumeName = "";
}
elasticSearchQuery = {
bool: {
must: [
{
match_phrase: {
"rawFileDetails.name":
queryObject.volumeName,
},
},
},
{
term: {
"inferredMetadata.issue.number": parseInt(
queryObject.issueNumber,
10
),
{
term: {
"inferredMetadata.issue.number":
parseInt(
queryObject.issueNumber,
10
),
},
},
},
],
},
};
],
},
};
return [
{
index: "comics",
search_type: "dfs_query_then_fetch",
},
{
query: elasticSearchQuery,
},
];
});
console.log(JSON.stringify(elasticSearchQueries, null, 2));
return [
{
index: "comics",
search_type: "dfs_query_then_fetch",
},
{
query: elasticSearchQuery,
},
];
}
);
console.log(
JSON.stringify(elasticSearchQueries, null, 2)
);
return await ctx.broker.call("search.searchComic", {
elasticSearchQueries,
@@ -625,11 +685,10 @@ export default class ImportService extends Service {
rest: "GET /libraryStatistics",
params: {},
handler: async (ctx: Context<{}>) => {
const comicDirectorySize = await getSizeOfDirectory(COMICS_DIRECTORY, [
".cbz",
".cbr",
".cb7",
]);
const comicDirectorySize = await getSizeOfDirectory(
COMICS_DIRECTORY,
[".cbz", ".cbr", ".cb7"]
);
const totalCount = await Comic.countDocuments({});
const statistics = await Comic.aggregate([
{
@@ -638,7 +697,11 @@ export default class ImportService extends Service {
{
$match: {
"rawFileDetails.extension": {
$in: [".cbr", ".cbz", ".cb7"],
$in: [
".cbr",
".cbz",
".cb7",
],
},
},
},
@@ -652,9 +715,10 @@ export default class ImportService extends Service {
issues: [
{
$match: {
"sourcedMetadata.comicvine.volumeInformation": {
$gt: {},
},
"sourcedMetadata.comicvine.volumeInformation":
{
$gt: {},
},
},
},
{
@@ -709,7 +773,6 @@ export default class ImportService extends Service {
},
},
// This method belongs in library service,
// because bundles can only exist for comics _in the library_
// (wanted or imported)
@@ -722,25 +785,32 @@ export default class ImportService extends Service {
config: any;
}>
) => {
// 1. Get the comic object Id
const { config } = ctx.params;
const comicObject = await Comic.findById(
new ObjectId(ctx.params.comicObjectId)
);
// 2. Init AirDC++
// 3. Get the bundles for the comic object
const ADCPPSocket = new AirDCPPSocket(config);
if (comicObject) {
const foo = comicObject.acquisition.directconnect.downloads.map(
async (bundle) => {
// make the call to get the bundles from AirDC++ using the bundleId
return await ADCPPSocket.get(`queue/bundles/${bundle.id}`);
}
try {
// 1. Get the comic object Id
const { config } = ctx.params;
const comicObject = await Comic.findById(
new ObjectId(ctx.params.comicObjectId)
);
return foo;
}
return false;
// 2. Init AirDC++
// 3. Get the bundles for the comic object
const ADCPPSocket = new AirDCPPSocket(config);
if (comicObject) {
// make the call to get the bundles from AirDC++ using the bundleId
return comicObject.acquisition.directconnect.downloads.map(
async (bundle) =>
await ADCPPSocket.get(
`queue/bundles/${bundle.id}`
)
);
}
return false;
} catch (error) {
throw new Errors.MoleculerError(
"Couldn't fetch bundles from AirDC++",
500
);
}
},
},
flushDB: {
@@ -751,16 +821,23 @@ export default class ImportService extends Service {
.drop()
.then(async (data) => {
console.info(data);
const coversFolderDeleteResult = fsExtra.emptyDirSync(
path.resolve(`${USERDATA_DIRECTORY}/covers`)
);
const expandedFolderDeleteResult = fsExtra.emptyDirSync(
path.resolve(`${USERDATA_DIRECTORY}/expanded`)
);
const eSIndicesDeleteResult = await ctx.broker.call(
"search.deleteElasticSearchIndices",
{}
);
const coversFolderDeleteResult =
fsExtra.emptyDirSync(
path.resolve(
`${USERDATA_DIRECTORY}/covers`
)
);
const expandedFolderDeleteResult =
fsExtra.emptyDirSync(
path.resolve(
`${USERDATA_DIRECTORY}/expanded`
)
);
const eSIndicesDeleteResult =
await ctx.broker.call(
"search.deleteElasticSearchIndices",
{}
);
return {
data,
coversFolderDeleteResult,