Merge pull request #7 from rishighan/qbittorrent-settings
🌊 Modified settings model schema
This commit was merged in pull request #7.
This commit is contained in:
@@ -2,7 +2,7 @@ const paginate = require("mongoose-paginate-v2");
|
||||
const { Client } = require("@elastic/elasticsearch");
|
||||
import ComicVineMetadataSchema from "./comicvine.metadata.model";
|
||||
import { mongoosastic } from "mongoosastic-ts";
|
||||
const mongoose = require("mongoose")
|
||||
const mongoose = require("mongoose");
|
||||
import {
|
||||
MongoosasticDocument,
|
||||
MongoosasticModel,
|
||||
@@ -112,6 +112,7 @@ const ComicSchema = mongoose.Schema(
|
||||
},
|
||||
},
|
||||
torrent: {
|
||||
downloads: [],
|
||||
sourceApplication: String,
|
||||
magnet: String,
|
||||
tracker: String,
|
||||
|
||||
@@ -1,21 +1,28 @@
|
||||
const mongoose = require("mongoose");
|
||||
const paginate = require("mongoose-paginate-v2");
|
||||
|
||||
const HostSchema = mongoose.Schema({
|
||||
_id: false,
|
||||
username: String,
|
||||
password: String,
|
||||
hostname: String,
|
||||
port: String,
|
||||
protocol: String,
|
||||
});
|
||||
const SettingsScehma = mongoose.Schema({
|
||||
directConnect: {
|
||||
client: {
|
||||
host: {
|
||||
username: String,
|
||||
password: String,
|
||||
hostname: String,
|
||||
port: String,
|
||||
protocol: String,
|
||||
},
|
||||
host: HostSchema,
|
||||
airDCPPUserSettings: Object,
|
||||
|
||||
hubs: Array,
|
||||
},
|
||||
},
|
||||
bittorrent: {
|
||||
client: {
|
||||
name: String,
|
||||
host: HostSchema,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const Settings = mongoose.model("Settings", SettingsScehma);
|
||||
|
||||
861
package-lock.json
generated
861
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -2,7 +2,7 @@ import { Context, Service, ServiceBroker } from "moleculer";
|
||||
import JobResult from "../models/jobresult.model";
|
||||
import { refineQuery } from "filename-parser";
|
||||
import BullMqMixin from "moleculer-bullmq";
|
||||
import { extractFromArchive } from "../utils/uncompression.utils";
|
||||
import { extractFromArchive, uncompressEntireArchive } from "../utils/uncompression.utils";
|
||||
import { isNil, isUndefined } from "lodash";
|
||||
import { pubClient } from "../config/redis.config";
|
||||
|
||||
@@ -47,12 +47,15 @@ export default class JobQueueService extends Service {
|
||||
enqueue: {
|
||||
queue: true,
|
||||
rest: "/GET enqueue",
|
||||
handler: async (ctx: Context<{}>) => {
|
||||
handler: async (ctx: Context<{ queueName: string; description: string }>) => {
|
||||
console.log(ctx.params);
|
||||
const { queueName, description } = ctx.params;
|
||||
// Enqueue the job
|
||||
const job = await this.localQueue(ctx, "enqueue.async", ctx.params, {
|
||||
const job = await this.localQueue(ctx, queueName, ctx.params, {
|
||||
priority: 10,
|
||||
});
|
||||
console.log(`Job ${job.id} enqueued`);
|
||||
console.log(`${description}`);
|
||||
|
||||
return job.id;
|
||||
},
|
||||
@@ -249,9 +252,24 @@ export default class JobQueueService extends Service {
|
||||
]);
|
||||
},
|
||||
},
|
||||
"uncompressFullArchive.async": {
|
||||
rest: "POST /uncompressFullArchive",
|
||||
handler: async (ctx: Context<{ filePath: string; options: any }>) => {
|
||||
const { filePath, options } = ctx.params;
|
||||
console.log("asd", filePath);
|
||||
// 2. Extract metadata from the archive
|
||||
return await uncompressEntireArchive(filePath, options);
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
events: {
|
||||
async "uncompressFullArchive.async.active"(ctx: Context<{ id: number }>) {
|
||||
console.log(`Uncompression Job ID ${ctx.params.id} is set to active.`);
|
||||
},
|
||||
async "uncompressFullArchive.async.completed"(ctx: Context<{ id: number }>) {
|
||||
console.log(`Uncompression Job ID ${ctx.params.id} completed.`);
|
||||
},
|
||||
// use the `${QUEUE_NAME}.QUEUE_EVENT` scheme
|
||||
async "enqueue.async.active"(ctx: Context<{ id: Number }>) {
|
||||
console.log(`Job ID ${ctx.params.id} is set to active.`);
|
||||
@@ -260,10 +278,10 @@ export default class JobQueueService extends Service {
|
||||
console.log("Queue drained.");
|
||||
await this.broker.call("socket.broadcast", {
|
||||
namespace: "/",
|
||||
event: "action",
|
||||
event: "LS_IMPORT_QUEUE_DRAINED",
|
||||
args: [
|
||||
{
|
||||
type: "LS_IMPORT_QUEUE_DRAINED",
|
||||
message: "drained",
|
||||
},
|
||||
],
|
||||
});
|
||||
@@ -278,10 +296,9 @@ export default class JobQueueService extends Service {
|
||||
// 4. Emit the LS_COVER_EXTRACTED event with the necessary details
|
||||
await this.broker.call("socket.broadcast", {
|
||||
namespace: "/",
|
||||
event: "action",
|
||||
event: "LS_COVER_EXTRACTED",
|
||||
args: [
|
||||
{
|
||||
type: "LS_COVER_EXTRACTED",
|
||||
completedJobCount,
|
||||
importResult: job.returnvalue.data.importResult,
|
||||
},
|
||||
@@ -315,10 +332,9 @@ export default class JobQueueService extends Service {
|
||||
// 4. Emit the LS_COVER_EXTRACTION_FAILED event with the necessary details
|
||||
await this.broker.call("socket.broadcast", {
|
||||
namespace: "/",
|
||||
event: "action",
|
||||
event: "LS_COVER_EXTRACTION_FAILED",
|
||||
args: [
|
||||
{
|
||||
type: "LS_COVER_EXTRACTION_FAILED",
|
||||
failedJobCount,
|
||||
importResult: job,
|
||||
},
|
||||
|
||||
@@ -186,6 +186,7 @@ export default class ImportService extends Service {
|
||||
},
|
||||
sessionId,
|
||||
importType: "new",
|
||||
queueName: "enqueue.async",
|
||||
});
|
||||
} else {
|
||||
console.log("Comic already exists in the library.");
|
||||
@@ -385,6 +386,7 @@ export default class ImportService extends Service {
|
||||
rest: "POST /getComicBookById",
|
||||
params: { id: "string" },
|
||||
async handler(ctx: Context<{ id: string }>) {
|
||||
console.log(ctx.params.id);
|
||||
return await Comic.findById(ctx.params.id);
|
||||
},
|
||||
},
|
||||
|
||||
@@ -75,9 +75,9 @@ export default class SettingsService extends Service {
|
||||
) => {
|
||||
try {
|
||||
console.log(ctx.params);
|
||||
const { query, pagination } = ctx.params;
|
||||
const { query, pagination, type } = ctx.params;
|
||||
let eSQuery = {};
|
||||
switch (ctx.params.type) {
|
||||
switch (type) {
|
||||
case "all":
|
||||
Object.assign(eSQuery, {
|
||||
match_all: {},
|
||||
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
} from "moleculer";
|
||||
import { DbMixin } from "../mixins/db.mixin";
|
||||
import Settings from "../models/settings.model";
|
||||
import { isEmpty, pickBy, identity, map } from "lodash";
|
||||
import { isEmpty, pickBy, identity, map, isNil } from "lodash";
|
||||
const ObjectId = require("mongoose").Types.ObjectId;
|
||||
|
||||
export default class SettingsService extends Service {
|
||||
@@ -42,44 +42,106 @@ export default class SettingsService extends Service {
|
||||
params: {},
|
||||
async handler(
|
||||
ctx: Context<{
|
||||
settingsPayload: {
|
||||
host: object;
|
||||
airDCPPUserSettings: object;
|
||||
hubs: [];
|
||||
settingsPayload?: {
|
||||
protocol: string;
|
||||
hostname: string;
|
||||
port: string;
|
||||
username: string;
|
||||
password: string;
|
||||
_id?: string;
|
||||
airDCPPUserSettings?: object;
|
||||
hubs?: [];
|
||||
};
|
||||
settingsObjectId: string;
|
||||
settingsObjectId?: string;
|
||||
settingsKey: string;
|
||||
}>
|
||||
) {
|
||||
console.log("varan bhat", ctx.params);
|
||||
const { host, airDCPPUserSettings, hubs } =
|
||||
ctx.params.settingsPayload;
|
||||
let query = {
|
||||
host,
|
||||
airDCPPUserSettings,
|
||||
hubs,
|
||||
};
|
||||
const keysToUpdate = pickBy(query, identity);
|
||||
let updateQuery = {};
|
||||
try {
|
||||
let query = {};
|
||||
const { settingsKey, settingsObjectId } =
|
||||
ctx.params;
|
||||
const {
|
||||
hostname,
|
||||
protocol,
|
||||
port,
|
||||
username,
|
||||
password,
|
||||
} = ctx.params.settingsPayload;
|
||||
const host = {
|
||||
hostname,
|
||||
protocol,
|
||||
port,
|
||||
username,
|
||||
password,
|
||||
};
|
||||
const undefinedPropsInHostname = Object.values(
|
||||
host
|
||||
).filter((value) => value === undefined);
|
||||
|
||||
map(Object.keys(keysToUpdate), (key) => {
|
||||
updateQuery[`directConnect.client.${key}`] =
|
||||
query[key];
|
||||
});
|
||||
const options = {
|
||||
upsert: true,
|
||||
new: true,
|
||||
setDefaultsOnInsert: true,
|
||||
};
|
||||
const filter = {
|
||||
_id: new ObjectId(ctx.params.settingsObjectId),
|
||||
};
|
||||
const result = Settings.findOneAndUpdate(
|
||||
filter,
|
||||
{ $set: updateQuery },
|
||||
options
|
||||
);
|
||||
// Update, depending what key was passed in params
|
||||
// 1. Construct the update query
|
||||
switch (settingsKey) {
|
||||
case "bittorrent":
|
||||
console.log(
|
||||
`Recieved settings for ${settingsKey}, building query...`
|
||||
);
|
||||
query = {
|
||||
...(undefinedPropsInHostname.length ===
|
||||
0 && {
|
||||
$set: {
|
||||
"bittorrent.client.host": host,
|
||||
},
|
||||
}),
|
||||
};
|
||||
break;
|
||||
case "directConnect":
|
||||
console.log(
|
||||
`Recieved settings for ${settingsKey}, building query...`
|
||||
);
|
||||
const { hubs, airDCPPUserSettings } =
|
||||
ctx.params.settingsPayload;
|
||||
query = {
|
||||
...(undefinedPropsInHostname.length ===
|
||||
0 && {
|
||||
$set: {
|
||||
"directConnect.client.host":
|
||||
host,
|
||||
},
|
||||
}),
|
||||
...(!isNil(hubs) && {
|
||||
$set: {
|
||||
"directConnect.client.hubs":
|
||||
hubs,
|
||||
},
|
||||
}),
|
||||
};
|
||||
console.log(JSON.stringify(query, null, 4));
|
||||
break;
|
||||
|
||||
return result;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
|
||||
// 2. Set up options, filters
|
||||
const options = {
|
||||
upsert: true,
|
||||
setDefaultsOnInsert: true,
|
||||
returnDocument: "after",
|
||||
};
|
||||
const filter = settingsObjectId
|
||||
? { _id: settingsObjectId }
|
||||
: {};
|
||||
|
||||
// 3. Execute the mongo query
|
||||
const result = await Settings.findOneAndUpdate(
|
||||
filter,
|
||||
query,
|
||||
options
|
||||
);
|
||||
return result;
|
||||
} catch (err) {
|
||||
return err;
|
||||
}
|
||||
},
|
||||
},
|
||||
deleteSettings: {
|
||||
|
||||
@@ -26,89 +26,7 @@ export default class SocketService extends Service {
|
||||
"/": {
|
||||
events: {
|
||||
call: {
|
||||
// whitelist: ["math.*", "say.*", "accounts.*", "rooms.*", "io.*"],
|
||||
},
|
||||
action: async (data) => {
|
||||
switch (data.type) {
|
||||
case "RESUME_SESSION":
|
||||
console.log("Attempting to resume session...");
|
||||
try {
|
||||
const sessionRecord = await Session.find({
|
||||
sessionId: data.session.sessionId,
|
||||
});
|
||||
// 1. Check for sessionId's existence, and a match
|
||||
if (
|
||||
sessionRecord.length !== 0 &&
|
||||
sessionRecord[0].sessionId ===
|
||||
data.session.sessionId
|
||||
) {
|
||||
// 2. Find if the queue has active jobs
|
||||
const jobs: JobType = await this.broker.call(
|
||||
"jobqueue.getJobCountsByType",
|
||||
{}
|
||||
);
|
||||
const { active } = jobs;
|
||||
|
||||
if (active > 0) {
|
||||
// 3. Get job counts
|
||||
const completedJobCount =
|
||||
await pubClient.get(
|
||||
"completedJobCount"
|
||||
);
|
||||
const failedJobCount = await pubClient.get(
|
||||
"failedJobCount"
|
||||
);
|
||||
|
||||
// 4. Send the counts to the active socket.io session
|
||||
await this.broker.call("socket.broadcast", {
|
||||
namespace: "/",
|
||||
event: "action",
|
||||
args: [
|
||||
{
|
||||
type: "RESTORE_JOB_COUNTS_AFTER_SESSION_RESTORATION",
|
||||
completedJobCount,
|
||||
failedJobCount,
|
||||
queueStatus: "running",
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
throw new MoleculerError(
|
||||
err,
|
||||
500,
|
||||
"SESSION_ID_NOT_FOUND",
|
||||
{
|
||||
data: data.session.sessionId,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case "LS_SET_QUEUE_STATUS":
|
||||
console.log(data);
|
||||
await this.broker.call(
|
||||
"jobqueue.toggle",
|
||||
{ action: data.data.queueAction },
|
||||
{}
|
||||
);
|
||||
break;
|
||||
case "LS_SINGLE_IMPORT":
|
||||
console.info("AirDC++ finished a download -> ");
|
||||
console.log(data);
|
||||
await this.broker.call(
|
||||
"library.importDownloadedComic",
|
||||
{ bundle: data },
|
||||
{}
|
||||
);
|
||||
break;
|
||||
// uncompress archive events
|
||||
case "COMICBOOK_EXTRACTION_SUCCESS":
|
||||
console.log(data);
|
||||
return data;
|
||||
}
|
||||
whitelist: ["socket.*"],
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -119,7 +37,84 @@ export default class SocketService extends Service {
|
||||
},
|
||||
},
|
||||
hooks: {},
|
||||
actions: {},
|
||||
actions: {
|
||||
resumeSession: async (ctx: Context<{ sessionId: string }>) => {
|
||||
const { sessionId } = ctx.params;
|
||||
console.log("Attempting to resume session...");
|
||||
try {
|
||||
const sessionRecord = await Session.find({
|
||||
sessionId,
|
||||
});
|
||||
// 1. Check for sessionId's existence, and a match
|
||||
if (
|
||||
sessionRecord.length !== 0 &&
|
||||
sessionRecord[0].sessionId === sessionId
|
||||
) {
|
||||
// 2. Find if the queue has active, paused or waiting jobs
|
||||
const jobs: JobType = await this.broker.call(
|
||||
"jobqueue.getJobCountsByType",
|
||||
{}
|
||||
);
|
||||
const { active, paused, waiting } = jobs;
|
||||
|
||||
if (active > 0 || paused > 0 || waiting > 0) {
|
||||
// 3. Get job counts
|
||||
const completedJobCount = await pubClient.get(
|
||||
"completedJobCount"
|
||||
);
|
||||
const failedJobCount = await pubClient.get(
|
||||
"failedJobCount"
|
||||
);
|
||||
|
||||
// 4. Send the counts to the active socket.io session
|
||||
await this.broker.call("socket.broadcast", {
|
||||
namespace: "/",
|
||||
event: "RESTORE_JOB_COUNTS_AFTER_SESSION_RESTORATION",
|
||||
args: [
|
||||
{
|
||||
completedJobCount,
|
||||
failedJobCount,
|
||||
queueStatus: "running",
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
throw new MoleculerError(
|
||||
err,
|
||||
500,
|
||||
"SESSION_ID_NOT_FOUND",
|
||||
{
|
||||
data: sessionId,
|
||||
}
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
setQueueStatus: async (
|
||||
ctx: Context<{
|
||||
queueAction: string;
|
||||
queueStatus: string;
|
||||
}>
|
||||
) => {
|
||||
const { queueAction } = ctx.params;
|
||||
await this.broker.call(
|
||||
"jobqueue.toggle",
|
||||
{ action: queueAction },
|
||||
{}
|
||||
);
|
||||
},
|
||||
importSingleIssue: async (ctx: Context<{}>) => {
|
||||
console.info("AirDC++ finished a download -> ");
|
||||
console.log(ctx.params);
|
||||
// await this.broker.call(
|
||||
// "library.importDownloadedComic",
|
||||
// { bundle: data },
|
||||
// {}
|
||||
// );
|
||||
},
|
||||
},
|
||||
methods: {},
|
||||
async started() {
|
||||
this.io.on("connection", async (socket) => {
|
||||
@@ -146,10 +141,7 @@ export default class SocketService extends Service {
|
||||
}
|
||||
// 2. else, retrieve it from Mongo and "resume" the socket.io connection
|
||||
else {
|
||||
console.log(
|
||||
`Found socketId ${socket.id}, attempting to resume socket.io connection...`
|
||||
);
|
||||
console.log(socket.handshake.query.sessionId);
|
||||
console.log(`Found socketId ${socket.id}, no-op.`);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
@@ -74,15 +74,14 @@ const errors = [];
|
||||
*/
|
||||
export const extractComicInfoXMLFromRar = async (
|
||||
filePath: string,
|
||||
mimeType: string,
|
||||
mimeType: string
|
||||
): Promise<any> => {
|
||||
try {
|
||||
// Create the target directory
|
||||
const directoryOptions = {
|
||||
mode: 0o2775,
|
||||
};
|
||||
const { fileNameWithoutExtension, extension } =
|
||||
getFileConstituents(filePath);
|
||||
const { fileNameWithoutExtension, extension } = getFileConstituents(filePath);
|
||||
const targetDirectory = `${USERDATA_DIRECTORY}/covers/${sanitize(
|
||||
fileNameWithoutExtension
|
||||
)}`;
|
||||
@@ -93,17 +92,15 @@ export const extractComicInfoXMLFromRar = async (
|
||||
bin: `${UNRAR_BIN_PATH}`, // this will change depending on Docker base OS
|
||||
arguments: ["-v"],
|
||||
});
|
||||
const filesInArchive: [RarFile] = await new Promise(
|
||||
(resolve, reject) => {
|
||||
return archive.list((err, entries) => {
|
||||
if (err) {
|
||||
console.log(`DEBUG: ${JSON.stringify(err, null, 2)}`);
|
||||
reject(err);
|
||||
}
|
||||
resolve(entries);
|
||||
});
|
||||
}
|
||||
);
|
||||
const filesInArchive: [RarFile] = await new Promise((resolve, reject) => {
|
||||
return archive.list((err, entries) => {
|
||||
if (err) {
|
||||
console.log(`DEBUG: ${JSON.stringify(err, null, 2)}`);
|
||||
reject(err);
|
||||
}
|
||||
resolve(entries);
|
||||
});
|
||||
});
|
||||
|
||||
remove(filesInArchive, ({ type }) => type === "Directory");
|
||||
const comicInfoXML = remove(
|
||||
@@ -113,10 +110,7 @@ export const extractComicInfoXMLFromRar = async (
|
||||
|
||||
remove(
|
||||
filesInArchive,
|
||||
({ name }) =>
|
||||
!IMPORT_IMAGE_FILE_FORMATS.includes(
|
||||
path.extname(name).toLowerCase()
|
||||
)
|
||||
({ name }) => !IMPORT_IMAGE_FILE_FORMATS.includes(path.extname(name).toLowerCase())
|
||||
);
|
||||
const files = filesInArchive.sort((a, b) => {
|
||||
if (!isUndefined(a) && !isUndefined(b)) {
|
||||
@@ -129,12 +123,8 @@ export const extractComicInfoXMLFromRar = async (
|
||||
const comicInfoXMLFilePromise = new Promise((resolve, reject) => {
|
||||
let comicinfostring = "";
|
||||
if (!isUndefined(comicInfoXML[0])) {
|
||||
const comicInfoXMLFileName = path.basename(
|
||||
comicInfoXML[0].name
|
||||
);
|
||||
const writeStream = createWriteStream(
|
||||
`${targetDirectory}/${comicInfoXMLFileName}`
|
||||
);
|
||||
const comicInfoXMLFileName = path.basename(comicInfoXML[0].name);
|
||||
const writeStream = createWriteStream(`${targetDirectory}/${comicInfoXMLFileName}`);
|
||||
|
||||
archive.stream(comicInfoXML[0]["name"]).pipe(writeStream);
|
||||
writeStream.on("finish", async () => {
|
||||
@@ -147,11 +137,7 @@ export const extractComicInfoXMLFromRar = async (
|
||||
});
|
||||
readStream.on("error", (error) => reject(error));
|
||||
readStream.on("end", async () => {
|
||||
if (
|
||||
existsSync(
|
||||
`${targetDirectory}/${comicInfoXMLFileName}`
|
||||
)
|
||||
) {
|
||||
if (existsSync(`${targetDirectory}/${comicInfoXMLFileName}`)) {
|
||||
const comicInfoJSON = await convertXMLToJSON(
|
||||
comicinfostring.toString()
|
||||
);
|
||||
@@ -172,34 +158,29 @@ export const extractComicInfoXMLFromRar = async (
|
||||
const sharpStream = sharp().resize(275).toFormat("png");
|
||||
const coverExtractionStream = archive.stream(files[0].name);
|
||||
const resizeStream = coverExtractionStream.pipe(sharpStream);
|
||||
resizeStream.toFile(
|
||||
`${targetDirectory}/${coverFile}`,
|
||||
(err, info) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
checkFileExists(`${targetDirectory}/${coverFile}`).then(
|
||||
(bool) => {
|
||||
console.log(`${coverFile} exists: ${bool}`);
|
||||
// orchestrate result
|
||||
resolve({
|
||||
filePath,
|
||||
name: fileNameWithoutExtension,
|
||||
extension,
|
||||
containedIn: targetDirectory,
|
||||
fileSize: fse.statSync(filePath).size,
|
||||
mimeType,
|
||||
cover: {
|
||||
filePath: path.relative(
|
||||
process.cwd(),
|
||||
`${targetDirectory}/${coverFile}`
|
||||
),
|
||||
},
|
||||
});
|
||||
}
|
||||
);
|
||||
resizeStream.toFile(`${targetDirectory}/${coverFile}`, (err, info) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
);
|
||||
checkFileExists(`${targetDirectory}/${coverFile}`).then((bool) => {
|
||||
console.log(`${coverFile} exists: ${bool}`);
|
||||
// orchestrate result
|
||||
resolve({
|
||||
filePath,
|
||||
name: fileNameWithoutExtension,
|
||||
extension,
|
||||
containedIn: targetDirectory,
|
||||
fileSize: fse.statSync(filePath).size,
|
||||
mimeType,
|
||||
cover: {
|
||||
filePath: path.relative(
|
||||
process.cwd(),
|
||||
`${targetDirectory}/${coverFile}`
|
||||
),
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
return Promise.all([comicInfoXMLFilePromise, coverFilePromise]);
|
||||
@@ -210,15 +191,14 @@ export const extractComicInfoXMLFromRar = async (
|
||||
|
||||
export const extractComicInfoXMLFromZip = async (
|
||||
filePath: string,
|
||||
mimeType: string,
|
||||
mimeType: string
|
||||
): Promise<any> => {
|
||||
try {
|
||||
// Create the target directory
|
||||
const directoryOptions = {
|
||||
mode: 0o2775,
|
||||
};
|
||||
const { fileNameWithoutExtension, extension } =
|
||||
getFileConstituents(filePath);
|
||||
const { fileNameWithoutExtension, extension } = getFileConstituents(filePath);
|
||||
const targetDirectory = `${USERDATA_DIRECTORY}/covers/${sanitize(
|
||||
fileNameWithoutExtension
|
||||
)}`;
|
||||
@@ -237,10 +217,7 @@ export const extractComicInfoXMLFromZip = async (
|
||||
// only allow allowed image formats
|
||||
remove(
|
||||
filesFromArchive.files,
|
||||
({ name }) =>
|
||||
!IMPORT_IMAGE_FILE_FORMATS.includes(
|
||||
path.extname(name).toLowerCase()
|
||||
)
|
||||
({ name }) => !IMPORT_IMAGE_FILE_FORMATS.includes(path.extname(name).toLowerCase())
|
||||
);
|
||||
|
||||
// Natural sort
|
||||
@@ -261,13 +238,7 @@ export const extractComicInfoXMLFromZip = async (
|
||||
extractionTargets.push(filesToWriteToDisk.comicInfoXML);
|
||||
}
|
||||
// Extract the files.
|
||||
await p7zip.extract(
|
||||
filePath,
|
||||
targetDirectory,
|
||||
extractionTargets,
|
||||
"",
|
||||
false
|
||||
);
|
||||
await p7zip.extract(filePath, targetDirectory, extractionTargets, "", false);
|
||||
|
||||
// ComicInfoXML detection, parsing and conversion to JSON
|
||||
// Write ComicInfo.xml to disk
|
||||
@@ -275,26 +246,15 @@ export const extractComicInfoXMLFromZip = async (
|
||||
const comicInfoXMLPromise = new Promise((resolve, reject) => {
|
||||
if (
|
||||
!isNil(filesToWriteToDisk.comicInfoXML) &&
|
||||
existsSync(
|
||||
`${targetDirectory}/${path.basename(
|
||||
filesToWriteToDisk.comicInfoXML
|
||||
)}`
|
||||
)
|
||||
existsSync(`${targetDirectory}/${path.basename(filesToWriteToDisk.comicInfoXML)}`)
|
||||
) {
|
||||
let comicinfoString = "";
|
||||
const comicInfoXMLStream = createReadStream(
|
||||
`${targetDirectory}/${path.basename(
|
||||
filesToWriteToDisk.comicInfoXML
|
||||
)}`
|
||||
);
|
||||
comicInfoXMLStream.on(
|
||||
"data",
|
||||
(data) => (comicinfoString += data)
|
||||
`${targetDirectory}/${path.basename(filesToWriteToDisk.comicInfoXML)}`
|
||||
);
|
||||
comicInfoXMLStream.on("data", (data) => (comicinfoString += data));
|
||||
comicInfoXMLStream.on("end", async () => {
|
||||
const comicInfoJSON = await convertXMLToJSON(
|
||||
comicinfoString.toString()
|
||||
);
|
||||
const comicInfoJSON = await convertXMLToJSON(comicinfoString.toString());
|
||||
resolve({
|
||||
comicInfoJSON: comicInfoJSON.comicinfo,
|
||||
});
|
||||
@@ -314,9 +274,7 @@ export const extractComicInfoXMLFromZip = async (
|
||||
coverStream
|
||||
.pipe(sharpStream)
|
||||
.toFile(
|
||||
`${targetDirectory}/${path.basename(
|
||||
filesToWriteToDisk.coverFile
|
||||
)}`,
|
||||
`${targetDirectory}/${path.basename(filesToWriteToDisk.coverFile)}`,
|
||||
(err, info) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
@@ -365,13 +323,10 @@ export const extractFromArchive = async (filePath: string) => {
|
||||
return Object.assign({}, ...cbrResult);
|
||||
|
||||
default:
|
||||
console.error(
|
||||
"Error inferring filetype for comicinfo.xml extraction."
|
||||
);
|
||||
console.error("Error inferring filetype for comicinfo.xml extraction.");
|
||||
throw new MoleculerError({}, 500, "FILETYPE_INFERENCE_ERROR", {
|
||||
data: { message: "Cannot infer filetype."},
|
||||
data: { message: "Cannot infer filetype." },
|
||||
});
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
@@ -381,10 +336,7 @@ export const extractFromArchive = async (filePath: string) => {
|
||||
* @param {any} options
|
||||
* @returns {Promise} A promise containing the contents of the uncompressed archive.
|
||||
*/
|
||||
export const uncompressEntireArchive = async (
|
||||
filePath: string,
|
||||
options: any
|
||||
) => {
|
||||
export const uncompressEntireArchive = async (filePath: string, options: any) => {
|
||||
const mimeType = await getMimeType(filePath);
|
||||
console.log(`File has the following mime-type: ${mimeType}`);
|
||||
switch (mimeType) {
|
||||
@@ -426,8 +378,7 @@ export const uncompressRarArchive = async (filePath: string, options: any) => {
|
||||
const directoryOptions = {
|
||||
mode: 0o2775,
|
||||
};
|
||||
const { fileNameWithoutExtension, extension } =
|
||||
getFileConstituents(filePath);
|
||||
const { fileNameWithoutExtension, extension } = getFileConstituents(filePath);
|
||||
const targetDirectory = `${USERDATA_DIRECTORY}/expanded/${options.purpose}/${fileNameWithoutExtension}`;
|
||||
await createDirectory(directoryOptions, targetDirectory);
|
||||
|
||||
@@ -464,10 +415,7 @@ export const uncompressRarArchive = async (filePath: string, options: any) => {
|
||||
return await resizeImageDirectory(targetDirectory, options);
|
||||
};
|
||||
|
||||
export const resizeImageDirectory = async (
|
||||
directoryPath: string,
|
||||
options: any
|
||||
) => {
|
||||
export const resizeImageDirectory = async (directoryPath: string, options: any) => {
|
||||
const files = await walkFolder(directoryPath, [
|
||||
".jpg",
|
||||
".jpeg",
|
||||
@@ -495,25 +443,15 @@ export const resizeImage = (directoryPath: string, file: any, options: any) => {
|
||||
const { baseWidth } = options.imageResizeOptions;
|
||||
const sharpResizeInstance = sharp().resize(baseWidth).toFormat("jpg");
|
||||
return new Promise((resolve, reject) => {
|
||||
const resizedStream = createReadStream(
|
||||
`${directoryPath}/${file.name}${file.extension}`
|
||||
);
|
||||
const resizedStream = createReadStream(`${directoryPath}/${file.name}${file.extension}`);
|
||||
if (fse.existsSync(`${directoryPath}/${file.name}${file.extension}`)) {
|
||||
resizedStream
|
||||
.pipe(sharpResizeInstance)
|
||||
.toFile(
|
||||
`${directoryPath}/${file.name}_${baseWidth}px${file.extension}`
|
||||
)
|
||||
.toFile(`${directoryPath}/${file.name}_${baseWidth}px${file.extension}`)
|
||||
.then((data) => {
|
||||
console.log(
|
||||
`Resized image ${JSON.stringify(data, null, 4)}`
|
||||
);
|
||||
fse.unlink(
|
||||
`${directoryPath}/${file.name}${file.extension}`
|
||||
);
|
||||
resolve(
|
||||
`${directoryPath}/${file.name}_${baseWidth}px${file.extension}`
|
||||
);
|
||||
console.log(`Resized image ${JSON.stringify(data, null, 4)}`);
|
||||
fse.unlink(`${directoryPath}/${file.name}${file.extension}`);
|
||||
resolve(`${directoryPath}/${file.name}_${baseWidth}px${file.extension}`);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user