🏗 Refactoring the import process WIP

This commit is contained in:
2021-11-30 22:44:06 -08:00
parent e8d21d4292
commit 1fd5f5b6cf
12 changed files with 380 additions and 1046 deletions

View File

@@ -42,10 +42,20 @@ const brokerConfig: BrokerOptions = {
// Enable/disable logging or use custom logger. More info: https://moleculer.services/docs/0.14/logging.html // Enable/disable logging or use custom logger. More info: https://moleculer.services/docs/0.14/logging.html
// Available logger types: "Console", "File", "Pino", "Winston", "Bunyan", "debug", "Log4js", "Datadog" // Available logger types: "Console", "File", "Pino", "Winston", "Bunyan", "debug", "Log4js", "Datadog"
logger: [ logger: [
{ {
type: "Console", type: "Console",
options: { options: {
level: "info", // Using colors on the output
colors: true,
// Print module names with different colors (like docker-compose for containers)
moduleColors: true,
// Line formatter. It can be "json", "short", "simple", "full", a `Function` or a template string like "{timestamp} {level} {nodeID}/{mod}: {msg}"
formatter: "full",
// Custom object printer. If not defined, it uses the `util.inspect` method.
objectPrinter: null,
// Auto-padding the module name in order to messages begin at the same column.
autoPadding: false,
}, },
}, },
{ {
@@ -59,7 +69,7 @@ const brokerConfig: BrokerOptions = {
// Print module names with different colors (like docker-compose for containers) // Print module names with different colors (like docker-compose for containers)
moduleColors: false, moduleColors: false,
// Line formatter. It can be "json", "short", "simple", "full", a `Function` or a template string like "{timestamp} {level} {nodeID}/{mod}: {msg}" // Line formatter. It can be "json", "short", "simple", "full", a `Function` or a template string like "{timestamp} {level} {nodeID}/{mod}: {msg}"
formatter: "json", formatter: "full",
// Custom object printer. If not defined, it uses the `util.inspect` method. // Custom object printer. If not defined, it uses the `util.inspect` method.
objectPrinter: null, objectPrinter: null,
eol: "\n", eol: "\n",
@@ -70,7 +80,12 @@ const brokerConfig: BrokerOptions = {
], ],
// Default log level for built-in console logger. It can be overwritten in logger options above. // Default log level for built-in console logger. It can be overwritten in logger options above.
// Available values: trace, debug, info, warn, error, fatal // Available values: trace, debug, info, warn, error, fatal
logLevel: "info", logLevel: {
"TRACING": "trace",
"TRANS*": "warn",
"GREETER": "debug",
"**": "info",
},
// Define transporter. // Define transporter.
// More info: https://moleculer.services/docs/0.14/networking.html // More info: https://moleculer.services/docs/0.14/networking.html

1029
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -29,6 +29,7 @@
"eslint-plugin-prefer-arrow": "^1.2.2", "eslint-plugin-prefer-arrow": "^1.2.2",
"jest": "^27.2.5", "jest": "^27.2.5",
"jest-cli": "^27.2.5", "jest-cli": "^27.2.5",
"klaw": "^4.0.1",
"moleculer-repl": "^0.6.6", "moleculer-repl": "^0.6.6",
"ts-jest": "^25.3.0", "ts-jest": "^25.3.0",
"ts-node": "^8.8.1" "ts-node": "^8.8.1"
@@ -38,7 +39,6 @@
"@types/jest": "^25.1.4", "@types/jest": "^25.1.4",
"@types/mkdirp": "^1.0.0", "@types/mkdirp": "^1.0.0",
"@types/node": "^13.9.8", "@types/node": "^13.9.8",
"@types/pino": "^6.3.8",
"@types/string-similarity": "^4.0.0", "@types/string-similarity": "^4.0.0",
"7zip-bin": "^5.1.1", "7zip-bin": "^5.1.1",
"7zip-min": "^1.4.0", "7zip-min": "^1.4.0",
@@ -63,12 +63,11 @@
"node-7z": "^3.0.0", "node-7z": "^3.0.0",
"node-calibre": "^2.1.1", "node-calibre": "^2.1.1",
"node-unrar-js": "^1.0.2", "node-unrar-js": "^1.0.2",
"pino": "^6.13.2",
"pino-pretty": "^7.0.0",
"sharp": "^0.28.1", "sharp": "^0.28.1",
"socket.io": "^4.3.1", "socket.io": "^4.3.1",
"socket.io-stream": "^0.5.3", "socket.io-stream": "^0.5.3",
"threetwo-ui-typings": "^1.0.11", "threetwo-ui-typings": "^1.0.11",
"through2": "^4.0.2",
"typescript": "^3.8.3", "typescript": "^3.8.3",
"unrar": "^0.2.0", "unrar": "^0.2.0",
"xml2js": "^0.4.23" "xml2js": "^0.4.23"

View File

@@ -1,7 +1,6 @@
import { Service, ServiceBroker, Context } from "moleculer"; import { Service, ServiceBroker, Context } from "moleculer";
import ApiGateway from "moleculer-web"; import ApiGateway from "moleculer-web";
import chokidar from "chokidar"; import chokidar from "chokidar";
import { logger } from "../utils/logger.utils";
import path from "path"; import path from "path";
import fs from "fs"; import fs from "fs";
import { IExtractionOptions, IFolderData } from "threetwo-ui-typings"; import { IExtractionOptions, IFolderData } from "threetwo-ui-typings";
@@ -107,24 +106,24 @@ export default class ApiService extends Service {
}); });
// Add a connect listener // Add a connect listener
this.io.on("connection", (client) => { this.io.on("connection", (client) => {
this.logger.info("Client connected via websocket!"); console.log("Client connected via websocket!");
client.on("action", async (action) => { client.on("action", async (action) => {
switch (action.type) { switch (action.type) {
case "LS_IMPORT": case "LS_IMPORT":
// 1. Send task to queue // 1. Send task to queue
const result = await this.broker.call( const result = await this.broker.call(
"libraryqueue.enqueue", "import.newImport",
action.data, action.data,
{} {}
); );
client.emit("LS_COVER_EXTRACTED", result);
break; break;
} }
}); });
// Add a disconnect listener // Add a disconnect listener
client.on("disconnect", () => { client.on("disconnect", () => {
this.logger.info("Client disconnected"); console.log("Client disconnected");
}); });
}); });
@@ -149,7 +148,8 @@ export default class ApiService extends Service {
stat.mtime.getTime() === stat.mtime.getTime() ===
previousPath.mtime.getTime() previousPath.mtime.getTime()
) { ) {
logger.info("File detected, starting import..."); console.log("File detected, starting import...");
// this walking business needs to go, SACURATAYYY, SACURATAYYY!! This dude needs to go.
const walkedFolders: IFolderData = const walkedFolders: IFolderData =
await broker.call("import.walkFolders", { await broker.call("import.walkFolders", {
basePathToWalk: path, basePathToWalk: path,
@@ -175,21 +175,21 @@ export default class ApiService extends Service {
}; };
fileWatcher fileWatcher
.on("add", async (path, stats) => { .on("add", async (path, stats) => {
logger.info("Watcher detected new files."); console.log("Watcher detected new files.");
logger.info( console.log(
`File ${path} has been added with stats: ${JSON.stringify( `File ${path} has been added with stats: ${JSON.stringify(
stats stats
)}` )}`
); );
logger.info("File copy started..."); console.log("File copy started...");
fs.stat(path, function (err, stat) { fs.stat(path, function (err, stat) {
if (err) { if (err) {
logger.error( console.log(
"Error watching file for copy completion. ERR: " + "Error watching file for copy completion. ERR: " +
err.message err.message
); );
logger.error( console.log(
"Error file not processed. PATH: " + path "Error file not processed. PATH: " + path
); );
throw err; throw err;
@@ -203,15 +203,15 @@ export default class ApiService extends Service {
}); });
}) })
.on("change", (path, stats) => .on("change", (path, stats) =>
logger.info( console.log(
`File ${path} has been changed. Stats: ${stats}` `File ${path} has been changed. Stats: ${stats}`
) )
) )
.on("unlink", (path) => .on("unlink", (path) =>
logger.info(`File ${path} has been removed`) console.log(`File ${path} has been removed`)
) )
.on("addDir", (path) => .on("addDir", (path) =>
logger.info(`Directory ${path} has been added`) console.log(`Directory ${path} has been added`)
); );
}, },
}); });

View File

@@ -9,10 +9,9 @@ import {
} from "moleculer"; } from "moleculer";
import { DbMixin } from "../mixins/db.mixin"; import { DbMixin } from "../mixins/db.mixin";
import Comic from "../models/comic.model"; import Comic from "../models/comic.model";
import { walkFolder } from "../utils/file.utils"; import { explodePath, walkFolder } from "../utils/file.utils";
import { convertXMLToJSON } from "../utils/xml.utils"; import { convertXMLToJSON } from "../utils/xml.utils";
import https from "https"; import https from "https";
import { logger } from "../utils/logger.utils";
import { import {
IExtractComicBookCoverErrorResponse, IExtractComicBookCoverErrorResponse,
IExtractedComicBookCoverFile, IExtractedComicBookCoverFile,
@@ -20,12 +19,14 @@ import {
} from "threetwo-ui-typings"; } from "threetwo-ui-typings";
import { import {
extractCoverFromFile, extractCoverFromFile,
extractCoverFromFile2,
unrarArchive, unrarArchive,
} from "../utils/uncompression.utils"; } from "../utils/uncompression.utils";
import { scrapeIssuesFromDOM } from "../utils/scraping.utils"; import { scrapeIssuesFromDOM } from "../utils/scraping.utils";
const ObjectId = require("mongoose").Types.ObjectId; const ObjectId = require("mongoose").Types.ObjectId;
import mongoose from "mongoose";
import fsExtra from "fs-extra"; import fsExtra from "fs-extra";
const through2 = require("through2");
import klaw from "klaw";
import path from "path"; import path from "path";
export default class ImportService extends Service { export default class ImportService extends Service {
@@ -72,6 +73,84 @@ export default class ImportService extends Service {
return convertXMLToJSON("lagos"); return convertXMLToJSON("lagos");
}, },
}, },
newImport: {
rest: "POST /newImport",
params: {},
async handler(
ctx: Context<{
extractionOptions?: any;
}>
) {
// 1. Walk the Source folder
klaw(path.resolve(process.env.COMICS_DIRECTORY))
// 1.1 Filter on .cb* extensions
.pipe(
through2.obj(function (
item,
enc,
next
) {
let fileExtension = path.extname(
item.path
);
if (
[
".cbz",
".cbr",
".cb7",
].includes(fileExtension)
) {
this.push(item);
}
next();
})
)
// 1.2 Pipe filtered results to the next step
.on("data", async (item) => {
console.info(
"Found a file at path: %s",
item.path
);
let comicExists = await Comic.exists({
"rawFileDetails.name": `${path.basename(
item.path,
path.extname(item.path)
)}`,
});
if (!comicExists) {
// 2. Send the extraction job to the queue
await broker.call(
"libraryqueue.enqueue",
{
fileObject: {
filePath: item.path,
size: item.stats.size,
},
}
);
} else {
console.log(
"Comic already exists in the library."
);
}
})
.on("end", () => {
console.log("Import process complete.");
});
},
},
nicefyPath: {
rest: "POST /nicefyPath",
params: {},
async handler(
ctx: Context<{
filePath: string;
}>
) {
return explodePath(ctx.params.filePath);
},
},
processAndImportToDB: { processAndImportToDB: {
rest: "POST /processAndImportToDB", rest: "POST /processAndImportToDB",
@@ -96,6 +175,11 @@ export default class ImportService extends Service {
let comicExists = await Comic.exists({ let comicExists = await Comic.exists({
"rawFileDetails.name": `${walkedFolders.name}`, "rawFileDetails.name": `${walkedFolders.name}`,
}); });
// rough flow of import process
// 1. Walk folder
// 2. For each folder, call extract function
// 3. For each successful extraction, run dbImport
if (!comicExists) { if (!comicExists) {
// 1. Extract cover and cover metadata // 1. Extract cover and cover metadata
let comicBookCoverMetadata: let comicBookCoverMetadata:
@@ -132,12 +216,12 @@ export default class ImportService extends Service {
dbImportResult, dbImportResult,
}; };
} else { } else {
logger.info( console.info(
`Comic: \"${walkedFolders.name}\" already exists in the database` `Comic: \"${walkedFolders.name}\" already exists in the database`
); );
} }
} catch (error) { } catch (error) {
logger.error( console.error(
"Error importing comic books", "Error importing comic books",
error error
); );
@@ -233,7 +317,7 @@ export default class ImportService extends Service {
{ new: true }, { new: true },
(err, result) => { (err, result) => {
if (err) { if (err) {
console.log(err); console.info(err);
reject(err); reject(err);
} else { } else {
// 3. Fetch and append volume information // 3. Fetch and append volume information
@@ -364,17 +448,17 @@ export default class ImportService extends Service {
rest: "POST /flushDB", rest: "POST /flushDB",
params: {}, params: {},
async handler(ctx: Context<{}>) { async handler(ctx: Context<{}>) {
return await mongoose.connection.db return await Comic.collection
.dropCollection("comics") .drop()
.then((data) => { .then((data) => {
logger.info(data); console.info(data);
const foo = fsExtra.emptyDirSync( const foo = fsExtra.emptyDirSync(
path.resolve("./userdata/covers") path.resolve("./userdata/covers")
); );
const foo2 = fsExtra.emptyDirSync( const foo2 = fsExtra.emptyDirSync(
path.resolve("./userdata/expanded") path.resolve("./userdata/expanded")
); );
return { foo, foo2 }; return { data, foo, foo2 };
}) })
.catch((error) => error); .catch((error) => error);
}, },
@@ -422,7 +506,7 @@ export default class ImportService extends Service {
}); });
resp.on("end", () => { resp.on("end", () => {
console.log( console.info(
data, data,
"HERE, BITCHES< HERE" "HERE, BITCHES< HERE"
); );
@@ -435,7 +519,7 @@ export default class ImportService extends Service {
} }
) )
.on("error", (err) => { .on("error", (err) => {
console.log("Error: " + err.message); console.info("Error: " + err.message);
reject(err); reject(err);
}); });
}), }),

View File

@@ -7,7 +7,11 @@ import {
Errors, Errors,
} from "moleculer"; } from "moleculer";
import BullMQMixin from "moleculer-bull"; import BullMQMixin from "moleculer-bull";
const REDIS_URI = process.env.REDIS_URI || `redis://0.0.0.0:6379`; import { SandboxedJob } from "moleculer-bull";
import { DbMixin } from "../mixins/db.mixin";
import Comic from "../models/comic.model";
import { extractCoverFromFile2 } from "../utils/uncompression.utils";
const REDIS_URI = process.env.REDIS_URI || `redis://0.0.0.0:6379`;
export default class LibraryQueueService extends Service { export default class LibraryQueueService extends Service {
public constructor( public constructor(
@@ -15,22 +19,45 @@ export default class LibraryQueueService extends Service {
schema: ServiceSchema<{}> = { name: "libraryqueue" } schema: ServiceSchema<{}> = { name: "libraryqueue" }
) { ) {
super(broker); super(broker);
console.log(this.io);
this.parseServiceSchema( this.parseServiceSchema(
Service.mergeSchemas( Service.mergeSchemas(
{ {
name: "libraryqueue", name: "libraryqueue",
mixins: [BullMQMixin(REDIS_URI)], mixins: [BullMQMixin(REDIS_URI), DbMixin("comics", Comic)],
settings: {}, settings: {},
hooks: {}, hooks: {},
queues: { queues: {
"process.import": { "process.import": {
async process(job) { async process(job: SandboxedJob) {
this.logger.info("New job received!", job.data); console.info("New job received!", job.data);
this.logger.info(`Processing queue...`); console.info(`Processing queue...`);
const result = await this.broker.call('import.processAndImportToDB', job.data); // extract the cover
const result = await extractCoverFromFile2(
job.data.fileObject
);
// write to mongo
const dbImportResult = await this.broker.call(
"import.rawImportToDB",
{
importStatus: {
isImported: true,
tagged: false,
matchedResult: {
score: "0",
},
},
rawFileDetails: result,
sourcedMetadata: {
comicvine: {},
},
},
{}
);
return Promise.resolve({ return Promise.resolve({
result, dbImportResult,
id: job.id, id: job.id,
worker: process.pid, worker: process.pid,
}); });
@@ -41,40 +68,42 @@ export default class LibraryQueueService extends Service {
enqueue: { enqueue: {
rest: "POST /enqueue", rest: "POST /enqueue",
params: {}, params: {},
async handler(ctx: Context<{ extractionOptions: object, walkedFolders: object}>) { async handler(
ctx: Context<{
fileObject: object;
}>
) {
return await this.createJob("process.import", { return await this.createJob("process.import", {
extractionOptions: ctx.params.extractionOptions, fileObject: ctx.params.fileObject,
walkedFolders: ctx.params.walkedFolders,
}); });
}, },
}, },
}, },
methods: {}, methods: {},
async started(): Promise<any> { async started(): Promise<any> {
const failed = await this.getQueue( const failed = await this.getQueue("process.import").on(
"process.import" "failed",
).on("failed", async (job, error) => { async (job, error) => {
this.logger.error( console.error(
`An error occured in 'mail.send' queue on job id '${job.id}': ${error.message}` `An error occured in 'process.import' queue on job id '${job.id}': ${error.message}`
); );
}); }
const completed = await this.getQueue( );
"process.import" const completed = await this.getQueue(
).on("completed", async (job, res) => { "process.import"
this.logger.info( ).on("completed", async (job, res) => {
`Job with the id '${job.id}' completed.` console.info(
); `Job with the id '${job.id}' completed.`
}); );
const stalled = await this.getQueue( });
"process.import" const stalled = await this.getQueue(
).on("stalled", async (job) => { "process.import"
this.logger.warn( ).on("stalled", async (job) => {
`The job with the id '${job} got stalled!` console.warn(
); `The job with the id '${job} got stalled!`
}); );
}, });
},
}, },
schema schema
) )

View File

@@ -9,7 +9,6 @@ import {
IExtractionOptions, IExtractionOptions,
IFolderData, IFolderData,
} from "threetwo-ui-typings"; } from "threetwo-ui-typings";
import { logger } from "./logger.utils";
import { includes, remove, indexOf } from "lodash"; import { includes, remove, indexOf } from "lodash";
const ALLOWED_IMAGE_FILE_FORMATS = [".jpg", ".jpeg", ".png"]; const ALLOWED_IMAGE_FILE_FORMATS = [".jpg", ".jpeg", ".png"];
@@ -29,7 +28,7 @@ export const walkFolder = async (folder: string, formats: string[]): Promise<IFo
const walk = Walk.create({ sort: filterOutDotFiles }); const walk = Walk.create({ sort: filterOutDotFiles });
await walk(folder, async (err, pathname, dirent) => { await walk(folder, async (err, pathname, dirent) => {
if (err) { if (err) {
logger.error("Failed to lstat directory", { error: err }); console.log("Failed to lstat directory", { error: err });
return false; return false;
} }
if ([...formats].includes(path.extname(dirent.name))) { if ([...formats].includes(path.extname(dirent.name))) {
@@ -42,7 +41,7 @@ export const walkFolder = async (folder: string, formats: string[]): Promise<IFo
isFile: dirent.isFile(), isFile: dirent.isFile(),
isLink: dirent.isSymbolicLink(), isLink: dirent.isSymbolicLink(),
}; };
logger.info( console.log(
`Scanned ${dirent.name} contained in ${path.dirname(pathname)}` `Scanned ${dirent.name} contained in ${path.dirname(pathname)}`
); );
result.push(walkResult); result.push(walkResult);

View File

@@ -1,5 +1,4 @@
const sharp = require("sharp"); const sharp = require("sharp");
import { logger } from "./logger.utils";
import { ISharpResizedImageStats } from "threetwo-ui-typings"; import { ISharpResizedImageStats } from "threetwo-ui-typings";
const imghash = require("imghash"); const imghash = require("imghash");
const leven = require("leven"); const leven = require("leven");
@@ -30,13 +29,12 @@ export const resizeImage = async (
.toBuffer(); .toBuffer();
return await sharp(buffer).toFile(`${outputPath}`, (err, info) => { return await sharp(buffer).toFile(`${outputPath}`, (err, info) => {
if (err) { if (err) {
logger.error("Failed to resize image:"); console.log("Failed to resize image:");
logger.error(err); console.log(err);
return err; return err;
} }
logger.info("Image file resized with the following parameters:"); console.log("Image file %s resized with the following parameters: %o", imageFile, info);
logger.info(info);
return info; return info;
}); });
}; };

View File

@@ -1,19 +0,0 @@
const Pino = require("pino");
export const logger = Pino({
name: "Threetwo!",
prettyPrint: { colorize: true },
crlf: false,
errorLikeObjectKeys: ["err", "error"],
// errorProps: "",
levelFirst: false,
messageKey: "msg", // --messageKey
levelKey: "level", // --levelKey
// messageFormat: false, // --messageFormat
// timestampKey: "time", // --timestampKey
translateTime: false, // --translateTime
// search: "foo == `bar`", // --search
// ignore: "pid,hostname", // --ignore
hideObject: false, // --hideObject
// singleLine: false,
});

View File

@@ -41,13 +41,15 @@ import {
IFolderData, IFolderData,
ISharpResizedImageStats, ISharpResizedImageStats,
} from "threetwo-ui-typings"; } from "threetwo-ui-typings";
import { logger } from "./logger.utils";
import { constructPaths, explodePath, walkFolder } from "../utils/file.utils"; import { constructPaths, explodePath, walkFolder } from "../utils/file.utils";
import { resizeImage } from "./imagetransformation.utils"; import { resizeImage } from "./imagetransformation.utils";
import { isNil } from "lodash";
const sevenZip = require("7zip-min"); const sevenZip = require("7zip-min");
const unrar = require("node-unrar-js"); const unrar = require("node-unrar-js");
const { Calibre } = require("node-calibre"); const { Calibre } = require("node-calibre");
console.info("COMICS_DIRECTORY", process.env.COMICS_DIRECTORY);
console.info("USERDATA_DIRECTORY", process.env.USERDATA_DIRECTORY);
export const extractCoverFromFile = async ( export const extractCoverFromFile = async (
extractionOptions: IExtractionOptions, extractionOptions: IExtractionOptions,
@@ -75,9 +77,9 @@ export const extractCoverFromFile = async (
constructedPaths.targetPath, constructedPaths.targetPath,
directoryOptions directoryOptions
); );
logger.info(`${constructedPaths.targetPath} was created.`); console.info(`${constructedPaths.targetPath} was created.`);
} catch (error) { } catch (error) {
logger.error(`${error}: Couldn't create directory.`); console.error(`${error}: Couldn't create directory.`);
} }
// extract the cover // extract the cover
@@ -125,11 +127,87 @@ export const extractCoverFromFile = async (
}, },
}); });
} catch (error) { } catch (error) {
console.log(error); console.info(error);
} }
}); });
}; };
export const extractCoverFromFile2 = async (
fileObject: any
): Promise<any> => {
try {
const { filePath, size} = fileObject;
const calibre = new Calibre();
console.info(`Initiating extraction process for path ${filePath}`);
// 1. Check for process.env.COMICS_DIRECTORY and process.env.USERDATA_DIRECTORY
if (!isNil(process.env.USERDATA_DIRECTORY)) {
// 2. Create the directory to which the cover image will be extracted
console.info("Attempting to create target directory for cover extraction...");
const directoryOptions = {
mode: 0o2775,
};
const fileNameWithExtension = path.basename(filePath);
const fileNameWithoutExtension = path.basename(filePath, path.extname(filePath));
const targetDirectory = `${process.env.USERDATA_DIRECTORY}/covers/${fileNameWithoutExtension}`;
await fse.ensureDir(targetDirectory, directoryOptions);
console.info(`%s was created.`, targetDirectory);
// 3. extract the cover
console.info(`Starting cover extraction...`);
let result: string;
const targetCoverImageFilePath = path.resolve(
targetDirectory +
"/" +
fileNameWithoutExtension +
"_cover.jpg"
);
const ebookMetaPath = process.env.CALIBRE_EBOOK_META_PATH
? `${process.env.CALIBRE_EBOOK_META_PATH}`
: `ebook-meta`;
result = await calibre.run(
ebookMetaPath,
[filePath],
{
getCover: targetCoverImageFilePath,
}
);
console.info(`ebook-meta ran with the following result: %o`, result)
// 4. create rendition path
const renditionPath =
targetDirectory+
"/" +
fileNameWithoutExtension +
"_200px.jpg";
// 5. resize image
await resizeImage(
targetCoverImageFilePath,
path.resolve(renditionPath),
200
);
return {
name: fileNameWithoutExtension,
path: filePath,
fileSize: size,
extension: path.extname(filePath),
cover: {
filePath: path.relative(process.cwd(),renditionPath),
},
containedIn: path.dirname(fileNameWithExtension),
calibreMetadata: {
coverWriteResult: result,
},
};
}
} catch (error) {
console.error(error);
}
};
export const unrarArchive = async ( export const unrarArchive = async (
filePath: string, filePath: string,
options: IExtractionOptions options: IExtractionOptions
@@ -143,9 +221,9 @@ export const unrarArchive = async (
.readFile(filePath) .readFile(filePath)
.catch((err) => console.error("Failed to read file", err)); .catch((err) => console.error("Failed to read file", err));
try { try {
logger.info("Unrar initiating."); console.info("Unrar initiating.");
await fse.ensureDir(options.targetExtractionFolder, directoryOptions); await fse.ensureDir(options.targetExtractionFolder, directoryOptions);
logger.info(`${options.targetExtractionFolder} was created.`); console.info(`${options.targetExtractionFolder} was created.`);
const extractor = await unrar.createExtractorFromData({ const extractor = await unrar.createExtractorFromData({
data: fileBuffer, data: fileBuffer,
@@ -153,7 +231,7 @@ export const unrarArchive = async (
const files = extractor.extract({}); const files = extractor.extract({});
const extractedFiles = [...files.files]; const extractedFiles = [...files.files];
for (const file of extractedFiles) { for (const file of extractedFiles) {
logger.info(`Attempting to write ${file.fileHeader.name}`); console.info(`Attempting to write ${file.fileHeader.name}`);
const fileBuffer = file.extraction; const fileBuffer = file.extraction;
const fileName = explodePath(file.fileHeader.name).fileName; const fileName = explodePath(file.fileHeader.name).fileName;
// resize image // resize image
@@ -170,6 +248,6 @@ export const unrarArchive = async (
".jpeg", ".jpeg",
]); ]);
} catch (error) { } catch (error) {
logger.error(`${error}`); console.info(`${error}`);
} }
}; };

View File

@@ -6,7 +6,6 @@ import {
IFolderData, IFolderData,
} from "threetwo-ui-typings"; } from "threetwo-ui-typings";
const Validator = require("fastest-validator"); const Validator = require("fastest-validator");
import { logger } from "./logger.utils";
export const validateComicBookMetadata = ( export const validateComicBookMetadata = (
comicBookMetadataObject: IExtractedComicBookCoverFile comicBookMetadataObject: IExtractedComicBookCoverFile
@@ -19,9 +18,9 @@ export const validateComicBookMetadata = (
}; };
const check = validator.compile(sch); const check = validator.compile(sch);
if (check(comicBookMetadataObject)) { if (check(comicBookMetadataObject)) {
logger.info(`Valid comic book metadata: ${comicBookMetadataObject}`); console.log(`Valid comic book metadata: ${comicBookMetadataObject}`);
} else { } else {
logger.error( console.log(
`Comic book metadata was invalid: `Comic book metadata was invalid:
${comicBookMetadataObject}` ${comicBookMetadataObject}`
); );

View File

@@ -28,7 +28,6 @@ SOFTWARE.
import xml2js from "xml2js"; import xml2js from "xml2js";
import fs from "fs"; import fs from "fs";
import { logger } from "../utils/logger.utils";
export const convertXMLToJSON = (xmlPayload) => { export const convertXMLToJSON = (xmlPayload) => {
const parser = new xml2js.Parser({ const parser = new xml2js.Parser({
@@ -43,6 +42,6 @@ export const convertXMLToJSON = (xmlPayload) => {
return result; return result;
}) })
.catch((error) => { .catch((error) => {
logger.error(error); console.log(error);
}); });
}; };