🔩 Refactoring

This commit is contained in:
2021-04-20 14:39:19 -07:00
parent a21b51f0ae
commit 69ca5fdea0
2 changed files with 30 additions and 26 deletions

View File

@@ -12,9 +12,9 @@ import { Request, Response } from "express";
router.route("/getComicCovers").get(async (req: Request, res: Response) => { router.route("/getComicCovers").get(async (req: Request, res: Response) => {
const foo = await extractArchive({ const foo = await extractArchive({
name: "Neonomicon 01 (of 04) (2010) (Project Comic Con cover) (Minutemen-DTs).cbz", name: "Ozymandias 001.cbr",
extension: ".cbz", extension: ".cbr",
containedIn: "comics/Neonomicon", containedIn: "comics/(2012) Before Watchmen - Ozymandias",
isFile: true, isFile: true,
isLink: false, isLink: false,
}); });

View File

@@ -3,7 +3,8 @@ const sharp = require("sharp");
const unrarer = require("node-unrar-js"); const unrarer = require("node-unrar-js");
const Walk = require("@root/walk"); const Walk = require("@root/walk");
const fse = require("fs-extra"); const fse = require("fs-extra");
const fs = require("fs").promises; import { createReadStream, createWriteStream } from "fs";
const { writeFile, readFile } = require("fs").promises;
import path from "path"; import path from "path";
import _ from "lodash"; import _ from "lodash";
import { logger } from "./logger.utils"; import { logger } from "./logger.utils";
@@ -27,17 +28,14 @@ export const unrar = async (
const directoryOptions = { const directoryOptions = {
mode: 0o2775, mode: 0o2775,
}; };
const fileBuffer = await fs const fileBuffer = await readFile(
.readFile( extractionOptions.folderDetails.containedIn +
extractionOptions.folderDetails.containedIn + "/" +
"/" + extractionOptions.folderDetails.name,
extractionOptions.folderDetails.name, ).catch((err) => console.error("Failed to read file", err));
)
.catch((err) => console.error("Failed to read file", err));
// const buf = Uint8Array.from(fs.readFile(fileBuffer);
const extractor = await unrarer.createExtractorFromData({ data: fileBuffer }); const extractor = await unrarer.createExtractorFromData({ data: fileBuffer });
switch (extractionOptions.extractTarget) { switch (extractionOptions.extractTarget) {
// extract the first file only
case "cover": case "cover":
const list = extractor.getFileList(); const list = extractor.getFileList();
const fileHeaders = [...list.fileHeaders]; const fileHeaders = [...list.fileHeaders];
@@ -56,7 +54,7 @@ export const unrar = async (
await fse.ensureDir(targetPath, directoryOptions); await fse.ensureDir(targetPath, directoryOptions);
logger.info(`${targetPath} was created or already exists.`); logger.info(`${targetPath} was created or already exists.`);
try { try {
await fs.writeFile( await writeFile(
targetPath + "/" + pathFragments.fileName, targetPath + "/" + pathFragments.fileName,
fileArrayBuffer, fileArrayBuffer,
); );
@@ -84,13 +82,13 @@ export const unrar = async (
logger.info(`Attempting to write ${file.fileHeader.name}`); logger.info(`Attempting to write ${file.fileHeader.name}`);
const fileBuffer = file.extraction; const fileBuffer = file.extraction;
const pathFragments = explodePath(file.fileHeader.name); const pathFragments = explodePath(file.fileHeader.name);
const targetPath = const fragment = determineFolderNameForExtraction(pathFragments);
extractionTargetPath + "/" + pathFragments.exploded.join("/"); const targetPath = extractionTargetPath + "/" + fragment;
try { try {
await fse.ensureDir(targetPath, directoryOptions); await fse.ensureDir(targetPath, directoryOptions);
logger.info(`${targetPath} was created or already exists.`); logger.info(`${targetPath} was created or already exists.`);
try { try {
await fs.writeFile( await writeFile(
targetPath + "/" + pathFragments.fileName, targetPath + "/" + pathFragments.fileName,
fileBuffer, fileBuffer,
); );
@@ -143,13 +141,9 @@ export const unzip = async (
extractionOptions.sourceFolder + extractionOptions.sourceFolder +
"/" + "/" +
extractionOptions.targetExtractionFolder; extractionOptions.targetExtractionFolder;
const zip = await fs const zip = createReadStream(
.createReadStream( extractionOptions.sourceFolder + "/" + extractionOptions.folderDetails.name,
extractionOptions.sourceFolder + ).pipe(unzipper.Parse({ forceStream: true }));
"/" +
extractionOptions.folderDetails.name,
)
.pipe(unzipper.Parse({ forceStream: true }));
for await (const entry of zip) { for await (const entry of zip) {
try { try {
await fse.ensureDir(targetPath, directoryOptions); await fse.ensureDir(targetPath, directoryOptions);
@@ -161,7 +155,7 @@ export const unzip = async (
fileSize: size, fileSize: size,
path: targetPath, path: targetPath,
}); });
entry.pipe(fs.createWriteStream(targetPath + fileName)); entry.pipe(createWriteStream(targetPath + fileName));
entry.autodrain(); entry.autodrain();
} catch (error) { } catch (error) {
logger.error(`${error} Couldn't create directory.`); logger.error(`${error} Couldn't create directory.`);
@@ -180,7 +174,7 @@ export const unzipOne = async (): Promise<IExtractedComicBookCoverFile> => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
directory.files[0] directory.files[0]
.stream() .stream()
.pipe(fs.createWriteStream("./comics/covers/yelaveda.jpg")) .pipe(createWriteStream("./comics/covers/yelaveda.jpg"))
.on("error", reject) .on("error", reject)
.on("finish", () => .on("finish", () =>
resolve({ resolve({
@@ -253,3 +247,13 @@ export const explodePath = (filePath: string): IExplodedPathResponse => {
fileName, fileName,
}; };
}; };
export const determineFolderNameForExtraction = (
pathFragments: IExplodedPathResponse,
): string | string[] => {
if (pathFragments.exploded.length === 0) {
return pathFragments.fileName;
} else {
return pathFragments.exploded.join("/");
}
};