✂️ Pruned deps

This commit is contained in:
2021-05-11 09:58:14 -07:00
parent 9403b277a3
commit 267386ec94
4 changed files with 2 additions and 339 deletions

View File

@@ -1,8 +1,6 @@
import axios from "axios";
import fetch, { Response } from "node-fetch";
import {
IExtractComicBookCoverErrorResponse,
IExtractedComicBookCoverFile,
IExtractionOptions,
IFolderData,
} from "../../server/interfaces/folder.interface";

View File

@@ -4,12 +4,10 @@ import {
} from "../actions/fileops.actions";
import { IExtractedComicBookCoverFile } from "../../server/interfaces/folder.interface";
const ndjsonStream = require("can-ndjson-stream");
import fetch from "node-fetch";
import { API_BASE_URI } from "../constants/endpoints";
export const greet = async (
path: string,
): Promise<IExtractedComicBookCoverFile[] | unknown | any> => {
): Promise<IExtractedComicBookCoverFile[]> => {
const targetOptions = {
sourceFolder: path,
extractTarget: "cover",
@@ -32,7 +30,7 @@ export const greet = async (
);
const reader = await ndjsonStream(fetchedResource.body).getReader();
reader.read().then(function process({ done, value }) {
return reader.read().then(function process({ done, value }) {
if (done) {
console.log("done");
return;

View File

@@ -1,7 +1,6 @@
import express, { Request, Response, Router, Express } from "express";
import bodyParser from "body-parser";
import router from "./route";
import { default as paginate } from "express-paginate";
// call express
const app: Express = express(); // define our app using express
@@ -9,7 +8,6 @@ const app: Express = express(); // define our app using express
// configure app to use bodyParser for
// Getting data from body of requests
app.use(bodyParser.json());
app.use(paginate.middleware(10, 50));
app.use(bodyParser.urlencoded({ extended: true }));

View File

@@ -1,331 +0,0 @@
/*
* MIT License
*
* Copyright (c) 2021 Rishi Ghan
*
The MIT License (MIT)
Copyright (c) 2021 Rishi Ghan
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
/*
* Revision History:
* Initial: 2021/05/04 Rishi Ghan
*/
const sharp = require("sharp");
const unrarer = require("node-unrar-js");
const Walk = require("@root/walk");
const fse = require("fs-extra");
import { default as unzipper } from "unzipper";
import _ from "lodash";
import { createReadStream, createWriteStream } from "fs";
const { writeFile, readFile } = require("fs").promises;
import path from "path";
import { each, isEmpty, map, remove, indexOf } from "lodash";
import { logger } from "./logger.utils";
import {
IExplodedPathResponse,
IExtractComicBookCoverErrorResponse,
IExtractedComicBookCoverFile,
IExtractionOptions,
IFolderData,
} from "../interfaces/folder.interface";
export const unrar = async (
extractionOptions: IExtractionOptions,
walkedFolder: IFolderData,
): Promise<
| IExtractedComicBookCoverFile
| IExtractedComicBookCoverFile[]
| IExtractComicBookCoverErrorResponse
> => {
const paths = constructPaths(extractionOptions, walkedFolder);
const directoryOptions = {
mode: 0o2775,
};
const fileBuffer = await readFile(paths.inputFilePath).catch((err) =>
console.error("Failed to read file", err),
);
try {
await fse.ensureDir(paths.targetPath, directoryOptions);
logger.info(`${paths.targetPath} was created.`);
} catch (error) {
logger.error(`${error}: Couldn't create directory.`);
}
const extractor = await unrarer.createExtractorFromData({ data: fileBuffer });
switch (extractionOptions.extractTarget) {
case "cover":
return new Promise(async (resolve, reject) => {
try {
let fileNameToExtract = "";
const list = extractor.getFileList();
const fileHeaders = [...list.fileHeaders];
each(fileHeaders, async (fileHeader) => {
const fileName = explodePath(fileHeader.name).fileName;
if (
fileName !== "" &&
fileHeader.flags.directory === false &&
isEmpty(fileNameToExtract)
) {
logger.info(`Attempting to write ${fileHeader.name}`);
fileNameToExtract = fileHeader.name;
const file = extractor.extract({ files: [fileHeader.name] });
const extractedFile = [...file.files][0];
const fileArrayBuffer = extractedFile.extraction;
await writeFile(
paths.targetPath + "/" + fileName,
fileArrayBuffer,
);
resolve({
name: `${fileName}`,
path: paths.targetPath,
fileSize: fileHeader.packSize,
});
}
});
} catch (error) {
logger.error(`${error}: Couldn't write file.`);
reject(error);
}
});
case "all":
return new Promise(async (resolve, reject) => {
try {
const files = extractor.extract({});
const extractedFiles = [...files.files];
const comicBookCoverFiles: IExtractedComicBookCoverFile[] = [];
for (const file of extractedFiles) {
logger.info(`Attempting to write ${file.fileHeader.name}`);
const fileBuffer = file.extraction;
const fileName = explodePath(file.fileHeader.name).fileName;
if (fileName !== "" && file.fileHeader.flags.directory === false) {
await writeFile(paths.targetPath + "/" + fileName, fileBuffer);
}
comicBookCoverFiles.push({
name: `${file.fileHeader.name}`,
path: paths.targetPath,
fileSize: file.fileHeader.packSize,
});
}
resolve(_.flatten(comicBookCoverFiles));
} catch (error) {
resolve({
message: `${error}`,
errorCode: "500",
data: walkedFolder.name,
});
}
});
default:
return {
message: "File format not supported, yet.",
errorCode: "90",
data: "asda",
};
}
};
export const unzip = async (
extractionOptions: IExtractionOptions,
walkedFolder: IFolderData,
): Promise<
| IExtractedComicBookCoverFile[]
| IExtractedComicBookCoverFile
| IExtractComicBookCoverErrorResponse
> => {
const directoryOptions = {
mode: 0o2775,
};
const paths = constructPaths(extractionOptions, walkedFolder);
try {
await fse.ensureDir(paths.targetPath, directoryOptions);
logger.info(`${paths.targetPath} was created or already exists.`);
} catch (error) {
logger.error(`${error} Couldn't create directory.`);
}
const extractedFiles: IExtractedComicBookCoverFile[] = [];
const zip = createReadStream(paths.inputFilePath).pipe(
unzipper.Parse({ forceStream: true }),
);
for await (const entry of zip) {
const fileName = explodePath(entry.path).fileName;
const size = entry.vars.uncompressedSize;
if (
extractedFiles.length === 1 &&
extractionOptions.extractTarget === "cover"
) {
break;
}
if (fileName !== "" && entry.type !== "Directory") {
logger.info(`Attempting to write ${fileName}`);
entry.pipe(createWriteStream(paths.targetPath + "/" + fileName));
extractedFiles.push({
name: fileName,
fileSize: size,
path: paths.targetPath,
});
}
entry.autodrain();
}
return new Promise(async (resolve, reject) => {
logger.info("");
resolve(_.flatten(extractedFiles));
});
};
export const extractArchive = async (
extractionOptions: IExtractionOptions,
walkedFolder: IFolderData,
): Promise<
| IExtractedComicBookCoverFile
| IExtractedComicBookCoverFile[]
| IExtractComicBookCoverErrorResponse
> => {
switch (walkedFolder.extension) {
case ".cbz":
return await unzip(extractionOptions, walkedFolder);
case ".cbr":
return await unrar(extractionOptions, walkedFolder);
default:
return {
message: "File format not supported, yet.",
errorCode: "90",
data: `${extractionOptions}`,
};
}
};
export const getCovers = async (
options: IExtractionOptions,
walkedFolders: Array<IFolderData>,
): Promise<
| IExtractedComicBookCoverFile
| IExtractComicBookCoverErrorResponse
| IExtractedComicBookCoverFile[]
| (
| IExtractedComicBookCoverFile
| IExtractComicBookCoverErrorResponse
| IExtractedComicBookCoverFile[]
)[]
| IExtractComicBookCoverErrorResponse
> => {
switch (options.extractionMode) {
case "bulk":
const extractedDataPromises = map(walkedFolders, async (folder) => {
return await extractArchive(options, folder);
});
return Promise.all(extractedDataPromises).then((data) => _.flatten(data));
case "single":
return await extractArchive(options, walkedFolders[0]);
default:
logger.error("Unknown extraction mode selected.");
return {
message: "Unknown extraction mode selected.",
errorCode: "90",
data: `${options}`,
};
}
};
export const walkFolder = async (folder: string): Promise<IFolderData[]> => {
const result: IFolderData[] = [];
let walkResult: IFolderData = {
name: "",
extension: "",
containedIn: "",
isFile: false,
isLink: true,
};
const walk = Walk.create({ sort: filterOutDotFiles });
await walk(folder, async (err, pathname, dirent) => {
if (err) {
logger.error("Failed to lstat directory", { error: err });
return false;
}
if ([".cbz", ".cbr"].includes(path.extname(dirent.name))) {
walkResult = {
name: path.basename(dirent.name, path.extname(dirent.name)),
extension: path.extname(dirent.name),
containedIn: path.dirname(pathname),
isFile: dirent.isFile(),
isLink: dirent.isSymbolicLink(),
};
logger.info(
`Scanned ${dirent.name} contained in ${path.dirname(pathname)}`,
);
result.push(walkResult);
}
});
return result;
};
export const explodePath = (filePath: string): IExplodedPathResponse => {
const exploded = filePath.split("/");
const fileName = remove(exploded, (item) => {
return indexOf(exploded, item) === exploded.length - 1;
}).join("");
return {
exploded,
fileName,
};
};
const constructPaths = (
extractionOptions: IExtractionOptions,
walkedFolder: IFolderData,
) => {
return {
targetPath:
extractionOptions.targetExtractionFolder + "/" + walkedFolder.name,
inputFilePath:
walkedFolder.containedIn +
"/" +
walkedFolder.name +
walkedFolder.extension,
};
};
export const extractMetadataFromImage = async (
imageFilePath: string,
): Promise<unknown> => {
const image = await sharp(imageFilePath)
.metadata()
.then(function (metadata) {
return metadata;
});
return image;
};
const filterOutDotFiles = (entities) => {
return entities.filter((ent) => !ent.name.startsWith("."));
};