🐉 Changes to support archive operations

This commit is contained in:
2021-10-05 21:57:40 -07:00
parent 0881002905
commit 5862d71c06
9 changed files with 1437 additions and 1060 deletions

3
.gitignore vendored
View File

@@ -67,4 +67,5 @@ typings/
dist/ dist/
comics/ comics/
userdata/ userdata/
.DS_Store .DS_Store
erl_crash.dump

View File

@@ -39,6 +39,11 @@ const ComicSchema = mongoose.Schema({
fileSize: Number, fileSize: Number,
extension: String, extension: String,
containedIn: String, containedIn: String,
pageCount: Number,
cover: {
filePath: String,
stats: Object,
},
calibreMetadata :{ calibreMetadata :{
coverWriteResult: String, coverWriteResult: String,
} }

2158
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -24,6 +24,7 @@
"@types/unzipper": "^0.10.3", "@types/unzipper": "^0.10.3",
"@typescript-eslint/eslint-plugin": "^2.26.0", "@typescript-eslint/eslint-plugin": "^2.26.0",
"@typescript-eslint/parser": "^2.26.0", "@typescript-eslint/parser": "^2.26.0",
"7zip-min": "^1.4.0",
"chokidar": "^3.5.2", "chokidar": "^3.5.2",
"eslint": "^6.8.0", "eslint": "^6.8.0",
"eslint-plugin-import": "^2.20.2", "eslint-plugin-import": "^2.20.2",
@@ -34,7 +35,9 @@
"pino": "^6.13.2", "pino": "^6.13.2",
"pino-pretty": "^7.0.0", "pino-pretty": "^7.0.0",
"ts-jest": "^25.3.0", "ts-jest": "^25.3.0",
"ts-node": "^8.8.1" "ts-node": "^8.8.1",
"unrar": "^0.2.0",
"unrar-promise": "^2.0.1"
}, },
"dependencies": { "dependencies": {
"@root/walk": "^1.1.0", "@root/walk": "^1.1.0",
@@ -64,7 +67,7 @@
"sharp": "^0.28.1", "sharp": "^0.28.1",
"socket.io": "^4.1.1", "socket.io": "^4.1.1",
"socket.io-stream": "^0.5.3", "socket.io-stream": "^0.5.3",
"threetwo-ui-typings": "^1.0.5", "threetwo-ui-typings": "^1.0.10",
"typescript": "^3.8.3", "typescript": "^3.8.3",
"xml2js": "^0.4.23" "xml2js": "^0.4.23"
}, },

View File

@@ -102,10 +102,6 @@ export default class ApiService extends Service {
extractTarget: "cover", extractTarget: "cover",
targetExtractionFolder: "./userdata/covers", targetExtractionFolder: "./userdata/covers",
extractionMode: "single", extractionMode: "single",
paginationOptions: {
pageLimit: 25,
page: 1,
},
}; };
this.broker.call("import.processAndImportToDB", {walkedFolders, extractionOptions }); this.broker.call("import.processAndImportToDB", {walkedFolders, extractionOptions });
}) })

View File

@@ -1,5 +1,5 @@
"use strict"; "use strict";
import { each, forOwn, isNil, isUndefined, map } from "lodash"; import { isNil, map } from "lodash";
import { import {
Context, Context,
Service, Service,
@@ -17,8 +17,13 @@ import { sendToRabbitMQ } from "../queue/importQueue";
import { import {
IExtractComicBookCoverErrorResponse, IExtractComicBookCoverErrorResponse,
IExtractedComicBookCoverFile, IExtractedComicBookCoverFile,
IExtractionOptions,
} from "threetwo-ui-typings"; } from "threetwo-ui-typings";
import { extractCoverFromFile } from "../utils/uncompression.utils"; import {
extractCoverFromFile,
getPageCountFromRarArchive,
unrarArchive,
} from "../utils/uncompression.utils";
const ObjectId = require("mongoose").Types.ObjectId; const ObjectId = require("mongoose").Types.ObjectId;
export default class ImportService extends Service { export default class ImportService extends Service {
@@ -53,7 +58,8 @@ export default class ImportService extends Service {
ctx: Context<{ basePathToWalk: string }> ctx: Context<{ basePathToWalk: string }>
) { ) {
return await walkFolder( return await walkFolder(
ctx.params.basePathToWalk ctx.params.basePathToWalk,
[".cbz", ".cbr"],
); );
}, },
}, },
@@ -78,6 +84,7 @@ export default class ImportService extends Service {
walkedFolders: [ walkedFolders: [
{ {
name: string; name: string;
path: string;
extension: string; extension: string;
containedIn: string; containedIn: string;
fileSize: number; fileSize: number;
@@ -104,7 +111,7 @@ export default class ImportService extends Service {
extractionOptions, extractionOptions,
folder folder
); );
// 2. Add to mongo // 2. Add to mongo
const dbImportResult = const dbImportResult =
await this.broker.call( await this.broker.call(
@@ -359,10 +366,25 @@ export default class ImportService extends Service {
} }
} }
); );
return Promise.all(volumesMetadata); return Promise.all(volumesMetadata);
}, },
}, },
getPageCountForComicBook: {
rest: "POST /getPageCountsForComicBook",
params: {},
async handler(ctx:Context<{filePath: string}>) {
return await getPageCountFromRarArchive(ctx.params.filePath);
}
},
unrarArchive: {
rest: "POST /unrarArchive",
params: {},
timeout: 10000,
async handler(ctx: Context<{ filePath: string, options: IExtractionOptions,}>) {
return await unrarArchive(ctx.params.filePath, ctx.params.options);
}
}
}, },
methods: { methods: {
getComicVineVolumeMetadata: (apiDetailURL) => getComicVineVolumeMetadata: (apiDetailURL) =>

View File

@@ -14,10 +14,11 @@ import { includes, remove, indexOf } from "lodash";
const ALLOWED_IMAGE_FILE_FORMATS = [".jpg", ".jpeg", ".png"]; const ALLOWED_IMAGE_FILE_FORMATS = [".jpg", ".jpeg", ".png"];
export const walkFolder = async (folder: string): Promise<IFolderData[]> => { export const walkFolder = async (folder: string, formats: string[]): Promise<IFolderData[]> => {
const result: IFolderData[] = []; const result: IFolderData[] = [];
let walkResult: IFolderData = { let walkResult: IFolderData = {
name: "", name: "",
path: "",
extension: "", extension: "",
containedIn: "", containedIn: "",
isFile: false, isFile: false,
@@ -31,10 +32,10 @@ export const walkFolder = async (folder: string): Promise<IFolderData[]> => {
logger.error("Failed to lstat directory", { error: err }); logger.error("Failed to lstat directory", { error: err });
return false; return false;
} }
if ([".cbz", ".cbr"].includes(path.extname(dirent.name))) { if ([...formats].includes(path.extname(dirent.name))) {
console.log(path.resolve(pathname));
walkResult = { walkResult = {
name: path.basename(dirent.name, path.extname(dirent.name)), name: path.basename(dirent.name, path.extname(dirent.name)),
path: path.dirname(pathname),
extension: path.extname(dirent.name), extension: path.extname(dirent.name),
fileSize: fs.statSync(path.resolve(pathname)).size, fileSize: fs.statSync(path.resolve(pathname)).size,
containedIn: path.dirname(pathname), containedIn: path.dirname(pathname),

View File

@@ -22,20 +22,22 @@ export const resizeImage = async (
newWidth: number, newWidth: number,
newHeight?: number newHeight?: number
): Promise<ISharpResizedImageStats> => { ): Promise<ISharpResizedImageStats> => {
return new Promise((resolve, reject) => { const buffer = await sharp(imageFile)
sharp(imageFile) .resize(newWidth, newHeight, {
.resize(newWidth) fit: sharp.fit.inside,
.toFile(`${outputPath}`, (err, info) => { withoutEnlargement: true,
if (err) { })
logger.error("Failed to resize image:"); .toBuffer();
logger.error(err); return await sharp(buffer).toFile(`${outputPath}`, (err, info) => {
reject(err); if (err) {
} logger.error("Failed to resize image:");
logger.error(err);
return err;
}
logger.info("Image file resized with the following parameters:"); logger.info("Image file resized with the following parameters:");
logger.info(info); logger.info(info);
resolve(info); return info;
});
}); });
}; };
@@ -50,6 +52,6 @@ export const calculateLevenshteinDistance = async (
resolve({ levenshteinDistance: leven(hash1, hash2) }); resolve({ levenshteinDistance: leven(hash1, hash2) });
}); });
} else { } else {
reject("Can't calculate the Levenshtein distance") reject("Can't calculate the Levenshtein distance");
} }
}; };

View File

@@ -42,17 +42,12 @@ import {
ISharpResizedImageStats, ISharpResizedImageStats,
} from "threetwo-ui-typings"; } from "threetwo-ui-typings";
import { logger } from "./logger.utils"; import { logger } from "./logger.utils";
import { import { constructPaths, explodePath, walkFolder } from "../utils/file.utils";
constructPaths,
explodePath,
} from "../utils/file.utils";
import { resizeImage } from "./imagetransformation.utils"; import { resizeImage } from "./imagetransformation.utils";
const { writeFile, readFile } = require("fs").promises;
import sevenBin from "7zip-bin"; const sevenZip = require("7zip-min");
import { list, extract } from "node-7z"; import { list } from "unrar-promise";
const pathTo7zip = sevenBin.path7za; const unrar = require("node-unrar-js");
const unrarer = require("node-unrar-js");
const { Calibre } = require("node-calibre"); const { Calibre } = require("node-calibre");
export const extractCoverFromFile = async ( export const extractCoverFromFile = async (
@@ -65,24 +60,35 @@ export const extractCoverFromFile = async (
> => { > => {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
try { try {
const constructedPaths = constructPaths(extractionOptions, walkedFolder); const constructedPaths = constructPaths(
const calibre = new Calibre({ extractionOptions,
library: path.resolve("./userdata/calibre-lib"), walkedFolder
}); );
const calibre = new Calibre();
// create directory // create directory
const directoryOptions = { const directoryOptions = {
mode: 0o2775, mode: 0o2775,
}; };
try { try {
await fse.ensureDir(constructedPaths.targetPath, directoryOptions); await fse.ensureDir(
constructedPaths.targetPath,
directoryOptions
);
logger.info(`${constructedPaths.targetPath} was created.`); logger.info(`${constructedPaths.targetPath} was created.`);
} catch (error) { } catch (error) {
logger.error(`${error}: Couldn't create directory.`); logger.error(`${error}: Couldn't create directory.`);
} }
// extract the cover
// extract the cover
let result: string; let result: string;
const targetCoverImageFilePath = path.resolve(constructedPaths.targetPath + "/" + walkedFolder.name + "_cover.jpg") const targetCoverImageFilePath = path.resolve(
constructedPaths.targetPath +
"/" +
walkedFolder.name +
"_cover.jpg"
);
result = await calibre.run( result = await calibre.run(
`ebook-meta`, `ebook-meta`,
[path.resolve(constructedPaths.inputFilePath)], [path.resolve(constructedPaths.inputFilePath)],
@@ -90,19 +96,32 @@ export const extractCoverFromFile = async (
getCover: targetCoverImageFilePath, getCover: targetCoverImageFilePath,
} }
); );
// create renditions // create renditions
const renditionPath = constructedPaths.targetPath + "/" + walkedFolder.name + "_200px.jpg"; const renditionPath =
const stats:ISharpResizedImageStats = await resizeImage(targetCoverImageFilePath, path.resolve(renditionPath), 200); constructedPaths.targetPath +
"/" +
walkedFolder.name +
"_200px.jpg";
const stats: ISharpResizedImageStats = await resizeImage(
targetCoverImageFilePath,
path.resolve(renditionPath),
200
);
resolve({ resolve({
name: walkedFolder.name, name: walkedFolder.name,
path: renditionPath, path: renditionPath,
fileSize: walkedFolder.fileSize, fileSize: walkedFolder.fileSize,
extension: path.extname(constructedPaths.inputFilePath), extension: path.extname(constructedPaths.inputFilePath),
cover: {
filePath: renditionPath,
stats,
},
containedIn: walkedFolder.containedIn, containedIn: walkedFolder.containedIn,
calibreMetadata: { calibreMetadata: {
coverWriteResult: result, coverWriteResult: result,
} },
}); });
} catch (error) { } catch (error) {
console.log(error); console.log(error);
@@ -110,149 +129,67 @@ export const extractCoverFromFile = async (
}); });
}; };
export const unzip = async ( export const unrarArchive = async (
extractionOptions: IExtractionOptions, filePath: string,
walkedFolder: IFolderData options: IExtractionOptions
): Promise< ) => {
| IExtractedComicBookCoverFile // create directory
| IExtractedComicBookCoverFile[]
| IExtractComicBookCoverErrorResponse
> => {
const paths = constructPaths(extractionOptions, walkedFolder);
const directoryOptions = { const directoryOptions = {
mode: 0o2775, mode: 0o2775,
}; };
const fileBuffer = await fse.readFile(filePath).catch((err) =>
console.error("Failed to read file", err)
);
try { try {
await fse.ensureDir(paths.targetPath, directoryOptions); await fse.ensureDir(options.targetExtractionFolder, directoryOptions);
logger.info(`${paths.targetPath} was created.`); logger.info(`${options.targetExtractionFolder} was created.`);
const extractor = await unrar.createExtractorFromData({
data: fileBuffer,
});
const files = extractor.extract({});
const extractedFiles = [...files.files];
for (const file of extractedFiles) {
logger.info(
`Attempting to write ${file.fileHeader.name}`
);
const fileBuffer = file.extraction;
const fileName = explodePath(
file.fileHeader.name
).fileName;
await fse.writeFile(
options.targetExtractionFolder + "/" + fileName,
fileBuffer
);
// folder.forEach(async (page) => {
// await resizeImage(
// page.path + "/" + page.name + page.extension,
// path.resolve(options.targetExtractionFolder + "/" + page.name + page.extension),
// 200
// );
// });
// walk the newly created folder and return results
}
return await walkFolder(options.targetExtractionFolder, [
".jpg",
".png",
".jpeg",
]);
} catch (error) { } catch (error) {
logger.error(`${error}: Couldn't create directory.`); logger.error(`${error}`);
}
switch (extractionOptions.extractTarget) {
case "cover":
return new Promise((resolve, reject) => {
try {
let firstImg;
const listStream = list(path.resolve(paths.inputFilePath), {
$cherryPick: ["*.png", "*.jpg", , "*.jpeg", "*.webp"],
$bin: pathTo7zip,
$progress: true,
recursive: true,
});
listStream.on("data", (data) => {
if (!firstImg) firstImg = data;
});
listStream.on("end", () => {
if (firstImg) {
const extractStream = extract(
paths.inputFilePath,
paths.targetPath,
{
$cherryPick: firstImg.file,
$bin: pathTo7zip,
$progress: true,
recursive: true,
}
);
extractStream.on("data", (data) => {
//do something with the image
console.log(data);
});
}
});
} catch (error) {
console.log(error);
}
// resolve({
// name: `${extractedFiles[0].fileHeader.name}`,
// path: paths.targetPath,
// fileSize: extractedFiles[0].fileHeader.packSize,
// containedIn: walkedFolder.containedIn,
//
// })
});
case "all":
break;
default:
return {
message: "File format not supported, yet.",
errorCode: "90",
data: "asda",
};
} }
}; };
export const unrar = async ( export const getPageCountFromRarArchive = async (filePath: string) => {
extractionOptions: IExtractionOptions, const pageCount = await list(filePath);
walkedFolder: IFolderData return pageCount.length;
): Promise<IExtractedComicBookCoverFile> => {
switch (extractionOptions.extractTarget) {
case "cover":
return new Promise(async (resolve, reject) => {
const paths = constructPaths(extractionOptions, walkedFolder);
const directoryOptions = {
mode: 0o2775,
};
try {
// read the file into a buffer
const fileBuffer = await readFile(
paths.inputFilePath
).catch((err) => console.error("Failed to read file", err));
try {
await fse.ensureDir(paths.targetPath, directoryOptions);
logger.info(`${paths.targetPath} was created.`);
} catch (error) {
logger.error(`${error}: Couldn't create directory.`);
}
const extractor = await unrarer.createExtractorFromData({
data: fileBuffer,
});
const files = extractor.extract({});
const extractedFiles = [...files.files];
for (const file of extractedFiles) {
logger.info(
`Attempting to write ${file.fileHeader.name}`
);
const fileBuffer = file.extraction;
const fileName = explodePath(
file.fileHeader.name
).fileName;
if (
fileName !== "" &&
file.fileHeader.flags.directory === false
) {
await writeFile(
paths.targetPath + "/" + fileName,
fileBuffer
);
}
}
resolve({
name: `${extractedFiles[0].fileHeader.name}`,
path: paths.targetPath,
extension: path.extname(extractedFiles[0].fileHeader.name),
fileSize: extractedFiles[0].fileHeader.packSize,
containedIn: walkedFolder.containedIn,
calibreMetadata: {
coverWriteResult: "",
}
});
} catch (error) {
logger.error(`${error}: Couldn't write file.`);
reject(error);
}
});
case "all":
break;
default:
break;
}
}; };