🐉 Changes to support archive operations
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -67,4 +67,5 @@ typings/
|
||||
dist/
|
||||
comics/
|
||||
userdata/
|
||||
.DS_Store
|
||||
.DS_Store
|
||||
erl_crash.dump
|
||||
|
||||
@@ -39,6 +39,11 @@ const ComicSchema = mongoose.Schema({
|
||||
fileSize: Number,
|
||||
extension: String,
|
||||
containedIn: String,
|
||||
pageCount: Number,
|
||||
cover: {
|
||||
filePath: String,
|
||||
stats: Object,
|
||||
},
|
||||
calibreMetadata :{
|
||||
coverWriteResult: String,
|
||||
}
|
||||
|
||||
2158
package-lock.json
generated
2158
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -24,6 +24,7 @@
|
||||
"@types/unzipper": "^0.10.3",
|
||||
"@typescript-eslint/eslint-plugin": "^2.26.0",
|
||||
"@typescript-eslint/parser": "^2.26.0",
|
||||
"7zip-min": "^1.4.0",
|
||||
"chokidar": "^3.5.2",
|
||||
"eslint": "^6.8.0",
|
||||
"eslint-plugin-import": "^2.20.2",
|
||||
@@ -34,7 +35,9 @@
|
||||
"pino": "^6.13.2",
|
||||
"pino-pretty": "^7.0.0",
|
||||
"ts-jest": "^25.3.0",
|
||||
"ts-node": "^8.8.1"
|
||||
"ts-node": "^8.8.1",
|
||||
"unrar": "^0.2.0",
|
||||
"unrar-promise": "^2.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@root/walk": "^1.1.0",
|
||||
@@ -64,7 +67,7 @@
|
||||
"sharp": "^0.28.1",
|
||||
"socket.io": "^4.1.1",
|
||||
"socket.io-stream": "^0.5.3",
|
||||
"threetwo-ui-typings": "^1.0.5",
|
||||
"threetwo-ui-typings": "^1.0.10",
|
||||
"typescript": "^3.8.3",
|
||||
"xml2js": "^0.4.23"
|
||||
},
|
||||
|
||||
@@ -102,10 +102,6 @@ export default class ApiService extends Service {
|
||||
extractTarget: "cover",
|
||||
targetExtractionFolder: "./userdata/covers",
|
||||
extractionMode: "single",
|
||||
paginationOptions: {
|
||||
pageLimit: 25,
|
||||
page: 1,
|
||||
},
|
||||
};
|
||||
this.broker.call("import.processAndImportToDB", {walkedFolders, extractionOptions });
|
||||
})
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"use strict";
|
||||
import { each, forOwn, isNil, isUndefined, map } from "lodash";
|
||||
import { isNil, map } from "lodash";
|
||||
import {
|
||||
Context,
|
||||
Service,
|
||||
@@ -17,8 +17,13 @@ import { sendToRabbitMQ } from "../queue/importQueue";
|
||||
import {
|
||||
IExtractComicBookCoverErrorResponse,
|
||||
IExtractedComicBookCoverFile,
|
||||
IExtractionOptions,
|
||||
} from "threetwo-ui-typings";
|
||||
import { extractCoverFromFile } from "../utils/uncompression.utils";
|
||||
import {
|
||||
extractCoverFromFile,
|
||||
getPageCountFromRarArchive,
|
||||
unrarArchive,
|
||||
} from "../utils/uncompression.utils";
|
||||
const ObjectId = require("mongoose").Types.ObjectId;
|
||||
|
||||
export default class ImportService extends Service {
|
||||
@@ -53,7 +58,8 @@ export default class ImportService extends Service {
|
||||
ctx: Context<{ basePathToWalk: string }>
|
||||
) {
|
||||
return await walkFolder(
|
||||
ctx.params.basePathToWalk
|
||||
ctx.params.basePathToWalk,
|
||||
[".cbz", ".cbr"],
|
||||
);
|
||||
},
|
||||
},
|
||||
@@ -78,6 +84,7 @@ export default class ImportService extends Service {
|
||||
walkedFolders: [
|
||||
{
|
||||
name: string;
|
||||
path: string;
|
||||
extension: string;
|
||||
containedIn: string;
|
||||
fileSize: number;
|
||||
@@ -104,7 +111,7 @@ export default class ImportService extends Service {
|
||||
extractionOptions,
|
||||
folder
|
||||
);
|
||||
|
||||
|
||||
// 2. Add to mongo
|
||||
const dbImportResult =
|
||||
await this.broker.call(
|
||||
@@ -359,10 +366,25 @@ export default class ImportService extends Service {
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
return Promise.all(volumesMetadata);
|
||||
},
|
||||
},
|
||||
getPageCountForComicBook: {
|
||||
rest: "POST /getPageCountsForComicBook",
|
||||
params: {},
|
||||
async handler(ctx:Context<{filePath: string}>) {
|
||||
return await getPageCountFromRarArchive(ctx.params.filePath);
|
||||
}
|
||||
},
|
||||
unrarArchive: {
|
||||
rest: "POST /unrarArchive",
|
||||
params: {},
|
||||
timeout: 10000,
|
||||
async handler(ctx: Context<{ filePath: string, options: IExtractionOptions,}>) {
|
||||
return await unrarArchive(ctx.params.filePath, ctx.params.options);
|
||||
}
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
getComicVineVolumeMetadata: (apiDetailURL) =>
|
||||
|
||||
@@ -14,10 +14,11 @@ import { includes, remove, indexOf } from "lodash";
|
||||
|
||||
const ALLOWED_IMAGE_FILE_FORMATS = [".jpg", ".jpeg", ".png"];
|
||||
|
||||
export const walkFolder = async (folder: string): Promise<IFolderData[]> => {
|
||||
export const walkFolder = async (folder: string, formats: string[]): Promise<IFolderData[]> => {
|
||||
const result: IFolderData[] = [];
|
||||
let walkResult: IFolderData = {
|
||||
name: "",
|
||||
path: "",
|
||||
extension: "",
|
||||
containedIn: "",
|
||||
isFile: false,
|
||||
@@ -31,10 +32,10 @@ export const walkFolder = async (folder: string): Promise<IFolderData[]> => {
|
||||
logger.error("Failed to lstat directory", { error: err });
|
||||
return false;
|
||||
}
|
||||
if ([".cbz", ".cbr"].includes(path.extname(dirent.name))) {
|
||||
console.log(path.resolve(pathname));
|
||||
if ([...formats].includes(path.extname(dirent.name))) {
|
||||
walkResult = {
|
||||
name: path.basename(dirent.name, path.extname(dirent.name)),
|
||||
path: path.dirname(pathname),
|
||||
extension: path.extname(dirent.name),
|
||||
fileSize: fs.statSync(path.resolve(pathname)).size,
|
||||
containedIn: path.dirname(pathname),
|
||||
|
||||
@@ -22,20 +22,22 @@ export const resizeImage = async (
|
||||
newWidth: number,
|
||||
newHeight?: number
|
||||
): Promise<ISharpResizedImageStats> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
sharp(imageFile)
|
||||
.resize(newWidth)
|
||||
.toFile(`${outputPath}`, (err, info) => {
|
||||
if (err) {
|
||||
logger.error("Failed to resize image:");
|
||||
logger.error(err);
|
||||
reject(err);
|
||||
}
|
||||
const buffer = await sharp(imageFile)
|
||||
.resize(newWidth, newHeight, {
|
||||
fit: sharp.fit.inside,
|
||||
withoutEnlargement: true,
|
||||
})
|
||||
.toBuffer();
|
||||
return await sharp(buffer).toFile(`${outputPath}`, (err, info) => {
|
||||
if (err) {
|
||||
logger.error("Failed to resize image:");
|
||||
logger.error(err);
|
||||
return err;
|
||||
}
|
||||
|
||||
logger.info("Image file resized with the following parameters:");
|
||||
logger.info(info);
|
||||
resolve(info);
|
||||
});
|
||||
logger.info("Image file resized with the following parameters:");
|
||||
logger.info(info);
|
||||
return info;
|
||||
});
|
||||
};
|
||||
|
||||
@@ -50,6 +52,6 @@ export const calculateLevenshteinDistance = async (
|
||||
resolve({ levenshteinDistance: leven(hash1, hash2) });
|
||||
});
|
||||
} else {
|
||||
reject("Can't calculate the Levenshtein distance")
|
||||
reject("Can't calculate the Levenshtein distance");
|
||||
}
|
||||
};
|
||||
|
||||
@@ -42,17 +42,12 @@ import {
|
||||
ISharpResizedImageStats,
|
||||
} from "threetwo-ui-typings";
|
||||
import { logger } from "./logger.utils";
|
||||
import {
|
||||
constructPaths,
|
||||
explodePath,
|
||||
} from "../utils/file.utils";
|
||||
import { constructPaths, explodePath, walkFolder } from "../utils/file.utils";
|
||||
import { resizeImage } from "./imagetransformation.utils";
|
||||
const { writeFile, readFile } = require("fs").promises;
|
||||
|
||||
import sevenBin from "7zip-bin";
|
||||
import { list, extract } from "node-7z";
|
||||
const pathTo7zip = sevenBin.path7za;
|
||||
const unrarer = require("node-unrar-js");
|
||||
const sevenZip = require("7zip-min");
|
||||
import { list } from "unrar-promise";
|
||||
const unrar = require("node-unrar-js");
|
||||
const { Calibre } = require("node-calibre");
|
||||
|
||||
export const extractCoverFromFile = async (
|
||||
@@ -65,24 +60,35 @@ export const extractCoverFromFile = async (
|
||||
> => {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
const constructedPaths = constructPaths(extractionOptions, walkedFolder);
|
||||
const calibre = new Calibre({
|
||||
library: path.resolve("./userdata/calibre-lib"),
|
||||
});
|
||||
const constructedPaths = constructPaths(
|
||||
extractionOptions,
|
||||
walkedFolder
|
||||
);
|
||||
const calibre = new Calibre();
|
||||
|
||||
// create directory
|
||||
const directoryOptions = {
|
||||
mode: 0o2775,
|
||||
};
|
||||
|
||||
|
||||
try {
|
||||
await fse.ensureDir(constructedPaths.targetPath, directoryOptions);
|
||||
await fse.ensureDir(
|
||||
constructedPaths.targetPath,
|
||||
directoryOptions
|
||||
);
|
||||
logger.info(`${constructedPaths.targetPath} was created.`);
|
||||
} catch (error) {
|
||||
logger.error(`${error}: Couldn't create directory.`);
|
||||
}
|
||||
// extract the cover
|
||||
|
||||
// extract the cover
|
||||
let result: string;
|
||||
const targetCoverImageFilePath = path.resolve(constructedPaths.targetPath + "/" + walkedFolder.name + "_cover.jpg")
|
||||
const targetCoverImageFilePath = path.resolve(
|
||||
constructedPaths.targetPath +
|
||||
"/" +
|
||||
walkedFolder.name +
|
||||
"_cover.jpg"
|
||||
);
|
||||
result = await calibre.run(
|
||||
`ebook-meta`,
|
||||
[path.resolve(constructedPaths.inputFilePath)],
|
||||
@@ -90,19 +96,32 @@ export const extractCoverFromFile = async (
|
||||
getCover: targetCoverImageFilePath,
|
||||
}
|
||||
);
|
||||
|
||||
// create renditions
|
||||
const renditionPath = constructedPaths.targetPath + "/" + walkedFolder.name + "_200px.jpg";
|
||||
const stats:ISharpResizedImageStats = await resizeImage(targetCoverImageFilePath, path.resolve(renditionPath), 200);
|
||||
const renditionPath =
|
||||
constructedPaths.targetPath +
|
||||
"/" +
|
||||
walkedFolder.name +
|
||||
"_200px.jpg";
|
||||
const stats: ISharpResizedImageStats = await resizeImage(
|
||||
targetCoverImageFilePath,
|
||||
path.resolve(renditionPath),
|
||||
200
|
||||
);
|
||||
|
||||
resolve({
|
||||
name: walkedFolder.name,
|
||||
path: renditionPath,
|
||||
path: renditionPath,
|
||||
fileSize: walkedFolder.fileSize,
|
||||
extension: path.extname(constructedPaths.inputFilePath),
|
||||
cover: {
|
||||
filePath: renditionPath,
|
||||
stats,
|
||||
},
|
||||
containedIn: walkedFolder.containedIn,
|
||||
calibreMetadata: {
|
||||
coverWriteResult: result,
|
||||
}
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
@@ -110,149 +129,67 @@ export const extractCoverFromFile = async (
|
||||
});
|
||||
};
|
||||
|
||||
export const unzip = async (
|
||||
extractionOptions: IExtractionOptions,
|
||||
walkedFolder: IFolderData
|
||||
): Promise<
|
||||
| IExtractedComicBookCoverFile
|
||||
| IExtractedComicBookCoverFile[]
|
||||
| IExtractComicBookCoverErrorResponse
|
||||
> => {
|
||||
const paths = constructPaths(extractionOptions, walkedFolder);
|
||||
export const unrarArchive = async (
|
||||
filePath: string,
|
||||
options: IExtractionOptions
|
||||
) => {
|
||||
// create directory
|
||||
const directoryOptions = {
|
||||
mode: 0o2775,
|
||||
};
|
||||
|
||||
const fileBuffer = await fse.readFile(filePath).catch((err) =>
|
||||
console.error("Failed to read file", err)
|
||||
);
|
||||
try {
|
||||
await fse.ensureDir(paths.targetPath, directoryOptions);
|
||||
logger.info(`${paths.targetPath} was created.`);
|
||||
await fse.ensureDir(options.targetExtractionFolder, directoryOptions);
|
||||
logger.info(`${options.targetExtractionFolder} was created.`);
|
||||
|
||||
const extractor = await unrar.createExtractorFromData({
|
||||
data: fileBuffer,
|
||||
});
|
||||
const files = extractor.extract({});
|
||||
const extractedFiles = [...files.files];
|
||||
for (const file of extractedFiles) {
|
||||
logger.info(
|
||||
`Attempting to write ${file.fileHeader.name}`
|
||||
);
|
||||
const fileBuffer = file.extraction;
|
||||
const fileName = explodePath(
|
||||
file.fileHeader.name
|
||||
).fileName;
|
||||
|
||||
|
||||
await fse.writeFile(
|
||||
options.targetExtractionFolder + "/" + fileName,
|
||||
fileBuffer
|
||||
);
|
||||
|
||||
|
||||
|
||||
|
||||
// folder.forEach(async (page) => {
|
||||
// await resizeImage(
|
||||
// page.path + "/" + page.name + page.extension,
|
||||
// path.resolve(options.targetExtractionFolder + "/" + page.name + page.extension),
|
||||
// 200
|
||||
// );
|
||||
// });
|
||||
// walk the newly created folder and return results
|
||||
|
||||
}
|
||||
return await walkFolder(options.targetExtractionFolder, [
|
||||
".jpg",
|
||||
".png",
|
||||
".jpeg",
|
||||
]);
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`${error}: Couldn't create directory.`);
|
||||
}
|
||||
switch (extractionOptions.extractTarget) {
|
||||
case "cover":
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
let firstImg;
|
||||
|
||||
const listStream = list(path.resolve(paths.inputFilePath), {
|
||||
$cherryPick: ["*.png", "*.jpg", , "*.jpeg", "*.webp"],
|
||||
$bin: pathTo7zip,
|
||||
$progress: true,
|
||||
recursive: true,
|
||||
});
|
||||
|
||||
listStream.on("data", (data) => {
|
||||
if (!firstImg) firstImg = data;
|
||||
});
|
||||
listStream.on("end", () => {
|
||||
if (firstImg) {
|
||||
const extractStream = extract(
|
||||
paths.inputFilePath,
|
||||
paths.targetPath,
|
||||
{
|
||||
$cherryPick: firstImg.file,
|
||||
$bin: pathTo7zip,
|
||||
$progress: true,
|
||||
recursive: true,
|
||||
}
|
||||
);
|
||||
extractStream.on("data", (data) => {
|
||||
//do something with the image
|
||||
console.log(data);
|
||||
});
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
// resolve({
|
||||
// name: `${extractedFiles[0].fileHeader.name}`,
|
||||
// path: paths.targetPath,
|
||||
// fileSize: extractedFiles[0].fileHeader.packSize,
|
||||
// containedIn: walkedFolder.containedIn,
|
||||
//
|
||||
// })
|
||||
});
|
||||
|
||||
case "all":
|
||||
break;
|
||||
|
||||
default:
|
||||
return {
|
||||
message: "File format not supported, yet.",
|
||||
errorCode: "90",
|
||||
data: "asda",
|
||||
};
|
||||
logger.error(`${error}`);
|
||||
}
|
||||
};
|
||||
|
||||
export const unrar = async (
|
||||
extractionOptions: IExtractionOptions,
|
||||
walkedFolder: IFolderData
|
||||
): Promise<IExtractedComicBookCoverFile> => {
|
||||
switch (extractionOptions.extractTarget) {
|
||||
case "cover":
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const paths = constructPaths(extractionOptions, walkedFolder);
|
||||
const directoryOptions = {
|
||||
mode: 0o2775,
|
||||
};
|
||||
try {
|
||||
// read the file into a buffer
|
||||
const fileBuffer = await readFile(
|
||||
paths.inputFilePath
|
||||
).catch((err) => console.error("Failed to read file", err));
|
||||
try {
|
||||
await fse.ensureDir(paths.targetPath, directoryOptions);
|
||||
logger.info(`${paths.targetPath} was created.`);
|
||||
} catch (error) {
|
||||
logger.error(`${error}: Couldn't create directory.`);
|
||||
}
|
||||
|
||||
const extractor = await unrarer.createExtractorFromData({
|
||||
data: fileBuffer,
|
||||
});
|
||||
const files = extractor.extract({});
|
||||
const extractedFiles = [...files.files];
|
||||
|
||||
for (const file of extractedFiles) {
|
||||
logger.info(
|
||||
`Attempting to write ${file.fileHeader.name}`
|
||||
);
|
||||
const fileBuffer = file.extraction;
|
||||
const fileName = explodePath(
|
||||
file.fileHeader.name
|
||||
).fileName;
|
||||
|
||||
if (
|
||||
fileName !== "" &&
|
||||
file.fileHeader.flags.directory === false
|
||||
) {
|
||||
await writeFile(
|
||||
paths.targetPath + "/" + fileName,
|
||||
fileBuffer
|
||||
);
|
||||
}
|
||||
}
|
||||
resolve({
|
||||
name: `${extractedFiles[0].fileHeader.name}`,
|
||||
path: paths.targetPath,
|
||||
extension: path.extname(extractedFiles[0].fileHeader.name),
|
||||
fileSize: extractedFiles[0].fileHeader.packSize,
|
||||
containedIn: walkedFolder.containedIn,
|
||||
calibreMetadata: {
|
||||
coverWriteResult: "",
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(`${error}: Couldn't write file.`);
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
case "all":
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
export const getPageCountFromRarArchive = async (filePath: string) => {
|
||||
const pageCount = await list(filePath);
|
||||
return pageCount.length;
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user