🔧 Adding a full extraction endpoint

This commit is contained in:
Rishi Ghan
2022-04-26 14:57:51 -07:00
parent dc4767db27
commit 8c94db1956
3 changed files with 53 additions and 13 deletions

View File

@@ -47,6 +47,7 @@ import {
walkFolder,
getSizeOfDirectory,
} from "../utils/file.utils";
import { uncompressEntireArchive } from "../utils/uncompression.utils";
import { convertXMLToJSON } from "../utils/xml.utils";
import {
IExtractComicBookCoverErrorResponse,
@@ -88,6 +89,13 @@ export default class ImportService extends Service {
return convertXMLToJSON("lagos");
},
},
uncompressFullArchive : {
rest: "POST /uncompressFullArchive",
params: {},
handler: async (ctx: Context<{filePath: string}>) => {
return await uncompressEntireArchive(ctx.params.filePath);
}
},
newImport: {
rest: "POST /newImport",
params: {},
@@ -323,7 +331,7 @@ export default class ImportService extends Service {
) {
return await Comic.paginate(ctx.params.predicate, {
...ctx.params.paginationOptions,
allowDiskUse: true,
// allowDiskUse: true,
});
},
},
@@ -537,7 +545,7 @@ export default class ImportService extends Service {
{ $sort: { count: -1 } },
{ $limit: 1 },
],
mostPopulatCharacter: [],
// mostPopulatCharacter: [],
},
},
]);

View File

@@ -1,4 +1,5 @@
const Walk = require("@root/walk");
const fse = require("fs-extra");
import path from "path";
import fs from "fs";
@@ -11,6 +12,7 @@ import {
IFolderData,
} from "threetwo-ui-typings";
import { includes, remove, indexOf } from "lodash";
import { Errors } from "moleculer";
const ALLOWED_IMAGE_FILE_FORMATS = [".jpg", ".jpeg", ".png"];
@@ -70,11 +72,11 @@ export const explodePath = (filePath: string): IExplodedPathResponse => {
// returns a promise which resolves true if file exists:
export const checkFileExists = (filepath) => {
return new Promise((resolve, reject) => {
fs.access(filepath, fs.constants.F_OK, error => {
resolve(!error);
});
fs.access(filepath, fs.constants.F_OK, error => {
resolve(!error);
});
});
}
}
export const getSizeOfDirectory = async (
directoryPath: string,
@@ -120,5 +122,14 @@ export const getFileConstituents = (filePath: string) => {
};
};
export const createDirectory = async (options: any, directoryPath: string) => {
try {
await fse.ensureDir(directoryPath, options);
console.info(`Directory [ %s ] was created.`, directoryPath);
} catch (error) {
throw new Errors.MoleculerError("Failed to create directory", 500, "FileOpsError", error);
}
}
const filterOutDotFiles = (entities) =>
entities.filter((ent) => !ent.name.startsWith("."));

View File

@@ -38,7 +38,7 @@ import path from "path";
import sharp from "sharp";
import { IMPORT_IMAGE_FILE_FORMATS } from "../constants/allowedFileFormats";
import { USERDATA_DIRECTORY } from "../constants/directories";
import { checkFileExists, getFileConstituents } from "../utils/file.utils";
import { checkFileExists, getFileConstituents, createDirectory, walkFolder } from "../utils/file.utils";
import { convertXMLToJSON } from "./xml.utils";
const fse = require("fs-extra");
const Unrar = require("unrar");
@@ -55,7 +55,7 @@ interface RarFile {
compression: string;
}
const UNRAR_BIN_PATH = process.env.UNRAR_BIN_PATH || "/opt/homebrew/bin/unrar";
const UNRAR_BIN_PATH = process.env.UNRAR_BIN_PATH || "/usr/local/bin/unrar";
export const extractComicInfoXMLFromRar = async (
filePath: string
@@ -71,8 +71,8 @@ export const extractComicInfoXMLFromRar = async (
const { fileNameWithoutExtension, extension } =
getFileConstituents(filePath);
const targetDirectory = `${USERDATA_DIRECTORY}/covers/${fileNameWithoutExtension}`;
await fse.ensureDir(targetDirectory, directoryOptions);
console.info(`%s was created.`, targetDirectory);
await createDirectory(directoryOptions, targetDirectory);
const archive = new Unrar({
path: path.resolve(filePath),
bin: `${UNRAR_BIN_PATH}`, // this will change depending on Docker base OS
@@ -179,8 +179,8 @@ export const extractComicInfoXMLFromZip = async (
const { fileNameWithoutExtension, extension } =
getFileConstituents(filePath);
const targetDirectory = `${USERDATA_DIRECTORY}/covers/${fileNameWithoutExtension}`;
await fse.ensureDir(targetDirectory, directoryOptions);
console.info(`%s was created.`, targetDirectory);
await createDirectory(directoryOptions, targetDirectory);
let filesToWriteToDisk = { coverFile: null, comicInfoXML: null };
const extractionTargets = [];
@@ -321,6 +321,7 @@ export const extractFromArchive = async (filePath: string) => {
export const uncompressEntireArchive = async (filePath: string) => {
const { extension } = getFileConstituents(filePath);
console.log(extension);
switch (extension) {
case ".cbz":
case ".cb7":
@@ -330,5 +331,25 @@ export const uncompressEntireArchive = async (filePath: string) => {
}
};
export const uncompressZipArchive = async (filePath: string) => { };
export const uncompressZipArchive = async (filePath: string) => {
// Create the target directory
const directoryOptions = {
mode: 0o2775,
};
const { fileNameWithoutExtension, extension } =
getFileConstituents(filePath);
const targetDirectory = `${USERDATA_DIRECTORY}/expanded/${fileNameWithoutExtension}`;
await createDirectory(directoryOptions, targetDirectory);
await p7zip.extract(
filePath,
targetDirectory,
[],
"",
true
);
return await walkFolder(targetDirectory, [
".jpg", ".jpeg", ".JPG", ".JPEG", ".png", ".bmp"
]);
};
export const uncompressRarArchive = async (filePath: string) => { };