🔧 Refactored methods into utils

This commit is contained in:
2021-06-14 10:46:38 -07:00
parent 06d0c6aa72
commit 2bf74985a1
9 changed files with 244 additions and 116 deletions

View File

@@ -1,5 +1,5 @@
const mongoose = require("mongoose");
const paginate = require("mongoose-paginate");
const paginate = require("mongoose-paginate-v2");
const ComicSchema = mongoose.Schema({
importStatus: {

24
package-lock.json generated
View File

@@ -6236,20 +6236,10 @@
"resolved": "https://registry.npmjs.org/mongoose-legacy-pluralize/-/mongoose-legacy-pluralize-1.0.2.tgz",
"integrity": "sha512-Yo/7qQU4/EyIS8YDFSeenIvXxZN+ld7YdV9LqFVQJzTLye8unujAWPZ4NWKfFA+RNjh+wvTWKY9Z3E5XM6ZZiQ=="
},
"mongoose-paginate": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/mongoose-paginate/-/mongoose-paginate-5.0.3.tgz",
"integrity": "sha1-165J7Vv2Tx9692IOqGW2cFjFU3E=",
"requires": {
"bluebird": "3.0.5"
},
"dependencies": {
"bluebird": {
"version": "3.0.5",
"resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.0.5.tgz",
"integrity": "sha1-L/nQfJs+2ynW0oD+B1KDZefs05I="
}
}
"mongoose-paginate-v2": {
"version": "1.3.18",
"resolved": "https://registry.npmjs.org/mongoose-paginate-v2/-/mongoose-paginate-v2-1.3.18.tgz",
"integrity": "sha512-MTEyXvQmUNlXyPGophxILHTYQQ80r3uMtgdnKVr+4qgWLM6JxHbWsFpCmaJJ7ofuV7bhkfBTGx3EDoJo+bIKFw=="
},
"mpath": {
"version": "0.8.3",
@@ -9435,9 +9425,9 @@
}
},
"ws": {
"version": "7.4.5",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.4.5.tgz",
"integrity": "sha512-xzyu3hFvomRfXKH8vOFMU3OguG6oOvhXMo3xsGy3xWExqaM2dxBbVxuD99O7m3ZUFMvvscsZDqxfgMaRr/Nr1g=="
"version": "7.4.6",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.4.6.tgz",
"integrity": "sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A=="
},
"xml-name-validator": {
"version": "3.0.0",

View File

@@ -45,7 +45,7 @@
"moleculer-db-adapter-mongoose": "^0.8.9",
"moleculer-web": "^0.9.0",
"mongoose": "^5.12.7",
"mongoose-paginate": "^5.0.3",
"mongoose-paginate-v2": "^1.3.18",
"nats": "^1.3.2",
"node-unrar-js": "^1.0.2",
"pino": "^6.11.3",

View File

@@ -100,7 +100,7 @@ export default class ApiService extends Service {
this.logger.info("Client connected via websocket!");
client.on(
"call",
"importComicsInDB",
async ({ action, params, opts }, done) => {
this.logger.info(
"Received request from client! Action:",
@@ -117,11 +117,27 @@ export default class ApiService extends Service {
extractionOptions,
folder
);
const dbImportResult =
await this.broker.call(
"import.rawImportToDB",
{
importStatus: {
isImported: true,
tagged: false,
matchedResult: {
score: "0",
},
},
rawFileDetails:
comicBookCoverMetadata,
},
{}
);
client.emit(
"comicBookCoverMetadata",
comicBookCoverMetadata
);
client.emit("comicBookCoverMetadata", {
comicBookCoverMetadata,
dbImportResult,
});
});
case "single":

View File

@@ -0,0 +1,62 @@
"use strict";
import {
Context,
Service,
ServiceBroker,
ServiceSchema,
Errors,
} from "moleculer";
import { resizeImage } from "../utils/imagetransformation.utils";
export default class ProductsService extends Service {
// @ts-ignore
public constructor(
public broker: ServiceBroker,
schema: ServiceSchema<{}> = { name: "imagetransformation" }
) {
super(broker);
this.parseServiceSchema(
Service.mergeSchemas(
{
name: "imagetransformation",
mixins: [],
settings: {
// Available fields in the responses
fields: ["_id", "name", "quantity", "price"],
// Validator for the `create` & `insert` actions.
entityValidator: {
name: "string|min:3",
price: "number|positive",
},
},
hooks: {},
actions: {
resize: {
rest: "POST /resizeImage",
params: {},
async handler(
ctx: Context<{
path: string;
newWidth: number;
newHeight: number;
outputPath: string;
}>
) {
const resizeResult = await resizeImage(
ctx.params.path,
ctx.params.outputPath,
ctx.params.newWidth,
ctx.params.newHeight
);
return { resizeOperationStatus: resizeResult };
},
},
},
methods: {},
},
schema
)
);
}
}

View File

@@ -1,8 +1,14 @@
"use strict";
import { Context, Service, ServiceBroker, ServiceSchema } from "moleculer";
import {
Context,
Service,
ServiceBroker,
ServiceSchema,
Errors,
} from "moleculer";
import { DbMixin } from "../mixins/db.mixin";
import Comic from "../models/comic.model";
import { walkFolder } from "../utils/uncompression.utils";
import { walkFolder } from "../utils/file.utils";
import { convertXMLToJSON } from "../utils/xml.utils";
export default class ProductsService extends Service {
@@ -51,22 +57,43 @@ export default class ProductsService extends Service {
},
rawImportToDB: {
rest: "POST /rawImportToDB",
params: { payload: "object" },
params: {},
async handler(ctx: Context<{ payload: object }>) {
return new Promise((resolve, reject) => {
Comic.create(
ctx.params.payload,
(error, data) => {
if (data) {
resolve(data);
} else if (error) {
reject(new Error(error));
}
Comic.create(ctx.params, (error, data) => {
if (data) {
resolve(data);
} else if (error) {
throw new Errors.MoleculerError(
"Failed to import comic book",
400,
"IMS_FAILED_COMIC_BOOK_IMPORT",
data
);
}
);
});
});
},
},
getRecentlyImportedComicBooks: {
rest: "POST /getRecentlyImportedComicBooks",
params: {},
async handler(
ctx: Context<{ paginationOptions: object }>
) {
return await Comic.paginate(
{},
ctx.params.paginationOptions
);
},
},
getComicBookById: {
rest: "POST /getComicBookById",
params: { id: "string" },
async handler(ctx: Context<{ id: string }>) {
return await Comic.findById(ctx.params.id);
},
},
},
methods: {},
},

74
utils/file.utils.ts Normal file
View File

@@ -0,0 +1,74 @@
const Walk = require("@root/walk");
import path from "path";
import {
IExplodedPathResponse,
IExtractComicBookCoverErrorResponse,
IExtractedComicBookCoverFile,
IExtractionOptions,
IFolderData,
} from "../interfaces/folder.interface";
import { logger } from "./logger.utils";
import { each, isEmpty, map, remove, indexOf } from "lodash";
export const walkFolder = async (folder: string): Promise<IFolderData[]> => {
const result: IFolderData[] = [];
let walkResult: IFolderData = {
name: "",
extension: "",
containedIn: "",
isFile: false,
isLink: true,
};
const walk = Walk.create({ sort: filterOutDotFiles });
await walk(folder, async (err, pathname, dirent) => {
if (err) {
logger.error("Failed to lstat directory", { error: err });
return false;
}
if ([".cbz", ".cbr"].includes(path.extname(dirent.name))) {
walkResult = {
name: path.basename(dirent.name, path.extname(dirent.name)),
extension: path.extname(dirent.name),
containedIn: path.dirname(pathname),
isFile: dirent.isFile(),
isLink: dirent.isSymbolicLink(),
};
logger.info(
`Scanned ${dirent.name} contained in ${path.dirname(pathname)}`
);
result.push(walkResult);
}
});
return result;
};
export const explodePath = (filePath: string): IExplodedPathResponse => {
const exploded = filePath.split("/");
const fileName = remove(
exploded,
(item) => indexOf(exploded, item) === exploded.length - 1
).join("");
return {
exploded,
fileName,
};
};
export const constructPaths = (
extractionOptions: IExtractionOptions,
walkedFolder: IFolderData
) => ({
targetPath:
extractionOptions.targetExtractionFolder + "/" + walkedFolder.name,
inputFilePath:
walkedFolder.containedIn +
"/" +
walkedFolder.name +
walkedFolder.extension,
});
const filterOutDotFiles = (entities) =>
entities.filter((ent) => !ent.name.startsWith("."));

View File

@@ -0,0 +1,36 @@
const sharp = require("sharp");
import { logger } from "./logger.utils";
import { explodePath } from "./file.utils";
import { isUndefined } from "lodash";
export const extractMetadataFromImage = async (
imageFilePath: string
): Promise<unknown> => {
const image = await sharp(imageFilePath)
.metadata()
.then((metadata) => {
return metadata;
});
return image;
};
export const resizeImage = async (
imageFilePath: string,
outputPath: string,
newWidth: number,
newHeight?: number
): Promise<unknown> => {
return await sharp(imageFilePath)
.resize(newWidth)
.toFile(`${outputPath}`, (err, info) => {
if (err) {
logger.error("Failed to resize image:");
logger.error(err);
return err;
}
logger.info("Image file resized with the following parameters:");
logger.info(info);
return info;
});
};

View File

@@ -32,6 +32,7 @@ SOFTWARE.
*/
import { createReadStream, createWriteStream } from "fs";
const fse = require("fs-extra");
import path from "path";
import { default as unzipper } from "unzipper";
import _ from "lodash";
@@ -45,11 +46,9 @@ import {
} from "../interfaces/folder.interface";
import { logger } from "./logger.utils";
import { validateComicBookMetadata } from "../utils/validation.utils";
import { constructPaths, explodePath } from "../utils/file.utils";
const { writeFile, readFile } = require("fs").promises;
const sharp = require("sharp");
const unrarer = require("node-unrar-js");
const Walk = require("@root/walk");
const fse = require("fs-extra");
export const unrar = async (
extractionOptions: IExtractionOptions,
@@ -220,10 +219,7 @@ export const unzip = async (
return new Promise(async (resolve, reject) => {
logger.info("");
if (
extractedFiles.length === 1 &&
extractionOptions.extractTarget === "cover"
) {
if (extractionOptions.extractTarget === "cover") {
resolve(extractedFiles[0]);
} else {
resolve(extractedFiles);
@@ -287,76 +283,3 @@ export const getCovers = async (
};
}
};
export const walkFolder = async (folder: string): Promise<IFolderData[]> => {
const result: IFolderData[] = [];
let walkResult: IFolderData = {
name: "",
extension: "",
containedIn: "",
isFile: false,
isLink: true,
};
const walk = Walk.create({ sort: filterOutDotFiles });
await walk(folder, async (err, pathname, dirent) => {
if (err) {
logger.error("Failed to lstat directory", { error: err });
return false;
}
if ([".cbz", ".cbr"].includes(path.extname(dirent.name))) {
walkResult = {
name: path.basename(dirent.name, path.extname(dirent.name)),
extension: path.extname(dirent.name),
containedIn: path.dirname(pathname),
isFile: dirent.isFile(),
isLink: dirent.isSymbolicLink(),
};
logger.info(
`Scanned ${dirent.name} contained in ${path.dirname(pathname)}`
);
result.push(walkResult);
}
});
return result;
};
export const explodePath = (filePath: string): IExplodedPathResponse => {
const exploded = filePath.split("/");
const fileName = remove(
exploded,
(item) => indexOf(exploded, item) === exploded.length - 1
).join("");
return {
exploded,
fileName,
};
};
const constructPaths = (
extractionOptions: IExtractionOptions,
walkedFolder: IFolderData
) => ({
targetPath:
extractionOptions.targetExtractionFolder + "/" + walkedFolder.name,
inputFilePath:
walkedFolder.containedIn +
"/" +
walkedFolder.name +
walkedFolder.extension,
});
export const extractMetadataFromImage = async (
imageFilePath: string
): Promise<unknown> => {
const image = await sharp(imageFilePath)
.metadata()
.then(function (metadata) {
return metadata;
});
return image;
};
const filterOutDotFiles = (entities) =>
entities.filter((ent) => !ent.name.startsWith("."));