👀 Filewatcher now correctly imports newly added comics

This commit is contained in:
2021-10-11 14:00:57 -07:00
parent e61268f9ae
commit 5c936a62ca
5 changed files with 8988 additions and 7027 deletions

15949
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -22,16 +22,16 @@
"devDependencies": { "devDependencies": {
"@types/lodash": "^4.14.168", "@types/lodash": "^4.14.168",
"@types/unzipper": "^0.10.3", "@types/unzipper": "^0.10.3",
"@typescript-eslint/eslint-plugin": "^2.26.0", "@typescript-eslint/eslint-plugin": "^4.33.0",
"@typescript-eslint/parser": "^2.26.0", "@typescript-eslint/parser": "^4.33.0",
"7zip-min": "^1.4.0", "7zip-min": "^1.4.0",
"chokidar": "^3.5.2", "chokidar": "^3.5.2",
"eslint": "^6.8.0", "eslint": "^7.32.0",
"eslint-plugin-import": "^2.20.2", "eslint-plugin-import": "^2.20.2",
"eslint-plugin-prefer-arrow": "^1.2.2", "eslint-plugin-prefer-arrow": "^1.2.2",
"jest": "^25.1.0", "jest": "^27.2.5",
"jest-cli": "^25.1.0", "jest-cli": "^27.2.5",
"moleculer-repl": "^0.6.2", "moleculer-repl": "^0.5.7",
"pino": "^6.13.2", "pino": "^6.13.2",
"pino-pretty": "^7.0.0", "pino-pretty": "^7.0.0",
"ts-jest": "^25.3.0", "ts-jest": "^25.3.0",
@@ -50,6 +50,7 @@
"amqplib": "^0.7.1", "amqplib": "^0.7.1",
"fs-extra": "^10.0.0", "fs-extra": "^10.0.0",
"imghash": "^0.0.9", "imghash": "^0.0.9",
"jsdom": "^15.2.1",
"leven": "^3.1.0", "leven": "^3.1.0",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"mkdirp": "^0.5.5", "mkdirp": "^0.5.5",

View File

@@ -3,6 +3,7 @@ import ApiGateway from "moleculer-web";
import chokidar from "chokidar"; import chokidar from "chokidar";
import { logger } from "../utils/logger.utils"; import { logger } from "../utils/logger.utils";
import path from "path"; import path from "path";
import fs from "fs";
import { IExtractionOptions, IFolderData } from "threetwo-ui-typings"; import { IExtractionOptions, IFolderData } from "threetwo-ui-typings";
export default class ApiService extends Service { export default class ApiService extends Service {
public constructor(broker: ServiceBroker) { public constructor(broker: ServiceBroker) {
@@ -90,20 +91,42 @@ export default class ApiService extends Service {
pollInterval: 100, pollInterval: 100,
}, },
}); });
const fileCopyDelaySeconds = 10;
const checkFileCopyComplete = (path, previousPath) => {
fs.stat(path, async (err, stat) => {
if (err) { throw err; }
if (stat.mtime.getTime() === previousPath.mtime.getTime()) {
logger.info('File copy complete, starting import...');
const walkedFolders: IFolderData = await broker.call("import.walkFolders", { basePathToWalk: path });
const extractionOptions: IExtractionOptions = {
extractTarget: "cover",
targetExtractionFolder: "./userdata/covers",
extractionMode: "single",
};
await this.broker.call("import.processAndImportToDB", { walkedFolders, extractionOptions });
} else {
setTimeout(checkFileCopyComplete, fileCopyDelaySeconds * 1000, path, stat);
}
})
}
fileWatcher fileWatcher
.on("add", async (path, stats) => { .on("add", async (path, stats) => {
logger.info("Watcher detected new files.")
logger.info( logger.info(
`File ${path} has been added with stats: ${JSON.stringify( `File ${path} has been added with stats: ${JSON.stringify(
stats stats
)}` )}`
); );
const walkedFolders:IFolderData = await broker.call("import.walkFolders", {basePathToWalk: path});
const extractionOptions: IExtractionOptions = { logger.info('File copy started...');
extractTarget: "cover", fs.stat(path, function (err, stat) {
targetExtractionFolder: "./userdata/covers", if (err) {
extractionMode: "single", logger.error('Error watching file for copy completion. ERR: ' + err.message);
}; logger.error('Error file not processed. PATH: ' + path);
this.broker.call("import.processAndImportToDB", {walkedFolders, extractionOptions }); throw err;
}
setTimeout(checkFileCopyComplete, fileCopyDelaySeconds * 1000, path, stat);
});
}) })
.on("change", (path, stats) => .on("change", (path, stats) =>
logger.info( logger.info(

View File

@@ -23,6 +23,7 @@ import {
extractCoverFromFile, extractCoverFromFile,
unrarArchive, unrarArchive,
} from "../utils/uncompression.utils"; } from "../utils/uncompression.utils";
import {scrapeIssuesFromDOM} from "../utils/scraping.utils";
const ObjectId = require("mongoose").Types.ObjectId; const ObjectId = require("mongoose").Types.ObjectId;
export default class ImportService extends Service { export default class ImportService extends Service {
@@ -369,6 +370,13 @@ export default class ImportService extends Service {
return Promise.all(volumesMetadata); return Promise.all(volumesMetadata);
}, },
}, },
scrapeIssueNamesFromDOM: {
rest: "POST /scrapeIssueNamesFromDOM",
params: {},
async handler(ctx: Context<{ html: string}>) {
return scrapeIssuesFromDOM(ctx.params.html);
}
},
unrarArchive: { unrarArchive: {
rest: "POST /unrarArchive", rest: "POST /unrarArchive",
params: {}, params: {},

8
utils/scraping.utils.ts Normal file
View File

@@ -0,0 +1,8 @@
const jsdom = require("jsdom");
const { JSDOM } = jsdom;
export const scrapeIssuesFromDOM = (html: string) => {
const dom = new JSDOM(html);
return dom.window.document.querySelector("p").textContent;
}