👀 Filewatcher now correctly imports newly added comics

This commit is contained in:
2021-10-11 14:00:57 -07:00
parent e61268f9ae
commit 5c936a62ca
5 changed files with 8988 additions and 7027 deletions

15949
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -22,16 +22,16 @@
"devDependencies": {
"@types/lodash": "^4.14.168",
"@types/unzipper": "^0.10.3",
"@typescript-eslint/eslint-plugin": "^2.26.0",
"@typescript-eslint/parser": "^2.26.0",
"@typescript-eslint/eslint-plugin": "^4.33.0",
"@typescript-eslint/parser": "^4.33.0",
"7zip-min": "^1.4.0",
"chokidar": "^3.5.2",
"eslint": "^6.8.0",
"eslint": "^7.32.0",
"eslint-plugin-import": "^2.20.2",
"eslint-plugin-prefer-arrow": "^1.2.2",
"jest": "^25.1.0",
"jest-cli": "^25.1.0",
"moleculer-repl": "^0.6.2",
"jest": "^27.2.5",
"jest-cli": "^27.2.5",
"moleculer-repl": "^0.5.7",
"pino": "^6.13.2",
"pino-pretty": "^7.0.0",
"ts-jest": "^25.3.0",
@@ -50,6 +50,7 @@
"amqplib": "^0.7.1",
"fs-extra": "^10.0.0",
"imghash": "^0.0.9",
"jsdom": "^15.2.1",
"leven": "^3.1.0",
"lodash": "^4.17.21",
"mkdirp": "^0.5.5",

View File

@@ -3,6 +3,7 @@ import ApiGateway from "moleculer-web";
import chokidar from "chokidar";
import { logger } from "../utils/logger.utils";
import path from "path";
import fs from "fs";
import { IExtractionOptions, IFolderData } from "threetwo-ui-typings";
export default class ApiService extends Service {
public constructor(broker: ServiceBroker) {
@@ -90,20 +91,42 @@ export default class ApiService extends Service {
pollInterval: 100,
},
});
const fileCopyDelaySeconds = 10;
const checkFileCopyComplete = (path, previousPath) => {
fs.stat(path, async (err, stat) => {
if (err) { throw err; }
if (stat.mtime.getTime() === previousPath.mtime.getTime()) {
logger.info('File copy complete, starting import...');
const walkedFolders: IFolderData = await broker.call("import.walkFolders", { basePathToWalk: path });
const extractionOptions: IExtractionOptions = {
extractTarget: "cover",
targetExtractionFolder: "./userdata/covers",
extractionMode: "single",
};
await this.broker.call("import.processAndImportToDB", { walkedFolders, extractionOptions });
} else {
setTimeout(checkFileCopyComplete, fileCopyDelaySeconds * 1000, path, stat);
}
})
}
fileWatcher
.on("add", async (path, stats) => {
logger.info("Watcher detected new files.")
logger.info(
`File ${path} has been added with stats: ${JSON.stringify(
stats
)}`
);
const walkedFolders:IFolderData = await broker.call("import.walkFolders", {basePathToWalk: path});
const extractionOptions: IExtractionOptions = {
extractTarget: "cover",
targetExtractionFolder: "./userdata/covers",
extractionMode: "single",
};
this.broker.call("import.processAndImportToDB", {walkedFolders, extractionOptions });
logger.info('File copy started...');
fs.stat(path, function (err, stat) {
if (err) {
logger.error('Error watching file for copy completion. ERR: ' + err.message);
logger.error('Error file not processed. PATH: ' + path);
throw err;
}
setTimeout(checkFileCopyComplete, fileCopyDelaySeconds * 1000, path, stat);
});
})
.on("change", (path, stats) =>
logger.info(

View File

@@ -23,6 +23,7 @@ import {
extractCoverFromFile,
unrarArchive,
} from "../utils/uncompression.utils";
import {scrapeIssuesFromDOM} from "../utils/scraping.utils";
const ObjectId = require("mongoose").Types.ObjectId;
export default class ImportService extends Service {
@@ -369,6 +370,13 @@ export default class ImportService extends Service {
return Promise.all(volumesMetadata);
},
},
scrapeIssueNamesFromDOM: {
rest: "POST /scrapeIssueNamesFromDOM",
params: {},
async handler(ctx: Context<{ html: string}>) {
return scrapeIssuesFromDOM(ctx.params.html);
}
},
unrarArchive: {
rest: "POST /unrarArchive",
params: {},

8
utils/scraping.utils.ts Normal file
View File

@@ -0,0 +1,8 @@
const jsdom = require("jsdom");
const { JSDOM } = jsdom;
export const scrapeIssuesFromDOM = (html: string) => {
const dom = new JSDOM(html);
return dom.window.document.querySelector("p").textContent;
}