🔧 Bumped dep versions and cleaned up console logs

Signed-off-by: Rishi Ghan <rishi.ghan@gmail.com>
This commit is contained in:
2023-03-01 21:51:37 -05:00
parent 6a4cf1d82f
commit 612a6ef344
6 changed files with 4362 additions and 1004 deletions

View File

@@ -1,8 +1,8 @@
const mongoose = require("mongoose");
const paginate = require("mongoose-paginate-v2"); const paginate = require("mongoose-paginate-v2");
const { Client } = require("@elastic/elasticsearch"); const { Client } = require("@elastic/elasticsearch");
import ComicVineMetadataSchema from "./comicvine.metadata.model"; import ComicVineMetadataSchema from "./comicvine.metadata.model";
import { mongoosastic } from "mongoosastic-ts"; import { mongoosastic } from "mongoosastic-ts";
const mongoose = require("mongoose")
import { import {
MongoosasticDocument, MongoosasticDocument,
MongoosasticModel, MongoosasticModel,
@@ -10,6 +10,7 @@ import {
} from "mongoosastic-ts/dist/types"; } from "mongoosastic-ts/dist/types";
const ELASTICSEARCH_HOST = const ELASTICSEARCH_HOST =
process.env.ELASTICSEARCH_URI || "http://localhost:9200"; process.env.ELASTICSEARCH_URI || "http://localhost:9200";
console.log(`ELASTICSEARCH -> ${ELASTICSEARCH_HOST}`);
export const eSClient = new Client({ export const eSClient = new Client({
node: ELASTICSEARCH_HOST, node: ELASTICSEARCH_HOST,
auth: { auth: {
@@ -127,34 +128,11 @@ const ComicSchema = mongoose.Schema(
); );
ComicSchema.plugin(mongoosastic, { ComicSchema.plugin(mongoosastic, {
index: "comics",
type: "comic",
esClient: eSClient, esClient: eSClient,
}); } as MongoosasticPluginOpts);
ComicSchema.plugin(paginate); ComicSchema.plugin(paginate);
const Comic = mongoose.model("Comic", ComicSchema); const Comic = mongoose.model("Comic", ComicSchema);
// Comic.createMapping({
// analysis: {
// analyzer: {
// content: {
// type: "custom",
// tokenizer: "whitespace",
// },
// },
// },
// }).then((data) => {
// console.log("Mapping the index...")
// console.log(data)
// })
// const stream = Comic.synchronize();
// let count = 0;
// stream.on("data", function (err, doc) {
// count++;
// });
// stream.on("close", function () {
// console.log("indexed " + count + " documents!");
// });
// stream.on("error", function (err) {
// console.log(err);
// });
export default Comic; export default Comic;

5302
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -37,7 +37,7 @@
}, },
"dependencies": { "dependencies": {
"@bluelovers/fast-glob": "https://github.com/rishighan/fast-glob-v2-api.git", "@bluelovers/fast-glob": "https://github.com/rishighan/fast-glob-v2-api.git",
"@elastic/elasticsearch": "^7.16.0", "@elastic/elasticsearch": "^8.6.0",
"@jorgeferrero/stream-to-buffer": "^2.0.6", "@jorgeferrero/stream-to-buffer": "^2.0.6",
"@root/walk": "^1.1.0", "@root/walk": "^1.1.0",
"@types/jest": "^27.4.1", "@types/jest": "^27.4.1",
@@ -56,20 +56,19 @@
"http-response-stream": "^1.0.9", "http-response-stream": "^1.0.9",
"image-js": "^0.34.0", "image-js": "^0.34.0",
"imghash": "^0.0.9", "imghash": "^0.0.9",
"jsdom": "^15.2.1", "jsdom": "^21.1.0",
"klaw": "^4.0.1", "klaw": "^4.1.0",
"leven": "^3.1.0", "leven": "^3.1.0",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"mkdirp": "^0.5.5", "mkdirp": "^0.5.5",
"moleculer": "^0.14.23", "moleculer": "^0.14.28",
"moleculer-bull": "github:rishighan/moleculer-bull#1.0.0", "moleculer-bull": "github:rishighan/moleculer-bull#1.0.0",
"moleculer-db": "^0.8.17", "moleculer-db": "^0.8.23",
"moleculer-db-adapter-mongo": "^0.4.7", "moleculer-db-adapter-mongo": "^0.4.17",
"moleculer-db-adapter-mongoose": "^0.8.12", "moleculer-db-adapter-mongoose": "^0.9.2",
"moleculer-io": "^2.2.0", "moleculer-io": "^2.2.0",
"moleculer-web": "^0.10.4", "moleculer-web": "^0.10.5",
"mongoosastic-ts": "^5.0.7", "mongoosastic-ts": "^6.0.3",
"mongoose": "^5.12.7",
"mongoose-paginate-v2": "^1.3.18", "mongoose-paginate-v2": "^1.3.18",
"nats": "^1.3.2", "nats": "^1.3.2",
"node-calibre": "^2.1.1", "node-calibre": "^2.1.1",

View File

@@ -67,7 +67,7 @@ export default class QueueService extends Service {
hooks: {}, hooks: {},
queues: { queues: {
"process.import": { "process.import": {
concurrency: 20, concurrency: 10,
async process(job: SandboxedJob) { async process(job: SandboxedJob) {
console.info("New job received!", job.data); console.info("New job received!", job.data);
console.info(`Processing queue...`); console.info(`Processing queue...`);
@@ -98,7 +98,7 @@ export default class QueueService extends Service {
if (!isNil(job.data.bundleId)) { if (!isNil(job.data.bundleId)) {
bundleId = job.data.bundleId; bundleId = job.data.bundleId;
} }
// Orchestrate the payload // Orchestrate the payload
const payload = { const payload = {
importStatus: { importStatus: {

View File

@@ -57,7 +57,7 @@ import klaw from "klaw";
import path from "path"; import path from "path";
import { COMICS_DIRECTORY, USERDATA_DIRECTORY } from "../constants/directories"; import { COMICS_DIRECTORY, USERDATA_DIRECTORY } from "../constants/directories";
console.log(process.env.MONGO_URI); console.log(`MONGO -> ${process.env.MONGO_URI}`);
export default class ImportService extends Service { export default class ImportService extends Service {
public constructor(public broker: ServiceBroker) { public constructor(public broker: ServiceBroker) {
super(broker); super(broker);
@@ -233,7 +233,6 @@ export default class ImportService extends Service {
try { try {
let volumeDetails; let volumeDetails;
const comicMetadata = ctx.params.payload; const comicMetadata = ctx.params.payload;
// When an issue is added from the search CV feature // When an issue is added from the search CV feature
// we solicit volume information and add that to mongo // we solicit volume information and add that to mongo
if ( if (
@@ -260,7 +259,6 @@ export default class ImportService extends Service {
console.log( console.log(
`Import type: [${ctx.params.importType}]` `Import type: [${ctx.params.importType}]`
); );
console.log(JSON.stringify(comicMetadata, null, 4));
switch (ctx.params.importType) { switch (ctx.params.importType) {
case "new": case "new":
return await Comic.create(comicMetadata); return await Comic.create(comicMetadata);
@@ -280,6 +278,7 @@ export default class ImportService extends Service {
return false; return false;
} }
} catch (error) { } catch (error) {
console.log(error);
throw new Errors.MoleculerError( throw new Errors.MoleculerError(
"Import failed.", "Import failed.",
500 500

View File

@@ -88,7 +88,7 @@ export const extractComicInfoXMLFromRar = async (
path: path.resolve(filePath), path: path.resolve(filePath),
bin: `${UNRAR_BIN_PATH}`, // this will change depending on Docker base OS bin: `${UNRAR_BIN_PATH}`, // this will change depending on Docker base OS
}); });
const filesInArchive: [RarFile] = await new Promise( const filesInArchive: [RarFile] = await new Promise(
(resolve, reject) => { (resolve, reject) => {
return archive.list((err, entries) => { return archive.list((err, entries) => {
@@ -353,8 +353,6 @@ export const extractFromArchive = async (filePath: string) => {
case ".cbr": case ".cbr":
const cbrResult = await extractComicInfoXMLFromRar(filePath); const cbrResult = await extractComicInfoXMLFromRar(filePath);
console.log("ASDASDASDASDas");
console.log(JSON.stringify(cbrResult, null, 4))
return Object.assign({}, ...cbrResult); return Object.assign({}, ...cbrResult);
default: default: