🪢 Scaffold for elasticsearch

This commit is contained in:
2021-12-17 19:41:21 -08:00
parent 32ad866c72
commit c316a1e0bc
7 changed files with 13137 additions and 623 deletions

View File

@@ -1,6 +1,17 @@
const mongoose = require("mongoose");
var mexp = require('mongoose-elasticsearch-xp').v7;
const paginate = require("mongoose-paginate-v2");
const { Client } = require("@elastic/elasticsearch");
const eSClient = new Client({
node: "http://ghost:9200",
auth: {
username: "elastic",
password: "password",
},
});
const ComicSchema = mongoose.Schema({
importStatus: {
isImported: Boolean,
@@ -34,7 +45,7 @@ const ComicSchema = mongoose.Schema({
gcd: {},
},
rawFileDetails: {
name: String,
name: { type: String, es_indexed: true },
path: String,
fileSize: Number,
extension: String,
@@ -63,7 +74,9 @@ const ComicSchema = mongoose.Schema({
},
},
}, { timestamps: true});
ComicSchema.plugin(mexp, {
client: eSClient,
});
ComicSchema.plugin(paginate);
const Comic = mongoose.model("Comic", ComicSchema);
export default Comic;

12552
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -35,13 +35,14 @@
"typescript": "^3.9.10"
},
"dependencies": {
"7zip-bin": "^5.1.1",
"7zip-min": "^1.4.0",
"@elastic/elasticsearch": "^7.15.0",
"@root/walk": "^1.1.0",
"@types/jest": "^25.1.4",
"@types/mkdirp": "^1.0.0",
"@types/node": "^13.9.8",
"@types/string-similarity": "^4.0.0",
"7zip-bin": "^5.1.1",
"7zip-min": "^1.4.0",
"chokidar": "^3.5.2",
"dotenv": "^10.0.0",
"fs-extra": "^10.0.0",
@@ -53,12 +54,14 @@
"lodash": "^4.17.21",
"mkdirp": "^0.5.5",
"moleculer": "^0.14.16",
"moleculer-addons": "github:rishighan/moleculer-addons#master",
"moleculer-bull": "^0.2.8",
"moleculer-db": "^0.8.13",
"moleculer-db-adapter-mongo": "^0.4.7",
"moleculer-db-adapter-mongoose": "^0.8.9",
"moleculer-web": "^0.10.3",
"mongoose": "^5.12.7",
"mongoose-elasticsearch-xp": "^5.8.0",
"mongoose-paginate-v2": "^1.3.18",
"nats": "^1.3.2",
"node-7z": "^3.0.0",

View File

@@ -72,8 +72,8 @@ export default class ApiService extends Service {
},
],
log4XXResponses: false,
logRequestParams: null,
logResponseData: null,
logRequestParams: true,
logResponseData: true,
assets: {
folder: "public",
// Options to `server-static` module

View File

@@ -28,26 +28,11 @@ import path from "path";
import { COMICS_DIRECTORY, USERDATA_DIRECTORY } from "../constants/directories";
export default class ImportService extends Service {
public constructor(
public broker: ServiceBroker,
schema: ServiceSchema<{}> = { name: "import" }
) {
public constructor(public broker: ServiceBroker) {
super(broker);
this.parseServiceSchema(
Service.mergeSchemas(
{
this.parseServiceSchema({
name: "import",
mixins: [DbMixin("comics", Comic)],
settings: {
// Available fields in the responses
fields: ["_id", "name", "quantity", "price"],
// Validator for the `create` & `insert` actions.
entityValidator: {
name: "string|min:3",
price: "number|positive",
},
},
hooks: {},
actions: {
walkFolders: {
@@ -55,13 +40,11 @@ export default class ImportService extends Service {
params: {
basePathToWalk: "string",
},
async handler(
ctx: Context<{ basePathToWalk: string }>
) {
return await walkFolder(
ctx.params.basePathToWalk,
[".cbz", ".cbr"]
);
async handler(ctx: Context<{ basePathToWalk: string }>) {
return await walkFolder(ctx.params.basePathToWalk, [
".cbz",
".cbr",
]);
},
},
convertXMLToJSON: {
@@ -84,20 +67,12 @@ export default class ImportService extends Service {
klaw(path.resolve(COMICS_DIRECTORY))
// 1.1 Filter on .cb* extensions
.pipe(
through2.obj(function (
item,
enc,
next
) {
let fileExtension = path.extname(
item.path
);
through2.obj(function (item, enc, next) {
let fileExtension = path.extname(item.path);
if (
[
".cbz",
".cbr",
".cb7",
].includes(fileExtension)
[".cbz", ".cbr", ".cb7"].includes(
fileExtension
)
) {
this.push(item);
}
@@ -119,15 +94,12 @@ export default class ImportService extends Service {
});
if (!comicExists) {
// 2. Send the extraction job to the queue
await broker.call(
"libraryqueue.enqueue",
{
await broker.call("libraryqueue.enqueue", {
fileObject: {
filePath: item.path,
size: item.stats.size,
},
}
);
});
} else {
console.log(
"Comic already exists in the library."
@@ -185,12 +157,11 @@ export default class ImportService extends Service {
| IExtractedComicBookCoverFile
| IExtractComicBookCoverErrorResponse
| IExtractedComicBookCoverFile[] = await extractCoverFromFile2(
extractionOptions,
extractionOptions
);
// 2. Add to mongo
const dbImportResult =
await this.broker.call(
const dbImportResult = await this.broker.call(
"import.rawImportToDB",
{
importStatus: {
@@ -200,8 +171,7 @@ export default class ImportService extends Service {
score: "0",
},
},
rawFileDetails:
comicBookCoverMetadata,
rawFileDetails: comicBookCoverMetadata,
sourcedMetadata: {
comicvine: {},
},
@@ -219,10 +189,7 @@ export default class ImportService extends Service {
);
}
} catch (error) {
console.error(
"Error importing comic books",
error
);
console.error("Error importing comic books", error);
}
},
},
@@ -247,14 +214,13 @@ export default class ImportService extends Service {
if (
comicMetadata.sourcedMetadata.comicvine &&
!isNil(
comicMetadata.sourcedMetadata.comicvine
.volume
comicMetadata.sourcedMetadata.comicvine.volume
)
) {
volumeDetails =
await this.getComicVineVolumeMetadata(
comicMetadata.sourcedMetadata
.comicvine.volume.api_detail_url
comicMetadata.sourcedMetadata.comicvine
.volume.api_detail_url
);
comicMetadata.sourcedMetadata.comicvine.volumeInformation =
volumeDetails;
@@ -301,10 +267,8 @@ export default class ImportService extends Service {
);
}
return new Promise(async (resolve, reject) => {
const volumeDetails =
await volumeDetailsPromise;
matchedResult.volumeInformation =
volumeDetails;
const volumeDetails = await volumeDetailsPromise;
matchedResult.volumeInformation = volumeDetails;
Comic.findByIdAndUpdate(
comicObjectId,
{
@@ -347,15 +311,12 @@ export default class ImportService extends Service {
{
$push: {
"acquisition.directconnect": {
resultId:
ctx.params.resultId,
bundleId:
ctx.params.bundleId,
resultId: ctx.params.resultId,
bundleId: ctx.params.bundleId,
directoryIds:
ctx.params.directoryIds,
searchInstanceId:
ctx.params
.searchInstanceId,
ctx.params.searchInstanceId,
},
},
},
@@ -375,9 +336,7 @@ export default class ImportService extends Service {
getComicBooks: {
rest: "POST /getComicBooks",
params: {},
async handler(
ctx: Context<{ paginationOptions: object }>
) {
async handler(ctx: Context<{ paginationOptions: object }>) {
return await Comic.paginate(
{},
ctx.params.paginationOptions
@@ -418,13 +377,9 @@ export default class ImportService extends Service {
]);
// 2. Map over the aggregation result and get volume metadata from CV
// 2a. Make a call to comicvine-service
volumesMetadata = map(
volumes,
async (volume) => {
volumesMetadata = map(volumes, async (volume) => {
if (!isNil(volume.volumeURI)) {
return await ctx.call(
"comicvine.getVolumes",
{
return await ctx.call("comicvine.getVolumes", {
volumeURI: volume.volumeURI,
data: {
format: "json",
@@ -433,11 +388,9 @@ export default class ImportService extends Service {
limit: "1",
offset: "0",
},
});
}
);
}
}
);
});
return Promise.all(volumesMetadata);
},
@@ -451,9 +404,7 @@ export default class ImportService extends Service {
.then((data) => {
console.info(data);
const foo = fsExtra.emptyDirSync(
path.resolve(
`${USERDATA_DIRECTORY}/covers`
)
path.resolve(`${USERDATA_DIRECTORY}/covers`)
);
const foo2 = fsExtra.emptyDirSync(
path.resolve(
@@ -508,15 +459,10 @@ export default class ImportService extends Service {
});
resp.on("end", () => {
console.info(
data,
"HERE, BITCHES< HERE"
);
console.log(`${apiDetailURL} returned data.`)
const volumeInformation =
JSON.parse(data);
resolve(
volumeInformation.results
);
resolve(volumeInformation.results);
});
}
)
@@ -526,9 +472,6 @@ export default class ImportService extends Service {
});
}),
},
},
schema
)
);
});
}
}

View File

@@ -15,14 +15,9 @@ import { io } from "./api.service";
const REDIS_URI = process.env.REDIS_URI || `redis://0.0.0.0:6379`;
export default class LibraryQueueService extends Service {
public constructor(
public broker: ServiceBroker,
schema: ServiceSchema<{}> = { name: "libraryqueue" }
) {
public constructor(public broker: ServiceBroker) {
super(broker);
this.parseServiceSchema(
Service.mergeSchemas(
{
this.parseServiceSchema({
name: "libraryqueue",
mixins: [BullMQMixin(REDIS_URI), DbMixin("comics", Comic)],
settings: {},
@@ -83,16 +78,17 @@ export default class LibraryQueueService extends Service {
methods: {},
async started(): Promise<any> {
io.on("connection", async (client) => {
await this.getQueue(
"process.import"
).on("failed", async (job, error) => {
await this.getQueue("process.import").on(
"failed",
async (job, error) => {
console.error(
`An error occured in 'process.import' queue on job id '${job.id}': ${error.message}`
);
});
await this.getQueue(
"process.import"
).on("completed", async (job, res) => {
}
);
await this.getQueue("process.import").on(
"completed",
async (job, res) => {
client.emit("action", {
type: "LS_COVER_EXTRACTED",
result: res,
@@ -100,19 +96,18 @@ export default class LibraryQueueService extends Service {
console.info(
`Job with the id '${job.id}' completed.`
);
});
await this.getQueue(
"process.import"
).on("stalled", async (job) => {
}
);
await this.getQueue("process.import").on(
"stalled",
async (job) => {
console.warn(
`The job with the id '${job} got stalled!`
);
});
});
},
},
schema
)
}
);
});
},
});
}
}

View File

@@ -0,0 +1,60 @@
"use strict";
import {
Context,
Service,
ServiceBroker,
ServiceSchema,
Errors,
} from "moleculer";
const { Client } = require("@elastic/elasticsearch");
const client = new Client({
node: "http://ghost:9200",
auth: {
username: "elastic",
password: "password",
},
});
import { DbMixin } from "../mixins/db.mixin";
import Comic from "../models/comic.model";
console.log(client);
export default class SettingsService extends Service {
// @ts-ignore
public constructor(
public broker: ServiceBroker,
schema: ServiceSchema<{}> = { name: "search" }
) {
super(broker);
this.parseServiceSchema(
Service.mergeSchemas(
{
name: "search",
mixins: [client, DbMixin("comics", Comic)],
hooks: {},
actions: {
searchComic: {
rest: "POST /searchComic",
params: {},
async handler(ctx: Context<{}>) {
Comic.esSearch({
query_string: {
query: "batman",
},
}).then(function (results) {
// results here
console.log(results.body.hits.hits);
results.body.hits.hits.forEach((item) => console.log(item._source))
});
},
},
},
methods: {},
},
schema
)
);
}
}