🪢 Scaffold for elasticsearch

This commit is contained in:
2021-12-17 19:41:21 -08:00
parent 32ad866c72
commit c316a1e0bc
7 changed files with 13137 additions and 623 deletions

View File

@@ -1,6 +1,17 @@
const mongoose = require("mongoose"); const mongoose = require("mongoose");
var mexp = require('mongoose-elasticsearch-xp').v7;
const paginate = require("mongoose-paginate-v2"); const paginate = require("mongoose-paginate-v2");
const { Client } = require("@elastic/elasticsearch");
const eSClient = new Client({
node: "http://ghost:9200",
auth: {
username: "elastic",
password: "password",
},
});
const ComicSchema = mongoose.Schema({ const ComicSchema = mongoose.Schema({
importStatus: { importStatus: {
isImported: Boolean, isImported: Boolean,
@@ -34,7 +45,7 @@ const ComicSchema = mongoose.Schema({
gcd: {}, gcd: {},
}, },
rawFileDetails: { rawFileDetails: {
name: String, name: { type: String, es_indexed: true },
path: String, path: String,
fileSize: Number, fileSize: Number,
extension: String, extension: String,
@@ -63,7 +74,9 @@ const ComicSchema = mongoose.Schema({
}, },
}, },
}, { timestamps: true}); }, { timestamps: true});
ComicSchema.plugin(mexp, {
client: eSClient,
});
ComicSchema.plugin(paginate); ComicSchema.plugin(paginate);
const Comic = mongoose.model("Comic", ComicSchema); const Comic = mongoose.model("Comic", ComicSchema);
export default Comic; export default Comic;

12552
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -35,13 +35,14 @@
"typescript": "^3.9.10" "typescript": "^3.9.10"
}, },
"dependencies": { "dependencies": {
"7zip-bin": "^5.1.1", "@elastic/elasticsearch": "^7.15.0",
"7zip-min": "^1.4.0",
"@root/walk": "^1.1.0", "@root/walk": "^1.1.0",
"@types/jest": "^25.1.4", "@types/jest": "^25.1.4",
"@types/mkdirp": "^1.0.0", "@types/mkdirp": "^1.0.0",
"@types/node": "^13.9.8", "@types/node": "^13.9.8",
"@types/string-similarity": "^4.0.0", "@types/string-similarity": "^4.0.0",
"7zip-bin": "^5.1.1",
"7zip-min": "^1.4.0",
"chokidar": "^3.5.2", "chokidar": "^3.5.2",
"dotenv": "^10.0.0", "dotenv": "^10.0.0",
"fs-extra": "^10.0.0", "fs-extra": "^10.0.0",
@@ -53,12 +54,14 @@
"lodash": "^4.17.21", "lodash": "^4.17.21",
"mkdirp": "^0.5.5", "mkdirp": "^0.5.5",
"moleculer": "^0.14.16", "moleculer": "^0.14.16",
"moleculer-addons": "github:rishighan/moleculer-addons#master",
"moleculer-bull": "^0.2.8", "moleculer-bull": "^0.2.8",
"moleculer-db": "^0.8.13", "moleculer-db": "^0.8.13",
"moleculer-db-adapter-mongo": "^0.4.7", "moleculer-db-adapter-mongo": "^0.4.7",
"moleculer-db-adapter-mongoose": "^0.8.9", "moleculer-db-adapter-mongoose": "^0.8.9",
"moleculer-web": "^0.10.3", "moleculer-web": "^0.10.3",
"mongoose": "^5.12.7", "mongoose": "^5.12.7",
"mongoose-elasticsearch-xp": "^5.8.0",
"mongoose-paginate-v2": "^1.3.18", "mongoose-paginate-v2": "^1.3.18",
"nats": "^1.3.2", "nats": "^1.3.2",
"node-7z": "^3.0.0", "node-7z": "^3.0.0",

View File

@@ -72,8 +72,8 @@ export default class ApiService extends Service {
}, },
], ],
log4XXResponses: false, log4XXResponses: false,
logRequestParams: null, logRequestParams: true,
logResponseData: null, logResponseData: true,
assets: { assets: {
folder: "public", folder: "public",
// Options to `server-static` module // Options to `server-static` module

View File

@@ -28,26 +28,11 @@ import path from "path";
import { COMICS_DIRECTORY, USERDATA_DIRECTORY } from "../constants/directories"; import { COMICS_DIRECTORY, USERDATA_DIRECTORY } from "../constants/directories";
export default class ImportService extends Service { export default class ImportService extends Service {
public constructor( public constructor(public broker: ServiceBroker) {
public broker: ServiceBroker,
schema: ServiceSchema<{}> = { name: "import" }
) {
super(broker); super(broker);
this.parseServiceSchema( this.parseServiceSchema({
Service.mergeSchemas(
{
name: "import", name: "import",
mixins: [DbMixin("comics", Comic)], mixins: [DbMixin("comics", Comic)],
settings: {
// Available fields in the responses
fields: ["_id", "name", "quantity", "price"],
// Validator for the `create` & `insert` actions.
entityValidator: {
name: "string|min:3",
price: "number|positive",
},
},
hooks: {}, hooks: {},
actions: { actions: {
walkFolders: { walkFolders: {
@@ -55,13 +40,11 @@ export default class ImportService extends Service {
params: { params: {
basePathToWalk: "string", basePathToWalk: "string",
}, },
async handler( async handler(ctx: Context<{ basePathToWalk: string }>) {
ctx: Context<{ basePathToWalk: string }> return await walkFolder(ctx.params.basePathToWalk, [
) { ".cbz",
return await walkFolder( ".cbr",
ctx.params.basePathToWalk, ]);
[".cbz", ".cbr"]
);
}, },
}, },
convertXMLToJSON: { convertXMLToJSON: {
@@ -84,20 +67,12 @@ export default class ImportService extends Service {
klaw(path.resolve(COMICS_DIRECTORY)) klaw(path.resolve(COMICS_DIRECTORY))
// 1.1 Filter on .cb* extensions // 1.1 Filter on .cb* extensions
.pipe( .pipe(
through2.obj(function ( through2.obj(function (item, enc, next) {
item, let fileExtension = path.extname(item.path);
enc,
next
) {
let fileExtension = path.extname(
item.path
);
if ( if (
[ [".cbz", ".cbr", ".cb7"].includes(
".cbz", fileExtension
".cbr", )
".cb7",
].includes(fileExtension)
) { ) {
this.push(item); this.push(item);
} }
@@ -119,15 +94,12 @@ export default class ImportService extends Service {
}); });
if (!comicExists) { if (!comicExists) {
// 2. Send the extraction job to the queue // 2. Send the extraction job to the queue
await broker.call( await broker.call("libraryqueue.enqueue", {
"libraryqueue.enqueue",
{
fileObject: { fileObject: {
filePath: item.path, filePath: item.path,
size: item.stats.size, size: item.stats.size,
}, },
} });
);
} else { } else {
console.log( console.log(
"Comic already exists in the library." "Comic already exists in the library."
@@ -185,12 +157,11 @@ export default class ImportService extends Service {
| IExtractedComicBookCoverFile | IExtractedComicBookCoverFile
| IExtractComicBookCoverErrorResponse | IExtractComicBookCoverErrorResponse
| IExtractedComicBookCoverFile[] = await extractCoverFromFile2( | IExtractedComicBookCoverFile[] = await extractCoverFromFile2(
extractionOptions, extractionOptions
); );
// 2. Add to mongo // 2. Add to mongo
const dbImportResult = const dbImportResult = await this.broker.call(
await this.broker.call(
"import.rawImportToDB", "import.rawImportToDB",
{ {
importStatus: { importStatus: {
@@ -200,8 +171,7 @@ export default class ImportService extends Service {
score: "0", score: "0",
}, },
}, },
rawFileDetails: rawFileDetails: comicBookCoverMetadata,
comicBookCoverMetadata,
sourcedMetadata: { sourcedMetadata: {
comicvine: {}, comicvine: {},
}, },
@@ -219,10 +189,7 @@ export default class ImportService extends Service {
); );
} }
} catch (error) { } catch (error) {
console.error( console.error("Error importing comic books", error);
"Error importing comic books",
error
);
} }
}, },
}, },
@@ -247,14 +214,13 @@ export default class ImportService extends Service {
if ( if (
comicMetadata.sourcedMetadata.comicvine && comicMetadata.sourcedMetadata.comicvine &&
!isNil( !isNil(
comicMetadata.sourcedMetadata.comicvine comicMetadata.sourcedMetadata.comicvine.volume
.volume
) )
) { ) {
volumeDetails = volumeDetails =
await this.getComicVineVolumeMetadata( await this.getComicVineVolumeMetadata(
comicMetadata.sourcedMetadata comicMetadata.sourcedMetadata.comicvine
.comicvine.volume.api_detail_url .volume.api_detail_url
); );
comicMetadata.sourcedMetadata.comicvine.volumeInformation = comicMetadata.sourcedMetadata.comicvine.volumeInformation =
volumeDetails; volumeDetails;
@@ -301,10 +267,8 @@ export default class ImportService extends Service {
); );
} }
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
const volumeDetails = const volumeDetails = await volumeDetailsPromise;
await volumeDetailsPromise; matchedResult.volumeInformation = volumeDetails;
matchedResult.volumeInformation =
volumeDetails;
Comic.findByIdAndUpdate( Comic.findByIdAndUpdate(
comicObjectId, comicObjectId,
{ {
@@ -347,15 +311,12 @@ export default class ImportService extends Service {
{ {
$push: { $push: {
"acquisition.directconnect": { "acquisition.directconnect": {
resultId: resultId: ctx.params.resultId,
ctx.params.resultId, bundleId: ctx.params.bundleId,
bundleId:
ctx.params.bundleId,
directoryIds: directoryIds:
ctx.params.directoryIds, ctx.params.directoryIds,
searchInstanceId: searchInstanceId:
ctx.params ctx.params.searchInstanceId,
.searchInstanceId,
}, },
}, },
}, },
@@ -375,9 +336,7 @@ export default class ImportService extends Service {
getComicBooks: { getComicBooks: {
rest: "POST /getComicBooks", rest: "POST /getComicBooks",
params: {}, params: {},
async handler( async handler(ctx: Context<{ paginationOptions: object }>) {
ctx: Context<{ paginationOptions: object }>
) {
return await Comic.paginate( return await Comic.paginate(
{}, {},
ctx.params.paginationOptions ctx.params.paginationOptions
@@ -418,13 +377,9 @@ export default class ImportService extends Service {
]); ]);
// 2. Map over the aggregation result and get volume metadata from CV // 2. Map over the aggregation result and get volume metadata from CV
// 2a. Make a call to comicvine-service // 2a. Make a call to comicvine-service
volumesMetadata = map( volumesMetadata = map(volumes, async (volume) => {
volumes,
async (volume) => {
if (!isNil(volume.volumeURI)) { if (!isNil(volume.volumeURI)) {
return await ctx.call( return await ctx.call("comicvine.getVolumes", {
"comicvine.getVolumes",
{
volumeURI: volume.volumeURI, volumeURI: volume.volumeURI,
data: { data: {
format: "json", format: "json",
@@ -433,11 +388,9 @@ export default class ImportService extends Service {
limit: "1", limit: "1",
offset: "0", offset: "0",
}, },
});
} }
); });
}
}
);
return Promise.all(volumesMetadata); return Promise.all(volumesMetadata);
}, },
@@ -451,9 +404,7 @@ export default class ImportService extends Service {
.then((data) => { .then((data) => {
console.info(data); console.info(data);
const foo = fsExtra.emptyDirSync( const foo = fsExtra.emptyDirSync(
path.resolve( path.resolve(`${USERDATA_DIRECTORY}/covers`)
`${USERDATA_DIRECTORY}/covers`
)
); );
const foo2 = fsExtra.emptyDirSync( const foo2 = fsExtra.emptyDirSync(
path.resolve( path.resolve(
@@ -508,15 +459,10 @@ export default class ImportService extends Service {
}); });
resp.on("end", () => { resp.on("end", () => {
console.info( console.log(`${apiDetailURL} returned data.`)
data,
"HERE, BITCHES< HERE"
);
const volumeInformation = const volumeInformation =
JSON.parse(data); JSON.parse(data);
resolve( resolve(volumeInformation.results);
volumeInformation.results
);
}); });
} }
) )
@@ -526,9 +472,6 @@ export default class ImportService extends Service {
}); });
}), }),
}, },
}, });
schema
)
);
} }
} }

View File

@@ -15,14 +15,9 @@ import { io } from "./api.service";
const REDIS_URI = process.env.REDIS_URI || `redis://0.0.0.0:6379`; const REDIS_URI = process.env.REDIS_URI || `redis://0.0.0.0:6379`;
export default class LibraryQueueService extends Service { export default class LibraryQueueService extends Service {
public constructor( public constructor(public broker: ServiceBroker) {
public broker: ServiceBroker,
schema: ServiceSchema<{}> = { name: "libraryqueue" }
) {
super(broker); super(broker);
this.parseServiceSchema( this.parseServiceSchema({
Service.mergeSchemas(
{
name: "libraryqueue", name: "libraryqueue",
mixins: [BullMQMixin(REDIS_URI), DbMixin("comics", Comic)], mixins: [BullMQMixin(REDIS_URI), DbMixin("comics", Comic)],
settings: {}, settings: {},
@@ -83,16 +78,17 @@ export default class LibraryQueueService extends Service {
methods: {}, methods: {},
async started(): Promise<any> { async started(): Promise<any> {
io.on("connection", async (client) => { io.on("connection", async (client) => {
await this.getQueue( await this.getQueue("process.import").on(
"process.import" "failed",
).on("failed", async (job, error) => { async (job, error) => {
console.error( console.error(
`An error occured in 'process.import' queue on job id '${job.id}': ${error.message}` `An error occured in 'process.import' queue on job id '${job.id}': ${error.message}`
); );
}); }
await this.getQueue( );
"process.import" await this.getQueue("process.import").on(
).on("completed", async (job, res) => { "completed",
async (job, res) => {
client.emit("action", { client.emit("action", {
type: "LS_COVER_EXTRACTED", type: "LS_COVER_EXTRACTED",
result: res, result: res,
@@ -100,19 +96,18 @@ export default class LibraryQueueService extends Service {
console.info( console.info(
`Job with the id '${job.id}' completed.` `Job with the id '${job.id}' completed.`
); );
}); }
await this.getQueue( );
"process.import" await this.getQueue("process.import").on(
).on("stalled", async (job) => { "stalled",
async (job) => {
console.warn( console.warn(
`The job with the id '${job} got stalled!` `The job with the id '${job} got stalled!`
); );
}); }
});
},
},
schema
)
); );
});
},
});
} }
} }

View File

@@ -0,0 +1,60 @@
"use strict";
import {
Context,
Service,
ServiceBroker,
ServiceSchema,
Errors,
} from "moleculer";
const { Client } = require("@elastic/elasticsearch");
const client = new Client({
node: "http://ghost:9200",
auth: {
username: "elastic",
password: "password",
},
});
import { DbMixin } from "../mixins/db.mixin";
import Comic from "../models/comic.model";
console.log(client);
export default class SettingsService extends Service {
// @ts-ignore
public constructor(
public broker: ServiceBroker,
schema: ServiceSchema<{}> = { name: "search" }
) {
super(broker);
this.parseServiceSchema(
Service.mergeSchemas(
{
name: "search",
mixins: [client, DbMixin("comics", Comic)],
hooks: {},
actions: {
searchComic: {
rest: "POST /searchComic",
params: {},
async handler(ctx: Context<{}>) {
Comic.esSearch({
query_string: {
query: "batman",
},
}).then(function (results) {
// results here
console.log(results.body.hits.hits);
results.body.hits.hits.forEach((item) => console.log(item._source))
});
},
},
},
methods: {},
},
schema
)
);
}
}