🔧 Fixes to search query builder
This commit is contained in:
@@ -1,8 +1,9 @@
|
||||
"use strict";
|
||||
import { Service, ServiceBroker, ServiceSchema } from "moleculer";
|
||||
import { Kafka, EachMessagePayload, logLevel } from "kafkajs";
|
||||
import { isUndefined } from "lodash";
|
||||
import io from "socket.io-client";
|
||||
import { isUndefined } from "lodash";
|
||||
|
||||
interface SearchResult {
|
||||
result: {
|
||||
id: string;
|
||||
@@ -17,12 +18,14 @@ interface SearchResultPayload {
|
||||
updatedResult: SearchResult;
|
||||
instanceId: string;
|
||||
}
|
||||
|
||||
export default class ComicProcessorService extends Service {
|
||||
private kafkaConsumer: any;
|
||||
private socketIOInstance: any;
|
||||
private kafkaProducer: any;
|
||||
private prowlarrResultsMap: Map<string, any> = new Map();
|
||||
private airDCPPSearchResults: Array<any> = [];
|
||||
private airDCPPSearchResults: Map<string, any[]> = new Map();
|
||||
private issuesToSearch: any = [];
|
||||
|
||||
// @ts-ignore
|
||||
public constructor(
|
||||
@@ -42,71 +45,118 @@ export default class ComicProcessorService extends Service {
|
||||
};
|
||||
},
|
||||
processJob: async (job: any) => {
|
||||
this.logger.info("Processing job:", job);
|
||||
const { volumeName, issue } = job;
|
||||
const { year } = this.parseStringDate(issue.cover_date || issue.coverDate);
|
||||
const settings: any = await this.broker.call("settings.getSettings", {
|
||||
settingsKey: "directConnect",
|
||||
});
|
||||
const hubs = settings.client.hubs.map((hub: any) => hub.value);
|
||||
const dcppSearchQuery = {
|
||||
query: {
|
||||
pattern: `${volumeName.replace(/#/g, "")} ${
|
||||
issue.issue_number || issue.issueNumber
|
||||
} ${year}`,
|
||||
extensions: ["cbz", "cbr", "cb7"],
|
||||
},
|
||||
hub_urls: hubs,
|
||||
priority: 5,
|
||||
};
|
||||
this.logger.info(
|
||||
"DC++ search query:",
|
||||
JSON.stringify(dcppSearchQuery, null, 4),
|
||||
);
|
||||
try {
|
||||
this.logger.info("Processing job:", JSON.stringify(job, null, 2));
|
||||
const { comic } = job;
|
||||
const { volume, issues, markEntireVolumeWanted } = comic.wanted;
|
||||
|
||||
await this.broker.call("socket.search", {
|
||||
query: dcppSearchQuery,
|
||||
config: {
|
||||
hostname: "localhost:5600",
|
||||
protocol: "http",
|
||||
username: "user",
|
||||
password: "pass",
|
||||
},
|
||||
namespace: "/automated",
|
||||
});
|
||||
// If entire volume is marked as wanted, get their details from CV
|
||||
if (markEntireVolumeWanted) {
|
||||
this.issuesToSearch = await this.broker.call(
|
||||
"comicvine.getIssuesForVolume",
|
||||
{ volumeId: volume.id },
|
||||
);
|
||||
this.logger.info(
|
||||
`The entire volume with id: ${volume.id} was marked as wanted.`,
|
||||
);
|
||||
this.logger.info(`Fetched issues for ${volume.id}:`);
|
||||
this.logger.info(`${this.issuesToSearch.length} issues to search`);
|
||||
} else {
|
||||
// Or proceed with `issues` from the wanted object.
|
||||
this.issuesToSearch = issues;
|
||||
}
|
||||
|
||||
const prowlarrResults = await this.broker.call("prowlarr.search", {
|
||||
prowlarrQuery: {
|
||||
port: "9696",
|
||||
apiKey: "c4f42e265fb044dc81f7e88bd41c3367",
|
||||
offset: 0,
|
||||
categories: [7030],
|
||||
query: `${volumeName} ${issue.issueNumber} ${year}`,
|
||||
host: "localhost",
|
||||
limit: 100,
|
||||
type: "search",
|
||||
indexerIds: [2],
|
||||
},
|
||||
});
|
||||
for (const issue of this.issuesToSearch) {
|
||||
// issue number
|
||||
const inferredIssueNumber = issue.issueNumber
|
||||
? issue.issueNumber
|
||||
: issue.issue_number;
|
||||
// year
|
||||
const { year } = this.parseStringDate(issue.coverDate);
|
||||
const inferredYear = year ? issue?.coverDate : issue.year;
|
||||
|
||||
this.logger.info(
|
||||
"Prowlarr search results:",
|
||||
JSON.stringify(prowlarrResults, null, 4),
|
||||
);
|
||||
// Store prowlarr results in map using unique key
|
||||
const key = `${volumeName}-${issue.issueNumber}-${year}`;
|
||||
this.prowlarrResultsMap.set(key, prowlarrResults);
|
||||
const settings: any = await this.broker.call("settings.getSettings", {
|
||||
settingsKey: "directConnect",
|
||||
});
|
||||
const hubs = settings.client.hubs.map((hub: any) => hub.value);
|
||||
const dcppSearchQuery = {
|
||||
query: {
|
||||
pattern: `${volume.name.replace(
|
||||
/#/g,
|
||||
"",
|
||||
)} ${inferredIssueNumber} ${inferredYear}`,
|
||||
extensions: ["cbz", "cbr", "cb7"],
|
||||
},
|
||||
hub_urls: hubs,
|
||||
priority: 5,
|
||||
};
|
||||
this.logger.info(
|
||||
"DC++ search query:",
|
||||
JSON.stringify(dcppSearchQuery, null, 4),
|
||||
);
|
||||
|
||||
await this.broker.call("socket.search", {
|
||||
query: dcppSearchQuery,
|
||||
config: {
|
||||
hostname: "localhost:5600",
|
||||
protocol: "http",
|
||||
username: "user",
|
||||
password: "pass",
|
||||
},
|
||||
namespace: "/automated",
|
||||
});
|
||||
|
||||
const prowlarrResults = await this.broker.call("prowlarr.search", {
|
||||
prowlarrQuery: {
|
||||
port: "9696",
|
||||
apiKey: "c4f42e265fb044dc81f7e88bd41c3367",
|
||||
offset: 0,
|
||||
categories: [7030],
|
||||
query: `${volume.name} ${issue.issueNumber} ${year}`,
|
||||
host: "localhost",
|
||||
limit: 100,
|
||||
type: "search",
|
||||
indexerIds: [2],
|
||||
},
|
||||
});
|
||||
|
||||
this.logger.info(
|
||||
"Prowlarr search results:",
|
||||
JSON.stringify(prowlarrResults, null, 4),
|
||||
);
|
||||
|
||||
// Store prowlarr results in map using unique key
|
||||
const key = `${volume.name}-${issue.issueNumber}-${year}`;
|
||||
this.prowlarrResultsMap.set(key, prowlarrResults);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error("Error processing job:", error);
|
||||
}
|
||||
},
|
||||
produceResultsToKafka: async (dcppResults: any, prowlarrResults: any) => {
|
||||
const results = { dcppResults, prowlarrResults };
|
||||
await this.kafkaProducer.send({
|
||||
topic: "comic-search-results",
|
||||
messages: [{ value: JSON.stringify(results) }],
|
||||
});
|
||||
this.logger.info(
|
||||
"Produced results to Kafka:",
|
||||
JSON.stringify(results, null, 4),
|
||||
);
|
||||
try {
|
||||
await this.kafkaProducer.send({
|
||||
topic: "comic-search-results",
|
||||
messages: [{ value: JSON.stringify(results) }],
|
||||
});
|
||||
this.logger.info(
|
||||
"Produced results to Kafka:",
|
||||
JSON.stringify(results, null, 4),
|
||||
);
|
||||
// socket event for UI
|
||||
await this.broker.call("socket.broadcast", {
|
||||
namespace: "/",
|
||||
event: "searchResultsAvailable",
|
||||
args: [
|
||||
{
|
||||
dcppResults,
|
||||
},
|
||||
],
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error("Error producing results to Kafka:", error);
|
||||
}
|
||||
},
|
||||
},
|
||||
async started() {
|
||||
@@ -117,9 +167,22 @@ export default class ComicProcessorService extends Service {
|
||||
});
|
||||
this.kafkaConsumer = kafka.consumer({ groupId: "comic-processor-group" });
|
||||
this.kafkaProducer = kafka.producer();
|
||||
|
||||
this.kafkaConsumer.on("consumer.crash", (event: any) => {
|
||||
this.logger.error("Consumer crash:", event);
|
||||
});
|
||||
this.kafkaConsumer.on("consumer.connect", () => {
|
||||
this.logger.info("Consumer connected");
|
||||
});
|
||||
this.kafkaConsumer.on("consumer.disconnect", () => {
|
||||
this.logger.info("Consumer disconnected");
|
||||
});
|
||||
this.kafkaConsumer.on("consumer.network.request_timeout", () => {
|
||||
this.logger.warn("Consumer network request timeout");
|
||||
});
|
||||
|
||||
await this.kafkaConsumer.connect();
|
||||
await this.kafkaProducer.connect();
|
||||
this.logger.info("Kafka consumer and producer connected successfully.");
|
||||
|
||||
await this.kafkaConsumer.subscribe({
|
||||
topic: "comic-search-jobs",
|
||||
@@ -130,10 +193,6 @@ export default class ComicProcessorService extends Service {
|
||||
eachMessage: async ({ topic, partition, message }: EachMessagePayload) => {
|
||||
if (message.value) {
|
||||
const job = JSON.parse(message.value.toString());
|
||||
this.logger.info(
|
||||
"Consumed job from Kafka:",
|
||||
JSON.stringify(job, null, 4),
|
||||
);
|
||||
await this.processJob(job);
|
||||
} else {
|
||||
this.logger.warn("Received message with null value");
|
||||
@@ -149,6 +208,7 @@ export default class ComicProcessorService extends Service {
|
||||
this.logger.info("Socket.IO connected successfully.");
|
||||
});
|
||||
|
||||
// Handle searchResultAdded event
|
||||
this.socketIOInstance.on(
|
||||
"searchResultAdded",
|
||||
({ groupedResult, instanceId }: SearchResultPayload) => {
|
||||
@@ -156,13 +216,14 @@ export default class ComicProcessorService extends Service {
|
||||
"Received search result added:",
|
||||
JSON.stringify(groupedResult, null, 4),
|
||||
);
|
||||
this.airDCPPSearchResults.push({
|
||||
groupedResult: groupedResult.result,
|
||||
instanceId,
|
||||
});
|
||||
if (!this.airDCPPSearchResults.has(instanceId)) {
|
||||
this.airDCPPSearchResults.set(instanceId, []);
|
||||
}
|
||||
this.airDCPPSearchResults.get(instanceId).push(groupedResult.result);
|
||||
},
|
||||
);
|
||||
|
||||
// Handle searchResultUpdated event
|
||||
this.socketIOInstance.on(
|
||||
"searchResultUpdated",
|
||||
async ({ updatedResult, instanceId }: SearchResultPayload) => {
|
||||
@@ -170,50 +231,30 @@ export default class ComicProcessorService extends Service {
|
||||
"Received search result update:",
|
||||
JSON.stringify(updatedResult, null, 4),
|
||||
);
|
||||
if (
|
||||
!isUndefined(updatedResult.result) &&
|
||||
!isUndefined(this.airDCPPSearchResults.result)
|
||||
) {
|
||||
const toReplaceIndex = this.airDCPPSearchResults.findIndex(
|
||||
(element: any) => {
|
||||
return element?.result.id === updatedResult.result.id;
|
||||
},
|
||||
const resultsForInstance = this.airDCPPSearchResults.get(instanceId);
|
||||
if (resultsForInstance) {
|
||||
const toReplaceIndex = resultsForInstance.findIndex(
|
||||
(element: any) => element.id === updatedResult.result.id,
|
||||
);
|
||||
this.airDCPPSearchResults[toReplaceIndex] = {
|
||||
result: updatedResult.result,
|
||||
instanceId,
|
||||
};
|
||||
if (toReplaceIndex !== -1) {
|
||||
resultsForInstance[toReplaceIndex] = updatedResult.result;
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
this.socketIOInstance.on("searchComplete", async () => {
|
||||
// Ensure results are not empty before producing to Kafka
|
||||
if (this.airDCPPSearchResults.length > 0) {
|
||||
const results = this.airDCPPSearchResults.reduce((acc: any, item: any) => {
|
||||
const key = item.instanceId;
|
||||
if (!acc[key]) {
|
||||
acc[key] = [];
|
||||
}
|
||||
acc[key].push(item);
|
||||
return acc;
|
||||
}, {});
|
||||
await this.produceResultsToKafka(results, []);
|
||||
} else {
|
||||
this.logger.warn(
|
||||
"AirDC++ search results are empty, not producing to Kafka.",
|
||||
);
|
||||
}
|
||||
|
||||
// Handle searchComplete event
|
||||
this.socketIOInstance.on("searchComplete", async (instanceId: string) => {
|
||||
this.logger.info(`Search complete for instance ID ${instanceId}`);
|
||||
await this.produceResultsToKafka(instanceId);
|
||||
});
|
||||
},
|
||||
async stopped() {
|
||||
await this.kafkaConsumer.disconnect();
|
||||
await this.kafkaProducer.disconnect();
|
||||
this.logger.info("Kafka consumer and producer disconnected successfully.");
|
||||
|
||||
// Close Socket.IO connection
|
||||
if (this.socketIOInstance) {
|
||||
this.socketIOInstance.close();
|
||||
this.logger.info("Socket.IO disconnected successfully.");
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user