🪳 kafka for handling dc++ download jobs
This commit is contained in:
17
package-lock.json
generated
17
package-lock.json
generated
@@ -10,6 +10,7 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@robertklep/qbittorrent": "^1.0.1",
|
"@robertklep/qbittorrent": "^1.0.1",
|
||||||
"ioredis": "^5.0.0",
|
"ioredis": "^5.0.0",
|
||||||
|
"kafkajs": "^2.2.4",
|
||||||
"moleculer": "^0.14.27",
|
"moleculer": "^0.14.27",
|
||||||
"moleculer-web": "^0.10.5",
|
"moleculer-web": "^0.10.5",
|
||||||
"parse-torrent": "^9.1.5"
|
"parse-torrent": "^9.1.5"
|
||||||
@@ -17,6 +18,7 @@
|
|||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@jest/globals": "^29.3.1",
|
"@jest/globals": "^29.3.1",
|
||||||
"@types/jest": "^29.2.3",
|
"@types/jest": "^29.2.3",
|
||||||
|
"@types/lodash": "^4.17.4",
|
||||||
"@types/node": "^18.11.9",
|
"@types/node": "^18.11.9",
|
||||||
"@types/parse-torrent": "^5.8.7",
|
"@types/parse-torrent": "^5.8.7",
|
||||||
"@typescript-eslint/eslint-plugin": "^5.44.0",
|
"@typescript-eslint/eslint-plugin": "^5.44.0",
|
||||||
@@ -31,6 +33,7 @@
|
|||||||
"eslint-plugin-import": "^2.26.0",
|
"eslint-plugin-import": "^2.26.0",
|
||||||
"eslint-plugin-jest": "^27.1.6",
|
"eslint-plugin-jest": "^27.1.6",
|
||||||
"jest": "^29.3.1",
|
"jest": "^29.3.1",
|
||||||
|
"lodash": "^4.17.21",
|
||||||
"moleculer-repl": "^0.7.3",
|
"moleculer-repl": "^0.7.3",
|
||||||
"prettier": "^2.8.0",
|
"prettier": "^2.8.0",
|
||||||
"qbittorrent-api-v2": "^1.2.2",
|
"qbittorrent-api-v2": "^1.2.2",
|
||||||
@@ -1486,6 +1489,12 @@
|
|||||||
"integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==",
|
"integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/lodash": {
|
||||||
|
"version": "4.17.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.4.tgz",
|
||||||
|
"integrity": "sha512-wYCP26ZLxaT3R39kiN2+HcJ4kTd3U1waI/cY7ivWYqFP6pW3ZNpvi6Wd6PHZx7T/t8z0vlkXMg3QYLa7DZ/IJQ==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
"node_modules/@types/magnet-uri": {
|
"node_modules/@types/magnet-uri": {
|
||||||
"version": "5.1.5",
|
"version": "5.1.5",
|
||||||
"resolved": "https://registry.npmjs.org/@types/magnet-uri/-/magnet-uri-5.1.5.tgz",
|
"resolved": "https://registry.npmjs.org/@types/magnet-uri/-/magnet-uri-5.1.5.tgz",
|
||||||
@@ -5462,6 +5471,14 @@
|
|||||||
"node": ">=6"
|
"node": ">=6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/kafkajs": {
|
||||||
|
"version": "2.2.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/kafkajs/-/kafkajs-2.2.4.tgz",
|
||||||
|
"integrity": "sha512-j/YeapB1vfPT2iOIUn/vxdyKEuhuY2PxMBvf5JWux6iSaukAccrMtXEY/Lb7OvavDhOWME589bpLrEdnVHjfjA==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/keyv": {
|
"node_modules/keyv": {
|
||||||
"version": "4.5.3",
|
"version": "4.5.3",
|
||||||
"resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.3.tgz",
|
"resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.3.tgz",
|
||||||
|
|||||||
@@ -23,6 +23,7 @@
|
|||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@jest/globals": "^29.3.1",
|
"@jest/globals": "^29.3.1",
|
||||||
"@types/jest": "^29.2.3",
|
"@types/jest": "^29.2.3",
|
||||||
|
"@types/lodash": "^4.17.4",
|
||||||
"@types/node": "^18.11.9",
|
"@types/node": "^18.11.9",
|
||||||
"@types/parse-torrent": "^5.8.7",
|
"@types/parse-torrent": "^5.8.7",
|
||||||
"@typescript-eslint/eslint-plugin": "^5.44.0",
|
"@typescript-eslint/eslint-plugin": "^5.44.0",
|
||||||
@@ -37,6 +38,7 @@
|
|||||||
"eslint-plugin-import": "^2.26.0",
|
"eslint-plugin-import": "^2.26.0",
|
||||||
"eslint-plugin-jest": "^27.1.6",
|
"eslint-plugin-jest": "^27.1.6",
|
||||||
"jest": "^29.3.1",
|
"jest": "^29.3.1",
|
||||||
|
"lodash": "^4.17.21",
|
||||||
"moleculer-repl": "^0.7.3",
|
"moleculer-repl": "^0.7.3",
|
||||||
"prettier": "^2.8.0",
|
"prettier": "^2.8.0",
|
||||||
"qbittorrent-api-v2": "^1.2.2",
|
"qbittorrent-api-v2": "^1.2.2",
|
||||||
@@ -48,6 +50,7 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@robertklep/qbittorrent": "^1.0.1",
|
"@robertklep/qbittorrent": "^1.0.1",
|
||||||
"ioredis": "^5.0.0",
|
"ioredis": "^5.0.0",
|
||||||
|
"kafkajs": "^2.2.4",
|
||||||
"moleculer": "^0.14.27",
|
"moleculer": "^0.14.27",
|
||||||
"moleculer-web": "^0.10.5",
|
"moleculer-web": "^0.10.5",
|
||||||
"parse-torrent": "^9.1.5"
|
"parse-torrent": "^9.1.5"
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
import { Context, Service, ServiceBroker, ServiceSchema, Errors } from "moleculer";
|
import { Context, Service, ServiceBroker, ServiceSchema, Errors } from "moleculer";
|
||||||
import io from "socket.io-client";
|
import { Kafka } from "kafkajs";
|
||||||
|
|
||||||
export default class AutoDownloadService extends Service {
|
export default class AutoDownloadService extends Service {
|
||||||
|
private kafkaProducer: any;
|
||||||
|
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
public constructor(
|
public constructor(
|
||||||
public broker: ServiceBroker,
|
public broker: ServiceBroker,
|
||||||
@@ -11,141 +13,74 @@ export default class AutoDownloadService extends Service {
|
|||||||
super(broker);
|
super(broker);
|
||||||
this.parseServiceSchema({
|
this.parseServiceSchema({
|
||||||
name: "autodownload",
|
name: "autodownload",
|
||||||
mixins: [],
|
|
||||||
hooks: {},
|
|
||||||
actions: {
|
actions: {
|
||||||
searchWantedComics: {
|
searchWantedComics: {
|
||||||
rest: "POST /searchWantedComics",
|
rest: "POST /searchWantedComics",
|
||||||
handler: async (ctx: Context<{}>) => {
|
handler: async (ctx: Context<{}>) => {
|
||||||
// 1.iterate through the wanted comic objects, and:
|
try {
|
||||||
// 1a. Orchestrate all issues from ComicVine if the entire volume is wanted
|
const wantedComics: any = await this.broker.call(
|
||||||
// 1b. Just the issues in "wanted.issues[]"
|
"library.getComicsMarkedAsWanted",
|
||||||
const wantedComics: any = await this.broker.call(
|
{},
|
||||||
"library.getComicsMarkedAsWanted",
|
);
|
||||||
{},
|
this.logger.info("Fetched wanted comics:", wantedComics.length);
|
||||||
);
|
|
||||||
|
|
||||||
// 2a. Get the list of hubs from AirDC++
|
for (const comic of wantedComics) {
|
||||||
const data: any = await this.broker.call("settings.getSettings", {
|
if (comic.wanted.markEntireVolumeWanted) {
|
||||||
settingsKey: "directConnect",
|
const issues: any = await this.broker.call(
|
||||||
});
|
"comicvine.getIssuesForVolume",
|
||||||
const { hubs } = data?.client;
|
{
|
||||||
console.log("HUBZZZZZ", hubs);
|
volumeId: comic.wanted.volume.id,
|
||||||
// Iterate through the list of wanted comics
|
},
|
||||||
wantedComics.forEach(async (comic: any) => {
|
);
|
||||||
let issuesToSearch: any = [];
|
for (const issue of issues) {
|
||||||
if (comic.wanted.markEntireVolumeAsWanted) {
|
await this.produceJobToKafka(
|
||||||
// Fetch all issues from ComicVine if the entire volume is wanted
|
comic.wanted.volume.name,
|
||||||
issuesToSearch = await this.broker.call(
|
issue,
|
||||||
"comicvine.getIssuesForVolume",
|
);
|
||||||
{
|
}
|
||||||
volumeId: comic.wanted.volume.id,
|
} else if (comic.wanted.issues && comic.wanted.issues.length > 0) {
|
||||||
},
|
for (const issue of comic.wanted.issues) {
|
||||||
);
|
await this.produceJobToKafka(
|
||||||
} else if (comic.wanted.issues && comic.wanted.issues.length > 0) {
|
comic.wanted.volume?.name,
|
||||||
// 1b. Just the issues in "wanted.issues[]"
|
issue,
|
||||||
issuesToSearch = {
|
);
|
||||||
issues: comic.wanted.issues,
|
}
|
||||||
volumeName: comic.wanted.volume?.name,
|
}
|
||||||
};
|
|
||||||
}
|
}
|
||||||
for (const issue of issuesToSearch.issues) {
|
} catch (error) {
|
||||||
// 2. construct the search queries
|
this.logger.error("Error in searchWantedComics:", error);
|
||||||
|
throw new Errors.MoleculerError(
|
||||||
// 2b. for AirDC++ search, with the volume name, issueId and cover_date
|
"Failed to search wanted comics.",
|
||||||
const { year } = this.parseStringDate(issue.coverDate);
|
500,
|
||||||
|
"SEARCH_WANTED_COMICS_ERROR",
|
||||||
const dcppSearchQuery = {
|
{ error },
|
||||||
query: {
|
);
|
||||||
pattern: `${issuesToSearch.volumeName.replace(/#/g, "")} ${
|
}
|
||||||
issue.issueNumber
|
|
||||||
} ${year}`,
|
|
||||||
extensions: ["cbz", "cbr", "cb7"],
|
|
||||||
},
|
|
||||||
hub_urls: hubs.map((hub: any) => hub.value),
|
|
||||||
priority: 5,
|
|
||||||
};
|
|
||||||
// Perform the AirDC++ search
|
|
||||||
const dcppResults = await this.broker.call("socket.search", {
|
|
||||||
query: dcppSearchQuery,
|
|
||||||
config: {
|
|
||||||
hostname: "localhost:5600",
|
|
||||||
protocol: "http",
|
|
||||||
username: "user",
|
|
||||||
password: "pass",
|
|
||||||
},
|
|
||||||
namespace: "/automated",
|
|
||||||
});
|
|
||||||
this.socketIOInstance.on("searchResultUpdated", (data: any) => {
|
|
||||||
console.log("Hyaar we go", data);
|
|
||||||
});
|
|
||||||
// const dcppResults = await ctx.call("airdcpp.search", {
|
|
||||||
// dcppSearchQuery,
|
|
||||||
// });
|
|
||||||
|
|
||||||
// 2b. for Prowlarr search, with the volume name, issueId and cover_date
|
|
||||||
const prowlarrQuery = {
|
|
||||||
port: "9696",
|
|
||||||
apiKey: "c4f42e265fb044dc81f7e88bd41c3367",
|
|
||||||
offset: 0,
|
|
||||||
categories: [7030],
|
|
||||||
query: `${issuesToSearch.volumeName} ${issue.issueNumber} ${year}`,
|
|
||||||
host: "localhost",
|
|
||||||
limit: 100,
|
|
||||||
type: "search",
|
|
||||||
indexerIds: [2],
|
|
||||||
};
|
|
||||||
|
|
||||||
// Perform the Prowlarr search
|
|
||||||
const prowlarrResults = await this.broker.call("prowlarr.search", {
|
|
||||||
prowlarrQuery,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Process results here or after the loop
|
|
||||||
console.log("DCPP Results: ", dcppResults);
|
|
||||||
console.log("Prowlarr Results: ", prowlarrResults);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
},
|
|
||||||
},
|
|
||||||
determineDownloadChannel: {
|
|
||||||
rest: "POST /determineDownloadChannel",
|
|
||||||
handler: async (ctx: Context<{}>) => {
|
|
||||||
// 1. Parse the incoming search query
|
|
||||||
// to make sure that it is well-formed
|
|
||||||
// At the very least, it should have name, year, number
|
|
||||||
// 2. Choose between download mediums based on user-preference?
|
|
||||||
// possible choices are: DC++, Torrent
|
|
||||||
// 3. Perform the search on those media with the aforementioned search query
|
|
||||||
// 4. Choose a subset of relevant search results,
|
|
||||||
// and score them
|
|
||||||
// 5. Download the highest-scoring, relevant result
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
methods: {
|
methods: {
|
||||||
parseStringDate: (dateString: string) => {
|
produceJobToKafka: async (volumeName: string, issue: any) => {
|
||||||
const date = new Date(dateString);
|
const job = { volumeName, issue };
|
||||||
|
await this.kafkaProducer.send({
|
||||||
// Get the year, month, and day
|
topic: "comic-search-jobs",
|
||||||
const year = date.getFullYear(); // 2022
|
messages: [{ value: JSON.stringify(job) }],
|
||||||
const month = date.getMonth() + 1; // December is 11 in Date object (0-indexed), so add 1 to make it human-readable
|
});
|
||||||
const day = date.getDate(); // 1
|
this.logger.info("Produced job to Kafka:", job);
|
||||||
return { year, month, day };
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
async started() {
|
async started() {
|
||||||
this.socketIOInstance = io("ws://localhost:3001/automated", {
|
const kafka = new Kafka({
|
||||||
transports: ["websocket"],
|
clientId: "comic-search-service",
|
||||||
withCredentials: true,
|
brokers: ["localhost:9092"],
|
||||||
});
|
|
||||||
this.socketIOInstance.on("connect", (data: any) => {
|
|
||||||
console.log("connected", data);
|
|
||||||
});
|
|
||||||
|
|
||||||
this.socketIOInstance.on("searchResultAdded", (data: any) => {
|
|
||||||
console.log("Received searchResultUpdated event:", data);
|
|
||||||
});
|
});
|
||||||
|
this.kafkaProducer = kafka.producer();
|
||||||
|
await this.kafkaProducer.connect();
|
||||||
|
this.logger.info("Kafka producer connected successfully.");
|
||||||
|
},
|
||||||
|
async stopped() {
|
||||||
|
await this.kafkaProducer.disconnect();
|
||||||
|
this.logger.info("Kafka producer disconnected successfully.");
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
188
services/comicprocessor.service.ts
Normal file
188
services/comicprocessor.service.ts
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
"use strict";
|
||||||
|
import { Service, ServiceBroker, ServiceSchema } from "moleculer";
|
||||||
|
import { Kafka, EachMessagePayload, logLevel } from "kafkajs";
|
||||||
|
import { isUndefined } from "lodash";
|
||||||
|
import io from "socket.io-client";
|
||||||
|
|
||||||
|
export default class ComicProcessorService extends Service {
|
||||||
|
private kafkaConsumer: any;
|
||||||
|
private socketIOInstance: any;
|
||||||
|
private kafkaProducer: any;
|
||||||
|
private prowlarrResultsMap: Map<string, any> = new Map();
|
||||||
|
private airDCPPSearchResults: Array<any> = [];
|
||||||
|
|
||||||
|
// @ts-ignore
|
||||||
|
public constructor(
|
||||||
|
public broker: ServiceBroker,
|
||||||
|
schema: ServiceSchema<{}> = { name: "comicProcessor" },
|
||||||
|
) {
|
||||||
|
super(broker);
|
||||||
|
this.parseServiceSchema({
|
||||||
|
name: "comicProcessor",
|
||||||
|
methods: {
|
||||||
|
parseStringDate: (dateString: string) => {
|
||||||
|
const date = new Date(dateString);
|
||||||
|
return {
|
||||||
|
year: date.getFullYear(),
|
||||||
|
month: date.getMonth() + 1,
|
||||||
|
day: date.getDate(),
|
||||||
|
};
|
||||||
|
},
|
||||||
|
processJob: async (job: any) => {
|
||||||
|
this.logger.info("Processing job:", job);
|
||||||
|
const { volumeName, issue } = job;
|
||||||
|
const { year } = this.parseStringDate(issue.cover_date || issue.coverDate);
|
||||||
|
const settings: any = await this.broker.call("settings.getSettings", {
|
||||||
|
settingsKey: "directConnect",
|
||||||
|
});
|
||||||
|
const hubs = settings.client.hubs.map((hub: any) => hub.value);
|
||||||
|
const dcppSearchQuery = {
|
||||||
|
query: {
|
||||||
|
pattern: `${volumeName.replace(/#/g, "")} ${
|
||||||
|
issue.issue_number || issue.issueNumber
|
||||||
|
} ${year}`,
|
||||||
|
extensions: ["cbz", "cbr", "cb7"],
|
||||||
|
},
|
||||||
|
hub_urls: hubs,
|
||||||
|
priority: 5,
|
||||||
|
};
|
||||||
|
this.logger.info(
|
||||||
|
"DC++ search query:",
|
||||||
|
JSON.stringify(dcppSearchQuery, null, 4),
|
||||||
|
);
|
||||||
|
|
||||||
|
await this.broker.call("socket.search", {
|
||||||
|
query: dcppSearchQuery,
|
||||||
|
config: {
|
||||||
|
hostname: "localhost:5600",
|
||||||
|
protocol: "http",
|
||||||
|
username: "user",
|
||||||
|
password: "pass",
|
||||||
|
},
|
||||||
|
namespace: "/automated",
|
||||||
|
});
|
||||||
|
|
||||||
|
const prowlarrResults = await this.broker.call("prowlarr.search", {
|
||||||
|
prowlarrQuery: {
|
||||||
|
port: "9696",
|
||||||
|
apiKey: "c4f42e265fb044dc81f7e88bd41c3367",
|
||||||
|
offset: 0,
|
||||||
|
categories: [7030],
|
||||||
|
query: `${volumeName} ${issue.issueNumber} ${year}`,
|
||||||
|
host: "localhost",
|
||||||
|
limit: 100,
|
||||||
|
type: "search",
|
||||||
|
indexerIds: [2],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
this.logger.info(
|
||||||
|
"Prowlarr search results:",
|
||||||
|
JSON.stringify(prowlarrResults, null, 4),
|
||||||
|
);
|
||||||
|
// Store prowlarr results in map using unique key
|
||||||
|
const key = `${volumeName}-${issue.issueNumber}-${year}`;
|
||||||
|
this.prowlarrResultsMap.set(key, prowlarrResults);
|
||||||
|
},
|
||||||
|
produceResultsToKafka: async (dcppResults: any, prowlarrResults: any) => {
|
||||||
|
const results = { dcppResults, prowlarrResults };
|
||||||
|
await this.kafkaProducer.send({
|
||||||
|
topic: "comic-search-results",
|
||||||
|
messages: [{ value: JSON.stringify(results) }],
|
||||||
|
});
|
||||||
|
this.logger.info(
|
||||||
|
"Produced results to Kafka:",
|
||||||
|
JSON.stringify(results, null, 4),
|
||||||
|
);
|
||||||
|
},
|
||||||
|
},
|
||||||
|
async started() {
|
||||||
|
const kafka = new Kafka({
|
||||||
|
clientId: "comic-processor-service",
|
||||||
|
brokers: ["localhost:9092"],
|
||||||
|
logLevel: logLevel.INFO,
|
||||||
|
});
|
||||||
|
this.kafkaConsumer = kafka.consumer({ groupId: "comic-processor-group" });
|
||||||
|
this.kafkaProducer = kafka.producer();
|
||||||
|
await this.kafkaConsumer.connect();
|
||||||
|
await this.kafkaProducer.connect();
|
||||||
|
this.logger.info("Kafka consumer and producer connected successfully.");
|
||||||
|
|
||||||
|
await this.kafkaConsumer.subscribe({
|
||||||
|
topic: "comic-search-jobs",
|
||||||
|
fromBeginning: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.kafkaConsumer.run({
|
||||||
|
eachMessage: async ({ topic, partition, message }: EachMessagePayload) => {
|
||||||
|
if (message.value) {
|
||||||
|
const job = JSON.parse(message.value.toString());
|
||||||
|
this.logger.info(
|
||||||
|
"Consumed job from Kafka:",
|
||||||
|
JSON.stringify(job, null, 4),
|
||||||
|
);
|
||||||
|
await this.processJob(job);
|
||||||
|
} else {
|
||||||
|
this.logger.warn("Received message with null value");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
this.socketIOInstance = io("ws://localhost:3001/automated", {
|
||||||
|
transports: ["websocket"],
|
||||||
|
withCredentials: true,
|
||||||
|
});
|
||||||
|
this.socketIOInstance.on("connect", () => {
|
||||||
|
this.logger.info("Socket.IO connected successfully.");
|
||||||
|
});
|
||||||
|
|
||||||
|
this.socketIOInstance.on("searchResultAdded", (data: any) => {
|
||||||
|
this.logger.info(
|
||||||
|
"Received search result added:",
|
||||||
|
JSON.stringify(data, null, 4),
|
||||||
|
);
|
||||||
|
this.airDCPPSearchResults.push(data);
|
||||||
|
});
|
||||||
|
|
||||||
|
this.socketIOInstance.on("searchResultUpdated", async (data: any) => {
|
||||||
|
this.logger.info(
|
||||||
|
"Received search result update:",
|
||||||
|
JSON.stringify(data, null, 4),
|
||||||
|
);
|
||||||
|
if (
|
||||||
|
!isUndefined(data.result) &&
|
||||||
|
!isUndefined(this.airDCPPSearchResults.result)
|
||||||
|
) {
|
||||||
|
const toReplaceIndex = this.airDCPPSearchResults.findIndex(
|
||||||
|
(element: any) => {
|
||||||
|
return element?.result.id === data.result.id;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
this.airDCPPSearchResults[toReplaceIndex] = data.result;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
this.socketIOInstance.on("searchComplete", async () => {
|
||||||
|
// Ensure results are not empty before producing to Kafka
|
||||||
|
if (this.airDCPPSearchResults.length > 0) {
|
||||||
|
await this.produceResultsToKafka(this.airDCPPSearchResults, []);
|
||||||
|
} else {
|
||||||
|
this.logger.warn(
|
||||||
|
"AirDC++ search results are empty, not producing to Kafka.",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
},
|
||||||
|
async stopped() {
|
||||||
|
await this.kafkaConsumer.disconnect();
|
||||||
|
await this.kafkaProducer.disconnect();
|
||||||
|
this.logger.info("Kafka consumer and producer disconnected successfully.");
|
||||||
|
|
||||||
|
// Close Socket.IO connection
|
||||||
|
if (this.socketIOInstance) {
|
||||||
|
this.socketIOInstance.close();
|
||||||
|
this.logger.info("Socket.IO disconnected successfully.");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user