🔧 kafka-powered autodownload loop
This commit is contained in:
@@ -2,6 +2,25 @@
|
||||
import { Context, Service, ServiceBroker, ServiceSchema, Errors } from "moleculer";
|
||||
import { Kafka } from "kafkajs";
|
||||
|
||||
interface Comic {
|
||||
wanted: {
|
||||
markEntireVolumeWanted?: boolean;
|
||||
issues?: Array<any>;
|
||||
volume: {
|
||||
id: string;
|
||||
name: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
interface PaginatedResult {
|
||||
wantedComics: Comic[];
|
||||
total: number;
|
||||
page: number;
|
||||
limit: number;
|
||||
pages: number;
|
||||
}
|
||||
|
||||
export default class AutoDownloadService extends Service {
|
||||
private kafkaProducer: any;
|
||||
|
||||
@@ -18,13 +37,29 @@ export default class AutoDownloadService extends Service {
|
||||
rest: "POST /searchWantedComics",
|
||||
handler: async (ctx: Context<{}>) => {
|
||||
try {
|
||||
const wantedComics: any = await this.broker.call(
|
||||
"library.getComicsMarkedAsWanted",
|
||||
{},
|
||||
);
|
||||
this.logger.info("Fetched wanted comics:", wantedComics.length);
|
||||
let page = 1;
|
||||
const limit = this.BATCH_SIZE;
|
||||
|
||||
for (const comic of wantedComics) {
|
||||
while (true) {
|
||||
const result: PaginatedResult = await this.broker.call(
|
||||
"library.getComicsMarkedAsWanted",
|
||||
{ page, limit },
|
||||
);
|
||||
|
||||
if (!result || !result.wantedComics) {
|
||||
this.logger.error("Invalid response structure", result);
|
||||
throw new Errors.MoleculerError(
|
||||
"Invalid response structure from getComicsMarkedAsWanted",
|
||||
500,
|
||||
"INVALID_RESPONSE_STRUCTURE",
|
||||
);
|
||||
}
|
||||
|
||||
this.logger.info(
|
||||
`Fetched ${result.wantedComics.length} comics from page ${page} of ${result.pages}`,
|
||||
);
|
||||
|
||||
for (const comic of result.wantedComics) {
|
||||
if (comic.wanted.markEntireVolumeWanted) {
|
||||
const issues: any = await this.broker.call(
|
||||
"comicvine.getIssuesForVolume",
|
||||
@@ -38,7 +73,10 @@ export default class AutoDownloadService extends Service {
|
||||
issue,
|
||||
);
|
||||
}
|
||||
} else if (comic.wanted.issues && comic.wanted.issues.length > 0) {
|
||||
} else if (
|
||||
comic.wanted.issues &&
|
||||
comic.wanted.issues.length > 0
|
||||
) {
|
||||
for (const issue of comic.wanted.issues) {
|
||||
await this.produceJobToKafka(
|
||||
comic.wanted.volume?.name,
|
||||
@@ -47,6 +85,12 @@ export default class AutoDownloadService extends Service {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (page >= result.pages) break;
|
||||
page += 1;
|
||||
}
|
||||
|
||||
return { success: true, message: "Processing started." };
|
||||
} catch (error) {
|
||||
this.logger.error("Error in searchWantedComics:", error);
|
||||
throw new Errors.MoleculerError(
|
||||
|
||||
@@ -3,7 +3,20 @@ import { Service, ServiceBroker, ServiceSchema } from "moleculer";
|
||||
import { Kafka, EachMessagePayload, logLevel } from "kafkajs";
|
||||
import { isUndefined } from "lodash";
|
||||
import io from "socket.io-client";
|
||||
interface SearchResult {
|
||||
result: {
|
||||
id: string;
|
||||
// Add other relevant fields
|
||||
};
|
||||
search_id: string;
|
||||
// Add other relevant fields
|
||||
}
|
||||
|
||||
interface SearchResultPayload {
|
||||
groupedResult: SearchResult;
|
||||
updatedResult: SearchResult;
|
||||
instanceId: string;
|
||||
}
|
||||
export default class ComicProcessorService extends Service {
|
||||
private kafkaConsumer: any;
|
||||
private socketIOInstance: any;
|
||||
@@ -136,35 +149,55 @@ export default class ComicProcessorService extends Service {
|
||||
this.logger.info("Socket.IO connected successfully.");
|
||||
});
|
||||
|
||||
this.socketIOInstance.on("searchResultAdded", (data: any) => {
|
||||
this.socketIOInstance.on(
|
||||
"searchResultAdded",
|
||||
({ groupedResult, instanceId }: SearchResultPayload) => {
|
||||
this.logger.info(
|
||||
"Received search result added:",
|
||||
JSON.stringify(data, null, 4),
|
||||
JSON.stringify(groupedResult, null, 4),
|
||||
);
|
||||
this.airDCPPSearchResults.push(data);
|
||||
this.airDCPPSearchResults.push({
|
||||
groupedResult: groupedResult.result,
|
||||
instanceId,
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
this.socketIOInstance.on("searchResultUpdated", async (data: any) => {
|
||||
this.socketIOInstance.on(
|
||||
"searchResultUpdated",
|
||||
async ({ updatedResult, instanceId }: SearchResultPayload) => {
|
||||
this.logger.info(
|
||||
"Received search result update:",
|
||||
JSON.stringify(data, null, 4),
|
||||
JSON.stringify(updatedResult, null, 4),
|
||||
);
|
||||
if (
|
||||
!isUndefined(data.result) &&
|
||||
!isUndefined(updatedResult.result) &&
|
||||
!isUndefined(this.airDCPPSearchResults.result)
|
||||
) {
|
||||
const toReplaceIndex = this.airDCPPSearchResults.findIndex(
|
||||
(element: any) => {
|
||||
return element?.result.id === data.result.id;
|
||||
return element?.result.id === updatedResult.result.id;
|
||||
},
|
||||
);
|
||||
this.airDCPPSearchResults[toReplaceIndex] = data.result;
|
||||
this.airDCPPSearchResults[toReplaceIndex] = {
|
||||
result: updatedResult.result,
|
||||
instanceId,
|
||||
};
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
this.socketIOInstance.on("searchComplete", async () => {
|
||||
// Ensure results are not empty before producing to Kafka
|
||||
if (this.airDCPPSearchResults.length > 0) {
|
||||
await this.produceResultsToKafka(this.airDCPPSearchResults, []);
|
||||
const results = this.airDCPPSearchResults.reduce((acc: any, item: any) => {
|
||||
const key = item.instanceId;
|
||||
if (!acc[key]) {
|
||||
acc[key] = [];
|
||||
}
|
||||
acc[key].push(item);
|
||||
return acc;
|
||||
}, {});
|
||||
await this.produceResultsToKafka(results, []);
|
||||
} else {
|
||||
this.logger.warn(
|
||||
"AirDC++ search results are empty, not producing to Kafka.",
|
||||
|
||||
Reference in New Issue
Block a user