2 Commits

Author SHA1 Message Date
011293376c 📝 Fixed the endpoint 2023-09-11 16:04:06 -04:00
9c10347d38 🐯 Added a prowlarr api test endpoint 2023-09-07 10:04:29 -04:00
8 changed files with 284 additions and 1282 deletions

2
.gitignore vendored
View File

@@ -62,5 +62,3 @@ jspm_packages/
# Don't track transpiled files
dist/
.DS_Store
*.torrent

635
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
{
"name": "threetwo-acquisition-service",
"version": "1.0.0",
"description": "My Moleculer-based microservices project",
"description": "",
"scripts": {
"build": "tsc --project tsconfig.build.json",
"dev": "ts-node ./node_modules/moleculer/bin/moleculer-runner.js --config moleculer.config.ts --hot --repl services/**/*.service.ts",
@@ -19,16 +19,13 @@
"microservices",
"moleculer"
],
"author": "",
"author": "Rishi Ghan",
"devDependencies": {
"@jest/globals": "^29.3.1",
"@types/jest": "^29.2.3",
"@types/lodash": "^4.17.4",
"@types/node": "^18.11.9",
"@types/parse-torrent": "^5.8.7",
"@typescript-eslint/eslint-plugin": "^5.44.0",
"@typescript-eslint/parser": "^5.44.0",
"axios": "^1.5.0",
"concurrently": "^7.6.0",
"cross-env": "^7.0.3",
"eslint": "^8.28.0",
@@ -40,23 +37,18 @@
"jest": "^29.3.1",
"moleculer-repl": "^0.7.3",
"prettier": "^2.8.0",
"qbittorrent-api-v2": "^1.2.2",
"ts-jest": "^29.0.3",
"ts-node": "^10.9.1",
"typescript": "^4.9.3"
},
"dependencies": {
"lodash": "^4.17.21",
"@robertklep/qbittorrent": "^1.0.1",
"axios": "^1.5.0",
"ioredis": "^5.0.0",
"kafkajs": "^2.2.4",
"socket.io-client": "^4.7.5",
"moleculer": "^0.14.34",
"moleculer-web": "^0.10.7",
"parse-torrent": "^9.1.5",
"string-similarity-alg": "^1.3.2"
"moleculer": "^0.14.27",
"moleculer-web": "^0.10.5"
},
"engines": {
"node": ">= 16.x.x"
"node": ">= 20.x.x"
}
}

View File

@@ -1,33 +1,43 @@
import fs from "fs";
import { Service, ServiceBroker } from "moleculer";
import { IncomingMessage } from "http";
import { Service, ServiceBroker, Context } from "moleculer";
import ApiGateway from "moleculer-web";
export default class ApiService extends Service {
public constructor(broker: ServiceBroker) {
super(broker);
// @ts-ignore
this.parseServiceSchema({
name: "api",
mixins: [ApiGateway],
// More info about settings: https://moleculer.services/docs/0.14/moleculer-web.html
settings: {
port: process.env.PORT || 3060,
routes: [
{
path: "/api",
whitelist: ["**"],
use: [],
mergeParams: true,
cors: {
origin: "*",
methods: ["GET", "OPTIONS", "POST", "PUT", "DELETE"],
methods: [
"GET",
"OPTIONS",
"POST",
"PUT",
"DELETE",
],
allowedHeaders: ["*"],
exposedHeaders: [],
credentials: false,
maxAge: 3600,
},
use: [],
mergeParams: true,
authentication: false,
authorization: false,
autoAliases: true,
aliases: {},
callingOptions: {},
@@ -42,27 +52,90 @@ export default class ApiService extends Service {
},
},
mappingPolicy: "all", // Available values: "all", "restrict"
// Enable/disable logging
logging: true,
},
{
path: "/logs",
use: [ApiGateway.serveStatic("logs")],
},
],
// Do not log client side errors (does not log an error response when the error.code is 400<=X<500)
log4XXResponses: false,
logRequestParams: true,
logResponseData: true,
// Logging the request parameters. Set to any log level to enable it. E.g. "info"
logRequestParams: null,
logResponseData: null,
assets: {
folder: "public",
// Options to `server-static` module
options: {},
},
},
events: {},
methods: {},
started(): any {},
methods: {
/**
* Authenticate the request. It checks the `Authorization` token value in the request header.
* Check the token value & resolve the user by the token.
* The resolved user will be available in `ctx.meta.user`
*
* PLEASE NOTE, IT'S JUST AN EXAMPLE IMPLEMENTATION. DO NOT USE IN PRODUCTION!
*
* @param {Context} ctx
* @param {any} route
* @param {IncomingMessage} req
* @returns {Promise}
async authenticate (ctx: Context, route: any, req: IncomingMessage): Promise < any > => {
// Read the token from header
const auth = req.headers.authorization;
if (auth && auth.startsWith("Bearer")) {
const token = auth.slice(7);
// Check the token. Tip: call a service which verify the token. E.g. `accounts.resolveToken`
if (token === "123456") {
// Returns the resolved user. It will be set to the `ctx.meta.user`
return {
id: 1,
name: "John Doe",
};
} else {
// Invalid token
throw new ApiGateway.Errors.UnAuthorizedError(ApiGateway.Errors.ERR_INVALID_TOKEN, {
error: "Invalid Token",
});
}
} else {
// No token. Throw an error or do nothing if anonymous access is allowed.
// Throw new E.UnAuthorizedError(E.ERR_NO_TOKEN);
return null;
}
},
*/
/**
* Authorize the request. Check that the authenticated user has right to access the resource.
*
* PLEASE NOTE, IT'S JUST AN EXAMPLE IMPLEMENTATION. DO NOT USE IN PRODUCTION!
*
* @param {Context} ctx
* @param {Object} route
* @param {IncomingMessage} req
* @returns {Promise}
async authorize (ctx: Context < any, {
user: string;
} > , route: Record<string, undefined>, req: IncomingMessage): Promise < any > => {
// Get the authenticated user.
const user = ctx.meta.user;
// It check the `auth` property in action schema.
// @ts-ignore
if (req.$action.auth === "required" && !user) {
throw new ApiGateway.Errors.UnAuthorizedError("NO_RIGHTS", {
error: "Unauthorized",
});
}
},
*/
},
});
}
}

View File

@@ -1,114 +0,0 @@
"use strict";
import { Kafka } from "kafkajs";
import type { Context, ServiceBroker, ServiceSchema } from "moleculer";
import { Errors, Service } from "moleculer";
interface Comic {
wanted: {
markEntireVolumeWanted?: boolean;
issues?: any[];
volume: {
id: string;
name: string;
};
};
}
export default class AutoDownloadService extends Service {
private kafkaProducer: any;
private readonly BATCH_SIZE = 100; // Adjust based on your system capacity
// @ts-ignore
constructor(
public broker: ServiceBroker,
schema: ServiceSchema<{}> = { name: "autodownload" },
) {
super(broker);
this.parseServiceSchema({
name: "autodownload",
actions: {
searchWantedComics: {
rest: "POST /searchWantedComics",
handler: async (ctx: Context<{}>) => {
try {
/* eslint-disable no-await-in-loop */
let page = 1;
const limit = this.BATCH_SIZE;
let comics: Comic[];
do {
comics = await this.broker.call(
"library.getComicsMarkedAsWanted",
{ page, limit },
);
// Log debugging info
this.logger.info(
"Received comics from getComicsMarkedAsWanted:",
JSON.stringify(comics, null, 2),
);
if (!Array.isArray(comics)) {
this.logger.error(
"Invalid response structure",
JSON.stringify(comics, null, 2),
);
throw new Errors.MoleculerError(
"Invalid response structure from getComicsMarkedAsWanted",
500,
"INVALID_RESPONSE_STRUCTURE",
);
}
this.logger.info(
`Fetched ${comics.length} comics from page ${page}`,
);
for (const comic of comics) {
await this.produceJobToKafka(comic);
}
page += 1;
} while (comics.length === limit);
return {
success: true,
message: "Jobs enqueued for background processing.",
};
} catch (error) {
this.logger.error("Error in searchWantedComics:", error);
throw new Errors.MoleculerError(
"Failed to search wanted comics.",
500,
"SEARCH_WANTED_COMICS_ERROR",
{ error },
);
}
},
},
},
methods: {
produceJobToKafka: async (comic: Comic) => {
const job = { comic };
try {
await this.kafkaProducer.send({
topic: "comic-search-jobs",
messages: [{ value: JSON.stringify(job) }],
});
this.logger.info("Produced job to Kafka:", job);
} catch (error) {
this.logger.error("Error producing job to Kafka:", error);
}
},
},
async started() {
const kafka = new Kafka({
clientId: "comic-search-service",
brokers: ["localhost:9092"],
});
this.kafkaProducer = kafka.producer();
await this.kafkaProducer.connect();
this.logger.info("Kafka producer connected successfully.");
},
async stopped() {
await this.kafkaProducer.disconnect();
this.logger.info("Kafka producer disconnected successfully.");
},
});
}
}

View File

@@ -1,310 +0,0 @@
import type { EachMessagePayload } from "kafkajs";
import { Kafka, logLevel } from "kafkajs";
import { isNil, isUndefined } from "lodash";
import type { ServiceBroker, ServiceSchema } from "moleculer";
import { Service } from "moleculer";
import io from "socket.io-client";
import stringSimilarity from "string-similarity-alg";
interface SearchResult {
groupedResult: { entityId: number; payload: any };
updatedResult: { entityId: number; payload: any };
}
export default class ComicProcessorService extends Service {
private kafkaConsumer: any;
private socketIOInstance: any;
private kafkaProducer: any;
private prowlarrResultsMap: Map<string, any> = new Map();
private airDCPPSearchResults: Map<number, any[]> = new Map();
private issuesToSearch: any = [];
// @ts-ignore: schema parameter is required by Service constructor
constructor(
public broker: ServiceBroker,
schema: ServiceSchema<object> = { name: "comicProcessor" },
) {
super(broker, schema);
this.parseServiceSchema({
name: "comicProcessor",
methods: {
parseStringDate: (dateString: string) => {
const date = new Date(dateString);
return {
year: date.getFullYear(),
month: date.getMonth() + 1,
day: date.getDate(),
};
},
rankSearchResults: async (results: Map<number, any[]>, query: string) => {
// Find the highest-ranked response based on similarity to the search string
let highestRankedResult = null;
let highestSimilarity = -1;
results.forEach((resultArray) => {
resultArray.forEach((result) => {
const similarity = stringSimilarity("jaro-winkler").compare(
result.name,
query,
);
if (similarity > highestSimilarity) {
highestSimilarity = similarity;
highestRankedResult = { ...result, similarity };
}
});
});
return highestRankedResult;
},
processJob: async (job: any) => {
try {
this.logger.info("Processing job:", JSON.stringify(job, null, 2));
// Get the hub to search on
const settings: any = await this.broker.call("settings.getSettings", {
settingsKey: "directConnect",
});
const hubs = settings.client.hubs.map((hub: any) => hub.value);
const { comic } = job;
const { volume, issues, markEntireVolumeWanted } = comic.wanted;
// If entire volume is marked as wanted, get their details from CV
if (markEntireVolumeWanted) {
this.issuesToSearch = await this.broker.call(
"comicvine.getIssuesForVolume",
{ volumeId: volume.id },
);
this.logger.info(
`The entire volume with id: ${volume.id} was marked as wanted.`,
);
this.logger.info(`Fetched issues for ${volume.id}:`);
this.logger.info(`${this.issuesToSearch.length} issues to search`);
} else {
// Or proceed with `issues` from the wanted object.
this.issuesToSearch = issues;
}
for (const issue of this.issuesToSearch) {
// Query builder for DC++
// 1. issue number
const inferredIssueNumber =
issue.issueNumber || issue.issue_number || "";
// 2. year
const { year } = this.parseStringDate(issue.coverDate);
const inferredYear = year || issue.year || "";
// 3. Orchestrate the query
const dcppSearchQuery = {
query: {
pattern: `${volume.name
.replace(/[^\w\s]/g, "")
.replace(/\s+/g, " ")
.trim()}`,
extensions: ["cbz", "cbr", "cb7"],
},
hub_urls: hubs,
priority: 5,
};
this.logger.info(
"DC++ search query:",
JSON.stringify(dcppSearchQuery, null, 4),
);
await this.broker.call("socket.search", {
query: dcppSearchQuery,
config: {
hostname: "192.168.1.119:5600",
protocol: "http",
username: "admin",
password: "password",
},
namespace: "/automated",
});
// const prowlarrResults = await this.broker.call("prowlarr.search", {
// prowlarrQuery: {
// port: "9696",
// apiKey: "c4f42e265fb044dc81f7e88bd41c3367",
// offset: 0,
// categories: [7030],
// query: `${volume.name} ${issue.issueNumber} ${year}`,
// host: "localhost",
// limit: 100,
// type: "search",
// indexerIds: [2],
// },
// });
//
// this.logger.info(
// "Prowlarr search results:",
// JSON.stringify(prowlarrResults, null, 4),
// );
// Store prowlarr results in map using unique key
// const key = `${volume.name}-${issue.issueNumber}-${year}`;
// this.prowlarrResultsMap.set(key, prowlarrResults);
}
} catch (error) {
this.logger.error("Error processing job:", error);
}
},
produceResultsToKafka: async (query: string, result: any[]): Promise<void> => {
try {
/*
Match and rank
*/
const finalResult = await this.rankSearchResults(
this.airDCPPSearchResults,
query,
);
/*
Kafka messages need to be in a format that can be serialized to JSON,
and a Map is not directly serializable in a way that retains its structure,
hence why we use Object.fromEntries
*/
await this.kafkaProducer.send({
topic: "comic-search-results",
messages: [
{
value: JSON.stringify(finalResult),
},
],
});
this.logger.info(`Produced results to Kafka.`);
// socket event for UI
await this.broker.call("socket.broadcast", {
namespace: "/",
event: "searchResultsAvailable",
args: [
{
query,
finalResult,
},
],
});
} catch (error) {
this.logger.error("Error producing results to Kafka:", error);
}
},
},
async started() {
const kafka = new Kafka({
clientId: "comic-processor-service",
brokers: ["localhost:9092"],
logLevel: logLevel.INFO,
});
this.kafkaConsumer = kafka.consumer({ groupId: "comic-processor-group" });
this.kafkaProducer = kafka.producer();
this.kafkaConsumer.on("consumer.crash", (event: any) => {
this.logger.error("Consumer crash:", event);
});
this.kafkaConsumer.on("consumer.connect", () => {
this.logger.info("Consumer connected");
});
this.kafkaConsumer.on("consumer.disconnect", () => {
this.logger.info("Consumer disconnected");
});
this.kafkaConsumer.on("consumer.network.request_timeout", () => {
this.logger.warn("Consumer network request timeout");
});
await this.kafkaConsumer.connect();
await this.kafkaProducer.connect();
await this.kafkaConsumer.subscribe({
topic: "comic-search-jobs",
fromBeginning: true,
});
await this.kafkaConsumer.run({
eachMessage: async ({ topic, partition, message }: EachMessagePayload) => {
if (message.value) {
const job = JSON.parse(message.value.toString());
await this.processJob(job);
} else {
this.logger.warn("Received message with null value");
}
},
});
this.socketIOInstance = io("ws://localhost:3001/automated", {
transports: ["websocket"],
withCredentials: true,
});
this.socketIOInstance.on("connect", () => {
this.logger.info("Socket.IO connected successfully.");
});
// Handle searchResultAdded event
this.socketIOInstance.on("searchResultAdded", (result: SearchResult) => {
const {
groupedResult: { entityId, payload },
} = result;
this.logger.info(
`AirDC++ Search result added for entityId: ${entityId} - ${payload?.name}`,
);
if (!this.airDCPPSearchResults.has(entityId)) {
this.airDCPPSearchResults.set(entityId, []);
}
if (!isNil(payload)) {
this.airDCPPSearchResults.get(entityId).push(payload);
}
console.log(
"Updated airDCPPSearchResults:",
JSON.stringify(Array.from(this.airDCPPSearchResults.entries()), null, 4),
);
console.log(JSON.stringify(payload, null, 4));
});
// Handle searchResultUpdated event
this.socketIOInstance.on("searchResultUpdated", (result: SearchResult) => {
const {
updatedResult: { entityId, payload },
} = result;
const resultsForInstance = this.airDCPPSearchResults.get(entityId);
if (resultsForInstance) {
const toReplaceIndex = resultsForInstance.findIndex((element: any) => {
this.logger.info("search result updated!");
this.logger.info(JSON.stringify(element, null, 4));
return element.id === payload.id;
});
if (toReplaceIndex !== -1) {
// Replace the existing result with the updated result
resultsForInstance[toReplaceIndex] = payload;
rty6j
// Optionally, update the map with the modified array
this.airDCPPSearchResults.set(entityId, resultsForInstance);
}
}
});
// Handle searchComplete event
this.socketIOInstance.on("searchesSent", async (data: any) => {
this.logger.info(
`Search complete for query: "${data.searchInfo.query.pattern}"`,
);
await this.produceResultsToKafka(data.searchInfo.query.pattern);
});
},
async stopped() {
await this.kafkaConsumer.disconnect();
await this.kafkaProducer.disconnect();
if (this.socketIOInstance) {
this.socketIOInstance.close();
}
},
});
}
}

View File

@@ -1,123 +1,39 @@
"use strict";
import { Context, Service, ServiceBroker, ServiceSchema, Errors } from "moleculer";
import axios from "axios";
import { qBittorrentClient } from "@robertklep/qbittorrent";
const { MoleculerError } = require("moleculer").Errors;
export default class ProwlarrService extends Service {
export default class QBittorrentService extends Service {
// @ts-ignore
public constructor(
public broker: ServiceBroker,
schema: ServiceSchema<{}> = { name: "prowlarr" },
schema: ServiceSchema<{}> = { name: "qbittorrent" },
) {
super(broker);
this.parseServiceSchema({
name: "prowlarr",
name: "qbittorrent",
mixins: [],
hooks: {},
actions: {
connect: {
rest: "POST /connect",
handler: async (
ctx: Context<{
host: string;
port: string;
apiKey: string;
}>,
) => {
const { host, port, apiKey } = ctx.params;
const result = await axios.request({
url: `http://${host}:${port}/api`,
method: "GET",
headers: {
"X-Api-Key": apiKey,
},
});
console.log(result.data);
},
},
getIndexers: {
rest: "GET /indexers",
handler: async (
ctx: Context<{ host: string; port: string; apiKey: string }>,
) => {
const { host, port, apiKey } = ctx.params;
const result = await axios.request({
url: `http://${host}:${port}/api/v1/indexer`,
method: "GET",
headers: {
"X-Api-Key": apiKey,
},
});
return result.data;
},
},
search: {
rest: "GET /search",
handler: async (
ctx: Context<{
prowlarrQuery: {
host: string;
port: string;
apiKey: string;
query: string;
type: string;
indexerIds: [number];
categories: [number];
limit: number;
offset: number;
};
}>,
) => {
const {
prowlarrQuery: {
indexerIds,
categories,
host,
port,
apiKey,
query,
type,
limit,
offset,
},
} = ctx.params;
const indexer = indexerIds[0] ? indexerIds.length === 1 : indexerIds;
const category = categories[0] ? categories.length === 1 : categories;
const result = await axios({
url: `http://${host}:${port}/api/v1/search`,
method: "GET",
params: {
query,
type,
indexer,
category,
limit,
offset,
},
headers: {
Accept: "application/json",
"X-Api-Key": `${apiKey}`,
},
});
return result.data;
},
},
ping: {
rest: "GET /ping",
getList: {
rest: "GET /getTorrents",
handler: async (ctx: Context<{}>) => {
const foo = await axios.request({
url: "http://192.168.1.183:9696/ping",
method: "GET",
headers: {
Accept: "application/json",
"X-Api-Key": "163ef9a683874f65b53c7be87354b38b",
},
});
console.log(foo.data);
return true;
},
},
},
methods: {},
return await this.torrentClient.torrents.info()
}
}
}, methods: {},
async started(): Promise<any> {
try {
this.torrentClient = new qBittorrentClient("http://192.168.1.183:8089", "admin", "adminadmin");
} catch (err) {
throw new MoleculerError(err, 500, "QBITTORRENT_CONNECTION_ERROR", {
data: err,
});
}
}
});
}
}

View File

@@ -1,237 +1,45 @@
import { readFileSync, writeFileSync } from "fs";
import { qBittorrentClient } from "@robertklep/qbittorrent";
import type { Context, ServiceBroker, ServiceSchema } from "moleculer";
import { Errors, Service } from "moleculer";
import parseTorrent from "parse-torrent";
"use strict";
import { Context, Service, ServiceBroker, ServiceSchema, Errors } from "moleculer";
const { MoleculerError } = require("moleculer").Errors;
import axios from "axios";
export default class QBittorrentService extends Service {
export default class ProwlarrService extends Service {
// @ts-ignore
constructor(public broker: ServiceBroker, schema: ServiceSchema<{}> = { name: "qbittorrent" }) {
public constructor(
public broker: ServiceBroker,
schema: ServiceSchema<{}> = { name: "prowlarr" },
) {
super(broker);
this.parseServiceSchema({
name: "qbittorrent",
name: "prowlarr",
mixins: [],
hooks: {},
settings: {},
actions: {
fetchQbittorrentCredentials: {
rest: "GET /fetchQbittorrentCredentials",
handler: async (ctx: Context<{}>) => {
return await this.broker.call("settings.getSettings", {
settingsKey: "bittorrent",
});
},
},
connect: {
rest: "POST /connect",
handler: async (
ctx: Context<{
username: string;
password: string;
hostname: string;
port: string;
protocol: string;
name?: string;
}>,
) => {
const { username, password, hostname, port, protocol } = ctx.params;
this.meta = new qBittorrentClient(
`${protocol}://${hostname}:${port}`,
`${username}`,
`${password}`,
);
console.log(this.meta);
if (this.meta) {
return { success: true, message: "Logged in successfully" };
}
},
},
loginWithStoredCredentials: {
rest: "POST /loginWithStoredCredentials",
testConnection: {
rest: "GET /testConnection",
handler: async (ctx: Context<{}>) => {
try {
const result: any = await this.broker.call(
"qbittorrent.fetchQbittorrentCredentials",
{},
);
if (result !== undefined) {
const {
client: {
host: { username, password, hostname, port, protocol },
},
} = result;
const connection = await this.broker.call("qbittorrent.connect", {
username,
password,
hostname,
port,
protocol,
});
console.log("qbittorrent connection details:");
console.log(JSON.stringify(connection, null, 4));
return connection;
}
} catch (err) {
return {
error: err,
message:
"Qbittorrent credentials not found, please configure them in Settings.",
};
}
},
},
getClientInfo: {
rest: "GET /getClientInfo",
handler: async (ctx: Context<{}>) => {
await this.broker.call("qbittorrent.loginWithStoredCredentials", {});
return {
buildInfo: await this.meta.app.buildInfo(),
version: await this.meta.app.version(),
webAPIVersion: await this.meta.app.webapiVersion(),
};
},
},
addTorrent: {
rest: "POST /addTorrent",
handler: async (
ctx: Context<{
torrentToDownload: any;
comicObjectId: string;
}>,
) => {
try {
await this.broker.call("qbittorrent.loginWithStoredCredentials", {});
const { torrentToDownload, comicObjectId } = ctx.params;
console.log(torrentToDownload);
const response = await fetch(torrentToDownload, {
method: "GET",
});
// Read the buffer to a file
const buffer = await response.arrayBuffer();
writeFileSync(`mithrandir.torrent`, Buffer.from(buffer));
// Add the torrent to qbittorrent's queue, paused.
const result = await this.meta.torrents.add({
torrents: {
buffer: readFileSync("mithrandir.torrent"),
},
// start this torrent in a paused state (see Torrent type for options)
paused: true,
});
const { name, infoHash, announce } = parseTorrent(
readFileSync("mithrandir.torrent"),
);
await this.broker.call("library.applyTorrentDownloadMetadata", {
name,
torrentToDownload,
comicObjectId,
announce,
infoHash,
});
return {
result,
};
} catch (err) {
console.error(err);
}
},
},
getTorrents: {
rest: "POST /getTorrents",
handler: async (ctx: Context<{}>) => {
await this.broker.call("qbittorrent.loginWithStoredCredentials", {});
return await this.meta.torrents.info();
},
},
getTorrentProperties: {
rest: "POST /getTorrentProperties",
handler: async (ctx: Context<{ infoHashes: string[] }>) => {
try {
const { infoHashes } = ctx.params;
await this.broker.call("qbittorrent.loginWithStoredCredentials", {});
return await this.meta.torrents.info({
hashes: infoHashes,
});
} catch (err) {
console.error("An error occurred:", err);
// Consider handling the error more gracefully here, possibly returning an error response
throw err; // or return a specific error object/message
}
},
},
getTorrentRealTimeStats: {
rest: "POST /getTorrentRealTimeStats",
handler: async (
ctx: Context<{ infoHashes: { _id: string; infoHashes: string[] }[] }>,
) => {
const { infoHashes } = ctx.params;
await this.broker.call("qbittorrent.loginWithStoredCredentials", {});
try {
// Increment rid for each call
this.rid = typeof this.rid === "number" ? this.rid + 1 : 0;
const data = await this.meta.sync.maindata(this.rid);
const torrentDetails: any = [];
infoHashes.forEach(({ _id, infoHashes }) => {
// Initialize an object to hold details for this _id
const details: any = [];
infoHashes.forEach((hash) => {
// Assuming 'data.torrents[hash]' retrieves the details for the hash
const torrent = data.torrents[hash];
if (torrent) {
details.push({
torrent,
});
}
});
// If you have details for this _id, add them to the main array
if (details.length > 0) {
torrentDetails.push({
_id,
details,
});
const result = await axios.request({
url: `http://192.168.1.183:9696/api/v1/history`,
method: `GET`,
headers: { Accept: "application/json" },
params: {
apikey: "163ef9a683874f65b53c7be87354b38b",
}
});
// Update rid with the latest value if needed based on the response
// Assuming `data.rid` contains the latest rid from the server
if (data.rid !== undefined) {
this.rid = data.rid;
console.log(`rid is ${this.rid}`);
}
console.log(JSON.stringify(torrentDetails, null, 4));
return torrentDetails;
} catch (err) {
this.logger.error(err);
throw err;
console.log(result);
return result.data;
} catch(err) {
console.log(err);
}
},
}
},
determineDownloadApps: {
rest: "",
handler: async () => {
// 1. Parse the incoming search query
// to make sure that it is well-formed
// At the very least, it should have name, year, number
// 2. Choose between download mediums based on user-preference?
// possible choices are: DC++, Torrent
// 3. Perform the search on those media with the aforementioned search query
// 4. Choose a subset of relevant search results,
// and score them
// 5. Download the highest-scoring, relevant result
},
},
},
methods: {},
async started() {
console.log(`Initializing rid...`);
this.rid = 0;
console.log(`rid is ${this.rid}`);
},
}, methods: {},
async started(): Promise<any> {
}
});
}
}