diff --git a/models/comic.model.ts b/models/comic.model.ts index 8778cd6..7dbd06f 100644 --- a/models/comic.model.ts +++ b/models/comic.model.ts @@ -115,13 +115,13 @@ const ComicSchema = mongoose.Schema( default: [], }, }, - torrent: { - downloads: [], - sourceApplication: String, - magnet: String, - tracker: String, - status: String, - }, + torrent: [ + { + infoHash: String, + name: String, + announce: [String], + }, + ], usenet: { sourceApplication: String, }, diff --git a/models/settings.model.ts b/models/settings.model.ts index a60f75d..73a4422 100644 --- a/models/settings.model.ts +++ b/models/settings.model.ts @@ -23,6 +23,12 @@ const SettingsScehma = mongoose.Schema({ host: HostSchema, }, }, + prowlarr: { + client: { + host: HostSchema, + apiKey: String, + }, + }, }); const Settings = mongoose.model("Settings", SettingsScehma); diff --git a/services/jobqueue.service.ts b/services/jobqueue.service.ts index 5e1cc08..830e18c 100644 --- a/services/jobqueue.service.ts +++ b/services/jobqueue.service.ts @@ -12,7 +12,6 @@ import { import { isNil, isUndefined } from "lodash"; import { pubClient } from "../config/redis.config"; import path from "path"; - const { MoleculerError } = require("moleculer").Errors; console.log(process.env.REDIS_URI); @@ -51,17 +50,18 @@ export default class JobQueueService extends Service { } }, }, + enqueue: { queue: true, - rest: "/GET enqueue", + rest: "GET /enqueue", handler: async ( - ctx: Context<{ queueName: string; description: string }> + ctx: Context<{ action: string; description: string }> ) => { - const { queueName, description } = ctx.params; + const { action, description } = ctx.params; // Enqueue the job const job = await this.localQueue( ctx, - queueName, + action, ctx.params, { priority: 10, @@ -73,6 +73,7 @@ export default class JobQueueService extends Service { return job.id; }, }, + // Comic Book Import Job Queue "enqueue.async": { handler: async ( @@ -437,6 +438,7 @@ export default class JobQueueService extends Service { }); }, }, + methods: {}, }); } } diff --git a/services/library.service.ts b/services/library.service.ts index ce198d8..904186d 100644 --- a/services/library.service.ts +++ b/services/library.service.ts @@ -222,7 +222,7 @@ export default class ImportService extends Service { }, sessionId, importType: "new", - queueName: "enqueue.async", + action: "enqueue.async", }); } else { console.log( @@ -303,6 +303,7 @@ export default class ImportService extends Service { ); switch (ctx.params.importType) { case "new": + console.log(comicMetadata); return await Comic.create(comicMetadata); case "update": return await Comic.findOneAndUpdate( @@ -424,6 +425,66 @@ export default class ImportService extends Service { }); }, }, + applyTorrentDownloadMetadata: { + rest: "POST /applyTorrentDownloadMetadata", + handler: async ( + ctx: Context<{ + torrentToDownload: any; + comicObjectId: String; + infoHash: String; + name: String; + announce: [String]; + }> + ) => { + const { + name, + torrentToDownload, + comicObjectId, + announce, + infoHash, + } = ctx.params; + console.log(JSON.stringify(ctx.params, null, 4)); + try { + return await Comic.findByIdAndUpdate( + new ObjectId(comicObjectId), + { + $push: { + "acquisition.torrent": { + infoHash, + name, + announce, + }, + }, + }, + { new: true, safe: true, upsert: true } + ); + } catch (err) { + console.log(err); + } + }, + }, + getInfoHashes: { + rest: "GET /getInfoHashes", + handler: async (ctx: Context<{}>) => { + try { + return await Comic.aggregate([ + { + $unwind: "$acquisition.torrent", + }, + { + $group: { + _id: "$_id", + infoHashes: { + $push: "$acquisition.torrent.infoHash", + }, + }, + }, + ]); + } catch (err) { + return err; + } + }, + }, getComicBooks: { rest: "POST /getComicBooks", params: {}, diff --git a/services/settings.service.ts b/services/settings.service.ts index 7159a00..361ca1f 100644 --- a/services/settings.service.ts +++ b/services/settings.service.ts @@ -28,12 +28,31 @@ export default class SettingsService extends Service { rest: "GET /getAllSettings", params: {}, async handler(ctx: Context<{ settingsKey: string }>) { - const settings = await Settings.find({}); - if (isEmpty(settings)) { + const { settingsKey } = ctx.params; + + // Initialize a projection object. Include everything by default. + let projection = settingsKey + ? { _id: 0, [settingsKey]: 1 } + : {}; + + // Find the settings with the dynamic projection + const settings = await Settings.find({}, projection); + + if (settings.length === 0) { return {}; } - console.log(settings[0]); - return settings[0]; + + // If settingsKey is provided, return the specific part of the settings. + // Otherwise, return the entire settings document. + if (settingsKey) { + // Check if the specific key exists in the settings document. + // Since `settings` is an array, we access the first element. + // Then, we use the settingsKey to return only that part of the document. + return settings[0][settingsKey] || {}; + } else { + // Return the entire settings document + return settings[0]; + } }, }, diff --git a/services/torrentjobs.service.ts b/services/torrentjobs.service.ts new file mode 100644 index 0000000..c6e5f51 --- /dev/null +++ b/services/torrentjobs.service.ts @@ -0,0 +1,101 @@ +"use strict"; +import axios from "axios"; +import { + Context, + Service, + ServiceBroker, + ServiceSchema, + Errors, +} from "moleculer"; +import { DbMixin } from "../mixins/db.mixin"; +import Comic from "../models/comic.model"; +const ObjectId = require("mongoose").Types.ObjectId; +import { isNil, isUndefined } from "lodash"; +import BullMqMixin from "moleculer-bullmq"; +const { MoleculerError } = require("moleculer").Errors; + +export default class ImageTransformation extends Service { + // @ts-ignore + public constructor( + public broker: ServiceBroker, + schema: ServiceSchema<{}> = { name: "imagetransformation" } + ) { + super(broker); + this.parseServiceSchema({ + name: "torrentjobs", + mixins: [DbMixin("comics", Comic), BullMqMixin], + settings: { + bullmq: { + client: process.env.REDIS_URI, + }, + }, + hooks: {}, + actions: { + getTorrentData: { + queue: true, + rest: "GET /getTorrentData", + handler: async (ctx: Context<{ trigger: string }>) => { + const { trigger } = ctx.params; + console.log(`Recieved ${trigger} as the trigger...`); + + const jobOptions = { + jobId: "retrieveTorrentData", + name: "bossy", + repeat: { + every: 10000, // Repeat every 10000 ms + limit: 100, // Limit to 100 repeats + }, + }; + + const job = await this.localQueue( + ctx, + "fetchTorrentData", + ctx.params, + jobOptions + ); + return job; + }, + }, + fetchTorrentData: { + rest: "GET /fetchTorrentData", + handler: async ( + ctx: Context<{ + birdName: String; + }> + ) => { + const repeatableJob = await this.$resolve( + "torrentjobs" + ).getRepeatableJobs(); + console.info(repeatableJob); + console.info( + `Scheduled job for fetching torrent data fired.` + ); + // 1. query mongo for infohashes + const infoHashes = await this.broker.call( + "library.getInfoHashes", + {} + ); + // 2. query qbittorrent to see if they exist + const torrents: any = await this.broker.call( + "qbittorrent.getTorrentRealTimeStats", + { infoHashes } + ); + // 4. Emit the LS_COVER_EXTRACTION_FAILED event with the necessary details + await this.broker.call("socket.broadcast", { + namespace: "/", + event: "AS_TORRENT_DATA", + args: [ + { + torrents, + }, + ], + }); + // 3. If they do, don't do anything + // 4. If they don't purge them from mongo + }, + }, + }, + methods: {}, + }); + } +}