44 Commits

Author SHA1 Message Date
Rishi Ghan
afead56a74 Fixed eslint errors
Some checks failed
Docker Image CI / build (push) Has been cancelled
2026-04-15 11:35:11 -04:00
f65a24da25 Added graphql deps and models
Some checks failed
Docker Image CI / build (push) Has been cancelled
2026-03-26 21:05:13 -04:00
9a4569040f ✏️ Formatting changes 2025-07-14 12:09:15 -04:00
de2a6abf68 ∗ Typo in moleculer config 2025-02-25 16:26:11 -05:00
c05d16a91a 🪳 Updated kafka broker uri to be dynamic 2025-02-24 14:02:18 -05:00
fc857baa4b 🪓 Added axios to npm deps 2025-02-21 16:38:14 -05:00
4516d7892f 🔧 Fixes to enable docker image build 2025-02-21 13:03:25 -05:00
193fffeb8d 🔧 added base tsconfig 2025-02-21 12:32:34 -05:00
fa3e8ebcfe 🔧 Fixed the incorrect tsconfig filename 2025-02-21 11:11:09 -05:00
2399431fde 🔧 Added bash 2025-02-21 11:08:49 -05:00
cb500f0fe4 🔧 Bumped node image version in Dockerfile 2025-02-21 11:05:57 -05:00
54ee896e15 🐳 Added copy config files step 2025-02-20 22:13:13 -05:00
40a0edadbb 🔧 Added npm i step 2025-02-20 22:04:01 -05:00
481878c19f Merge branch 'main' of https://github.com/rishighan/threetwo-acquisition-service 2025-02-20 21:56:56 -05:00
8d588e9542 🐳 Removed npm ci step 2025-02-20 21:56:49 -05:00
ed06459450 Update docker-image.yml 2025-02-20 21:55:27 -05:00
56547034ef 🐳 Updated Dockerfile 2025-02-20 21:52:14 -05:00
703f1e76fa Merge branch 'main' of https://github.com/rishighan/threetwo-acquisition-service 2025-02-20 21:48:41 -05:00
175a67c5b2 🐳 Added Dockerfile 2025-02-20 21:48:31 -05:00
d97b69c92d Merge pull request #6 from rishighan/rishighan-patch-2
Update docker-image.yml
2025-02-20 21:38:09 -05:00
c65eb2c6ec Update docker-image.yml 2025-02-20 21:37:53 -05:00
5b2555aa61 Merge pull request #5 from rishighan/rishighan-patch-1
Update docker-image.yml
2025-02-20 21:26:42 -05:00
6b732f1518 Update docker-image.yml 2025-02-20 21:26:30 -05:00
1900a3ddb8 🔧 Removed workflow file from wrong path 2025-02-20 21:24:13 -05:00
63130b4e82 Create docker-image.yml 2025-02-20 21:23:18 -05:00
f5a2e6505b 🔧 yeeting a docker workflow 2025-02-20 21:18:40 -05:00
6f11e84c1d 🐛 Removed some random letters 2025-02-20 12:36:53 -05:00
cb8e6bb3d6 Merge pull request #4 from rishighan/autodownload-loop
Autodownload loop
2025-02-17 15:42:22 -05:00
c0946e2ce4 🔧 Fixed awaits inside loops 2025-02-17 15:41:38 -05:00
36f08212a0 🔧 Wired the kafka producer back up 2025-01-27 19:28:40 -05:00
cd9ea85b80 🔧 Fixing autodownload functionality 2024-12-22 21:59:49 -05:00
e0954eb3e8 🔧 Fixing the response from searchResultAdded event 2024-11-22 21:12:25 -05:00
651e3ac7bb 🔧 Added an undefined check for the results 2024-11-18 11:07:38 -05:00
4045097eb0 🏗️ Added string-similarity lib 2024-08-14 12:19:55 -04:00
a710211a2c 🔧 Commented code 2024-07-10 14:36:03 -04:00
0430670a01 🏗️ Added structure for the UI notification 2024-07-02 22:09:33 -04:00
ecdc3845cb 🪳 Serialized Map to be compatible with kafka 2024-06-18 00:06:25 -04:00
e6a85e6a39 🏗️ Commented code 2024-06-13 14:09:00 -04:00
55494abdc0 🔧 Fixes to search query builder 2024-06-12 21:41:29 -05:00
60e5b6f61b 🔧 kafka-powered autodownload loop 2024-06-03 17:18:22 -04:00
12e46334da 🪳 kafka for handling dc++ download jobs 2024-05-28 08:39:46 -04:00
6fb1374ce9 🏗️ WIP for automatic downloads endpoint 2024-05-09 13:45:18 -04:00
17ed663823 Added an AutoDownloadService 2024-04-23 22:46:49 -05:00
91199bcf0c Merge pull request #3 from rishighan/qbittorrent-endpoints
Qbittorrent endpoints
2024-03-30 21:38:17 -04:00
15 changed files with 1594 additions and 621 deletions

19
.github/workflows/docker-image.yml vendored Normal file
View File

@@ -0,0 +1,19 @@
name: Docker Image CI
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- name: Publish to Registry
uses: elgohr/Publish-Docker-Github-Action@v5
with:
name: frishi/threetwo-acquisition-service
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}

29
Dockerfile Normal file
View File

@@ -0,0 +1,29 @@
FROM node:22.1.0
LABEL maintainer="Rishi Ghan <rishi.ghan@gmail.com>"
# Set working directory
WORKDIR /acquisition-service
# Copy package files first for efficient caching
COPY package.json package-lock.json ./
# Install all dependencies (including devDependencies)
RUN npm install
# Copy necessary config files
COPY moleculer.config.ts tsconfig.json tsconfig.build.json ./
# Copy the rest of the source code
COPY . .
# Build the application
RUN npm run build
# Now remove devDependencies to keep the final image small
RUN npm prune --omit=dev
# Expose the port
EXPOSE 3080
# Start the application
CMD ["npm", "start"]

View File

@@ -0,0 +1,23 @@
export const resolvers = {
Query: {
_empty: (): null => null,
},
Mutation: {
addTorrent: async (_: any, { input }: any, context: any) => {
const { broker } = context;
if (!broker) throw new Error("Broker not available in context");
return broker.call("qbittorrent.addTorrent", {
torrentToDownload: input.torrentToDownload,
comicObjectId: input.comicObjectId,
});
},
},
JSON: {
__parseValue: (value: any) => value,
__serialize: (value: any) => value,
__parseLiteral: (ast: any) => ast.value,
},
};

25
models/graphql/typedef.ts Normal file
View File

@@ -0,0 +1,25 @@
import { gql } from "graphql-tag";
export const typeDefs = gql`
scalar JSON
input AddTorrentInput {
torrentToDownload: String!
comicObjectId: ID!
}
type AddTorrentResult {
result: JSON
}
type Query {
_empty: String
}
type Mutation {
"""
Add a torrent to qBittorrent
"""
addTorrent(input: AddTorrentInput!): AddTorrentResult
}
`;

View File

@@ -1,5 +1,10 @@
import type { BrokerOptions, MetricRegistry, ServiceBroker } from "moleculer"; "use strict";
import { Errors } from "moleculer"; import {
BrokerOptions,
Errors,
MetricRegistry,
ServiceBroker,
} from "moleculer";
/** /**
* Moleculer ServiceBroker configuration file * Moleculer ServiceBroker configuration file
@@ -30,7 +35,7 @@ const brokerConfig: BrokerOptions = {
// Namespace of nodes to segment your nodes on the same network. // Namespace of nodes to segment your nodes on the same network.
namespace: "", namespace: "",
// Unique node identifier. Must be unique in a namespace. // Unique node identifier. Must be unique in a namespace.
nodeID: "threetwo-acquistion-service", nodeID: "threetwo-acquisition-service",
// Custom metadata store. Store here what you want. Accessing: `this.broker.metadata` // Custom metadata store. Store here what you want. Accessing: `this.broker.metadata`
metadata: {}, metadata: {},
@@ -42,7 +47,7 @@ const brokerConfig: BrokerOptions = {
// Using colors on the output // Using colors on the output
colors: true, colors: true,
// Print module names with different colors (like docker-compose for containers) // Print module names with different colors (like docker-compose for containers)
moduleColors: false, moduleColors: true,
// Line formatter. It can be "json", "short", "simple", "full", a `Function` or a template string like "{timestamp} {level} {nodeID}/{mod}: {msg}" // Line formatter. It can be "json", "short", "simple", "full", a `Function` or a template string like "{timestamp} {level} {nodeID}/{mod}: {msg}"
formatter: "full", formatter: "full",
// Custom object printer. If not defined, it uses the `util.inspect` method. // Custom object printer. If not defined, it uses the `util.inspect` method.
@@ -59,11 +64,11 @@ const brokerConfig: BrokerOptions = {
// More info: https://moleculer.services/docs/0.14/networking.html // More info: https://moleculer.services/docs/0.14/networking.html
// Note: During the development, you don't need to define it because all services will be loaded locally. // Note: During the development, you don't need to define it because all services will be loaded locally.
// In production you can set it via `TRANSPORTER=nats://localhost:4222` environment variable. // In production you can set it via `TRANSPORTER=nats://localhost:4222` environment variable.
transporter: process.env.REDIS_URI || "redis://localhost:6379", transporter: process.env.REDIS_URI || "redis://localhost:6379", // "NATS"
// Define a cacher. // Define a cacher.
// More info: https://moleculer.services/docs/0.14/caching.html // More info: https://moleculer.services/docs/0.14/caching.html
cacher: "Redis", cacher: "Memory",
// Define a serializer. // Define a serializer.
// Available values: "JSON", "Avro", "ProtoBuf", "MsgPack", "Notepack", "Thrift". // Available values: "JSON", "Avro", "ProtoBuf", "MsgPack", "Notepack", "Thrift".
@@ -86,8 +91,7 @@ const brokerConfig: BrokerOptions = {
// Backoff factor for delay. 2 means exponential backoff. // Backoff factor for delay. 2 means exponential backoff.
factor: 2, factor: 2,
// A function to check failed requests. // A function to check failed requests.
check: (err: Error) => check: (err: Errors.MoleculerError) => err && !!err.retryable,
err && err instanceof Errors.MoleculerRetryableError && !!err.retryable,
}, },
// Limit of calling level. If it reaches the limit, broker will throw an MaxCallLevelError error. (Infinite loop protection) // Limit of calling level. If it reaches the limit, broker will throw an MaxCallLevelError error. (Infinite loop protection)
@@ -134,7 +138,7 @@ const brokerConfig: BrokerOptions = {
// Number of milliseconds to switch from open to half-open state // Number of milliseconds to switch from open to half-open state
halfOpenTime: 10 * 1000, halfOpenTime: 10 * 1000,
// A function to check failed requests. // A function to check failed requests.
check: (err: Error) => err && err instanceof Errors.MoleculerError && err.code >= 500, check: (err: Errors.MoleculerError) => err && err.code >= 500,
}, },
// Settings of bulkhead feature. More info: https://moleculer.services/docs/0.14/fault-tolerance.html#Bulkhead // Settings of bulkhead feature. More info: https://moleculer.services/docs/0.14/fault-tolerance.html#Bulkhead
@@ -195,16 +199,13 @@ const brokerConfig: BrokerOptions = {
middlewares: [], middlewares: [],
// Register custom REPL commands. // Register custom REPL commands.
replCommands: null, /*
// Called after broker created. // Called after broker created.
// created(broker: ServiceBroker): void {}, created : (broker: ServiceBroker): void => {},
// Called after broker started. // Called after broker started.
// async started(broker: ServiceBroker): Promise<void> {}, started: async (broker: ServiceBroker): Promise<void> => {},
stopped: async (broker: ServiceBroker): Promise<void> => {},
// Called after broker stopped. */
// async stopped(broker: ServiceBroker): Promise<void> {},
}; };
export = brokerConfig; export = brokerConfig;

1069
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -23,11 +23,12 @@
"devDependencies": { "devDependencies": {
"@jest/globals": "^29.3.1", "@jest/globals": "^29.3.1",
"@types/jest": "^29.2.3", "@types/jest": "^29.2.3",
"@types/lodash": "^4.17.4",
"@types/node": "^18.11.9", "@types/node": "^18.11.9",
"@types/parse-torrent": "^5.8.7", "@types/parse-torrent": "^5.8.7",
"@types/qs": "^6.9.18",
"@typescript-eslint/eslint-plugin": "^5.44.0", "@typescript-eslint/eslint-plugin": "^5.44.0",
"@typescript-eslint/parser": "^5.44.0", "@typescript-eslint/parser": "^5.44.0",
"axios": "^1.5.0",
"concurrently": "^7.6.0", "concurrently": "^7.6.0",
"cross-env": "^7.0.3", "cross-env": "^7.0.3",
"eslint": "^8.28.0", "eslint": "^8.28.0",
@@ -45,11 +46,19 @@
"typescript": "^4.9.3" "typescript": "^4.9.3"
}, },
"dependencies": { "dependencies": {
"@graphql-tools/schema": "^10.0.31",
"@robertklep/qbittorrent": "^1.0.1", "@robertklep/qbittorrent": "^1.0.1",
"axios": "^1.7.9",
"graphql": "^16.13.1",
"graphql-tag": "^2.12.6",
"ioredis": "^5.0.0", "ioredis": "^5.0.0",
"moleculer": "^0.14.27", "kafkajs": "^2.2.4",
"moleculer-web": "^0.10.5", "lodash": "^4.17.21",
"parse-torrent": "^9.1.5" "moleculer": "^0.14.34",
"moleculer-web": "^0.10.7",
"parse-torrent": "^9.1.5",
"socket.io-client": "^4.7.5",
"string-similarity-alg": "^1.3.2"
}, },
"engines": { "engines": {
"node": ">= 16.x.x" "node": ">= 16.x.x"

View File

@@ -1,9 +1,30 @@
import fs from "fs"; import type { IncomingMessage, ServerResponse } from "http";
import { Service, ServiceBroker } from "moleculer"; import type { ServiceBroker } from "moleculer";
import { Service } from "moleculer";
import ApiGateway from "moleculer-web"; import ApiGateway from "moleculer-web";
interface GraphQLRequest extends IncomingMessage {
body: {
query: string;
variables?: Record<string, unknown>;
operationName?: string;
};
$params: {
query?: string;
variables?: string;
operationName?: string;
};
$ctx: {
broker: ServiceBroker;
};
}
interface GraphQLError {
message: string;
}
export default class ApiService extends Service { export default class ApiService extends Service {
public constructor(broker: ServiceBroker) { constructor(broker: ServiceBroker) {
super(broker); super(broker);
this.parseServiceSchema({ this.parseServiceSchema({
name: "api", name: "api",
@@ -46,9 +67,61 @@ export default class ApiService extends Service {
}, },
{ {
path: "/logs", path: "/acquisition-graphql",
use: [ApiGateway.serveStatic("logs")], whitelist: ["acquisition-graphql.query"],
cors: {
origin: "*",
methods: ["GET", "POST", "OPTIONS"],
allowedHeaders: ["*"],
credentials: false,
maxAge: 3600,
}, },
aliases: {
// eslint-disable-next-line @typescript-eslint/naming-convention
"POST /": async (req: GraphQLRequest, res: ServerResponse) => {
try {
const { query, variables, operationName } = req.body;
const result = await req.$ctx.broker.call("acquisition-graphql.query", {
query,
variables,
operationName,
});
res.setHeader("Content-Type", "application/json");
res.end(JSON.stringify(result));
} catch (error: unknown) {
res.statusCode = 500;
res.setHeader("Content-Type", "application/json");
res.end(JSON.stringify({ errors: [{ message: (error as GraphQLError).message }] }));
}
},
// eslint-disable-next-line @typescript-eslint/naming-convention
"GET /": async (req: GraphQLRequest, res: ServerResponse) => {
try {
const { query, variables: variablesStr, operationName } = req.$params;
const variables = variablesStr ? JSON.parse(variablesStr) : undefined;
const result = await req.$ctx.broker.call("acquisition-graphql.query", {
query,
variables,
operationName,
});
res.setHeader("Content-Type", "application/json");
res.end(JSON.stringify(result));
} catch (error: unknown) {
res.statusCode = 500;
res.setHeader("Content-Type", "application/json");
res.end(JSON.stringify({ errors: [{ message: (error as GraphQLError).message }] }));
}
},
},
bodyParsers: { json: { strict: false, limit: "1MB" } },
mappingPolicy: "restrict",
logging: true,
},
{
path: "/logs",
use: [ApiGateway.serveStatic("logs")],
},
], ],
log4XXResponses: false, log4XXResponses: false,
logRequestParams: true, logRequestParams: true,
@@ -62,7 +135,7 @@ export default class ApiService extends Service {
events: {}, events: {},
methods: {}, methods: {},
started(): any {}, started(): void {},
}); });
} }
} }

View File

@@ -0,0 +1,117 @@
import type { Producer } from "kafkajs";
import { Kafka } from "kafkajs";
import type { Context, ServiceBroker } from "moleculer";
import { Errors, Service } from "moleculer";
interface Issue {
id: string;
number: number;
}
interface Comic {
wanted: {
markEntireVolumeWanted?: boolean;
issues?: Issue[];
volume: {
id: string;
name: string;
};
};
}
export default class AutoDownloadService extends Service {
private kafkaProducer!: Producer;
private readonly BATCH_SIZE = 100;
// @ts-ignore -- Moleculer requires this constructor signature for service instantiation
constructor(broker: ServiceBroker) {
super(broker);
this.parseServiceSchema({
name: "autodownload",
actions: {
searchWantedComics: {
rest: "POST /searchWantedComics",
// eslint-disable-next-line @typescript-eslint/no-unused-vars
handler: async (ctx: Context<Record<string, never>>) => {
try {
/* eslint-disable no-await-in-loop */
let page = 1;
const limit = this.BATCH_SIZE;
let comics: Comic[];
do {
comics = await this.broker.call(
"library.getComicsMarkedAsWanted",
{ page, limit },
);
// Log debugging info
this.logger.info(
"Received comics from getComicsMarkedAsWanted:",
JSON.stringify(comics, null, 2),
);
if (!Array.isArray(comics)) {
this.logger.error(
"Invalid response structure",
JSON.stringify(comics, null, 2),
);
throw new Errors.MoleculerError(
"Invalid response structure from getComicsMarkedAsWanted",
500,
"INVALID_RESPONSE_STRUCTURE",
);
}
this.logger.info(
`Fetched ${comics.length} comics from page ${page}`,
);
for (const comic of comics) {
await this.produceJobToKafka(comic);
}
page += 1;
} while (comics.length === limit);
return {
success: true,
message: "Jobs enqueued for background processing.",
};
} catch (error) {
this.logger.error("Error in searchWantedComics:", error);
throw new Errors.MoleculerError(
"Failed to search wanted comics.",
500,
"SEARCH_WANTED_COMICS_ERROR",
{ error },
);
}
},
},
},
methods: {
produceJobToKafka: async (comic: Comic) => {
const job = { comic };
try {
await this.kafkaProducer.send({
topic: "comic-search-jobs",
messages: [{ value: JSON.stringify(job) }],
});
this.logger.info("Produced job to Kafka:", job);
} catch (error) {
this.logger.error("Error producing job to Kafka:", error);
}
},
},
async started() {
const kafka = new Kafka({
clientId: "comic-search-service",
brokers: [process.env.KAFKA_BROKER_URI || "localhost:9092"],
});
this.kafkaProducer = kafka.producer();
await this.kafkaProducer.connect();
this.logger.info("Kafka producer connected successfully.");
},
async stopped() {
await this.kafkaProducer.disconnect();
this.logger.info("Kafka producer disconnected successfully.");
},
});
}
}

View File

@@ -0,0 +1,348 @@
import type { Consumer, EachMessagePayload, Producer } from "kafkajs";
import { Kafka, logLevel } from "kafkajs";
import { isNil } from "lodash";
import type { ServiceBroker, ServiceSchema } from "moleculer";
import { Service } from "moleculer";
import type { Socket } from "socket.io-client";
import io from "socket.io-client";
import stringSimilarity from "string-similarity-alg";
interface SearchPayload {
id: string;
name: string;
}
interface SearchResult {
groupedResult: { entityId: number; payload: SearchPayload };
updatedResult: { entityId: number; payload: SearchPayload };
}
interface Issue {
issueNumber?: string;
issue_number?: string;
coverDate?: string;
year?: number;
}
interface Volume {
id: string;
name: string;
}
interface Comic {
wanted: {
volume: Volume;
issues?: Issue[];
markEntireVolumeWanted?: boolean;
};
}
interface Job {
comic: Comic;
}
interface Hub {
value: string;
}
interface DirectConnectSettings {
client: {
hubs: Hub[];
};
}
interface SearchInfo {
query: {
pattern: string;
};
}
interface SearchesSentData {
searchInfo: SearchInfo;
}
interface RankedResult extends SearchPayload {
similarity: number;
}
export default class ComicProcessorService extends Service {
private kafkaConsumer!: Consumer;
private socketIOInstance!: Socket;
private kafkaProducer!: Producer;
private prowlarrResultsMap: Map<string, unknown> = new Map();
private airDCPPSearchResults: Map<number, SearchPayload[]> = new Map();
private issuesToSearch: Issue[] = [];
// @ts-ignore -- Moleculer requires this constructor signature for service instantiation
constructor(
broker: ServiceBroker,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
schema: ServiceSchema<object> = { name: "comicProcessor" },
) {
super(broker);
this.parseServiceSchema({
name: "comicProcessor",
methods: {
parseStringDate: (dateString: string) => {
const date = new Date(dateString);
return {
year: date.getFullYear(),
month: date.getMonth() + 1,
day: date.getDate(),
};
},
rankSearchResults: (results: Map<number, SearchPayload[]>, query: string): RankedResult | null => {
// Find the highest-ranked response based on similarity to the search string
let highestRankedResult: RankedResult | null = null;
let highestSimilarity = -1;
results.forEach((resultArray) => {
resultArray.forEach((result) => {
const similarity = stringSimilarity("jaro-winkler").compare(
result.name,
query,
);
if (similarity > highestSimilarity) {
highestSimilarity = similarity;
highestRankedResult = { ...result, similarity };
}
});
});
return highestRankedResult;
},
processJob: async (job: Job) => {
try {
this.logger.info("Processing job:", JSON.stringify(job, null, 2));
// Get the hub to search on
const settings: DirectConnectSettings = await this.broker.call("settings.getSettings", {
settingsKey: "directConnect",
});
const hubs = settings.client.hubs.map((hub: Hub) => hub.value);
const { comic } = job;
const { volume, issues, markEntireVolumeWanted } = comic.wanted;
// If entire volume is marked as wanted, get their details from CV
if (markEntireVolumeWanted) {
const fetchedIssues: Issue[] = await this.broker.call(
"comicvine.getIssuesForVolume",
{ volumeId: volume.id },
);
this.issuesToSearch = fetchedIssues;
this.logger.info(
`The entire volume with id: ${volume.id} was marked as wanted.`,
);
this.logger.info(`Fetched issues for ${volume.id}:`);
this.logger.info(`${this.issuesToSearch.length} issues to search`);
} else {
// Or proceed with `issues` from the wanted object.
this.issuesToSearch = issues || [];
}
/* eslint-disable no-await-in-loop */
for (const issue of this.issuesToSearch) {
// Query builder for DC++
// 1. issue number
const issueNumber =
issue.issueNumber || issue.issue_number || "";
// 2. year
const { year } = this.parseStringDate(issue.coverDate || "");
const issueYear = year || issue.year || "";
// 3. Orchestrate the query
const dcppSearchQuery = {
query: {
pattern: `${volume.name
.replace(/[^\w\s]/g, "")
.replace(/\s+/g, " ")
.trim()}`,
extensions: ["cbz", "cbr", "cb7"],
},
hub_urls: hubs,
priority: 5,
};
this.logger.info(
"DC++ search query:",
JSON.stringify(dcppSearchQuery, null, 4),
);
this.logger.debug(`Issue number: ${issueNumber}, Year: ${issueYear}`);
await this.broker.call("socket.search", {
query: dcppSearchQuery,
config: {
hostname: "192.168.1.119:5600",
protocol: "http",
username: "admin",
password: "password",
},
namespace: "/automated",
});
}
/* eslint-enable no-await-in-loop */
} catch (error) {
this.logger.error("Error processing job:", error);
}
},
produceResultsToKafka: async (query: string): Promise<void> => {
try {
/*
Match and rank
*/
const finalResult = this.rankSearchResults(
this.airDCPPSearchResults,
query,
);
/*
Kafka messages need to be in a format that can be serialized to JSON,
and a Map is not directly serializable in a way that retains its structure,
hence why we use Object.fromEntries
*/
await this.kafkaProducer.send({
topic: "comic-search-results",
messages: [
{
value: JSON.stringify(finalResult),
},
],
});
this.logger.info(`Produced results to Kafka.`);
// socket event for UI
await this.broker.call("socket.broadcast", {
namespace: "/",
event: "searchResultsAvailable",
args: [
{
query,
finalResult,
},
],
});
} catch (error) {
this.logger.error("Error producing results to Kafka:", error);
}
},
},
async started() {
const kafka = new Kafka({
clientId: "comic-processor-service",
brokers: [process.env.KAFKA_BROKER_URI || "localhost:9092"],
logLevel: logLevel.INFO,
});
this.kafkaConsumer = kafka.consumer({ groupId: "comic-processor-group" });
this.kafkaProducer = kafka.producer();
this.kafkaConsumer.on("consumer.crash", (event: { payload: Error }) => {
this.logger.error("Consumer crash:", event);
});
this.kafkaConsumer.on("consumer.connect", () => {
this.logger.info("Consumer connected");
});
this.kafkaConsumer.on("consumer.disconnect", () => {
this.logger.info("Consumer disconnected");
});
this.kafkaConsumer.on("consumer.network.request_timeout", () => {
this.logger.warn("Consumer network request timeout");
});
await this.kafkaConsumer.connect();
await this.kafkaProducer.connect();
await this.kafkaConsumer.subscribe({
topic: "comic-search-jobs",
fromBeginning: true,
});
await this.kafkaConsumer.run({
eachMessage: async ({ message }: EachMessagePayload) => {
if (message.value) {
const job = JSON.parse(message.value.toString()) as Job;
await this.processJob(job);
} else {
this.logger.warn("Received message with null value");
}
},
});
this.socketIOInstance = io("ws://localhost:3001/automated", {
transports: ["websocket"],
withCredentials: true,
});
this.socketIOInstance.on("connect", () => {
this.logger.info("Socket.IO connected successfully.");
});
// Handle searchResultAdded event
this.socketIOInstance.on("searchResultAdded", (result: SearchResult) => {
const {
groupedResult: { entityId, payload },
} = result;
this.logger.info(
`AirDC++ Search result added for entityId: ${entityId} - ${payload?.name}`,
);
if (!this.airDCPPSearchResults.has(entityId)) {
this.airDCPPSearchResults.set(entityId, []);
}
if (!isNil(payload)) {
const results = this.airDCPPSearchResults.get(entityId);
if (results) {
results.push(payload);
}
}
this.logger.info(
"Updated airDCPPSearchResults:",
JSON.stringify(Array.from(this.airDCPPSearchResults.entries()), null, 4),
);
this.logger.info(JSON.stringify(payload, null, 4));
});
// Handle searchResultUpdated event
this.socketIOInstance.on("searchResultUpdated", (result: SearchResult) => {
const {
updatedResult: { entityId, payload },
} = result;
const resultsForInstance = this.airDCPPSearchResults.get(entityId);
if (resultsForInstance) {
const toReplaceIndex = resultsForInstance.findIndex((element: SearchPayload) => {
this.logger.info("search result updated!");
this.logger.info(JSON.stringify(element, null, 4));
return element.id === payload.id;
});
if (toReplaceIndex !== -1) {
// Replace the existing result with the updated result
resultsForInstance[toReplaceIndex] = payload;
// Optionally, update the map with the modified array
this.airDCPPSearchResults.set(entityId, resultsForInstance);
}
}
});
// Handle searchComplete event
this.socketIOInstance.on("searchesSent", async (data: SearchesSentData) => {
this.logger.info(
`Search complete for query: "${data.searchInfo.query.pattern}"`,
);
await this.produceResultsToKafka(data.searchInfo.query.pattern);
});
},
async stopped() {
await this.kafkaConsumer.disconnect();
await this.kafkaProducer.disconnect();
if (this.socketIOInstance) {
this.socketIOInstance.close();
}
},
});
}
}

View File

@@ -0,0 +1,49 @@
import { makeExecutableSchema } from "@graphql-tools/schema";
import type { GraphQLSchema } from "graphql";
import { graphql } from "graphql";
import type { Context, ServiceBroker } from "moleculer";
import { Service } from "moleculer";
import { resolvers } from "../models/graphql/resolvers";
import { typeDefs } from "../models/graphql/typedef";
interface GraphQLParams {
query: string;
variables?: Record<string, unknown>;
operationName?: string;
}
export default class GraphQLService extends Service {
private graphqlSchema!: GraphQLSchema;
constructor(broker: ServiceBroker) {
super(broker);
this.parseServiceSchema({
name: "acquisition-graphql",
actions: {
query: {
params: {
query: "string",
variables: { type: "object", optional: true },
operationName: { type: "string", optional: true },
},
async handler(ctx: Context<GraphQLParams>) {
const { query, variables, operationName } = ctx.params;
return graphql({
schema: this.graphqlSchema,
source: query,
variableValues: variables,
operationName,
contextValue: { broker: this.broker, ctx },
});
},
},
},
started() {
this.graphqlSchema = makeExecutableSchema({ typeDefs, resolvers });
this.logger.info("Acquisition GraphQL service started");
},
});
}
}

View File

@@ -1,13 +1,32 @@
"use strict";
import { Context, Service, ServiceBroker, ServiceSchema, Errors } from "moleculer";
import axios from "axios"; import axios from "axios";
import type { Context, ServiceBroker } from "moleculer";
import { Service } from "moleculer";
interface ConnectParams {
host: string;
port: string;
apiKey: string;
}
interface ProwlarrQuery {
host: string;
port: string;
apiKey: string;
query: string;
type: string;
indexerIds: number[];
categories: number[];
limit: number;
offset: number;
}
interface SearchParams {
prowlarrQuery: ProwlarrQuery;
}
export default class ProwlarrService extends Service { export default class ProwlarrService extends Service {
// @ts-ignore // @ts-ignore -- Moleculer requires this constructor signature for service instantiation
public constructor( constructor(broker: ServiceBroker) {
public broker: ServiceBroker,
schema: ServiceSchema<{}> = { name: "prowlarr" },
) {
super(broker); super(broker);
this.parseServiceSchema({ this.parseServiceSchema({
name: "prowlarr", name: "prowlarr",
@@ -16,13 +35,7 @@ export default class ProwlarrService extends Service {
actions: { actions: {
connect: { connect: {
rest: "POST /connect", rest: "POST /connect",
handler: async ( handler: async (ctx: Context<ConnectParams>) => {
ctx: Context<{
host: string;
port: string;
apiKey: string;
}>,
) => {
const { host, port, apiKey } = ctx.params; const { host, port, apiKey } = ctx.params;
const result = await axios.request({ const result = await axios.request({
url: `http://${host}:${port}/api`, url: `http://${host}:${port}/api`,
@@ -31,14 +44,12 @@ export default class ProwlarrService extends Service {
"X-Api-Key": apiKey, "X-Api-Key": apiKey,
}, },
}); });
console.log(result.data); this.logger.info(result.data);
}, },
}, },
getIndexers: { getIndexers: {
rest: "GET /indexers", rest: "GET /indexers",
handler: async ( handler: async (ctx: Context<ConnectParams>) => {
ctx: Context<{ host: string; port: string; apiKey: string }>,
) => {
const { host, port, apiKey } = ctx.params; const { host, port, apiKey } = ctx.params;
const result = await axios.request({ const result = await axios.request({
url: `http://${host}:${port}/api/v1/indexer`, url: `http://${host}:${port}/api/v1/indexer`,
@@ -52,39 +63,30 @@ export default class ProwlarrService extends Service {
}, },
search: { search: {
rest: "GET /search", rest: "GET /search",
handler: async ( handler: async (ctx: Context<SearchParams>) => {
ctx: Context<{
host: string;
port: string;
apiKey: string;
query: string;
type: string;
indexerIds: [number];
categories: [number];
limit: number;
offset: number;
}>,
) => {
const { const {
indexerIds, prowlarrQuery: {
categories, indexerIds,
host, categories,
port, host,
apiKey, port,
query, apiKey,
type, query,
limit, type,
offset, limit,
offset,
},
} = ctx.params; } = ctx.params;
const indexer = indexerIds[0] ? indexerIds.length === 1 : indexerIds;
const category = categories[0] ? categories.length === 1 : categories;
const result = await axios({ const result = await axios({
url: `http://${host}:${port}/api/v1/search`, url: `http://${host}:${port}/api/v1/search`,
method: "GET", method: "GET",
params: { params: {
query, query,
type, type,
indexerIds, indexer,
categories, category,
limit, limit,
offset, offset,
}, },
@@ -98,7 +100,8 @@ export default class ProwlarrService extends Service {
}, },
ping: { ping: {
rest: "GET /ping", rest: "GET /ping",
handler: async (ctx: Context<{}>) => { // eslint-disable-next-line @typescript-eslint/no-unused-vars
handler: async (ctx: Context<Record<string, never>>) => {
const foo = await axios.request({ const foo = await axios.request({
url: "http://192.168.1.183:9696/ping", url: "http://192.168.1.183:9696/ping",
method: "GET", method: "GET",
@@ -107,7 +110,7 @@ export default class ProwlarrService extends Service {
"X-Api-Key": "163ef9a683874f65b53c7be87354b38b", "X-Api-Key": "163ef9a683874f65b53c7be87354b38b",
}, },
}); });
console.log(foo.data); this.logger.info(foo.data);
return true; return true;
}, },
}, },

View File

@@ -1,12 +1,67 @@
import { readFileSync, writeFileSync } from "fs"; import { readFileSync, writeFileSync } from "fs";
import { qBittorrentClient } from "@robertklep/qbittorrent"; import { qBittorrentClient } from "@robertklep/qbittorrent";
import type { Context, ServiceBroker, ServiceSchema } from "moleculer"; import type { Context, ServiceBroker } from "moleculer";
import { Errors, Service } from "moleculer"; import { Service } from "moleculer";
import parseTorrent from "parse-torrent"; import parseTorrent from "parse-torrent";
interface QBittorrentCredentials {
client: {
host: {
username: string;
password: string;
hostname: string;
port: string;
protocol: string;
};
};
}
interface ConnectParams {
username: string;
password: string;
hostname: string;
port: string;
protocol: string;
name?: string;
}
interface AddTorrentParams {
torrentToDownload: string;
comicObjectId: string;
}
interface InfoHashesParams {
infoHashes: string[];
}
interface TorrentRealTimeStatsParams {
infoHashes: { _id: string; infoHashes: string[] }[];
}
interface TorrentDetail {
torrent: Record<string, unknown>;
}
interface TorrentDetailsGroup {
_id: string;
details: TorrentDetail[];
}
interface SyncMaindata {
torrents: Record<string, Record<string, unknown>>;
rid?: number;
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
type QBittorrentMeta = any;
export default class QBittorrentService extends Service { export default class QBittorrentService extends Service {
// @ts-ignore private meta!: QBittorrentMeta;
constructor(public broker: ServiceBroker, schema: ServiceSchema<{}> = { name: "qbittorrent" }) {
private rid = 0;
// @ts-ignore -- Moleculer requires this constructor signature for service instantiation
constructor(broker: ServiceBroker) {
super(broker); super(broker);
this.parseServiceSchema({ this.parseServiceSchema({
name: "qbittorrent", name: "qbittorrent",
@@ -16,42 +71,35 @@ export default class QBittorrentService extends Service {
actions: { actions: {
fetchQbittorrentCredentials: { fetchQbittorrentCredentials: {
rest: "GET /fetchQbittorrentCredentials", rest: "GET /fetchQbittorrentCredentials",
handler: async (ctx: Context<{}>) => { // eslint-disable-next-line @typescript-eslint/no-unused-vars
return await this.broker.call("settings.getSettings", { handler: async (ctx: Context<Record<string, never>>) => this.broker.call("settings.getSettings", {
settingsKey: "bittorrent", settingsKey: "bittorrent",
}); }),
},
}, },
connect: { connect: {
rest: "POST /connect", rest: "POST /connect",
handler: async ( handler: (ctx: Context<ConnectParams>) => {
ctx: Context<{
username: string;
password: string;
hostname: string;
port: string;
protocol: string;
name?: string;
}>,
) => {
const { username, password, hostname, port, protocol } = ctx.params; const { username, password, hostname, port, protocol } = ctx.params;
// eslint-disable-next-line new-cap
this.meta = new qBittorrentClient( this.meta = new qBittorrentClient(
`${protocol}://${hostname}:${port}`, `${protocol}://${hostname}:${port}`,
`${username}`, `${username}`,
`${password}`, `${password}`,
); );
console.log(this.meta); this.logger.info(this.meta);
if (this.meta) { if (this.meta) {
return { success: true, message: "Logged in successfully" }; return { success: true, message: "Logged in successfully" };
} }
return { success: false, message: "Failed to connect" };
}, },
}, },
loginWithStoredCredentials: { loginWithStoredCredentials: {
rest: "POST /loginWithStoredCredentials", rest: "POST /loginWithStoredCredentials",
handler: async (ctx: Context<{}>) => { // eslint-disable-next-line @typescript-eslint/no-unused-vars
handler: async (ctx: Context<Record<string, never>>) => {
try { try {
const result: any = await this.broker.call( const result: QBittorrentCredentials | undefined = await this.broker.call(
"qbittorrent.fetchQbittorrentCredentials", "qbittorrent.fetchQbittorrentCredentials",
{}, {},
); );
@@ -69,10 +117,14 @@ export default class QBittorrentService extends Service {
port, port,
protocol, protocol,
}); });
console.log("qbittorrent connection details:"); this.logger.info("qbittorrent connection details:");
console.log(JSON.stringify(connection, null, 4)); this.logger.info(JSON.stringify(connection, null, 4));
return connection; return connection;
} }
return {
error: null,
message: "Qbittorrent credentials not found.",
};
} catch (err) { } catch (err) {
return { return {
error: err, error: err,
@@ -85,8 +137,8 @@ export default class QBittorrentService extends Service {
getClientInfo: { getClientInfo: {
rest: "GET /getClientInfo", rest: "GET /getClientInfo",
handler: async (ctx: Context<{}>) => { // eslint-disable-next-line @typescript-eslint/no-unused-vars
console.log(this.meta.app); handler: async (ctx: Context<Record<string, never>>) => {
await this.broker.call("qbittorrent.loginWithStoredCredentials", {}); await this.broker.call("qbittorrent.loginWithStoredCredentials", {});
return { return {
buildInfo: await this.meta.app.buildInfo(), buildInfo: await this.meta.app.buildInfo(),
@@ -97,22 +149,17 @@ export default class QBittorrentService extends Service {
}, },
addTorrent: { addTorrent: {
rest: "POST /addTorrent", rest: "POST /addTorrent",
handler: async ( handler: async (ctx: Context<AddTorrentParams>) => {
ctx: Context<{
torrentToDownload: any;
comicObjectId: string;
}>,
) => {
try { try {
await this.broker.call("qbittorrent.loginWithStoredCredentials", {}); await this.broker.call("qbittorrent.loginWithStoredCredentials", {});
const { torrentToDownload, comicObjectId } = ctx.params; const { torrentToDownload, comicObjectId } = ctx.params;
console.log(torrentToDownload); this.logger.info(torrentToDownload);
const response = await fetch(torrentToDownload, { const response = await fetch(torrentToDownload, {
method: "GET", method: "GET",
}); });
// Read the buffer to a file // Read the buffer to a file
const buffer = await response.arrayBuffer(); const buffer = await response.arrayBuffer();
writeFileSync(`mithrandir.torrent`, Buffer.from(buffer)); writeFileSync(`mithrandir.torrent`, new Uint8Array(buffer));
// Add the torrent to qbittorrent's queue, paused. // Add the torrent to qbittorrent's queue, paused.
const result = await this.meta.torrents.add({ const result = await this.meta.torrents.add({
torrents: { torrents: {
@@ -135,28 +182,33 @@ export default class QBittorrentService extends Service {
result, result,
}; };
} catch (err) { } catch (err) {
console.error(err); this.logger.error(err);
return {
error: err,
message: "Failed to add torrent",
};
} }
}, },
}, },
getTorrents: { getTorrents: {
rest: "POST /getTorrents", rest: "POST /getTorrents",
handler: async (ctx: Context<{}>) => { // eslint-disable-next-line @typescript-eslint/no-unused-vars
handler: async (ctx: Context<Record<string, never>>) => {
await this.broker.call("qbittorrent.loginWithStoredCredentials", {}); await this.broker.call("qbittorrent.loginWithStoredCredentials", {});
return await this.meta.torrents.info(); return this.meta.torrents.info();
}, },
}, },
getTorrentProperties: { getTorrentProperties: {
rest: "POST /getTorrentProperties", rest: "POST /getTorrentProperties",
handler: async (ctx: Context<{ infoHashes: string[] }>) => { handler: async (ctx: Context<InfoHashesParams>) => {
try { try {
const { infoHashes } = ctx.params; const { infoHashes } = ctx.params;
await this.broker.call("qbittorrent.loginWithStoredCredentials", {}); await this.broker.call("qbittorrent.loginWithStoredCredentials", {});
return await this.meta.torrents.info({ return this.meta.torrents.info({
hashes: infoHashes, hashes: infoHashes,
}); });
} catch (err) { } catch (err) {
console.error("An error occurred:", err); this.logger.error("An error occurred:", err);
// Consider handling the error more gracefully here, possibly returning an error response // Consider handling the error more gracefully here, possibly returning an error response
throw err; // or return a specific error object/message throw err; // or return a specific error object/message
} }
@@ -164,23 +216,21 @@ export default class QBittorrentService extends Service {
}, },
getTorrentRealTimeStats: { getTorrentRealTimeStats: {
rest: "POST /getTorrentRealTimeStats", rest: "POST /getTorrentRealTimeStats",
handler: async ( handler: async (ctx: Context<TorrentRealTimeStatsParams>) => {
ctx: Context<{ infoHashes: { _id: string; infoHashes: string[] }[] }>,
) => {
const { infoHashes } = ctx.params; const { infoHashes } = ctx.params;
await this.broker.call("qbittorrent.loginWithStoredCredentials", {}); await this.broker.call("qbittorrent.loginWithStoredCredentials", {});
try { try {
// Increment rid for each call // Increment rid for each call
this.rid = typeof this.rid === "number" ? this.rid + 1 : 0; this.rid = typeof this.rid === "number" ? this.rid + 1 : 0;
const data = await this.meta.sync.maindata(this.rid); const data: SyncMaindata = await this.meta.sync.maindata(this.rid);
const torrentDetails: any = []; const torrentDetails: TorrentDetailsGroup[] = [];
infoHashes.forEach(({ _id, infoHashes }) => { infoHashes.forEach(({ _id, infoHashes: hashes }) => {
// Initialize an object to hold details for this _id // Initialize an object to hold details for this _id
const details: any = []; const details: TorrentDetail[] = [];
infoHashes.forEach((hash) => { hashes.forEach((hash) => {
// Assuming 'data.torrents[hash]' retrieves the details for the hash // Assuming 'data.torrents[hash]' retrieves the details for the hash
const torrent = data.torrents[hash]; const torrent = data.torrents[hash];
if (torrent) { if (torrent) {
@@ -202,9 +252,9 @@ export default class QBittorrentService extends Service {
// Assuming `data.rid` contains the latest rid from the server // Assuming `data.rid` contains the latest rid from the server
if (data.rid !== undefined) { if (data.rid !== undefined) {
this.rid = data.rid; this.rid = data.rid;
console.log(`rid is ${this.rid}`); this.logger.info(`rid is ${this.rid}`);
} }
console.log(JSON.stringify(torrentDetails, null, 4)); this.logger.info(JSON.stringify(torrentDetails, null, 4));
return torrentDetails; return torrentDetails;
} catch (err) { } catch (err) {
this.logger.error(err); this.logger.error(err);
@@ -212,12 +262,25 @@ export default class QBittorrentService extends Service {
} }
}, },
}, },
determineDownloadApps: {
rest: "",
// 1. Parse the incoming search query
// to make sure that it is well-formed
// At the very least, it should have name, year, number
// 2. Choose between download mediums based on user-preference?
// possible choices are: DC++, Torrent
// 3. Perform the search on those media with the aforementioned search query
// 4. Choose a subset of relevant search results,
// and score them
// 5. Download the highest-scoring, relevant result
handler: () => undefined,
},
}, },
methods: {}, methods: {},
async started() { started() {
console.log(`Initializing rid...`); this.logger.info(`Initializing rid...`);
this.rid = 0; this.rid = 0;
console.log(`rid is ${this.rid}`); this.logger.info(`rid is ${this.rid}`);
}, },
}); });
} }

View File

@@ -1,12 +1,14 @@
{ {
"extends": "./tsconfig.json", "extends": "./tsconfig.json",
"compilerOptions": { "compilerOptions": {
"allowJs": true "noEmit": true
}, },
"include": [ "include": [
"./.*.cjs", // root commonjs files "./**/*.ts",
"./.*.js", // root javascript config files "./**/*.js"
"**/*.js", // javascript files ],
"**/*.ts" // typescript files "exclude": [
] "node_modules",
"dist"
]
} }

View File

@@ -1,103 +1,18 @@
{ {
"compilerOptions": { "compilerOptions": {
/* Visit https://aka.ms/tsconfig to read more about this file */ "module": "commonjs",
"esModuleInterop": true,
/* Projects */ "noImplicitAny": true,
// "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ "removeComments": true,
// "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ "preserveConstEnums": true,
// "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ "sourceMap": true,
// "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ "pretty": true,
// "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ "target": "es6",
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ "outDir": "dist"
},
/* Language and Environment */ "include": ["./**/*"],
"target": "es2021", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ "exclude": [
// "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ "node_modules/**/*",
// "jsx": "preserve", /* Specify what JSX code is generated. */ "test"
// "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ ]
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
// "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */
// "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
// "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */
// "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */
// "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
// "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
// "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */
/* Modules */
"module": "commonjs", /* Specify what module code is generated. */
// "rootDir": "./", /* Specify the root folder within your source files. */
"moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */
// "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
// "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */
// "types": [], /* Specify type package names to be included without being referenced in a source file. */
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
// "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */
// "resolveJsonModule": true, /* Enable importing .json files. */
// "noResolve": true, /* Disallow 'import's, 'require's or '<reference>'s from expanding the number of files TypeScript should add to a project. */
/* JavaScript Support */
// "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */
// "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
/* Emit */
// "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
// "declarationMap": true, /* Create sourcemaps for d.ts files. */
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
// "sourceMap": true, /* Create source map files for emitted JavaScript files. */
// "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */
"outDir": "./dist", /* Specify an output folder for all emitted files. */
// "removeComments": true, /* Disable emitting comments. */
// "noEmit": true, /* Disable emitting files from a compilation. */
// "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
// "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */
// "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
// "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
// "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
// "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
// "newLine": "crlf", /* Set the newline character for emitting files. */
// "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */
// "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
// "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */
// "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
/* Interop Constraints */
// "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
// "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */
// "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
"forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
/* Type Checking */
"strict": true, /* Enable all strict type-checking options. */
// "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */
// "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */
// "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
// "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */
// "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
// "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */
"useUnknownInCatchVariables": false, /* Default catch clause variables as 'unknown' instead of 'any'. */
// "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
// "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */
// "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */
// "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
// "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
// "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
// "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
// "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
/* Completeness */
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
"skipLibCheck": true, /* Skip type checking all .d.ts files. */
}
} }