17 Commits

Author SHA1 Message Date
0f47750bc2 Merge branch 'master' into automated-download-loop 2024-10-23 14:27:44 -04:00
2247411ac8 🪳 Added kafka to the docker-compose deps 2024-05-28 08:40:33 -04:00
e61ecb1143 🔧 Refactor for docker-compose 2024-05-24 14:22:59 -04:00
e01421f17b 🐳 Added all other deps 2024-05-23 23:15:03 -04:00
cc271021e0 🐳 Created a deps docker-compose stack 2024-05-19 21:19:15 -04:00
cc772504ae 🔧 Fixed the Redis disconnection issue 2024-05-17 01:26:16 -04:00
8dcb17a6a0 🔧 Reverted 2024-05-16 14:15:09 -04:00
a06896ffcc 🔧 Reverting to nats for transporter needs 2024-05-16 14:03:07 -04:00
03f6623ed0 🔧 Fixes 2024-05-15 21:27:38 -05:00
66f9d63b44 🔧 Debuggin Redis connectivity issue 2024-05-15 16:58:49 -05:00
a936df3144 🪲 Added a console.log 2024-05-15 12:13:50 -05:00
dc9dabca48 🔧 Fixed REDIS_URI 2024-05-15 12:00:51 -05:00
4680fd0875 ⬅️ Reverted changes 2024-05-15 11:47:08 -05:00
323548c0ff 🔧 WIP Dockerfile fixes 2024-05-15 11:32:11 -05:00
f4563c12c6 🔧 Added startup scripts fixing MongoDB timeouts 2024-05-12 23:35:01 -04:00
1b0cada848 🏗️ Added validation to db mixin 2024-05-11 13:31:10 -04:00
750a74cd9f Merge pull request #10 from rishighan/automated-download-loop
Automated download loop
2024-05-10 22:59:08 -04:00
14 changed files with 634 additions and 254 deletions

View File

@@ -42,6 +42,8 @@ RUN node -v && npm -v
COPY package.json package-lock.json ./ COPY package.json package-lock.json ./
COPY moleculer.config.ts ./ COPY moleculer.config.ts ./
COPY tsconfig.json ./ COPY tsconfig.json ./
COPY scripts ./scripts
RUN chmod +x ./scripts/*
# Install application dependencies # Install application dependencies
RUN npm install RUN npm install
@@ -50,9 +52,8 @@ RUN npm install -g typescript ts-node
# Copy the rest of the application files # Copy the rest of the application files
COPY . . COPY . .
# Build and clean up # clean up
RUN npm run build \ RUN npm prune
&& npm prune
# Expose the application's port # Expose the application's port
EXPOSE 3000 EXPOSE 3000

View File

@@ -1,10 +1,30 @@
import { createClient } from "redis"; // Import the Redis library
const redisURL = new URL(process.env.REDIS_URI); import IORedis from "ioredis";
const pubClient = createClient({ url: `redis://${redisURL.hostname}:6379` }); // Environment variable for Redis URI
(async () => { const redisURI = process.env.REDIS_URI || "redis://localhost:6379";
await pubClient.connect(); console.log(`process.env.REDIS_URI is ${process.env.REDIS_URI}`);
})(); // Creating the publisher client
const subClient = pubClient.duplicate(); const pubClient = new IORedis(redisURI);
export { subClient, pubClient }; // Creating the subscriber client
const subClient = new IORedis(redisURI);
// Handle connection events for the publisher
pubClient.on("connect", () => {
console.log("Publisher client connected to Redis.");
});
pubClient.on("error", (err) => {
console.error("Publisher client failed to connect to Redis:", err);
});
// Handle connection events for the subscriber
subClient.on("connect", () => {
console.log("Subscriber client connected to Redis.");
});
subClient.on("error", (err) => {
console.error("Subscriber client failed to connect to Redis:", err);
});
// Export the clients for use in other parts of the application
export { pubClient, subClient };

View File

@@ -3,7 +3,15 @@ LOGGER=true
LOGLEVEL=info LOGLEVEL=info
SERVICEDIR=dist/services SERVICEDIR=dist/services
TRANSPORTER=nats://nats:4222 VITE_UNDERLYING_HOST=localhost
COMICS_DIRECTORY=/Users/rishi/work/threetwo-core-service/comics
USERDATA_DIRECTORY=/Users/rishi/work/threetwo-core-service/userdata
REDIS_URI=redis://redis:6379
KAFKA_BROKER=kafka1:9092
ELASTICSEARCH_URI=http://elasticsearch:9200
MONGO_URI=mongodb://db:27017/threetwo
UNRAR_BIN_PATH=/opt/homebrew/bin/unrar
SEVENZ_BINARY_PATH=/opt/homebrew/bin/7za
CACHER=Memory CACHER=Memory

View File

@@ -1,58 +1,125 @@
version: "3.3" x-userdata-volume: &userdata-volume
type: bind
source: ${USERDATA_DIRECTORY}
target: /userdata
x-comics-volume: &comics-volume
type: bind
source: ${COMICS_DIRECTORY}
target: /comics
services: services:
core-services:
api:
build: build:
context: . # context: https://github.com/rishighan/threetwo-core-service.git
image: threetwo-library-service context: ./
env_file: docker-compose.env dockerfile: Dockerfile
environment: image: frishi/threetwo-core-service
SERVICES: api container_name: core-services
PORT: 3000
depends_on:
- nats
labels:
- "traefik.enable=true"
- "traefik.http.routers.api-gw.rule=PathPrefix(`/`)"
- "traefik.http.services.api-gw.loadbalancer.server.port=3000"
networks:
- internal
greeter:
build:
context: .
image: threetwo-library-service
env_file: docker-compose.env
environment:
SERVICES: greeter
depends_on:
- nats
networks:
- internal
nats:
image: nats:2
networks:
- internal
traefik:
image: traefik:v2.1
command:
- "--api.insecure=true" # Don't do that in production!
- "--providers.docker=true"
- "--providers.docker.exposedbydefault=false"
ports: ports:
- 3000:80 - "3000:3000"
- 3001:8080 - "3001:3001"
depends_on:
- db
- redis
- elasticsearch
- kafka1
- zoo1
environment:
name: core-services
SERVICES: api,library,imagetransformation,opds,search,settings,jobqueue,socket,torrentjobs
env_file: docker-compose.env
volumes: volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro - *comics-volume
- *userdata-volume
networks: networks:
- internal - proxy
- default
zoo1:
image: confluentinc/cp-zookeeper:7.3.2
hostname: zoo1
container_name: zoo1
ports:
- "2181:2181"
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_SERVER_ID: 1
ZOOKEEPER_SERVERS: zoo1:2888:3888
networks:
- proxy
kafka1:
image: confluentinc/cp-kafka:7.3.2
hostname: kafka1
container_name: kafka1
ports:
- "9092:9092"
- "29092:29092"
- "9999:9999"
environment:
KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka1:19092,EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1} :9092,DOCKER://host.docker.internal:29092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT,DOCKER:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL
KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181"
KAFKA_BROKER_ID: 1
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state. change.logger=INFO"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_JMX_PORT: 9999
KAFKA_JMX_HOSTNAME: ${DOCKER_HOST_IP:-127.0.0.1}
KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.authorizer.AclAuthorizer
KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "true"
depends_on:
- zoo1
networks:
- proxy
db:
image: "mongo:latest"
container_name: database
networks:
- proxy
ports:
- "27017:27017"
volumes:
- "mongodb_data:/bitnami/mongodb"
redis:
image: "bitnami/redis:latest"
container_name: redis
hostname: redis
environment:
ALLOW_EMPTY_PASSWORD: "yes"
networks:
- proxy
ports:
- "6379:6379"
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:7.16.2
container_name: elasticsearch
environment:
- "discovery.type=single-node"
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
- "xpack.security.enabled=true"
- "xpack.security.authc.api_key.enabled=true"
- "ELASTIC_PASSWORD=password"
ulimits:
memlock:
soft: -1
hard: -1
ports:
- 9200:9200
networks:
- proxy
networks: networks:
internal: proxy:
external: true
volumes: volumes:
data: mongodb_data:
driver: local
elasticsearch:
driver: local

View File

@@ -2,21 +2,60 @@ const path = require("path");
const mkdir = require("mkdirp").sync; const mkdir = require("mkdirp").sync;
const DbService = require("moleculer-db"); const DbService = require("moleculer-db");
export const DbMixin = (collection, model) => { export const DbMixin = (collection, model) => {
if (process.env.MONGO_URI) { if (!process.env.MONGO_URI) {
const MongooseAdapter = require("moleculer-db-adapter-mongoose"); console.log("MONGO_URI not provided, initializing local storage...");
return { mkdir(path.resolve("./data"));
mixins: [DbService], return { mixins: [DbService] }; // Handle case where no DB URI is provided
adapter: new MongooseAdapter(process.env.MONGO_URI, {
user: process.env.MONGO_INITDB_ROOT_USERNAME,
pass: process.env.MONGO_INITDB_ROOT_PASSWORD,
keepAlive: true,
useUnifiedTopology: true,
family: 4,
}),
model,
};
} }
mkdir(path.resolve("./data"));
const MongooseAdapter = require("moleculer-db-adapter-mongoose");
const adapter = new MongooseAdapter(process.env.MONGO_URI, {
user: process.env.MONGO_INITDB_ROOT_USERNAME,
pass: process.env.MONGO_INITDB_ROOT_PASSWORD,
keepAlive: true,
useNewUrlParser: true,
useUnifiedTopology: true,
});
const connectWithRetry = async (
adapter,
maxRetries = 5,
interval = 5000
) => {
for (let retry = 0; retry < maxRetries; retry++) {
try {
await adapter.connect();
console.log("MongoDB connected successfully!");
return;
} catch (err) {
console.error("MongoDB connection error:", err);
console.log(
`Retrying MongoDB connection in ${
interval / 1000
} seconds...`
);
await new Promise((resolve) => setTimeout(resolve, interval));
}
}
console.error("Failed to connect to MongoDB after several attempts.");
};
return {
mixins: [DbService],
adapter,
model,
collection,
async started() {
await connectWithRetry(this.adapter);
},
async stopped() {
try {
await this.adapter.disconnect();
console.log("MongoDB disconnected");
} catch (err) {
console.error("MongoDB disconnection error:", err);
}
},
};
}; };

View File

@@ -5,6 +5,7 @@ import {
MetricRegistry, MetricRegistry,
ServiceBroker, ServiceBroker,
} from "moleculer"; } from "moleculer";
const RedisTransporter = require("moleculer").Transporters.Redis;
/** /**
* Moleculer ServiceBroker configuration file * Moleculer ServiceBroker configuration file
@@ -90,7 +91,7 @@ const brokerConfig: BrokerOptions = {
// More info: https://moleculer.services/docs/0.14/networking.html // More info: https://moleculer.services/docs/0.14/networking.html
// Note: During the development, you don't need to define it because all services will be loaded locally. // Note: During the development, you don't need to define it because all services will be loaded locally.
// In production you can set it via `TRANSPORTER=nats://localhost:4222` environment variable. // In production you can set it via `TRANSPORTER=nats://localhost:4222` environment variable.
transporter: process.env.REDIS_URI || "redis://localhost:6379", transporter: new RedisTransporter(process.env.REDIS_URI),
// Define a cacher. // Define a cacher.
// More info: https://moleculer.services/docs/0.14/caching.html // More info: https://moleculer.services/docs/0.14/caching.html

169
package-lock.json generated
View File

@@ -38,16 +38,16 @@
"mkdirp": "^0.5.5", "mkdirp": "^0.5.5",
"moleculer-bullmq": "^3.0.0", "moleculer-bullmq": "^3.0.0",
"moleculer-db": "^0.8.23", "moleculer-db": "^0.8.23",
"moleculer-db-adapter-mongoose": "^0.9.2", "moleculer-db-adapter-mongoose": "^0.9.4",
"moleculer-io": "^2.2.0", "moleculer-io": "^2.2.0",
"moleculer-web": "^0.10.5", "moleculer-web": "^0.10.7",
"mongoosastic-ts": "^6.0.3", "mongoosastic-ts": "^6.0.3",
"mongoose": "^6.10.4", "mongoose": "^6.10.4",
"mongoose-paginate-v2": "^1.3.18", "mongoose-paginate-v2": "^1.3.18",
"nats": "^1.3.2", "nats": "^1.3.2",
"opds-extra": "^3.0.10", "opds-extra": "^3.0.10",
"p7zip-threetwo": "^1.0.4", "p7zip-threetwo": "^1.0.4",
"redis": "^4.6.5", "redis": "^4.6.14",
"sanitize-filename-ts": "^1.0.2", "sanitize-filename-ts": "^1.0.2",
"sharp": "^0.33.3", "sharp": "^0.33.3",
"threetwo-ui-typings": "^1.0.14", "threetwo-ui-typings": "^1.0.14",
@@ -63,6 +63,7 @@
"eslint-plugin-import": "^2.20.2", "eslint-plugin-import": "^2.20.2",
"eslint-plugin-prefer-arrow": "^1.2.2", "eslint-plugin-prefer-arrow": "^1.2.2",
"install": "^0.13.0", "install": "^0.13.0",
"ioredis": "^5.4.1",
"jest": "^29.5.0", "jest": "^29.5.0",
"jest-cli": "^29.5.0", "jest-cli": "^29.5.0",
"moleculer-repl": "^0.7.0", "moleculer-repl": "^0.7.0",
@@ -2665,9 +2666,9 @@
} }
}, },
"node_modules/@redis/client": { "node_modules/@redis/client": {
"version": "1.5.9", "version": "1.5.16",
"resolved": "https://registry.npmjs.org/@redis/client/-/client-1.5.9.tgz", "resolved": "https://registry.npmjs.org/@redis/client/-/client-1.5.16.tgz",
"integrity": "sha512-SffgN+P1zdWJWSXBvJeynvEnmnZrYmtKSRW00xl8pOPFOMJjxRR9u0frSxJpPR6Y4V+k54blJjGW7FgxbTI7bQ==", "integrity": "sha512-X1a3xQ5kEMvTib5fBrHKh6Y+pXbeKXqziYuxOUo1ojQNECg4M5Etd1qqyhMap+lFUOAh8S7UYevgJHOm4A+NOg==",
"dependencies": { "dependencies": {
"cluster-key-slot": "1.1.2", "cluster-key-slot": "1.1.2",
"generic-pool": "3.9.0", "generic-pool": "3.9.0",
@@ -2683,25 +2684,25 @@
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
}, },
"node_modules/@redis/graph": { "node_modules/@redis/graph": {
"version": "1.1.0", "version": "1.1.1",
"resolved": "https://registry.npmjs.org/@redis/graph/-/graph-1.1.0.tgz", "resolved": "https://registry.npmjs.org/@redis/graph/-/graph-1.1.1.tgz",
"integrity": "sha512-16yZWngxyXPd+MJxeSr0dqh2AIOi8j9yXKcKCwVaKDbH3HTuETpDVPcLujhFYVPtYrngSco31BUcSa9TH31Gqg==", "integrity": "sha512-FEMTcTHZozZciLRl6GiiIB4zGm5z5F3F6a6FZCyrfxdKOhFlGkiAqlexWMBzCi4DcRoyiOsuLfW+cjlGWyExOw==",
"peerDependencies": { "peerDependencies": {
"@redis/client": "^1.0.0" "@redis/client": "^1.0.0"
} }
}, },
"node_modules/@redis/json": { "node_modules/@redis/json": {
"version": "1.0.4", "version": "1.0.6",
"resolved": "https://registry.npmjs.org/@redis/json/-/json-1.0.4.tgz", "resolved": "https://registry.npmjs.org/@redis/json/-/json-1.0.6.tgz",
"integrity": "sha512-LUZE2Gdrhg0Rx7AN+cZkb1e6HjoSKaeeW8rYnt89Tly13GBI5eP4CwDVr+MY8BAYfCg4/N15OUrtLoona9uSgw==", "integrity": "sha512-rcZO3bfQbm2zPRpqo82XbW8zg4G/w4W3tI7X8Mqleq9goQjAGLL7q/1n1ZX4dXEAmORVZ4s1+uKLaUOg7LrUhw==",
"peerDependencies": { "peerDependencies": {
"@redis/client": "^1.0.0" "@redis/client": "^1.0.0"
} }
}, },
"node_modules/@redis/search": { "node_modules/@redis/search": {
"version": "1.1.3", "version": "1.1.6",
"resolved": "https://registry.npmjs.org/@redis/search/-/search-1.1.3.tgz", "resolved": "https://registry.npmjs.org/@redis/search/-/search-1.1.6.tgz",
"integrity": "sha512-4Dg1JjvCevdiCBTZqjhKkGoC5/BcB7k9j99kdMnaXFXg8x4eyOIVg9487CMv7/BUVkFLZCaIh8ead9mU15DNng==", "integrity": "sha512-mZXCxbTYKBQ3M2lZnEddwEAks0Kc7nauire8q20oA0oA/LoA+E/b5Y5KZn232ztPb1FkIGqo12vh3Lf+Vw5iTw==",
"peerDependencies": { "peerDependencies": {
"@redis/client": "^1.0.0" "@redis/client": "^1.0.0"
} }
@@ -4631,58 +4632,6 @@
"url": "https://github.com/sponsors/sindresorhus" "url": "https://github.com/sponsors/sindresorhus"
} }
}, },
"node_modules/bullmq": {
"version": "3.15.8",
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-3.15.8.tgz",
"integrity": "sha512-k3uimHGhl5svqD7SEak+iI6c5DxeLOaOXzCufI9Ic0ST3nJr69v71TGR4cXCTXdgCff3tLec5HgoBnfyWjgn5A==",
"dependencies": {
"cron-parser": "^4.6.0",
"glob": "^8.0.3",
"ioredis": "^5.3.2",
"lodash": "^4.17.21",
"msgpackr": "^1.6.2",
"semver": "^7.3.7",
"tslib": "^2.0.0",
"uuid": "^9.0.0"
}
},
"node_modules/bullmq/node_modules/brace-expansion": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"dependencies": {
"balanced-match": "^1.0.0"
}
},
"node_modules/bullmq/node_modules/glob": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz",
"integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==",
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^5.0.1",
"once": "^1.3.0"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/bullmq/node_modules/minimatch": {
"version": "5.1.6",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz",
"integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==",
"dependencies": {
"brace-expansion": "^2.0.1"
},
"engines": {
"node": ">=10"
}
},
"node_modules/bytes": { "node_modules/bytes": {
"version": "3.1.2", "version": "3.1.2",
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
@@ -7759,9 +7708,9 @@
"integrity": "sha512-kO3CjNfLZ9t+tHxAMd+Xk4v3D/31E91rMs1dHrm7ikEQrlZ8mLDbQ4z3tZfDM48zOkReas2jx8MWSAmN9+c8Fw==" "integrity": "sha512-kO3CjNfLZ9t+tHxAMd+Xk4v3D/31E91rMs1dHrm7ikEQrlZ8mLDbQ4z3tZfDM48zOkReas2jx8MWSAmN9+c8Fw=="
}, },
"node_modules/ioredis": { "node_modules/ioredis": {
"version": "5.3.2", "version": "5.4.1",
"resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.3.2.tgz", "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.4.1.tgz",
"integrity": "sha512-1DKMMzlIHM02eBBVOFQ1+AolGjs6+xEcM4PDL7NqOS6szq7H9jSaEkIUH6/a5Hl241LzW6JLSiAbNvTQjUupUA==", "integrity": "sha512-2YZsvl7jopIa1gaePkeMtd9rAcSjOOjPtpcLlOeusyO+XH2SK5ZcT+UCrElPP+WVIInh2TzeI4XW9ENaSLVVHA==",
"dependencies": { "dependencies": {
"@ioredis/commands": "^1.1.1", "@ioredis/commands": "^1.1.1",
"cluster-key-slot": "^1.1.0", "cluster-key-slot": "^1.1.0",
@@ -10120,6 +10069,58 @@
"node": ">= 10.x.x" "node": ">= 10.x.x"
} }
}, },
"node_modules/moleculer-bullmq/node_modules/brace-expansion": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"dependencies": {
"balanced-match": "^1.0.0"
}
},
"node_modules/moleculer-bullmq/node_modules/bullmq": {
"version": "3.15.8",
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-3.15.8.tgz",
"integrity": "sha512-k3uimHGhl5svqD7SEak+iI6c5DxeLOaOXzCufI9Ic0ST3nJr69v71TGR4cXCTXdgCff3tLec5HgoBnfyWjgn5A==",
"dependencies": {
"cron-parser": "^4.6.0",
"glob": "^8.0.3",
"ioredis": "^5.3.2",
"lodash": "^4.17.21",
"msgpackr": "^1.6.2",
"semver": "^7.3.7",
"tslib": "^2.0.0",
"uuid": "^9.0.0"
}
},
"node_modules/moleculer-bullmq/node_modules/glob": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz",
"integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==",
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^5.0.1",
"once": "^1.3.0"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/moleculer-bullmq/node_modules/minimatch": {
"version": "5.1.6",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz",
"integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==",
"dependencies": {
"brace-expansion": "^2.0.1"
},
"engines": {
"node": ">=10"
}
},
"node_modules/moleculer-db": { "node_modules/moleculer-db": {
"version": "0.8.24", "version": "0.8.24",
"resolved": "https://registry.npmjs.org/moleculer-db/-/moleculer-db-0.8.24.tgz", "resolved": "https://registry.npmjs.org/moleculer-db/-/moleculer-db-0.8.24.tgz",
@@ -10138,9 +10139,9 @@
} }
}, },
"node_modules/moleculer-db-adapter-mongoose": { "node_modules/moleculer-db-adapter-mongoose": {
"version": "0.9.3", "version": "0.9.4",
"resolved": "https://registry.npmjs.org/moleculer-db-adapter-mongoose/-/moleculer-db-adapter-mongoose-0.9.3.tgz", "resolved": "https://registry.npmjs.org/moleculer-db-adapter-mongoose/-/moleculer-db-adapter-mongoose-0.9.4.tgz",
"integrity": "sha512-zv77GKBZrAUCnxUESxqyq58t9HUFxB9M0CrnFkxUr87HyqzRcj8A+sAkllFuK8sMBOdgDHSA5HUi7VrchrB2NQ==", "integrity": "sha512-+ZRPfyLAVbBb6St/9RAqiZdWY3hw++fPutwqSPg8AYNL7n/eIQRnvx7oXn+ebnezbuCXg63L1siT9uzyKNzbZw==",
"dependencies": { "dependencies": {
"bluebird": "^3.7.2", "bluebird": "^3.7.2",
"lodash": "^4.17.21" "lodash": "^4.17.21"
@@ -10192,9 +10193,9 @@
} }
}, },
"node_modules/moleculer-web": { "node_modules/moleculer-web": {
"version": "0.10.6", "version": "0.10.7",
"resolved": "https://registry.npmjs.org/moleculer-web/-/moleculer-web-0.10.6.tgz", "resolved": "https://registry.npmjs.org/moleculer-web/-/moleculer-web-0.10.7.tgz",
"integrity": "sha512-MGNIH6mXLU2Wj63bAgoVzdhMKXALp99F5UHuiBgS2ywakdWEUl/q7GlMblvscioCCkXuUWezId85J0yioYxedg==", "integrity": "sha512-/UJtV+O7iQ3aSg/xi/sw3ZswhvzkigzGPjKOR5R97sm2FSihKuLTftUpXlk4dYls7/8c8WSz6H/M/40BenEx9Q==",
"dependencies": { "dependencies": {
"@fastify/busboy": "^1.0.0", "@fastify/busboy": "^1.0.0",
"body-parser": "^1.19.0", "body-parser": "^1.19.0",
@@ -13844,9 +13845,11 @@
] ]
}, },
"node_modules/r2-lcp-js": { "node_modules/r2-lcp-js": {
"version": "1.0.41", "version": "1.0.41",
"resolved": "https://registry.npmjs.org/r2-lcp-js/-/r2-lcp-js-1.0.41.tgz", "resolved": "https://registry.npmjs.org/r2-lcp-js/-/r2-lcp-js-1.0.41.tgz",
"integrity": "sha512-BdJNFcScLINp0IvU+jlaPM8K8WzFeraq0YECc0k8zrbZQIuS7Nt6yGnKhcMmbEGBEqLXiJAaF7aWwF+p7csfbA==", "integrity": "sha512-BdJNFcScLINp0IvU+jlaPM8K8WzFeraq0YECc0k8zrbZQIuS7Nt6yGnKhcMmbEGBEqLXiJAaF7aWwF+p7csfbA==",
"dependencies": { "dependencies": {
"bindings": "^1.5.0", "bindings": "^1.5.0",
"debug": "^4.3.7", "debug": "^4.3.7",
@@ -14115,15 +14118,15 @@
} }
}, },
"node_modules/redis": { "node_modules/redis": {
"version": "4.6.8", "version": "4.6.14",
"resolved": "https://registry.npmjs.org/redis/-/redis-4.6.8.tgz", "resolved": "https://registry.npmjs.org/redis/-/redis-4.6.14.tgz",
"integrity": "sha512-S7qNkPUYrsofQ0ztWlTHSaK0Qqfl1y+WMIxrzeAGNG+9iUZB4HGeBgkHxE6uJJ6iXrkvLd1RVJ2nvu6H1sAzfQ==", "integrity": "sha512-GrNg/e33HtsQwNXL7kJT+iNFPSwE1IPmd7wzV3j4f2z0EYxZfZE7FVTmUysgAtqQQtg5NXF5SNLR9OdO/UHOfw==",
"dependencies": { "dependencies": {
"@redis/bloom": "1.2.0", "@redis/bloom": "1.2.0",
"@redis/client": "1.5.9", "@redis/client": "1.5.16",
"@redis/graph": "1.1.0", "@redis/graph": "1.1.1",
"@redis/json": "1.0.4", "@redis/json": "1.0.6",
"@redis/search": "1.1.3", "@redis/search": "1.1.6",
"@redis/time-series": "1.0.5" "@redis/time-series": "1.0.5"
} }
}, },

View File

@@ -4,8 +4,8 @@
"description": "Endpoints for common operations in ThreeTwo", "description": "Endpoints for common operations in ThreeTwo",
"scripts": { "scripts": {
"build": "tsc --build tsconfig.json", "build": "tsc --build tsconfig.json",
"dev": "ts-node ./node_modules/moleculer/bin/moleculer-runner.js --hot --repl --config moleculer.config.ts services/**/*.service.ts", "dev": "./scripts/start.sh dev",
"start": "moleculer-runner --config dist/moleculer.config.js", "start": "npm run build && ./scripts/start.sh prod",
"cli": "moleculer connect NATS", "cli": "moleculer connect NATS",
"ci": "jest --watch", "ci": "jest --watch",
"test": "jest --coverage", "test": "jest --coverage",
@@ -27,6 +27,7 @@
"eslint-plugin-import": "^2.20.2", "eslint-plugin-import": "^2.20.2",
"eslint-plugin-prefer-arrow": "^1.2.2", "eslint-plugin-prefer-arrow": "^1.2.2",
"install": "^0.13.0", "install": "^0.13.0",
"ioredis": "^5.4.1",
"jest": "^29.5.0", "jest": "^29.5.0",
"jest-cli": "^29.5.0", "jest-cli": "^29.5.0",
"moleculer-repl": "^0.7.0", "moleculer-repl": "^0.7.0",
@@ -68,16 +69,16 @@
"mkdirp": "^0.5.5", "mkdirp": "^0.5.5",
"moleculer-bullmq": "^3.0.0", "moleculer-bullmq": "^3.0.0",
"moleculer-db": "^0.8.23", "moleculer-db": "^0.8.23",
"moleculer-db-adapter-mongoose": "^0.9.2", "moleculer-db-adapter-mongoose": "^0.9.4",
"moleculer-io": "^2.2.0", "moleculer-io": "^2.2.0",
"moleculer-web": "^0.10.5", "moleculer-web": "^0.10.7",
"mongoosastic-ts": "^6.0.3", "mongoosastic-ts": "^6.0.3",
"mongoose": "^6.10.4", "mongoose": "^6.10.4",
"mongoose-paginate-v2": "^1.3.18", "mongoose-paginate-v2": "^1.3.18",
"nats": "^1.3.2", "nats": "^1.3.2",
"opds-extra": "^3.0.10", "opds-extra": "^3.0.10",
"p7zip-threetwo": "^1.0.4", "p7zip-threetwo": "^1.0.4",
"redis": "^4.6.5", "redis": "^4.6.14",
"sanitize-filename-ts": "^1.0.2", "sanitize-filename-ts": "^1.0.2",
"sharp": "^0.33.3", "sharp": "^0.33.3",
"threetwo-ui-typings": "^1.0.14", "threetwo-ui-typings": "^1.0.14",

26
scripts/start.sh Executable file
View File

@@ -0,0 +1,26 @@
#!/bin/bash
echo "Starting script with mode: $MODE"
# Extract the host and port from MONGO_URI
HOST_PORT=$(echo $MONGO_URI | sed -e 's/mongodb:\/\///' -e 's/\/.*$//')
# Assuming the script is called from the project root
PROJECT_ROOT=$(pwd)
echo "Project root: $PROJECT_ROOT"
CONFIG_PATH="$PROJECT_ROOT/moleculer.config.ts"
echo "Configuration path: $CONFIG_PATH"
# Set the correct path for moleculer-runner based on the mode
if [ "$MODE" == "dev" ]; then
# For development: use ts-node
MOLECULER_RUNNER="ts-node $PROJECT_ROOT/node_modules/moleculer/bin/moleculer-runner.js --hot --repl --config $CONFIG_PATH $PROJECT_ROOT/services/**/*.service.ts"
echo "Moleculer Runner for dev: $MOLECULER_RUNNER"
else
# For production: direct node execution of the compiled JavaScript
MOLECULER_RUNNER="moleculer-runner --config $PROJECT_ROOT/dist/moleculer.config.js $PROJECT_ROOT/dist/services/**/*.service.js"
echo "Moleculer Runner for prod: $MOLECULER_RUNNER"
fi
# Run wait-for-it, then start the application
./scripts/wait-for-it.sh $HOST_PORT -- $MOLECULER_RUNNER

190
scripts/wait-for-it.sh Executable file
View File

@@ -0,0 +1,190 @@
#!/usr/bin/env bash
# Use this script to test if a given TCP host/port are available
WAITFORIT_cmdname=${0##*/}
if [[ $OSTYPE == 'darwin'* ]]; then
if ! command -v gtimeout &> /dev/null
then
echo "missing gtimeout (`brew install coreutils`)"
exit
fi
alias timeout=gtimeout
fi
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
usage()
{
cat << USAGE >&2
Usage:
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
-h HOST | --host=HOST Host or IP under test
-p PORT | --port=PORT TCP port under test
Alternatively, you specify the host and port as host:port
-s | --strict Only execute subcommand if the test succeeds
-q | --quiet Don't output any status messages
-t TIMEOUT | --timeout=TIMEOUT
Timeout in seconds, zero for no timeout
-- COMMAND ARGS Execute command with args after the test finishes
USAGE
exit 1
}
wait_for()
{
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
else
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
fi
WAITFORIT_start_ts=$(date +%s)
while :
do
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
WAITFORIT_result=$?
else
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
WAITFORIT_result=$?
fi
if [[ $WAITFORIT_result -eq 0 ]]; then
WAITFORIT_end_ts=$(date +%s)
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
break
fi
sleep 1
done
return $WAITFORIT_result
}
wait_for_wrapper()
{
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
else
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
fi
WAITFORIT_PID=$!
trap "kill -INT -$WAITFORIT_PID" INT
wait $WAITFORIT_PID
WAITFORIT_RESULT=$?
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
fi
return $WAITFORIT_RESULT
}
# process arguments
while [[ $# -gt 0 ]]
do
case "$1" in
*:* )
WAITFORIT_hostport=(${1//:/ })
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
shift 1
;;
--child)
WAITFORIT_CHILD=1
shift 1
;;
-q | --quiet)
WAITFORIT_QUIET=1
shift 1
;;
-s | --strict)
WAITFORIT_STRICT=1
shift 1
;;
-h)
WAITFORIT_HOST="$2"
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
shift 2
;;
--host=*)
WAITFORIT_HOST="${1#*=}"
shift 1
;;
-p)
WAITFORIT_PORT="$2"
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
shift 2
;;
--port=*)
WAITFORIT_PORT="${1#*=}"
shift 1
;;
-t)
WAITFORIT_TIMEOUT="$2"
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
shift 2
;;
--timeout=*)
WAITFORIT_TIMEOUT="${1#*=}"
shift 1
;;
--)
shift
WAITFORIT_CLI=("$@")
break
;;
--help)
usage
;;
*)
echoerr "Unknown argument: $1"
usage
;;
esac
done
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
echoerr "Error: you need to provide a host and port to test."
usage
fi
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
# Check to see if timeout is from busybox?
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
WAITFORIT_BUSYTIMEFLAG=""
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
WAITFORIT_ISBUSY=1
# Check if busybox timeout uses -t flag
# (recent Alpine versions don't support -t anymore)
if timeout &>/dev/stdout | grep -q -e '-t '; then
WAITFORIT_BUSYTIMEFLAG="-t"
fi
else
WAITFORIT_ISBUSY=0
fi
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
wait_for
WAITFORIT_RESULT=$?
exit $WAITFORIT_RESULT
else
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
wait_for_wrapper
WAITFORIT_RESULT=$?
else
wait_for
WAITFORIT_RESULT=$?
fi
fi
if [[ $WAITFORIT_CLI != "" ]]; then
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
exit $WAITFORIT_RESULT
fi
exec "${WAITFORIT_CLI[@]}"
else
exit $WAITFORIT_RESULT
fi

View File

@@ -14,7 +14,6 @@ import { pubClient } from "../config/redis.config";
import path from "path"; import path from "path";
const { MoleculerError } = require("moleculer").Errors; const { MoleculerError } = require("moleculer").Errors;
console.log(process.env.REDIS_URI);
export default class JobQueueService extends Service { export default class JobQueueService extends Service {
public constructor(public broker: ServiceBroker) { public constructor(public broker: ServiceBroker) {
super(broker); super(broker);
@@ -22,9 +21,10 @@ export default class JobQueueService extends Service {
name: "jobqueue", name: "jobqueue",
hooks: {}, hooks: {},
mixins: [DbMixin("comics", Comic), BullMqMixin], mixins: [DbMixin("comics", Comic), BullMqMixin],
settings: { settings: {
bullmq: { bullmq: {
client: process.env.REDIS_URI, client: pubClient,
}, },
}, },
actions: { actions: {
@@ -57,20 +57,24 @@ export default class JobQueueService extends Service {
handler: async ( handler: async (
ctx: Context<{ action: string; description: string }> ctx: Context<{ action: string; description: string }>
) => { ) => {
const { action, description } = ctx.params; try {
// Enqueue the job const { action, description } = ctx.params;
const job = await this.localQueue( // Enqueue the job
ctx, const job = await this.localQueue(
action, ctx,
ctx.params, action,
{ {},
priority: 10, {
} priority: 10,
); }
console.log(`Job ${job.id} enqueued`); );
console.log(`${description}`); console.log(`Job ${job.id} enqueued`);
console.log(`${description}`);
return job.id; return job.id;
} catch (error) {
console.error("Failed to enqueue job:", error);
}
}, },
}, },

View File

@@ -59,9 +59,11 @@ import path from "path";
import { COMICS_DIRECTORY, USERDATA_DIRECTORY } from "../constants/directories"; import { COMICS_DIRECTORY, USERDATA_DIRECTORY } from "../constants/directories";
import AirDCPPSocket from "../shared/airdcpp.socket"; import AirDCPPSocket from "../shared/airdcpp.socket";
console.log(`MONGO -> ${process.env.MONGO_URI}`); export default class LibraryService extends Service {
export default class ImportService extends Service { public constructor(
public constructor(public broker: ServiceBroker) { public broker: ServiceBroker,
schema: ServiceSchema<{}> = { name: "library" }
) {
super(broker); super(broker);
this.parseServiceSchema({ this.parseServiceSchema({
name: "library", name: "library",
@@ -164,78 +166,52 @@ export default class ImportService extends Service {
}, },
newImport: { newImport: {
rest: "POST /newImport", rest: "POST /newImport",
// params: {}, async handler(ctx) {
async handler( const { sessionId } = ctx.params;
ctx: Context<{
extractionOptions?: any;
sessionId: string;
}>
) {
try { try {
// Get params to be passed to the import jobs // Initialize Redis counters once at the start of the import
const { sessionId } = ctx.params; await pubClient.set("completedJobCount", 0);
// 1. Walk the Source folder await pubClient.set("failedJobCount", 0);
klaw(path.resolve(COMICS_DIRECTORY))
// 1.1 Filter on .cb* extensions // Convert klaw to use a promise-based approach for better flow control
.pipe( const files = await this.getComicFiles(
through2.obj(function (item, enc, next) { COMICS_DIRECTORY
let fileExtension = path.extname( );
item.path for (const file of files) {
); console.info(
if ( "Found a file at path:",
[".cbz", ".cbr", ".cb7"].includes( file.path
fileExtension );
) const comicExists = await Comic.exists({
) { "rawFileDetails.name": path.basename(
this.push(item); file.path,
} path.extname(file.path)
next(); ),
})
)
// 1.2 Pipe filtered results to the next step
// Enqueue the job in the queue
.on("data", async (item) => {
console.info(
"Found a file at path: %s",
item.path
);
let comicExists = await Comic.exists({
"rawFileDetails.name": `${path.basename(
item.path,
path.extname(item.path)
)}`,
});
if (!comicExists) {
// 2.1 Reset the job counters in Redis
await pubClient.set(
"completedJobCount",
0
);
await pubClient.set(
"failedJobCount",
0
);
// 2.2 Send the extraction job to the queue
this.broker.call("jobqueue.enqueue", {
fileObject: {
filePath: item.path,
fileSize: item.stats.size,
},
sessionId,
importType: "new",
action: "enqueue.async",
});
} else {
console.log(
"Comic already exists in the library."
);
}
})
.on("end", () => {
console.log("All files traversed.");
}); });
if (!comicExists) {
// Send the extraction job to the queue
await this.broker.call("jobqueue.enqueue", {
fileObject: {
filePath: file.path,
fileSize: file.stats.size,
},
sessionId,
importType: "new",
action: "enqueue.async",
});
} else {
console.log(
"Comic already exists in the library."
);
}
}
console.log("All files traversed.");
} catch (error) { } catch (error) {
console.log(error); console.error(
"Error during newImport processing:",
error
);
} }
}, },
}, },
@@ -862,7 +838,35 @@ export default class ImportService extends Service {
}, },
}, },
}, },
methods: {}, methods: {
// Method to walk the directory and filter comic files
getComicFiles: (directory) => {
return new Promise((resolve, reject) => {
const files = [];
klaw(directory)
.pipe(
through2.obj(function (item, enc, next) {
const fileExtension = path.extname(
item.path
);
if (
[".cbz", ".cbr", ".cb7"].includes(
fileExtension
)
) {
this.push(item);
}
next();
})
)
.on("data", (item) => {
files.push(item);
})
.on("end", () => resolve(files))
.on("error", (err) => reject(err));
});
},
},
}); });
} }
} }

View File

@@ -1,7 +1,6 @@
"use strict"; "use strict";
import { Service, ServiceBroker, ServiceSchema, Context } from "moleculer"; import { Service, ServiceBroker, ServiceSchema, Context } from "moleculer";
import { JobType } from "moleculer-bullmq"; import { JobType } from "moleculer-bullmq";
import { createClient } from "redis";
import { createAdapter } from "@socket.io/redis-adapter"; import { createAdapter } from "@socket.io/redis-adapter";
import Session from "../models/session.model"; import Session from "../models/session.model";
import { pubClient, subClient } from "../config/redis.config"; import { pubClient, subClient } from "../config/redis.config";
@@ -274,6 +273,22 @@ export default class SocketService extends Service {
}, },
}, },
async started() { async started() {
this.logger.info("Starting Socket Service...");
this.logger.debug("pubClient:", pubClient);
this.logger.debug("subClient:", subClient);
if (!pubClient || !subClient) {
this.logger.error("Redis clients are not initialized!");
throw new Error("Redis clients are not initialized!");
}
// Additional checks or logic if necessary
if (pubClient.status !== "ready") {
await pubClient.connect();
}
if (subClient.status !== "ready") {
await subClient.connect();
}
this.io.on("connection", async (socket) => { this.io.on("connection", async (socket) => {
console.log( console.log(
`socket.io server connected to client with session ID: ${socket.id}` `socket.io server connected to client with session ID: ${socket.id}`

View File

@@ -9,6 +9,7 @@ import {
import { DbMixin } from "../mixins/db.mixin"; import { DbMixin } from "../mixins/db.mixin";
import Comic from "../models/comic.model"; import Comic from "../models/comic.model";
import BullMqMixin from "moleculer-bullmq"; import BullMqMixin from "moleculer-bullmq";
import { pubClient } from "../config/redis.config";
const { MoleculerError } = require("moleculer").Errors; const { MoleculerError } = require("moleculer").Errors;
export default class ImageTransformation extends Service { export default class ImageTransformation extends Service {
@@ -23,7 +24,7 @@ export default class ImageTransformation extends Service {
mixins: [DbMixin("comics", Comic), BullMqMixin], mixins: [DbMixin("comics", Comic), BullMqMixin],
settings: { settings: {
bullmq: { bullmq: {
client: process.env.REDIS_URI, client: pubClient,
}, },
}, },
hooks: {}, hooks: {},