diff --git a/.github/workflows/deploy_addon.yml b/.github/workflows/deploy_addon.yml index 9d26320..c3d3330 100644 --- a/.github/workflows/deploy_addon.yml +++ b/.github/workflows/deploy_addon.yml @@ -50,5 +50,5 @@ jobs: docker load -i /tmp/docker/torrentio_addon_latest.tar docker stop torrentio-addon docker rm torrentio-addon - docker run -p ${{ secrets.PORT }}:7000 -d --name torrentio-addon --restart always --log-opt max-size=100m --network host -e PORT=${{ secrets.PORT }} -e MONGODB_URI=${{ secrets.MONGODB_URI }} -e DATABASE_URI=${{ secrets.DATABASE_URI }} -e REDIS_URL=${{ secrets.REDIS_URL }} -e METRICS_USER=${{ secrets.METRICS_USER }} -e METRICS_PASSWORD=${{ secrets.METRICS_PASSWORD }} torrentio-addon:latest + docker run -p ${{ secrets.PORT }}:${{ secrets.PORT }} -d --name torrentio-addon --restart always --log-opt max-size=100m --network host -e PORT=${{ secrets.PORT }} -e MONGODB_URI=${{ secrets.MONGODB_URI }} -e DATABASE_URI=${{ secrets.DATABASE_URI }} -e REDIS_URL=${{ secrets.REDIS_URL }} -e METRICS_USER=${{ secrets.METRICS_USER }} -e METRICS_PASSWORD=${{ secrets.METRICS_PASSWORD }} torrentio-addon:latest docker image prune -f diff --git a/addon/addon.js b/addon/addon.js index 1f3b31d..70b4337 100644 --- a/addon/addon.js +++ b/addon/addon.js @@ -19,7 +19,7 @@ const STALE_ERROR_AGE = 7 * 24 * 60 * 60; // 7 days const builder = new addonBuilder(dummyManifest()); const requestQueue = createNamedQueue(200); -const newLimiter = pLimit(50) +const newLimiter = pLimit(20) builder.defineStreamHandler((args) => { if (!args.id.match(/tt\d+/i) && !args.id.match(/kitsu:\d+/i)) { diff --git a/addon/lib/cache.js b/addon/lib/cache.js index e917b0b..eb6c59e 100644 --- a/addon/lib/cache.js +++ b/addon/lib/cache.js @@ -1,4 +1,5 @@ import KeyvMongo from "@keyv/mongo"; +import KeyvRedis from "@keyv/redis"; import { KeyvCacheableMemory } from "cacheable"; import { isStaticUrl } from '../moch/static.js'; @@ -12,73 +13,75 @@ const STREAM_EMPTY_TTL = 60 * 1000; // 1 minute const RESOLVED_URL_TTL = 3 * 60 * 60 * 1000; // 3 hours const AVAILABILITY_TTL = 5 * 24 * 60 * 60 * 1000; // 5 days const MESSAGE_VIDEO_URL_TTL = 60 * 1000; // 1 minutes -// When the streams are empty we want to cache it for less time in case of timeouts or failures const MONGO_URI = process.env.MONGODB_URI; +const REDIS_URL = process.env.REDIS_URL; const memoryCache = new KeyvCacheableMemory({ ttl: MESSAGE_VIDEO_URL_TTL, lruSize: Infinity }); -const remoteCache = MONGO_URI && new KeyvMongo(MONGO_URI, { +const redisCache = REDIS_URL && new KeyvRedis(REDIS_URL); +const mongoCache = MONGO_URI && new KeyvMongo(MONGO_URI, { collection: 'torrentio_addon_collection', minPoolSize: 50, maxPoolSize: 200, maxConnecting: 5, }); -async function cacheWrap(cache, key, method, ttl) { - if (!cache) { - return method(); - } - const value = await cache.get(key); - if (value !== undefined) { - return value; - } - const result = await method(); - const ttlValue = ttl instanceof Function ? ttl(result) : ttl; - await cache.set(key, result, ttlValue); - return result; +async function cacheWrapRedis(key, method, ttl) { + const value = await redisCache.get(key); + if (value !== undefined) { + try { + return JSON.parse(value); + } catch (e) { + console.warn(`Cache parse error for key ${key}`, e); + } + } + const result = await method(); + const ttlValue = ttl instanceof Function ? ttl(result) : ttl; + await redisCache.set(key, JSON.stringify(result), ttlValue); + return result; } export function cacheWrapStream(id, method) { const ttl = (streams) => streams.length ? STREAM_TTL : STREAM_EMPTY_TTL; - return cacheWrap(remoteCache, `${STREAM_KEY_PREFIX}:${id}`, method, ttl); + return cacheWrapRedis(`${STREAM_KEY_PREFIX}:${id}`, method, ttl); } export function cacheWrapResolvedUrl(id, method) { const ttl = (url) => isStaticUrl(url) ? MESSAGE_VIDEO_URL_TTL : RESOLVED_URL_TTL; - return cacheWrap(remoteCache, `${RESOLVED_URL_KEY_PREFIX}:${id}`, method, ttl); + return cacheWrapRedis(`${RESOLVED_URL_KEY_PREFIX}:${id}`, method, ttl); } export function cacheAvailabilityResults(infoHash, fileIds) { const key = `${AVAILABILITY_KEY_PREFIX}:${infoHash}`; const fileIdsString = fileIds.toString(); const containsFileIds = (array) => array.some(ids => ids.toString() === fileIdsString) - return remoteCache.get(key) + return mongoCache.get(key) .then(result => { const newResult = result || []; if (!containsFileIds(newResult)) { newResult.push(fileIds); newResult.sort((a, b) => b.length - a.length); } - return remoteCache.set(key, newResult, AVAILABILITY_TTL); + return mongoCache.set(key, newResult, AVAILABILITY_TTL); }); } export function removeAvailabilityResults(infoHash, fileIds) { const key = `${AVAILABILITY_KEY_PREFIX}:${infoHash}`; const fileIdsString = fileIds.toString(); - return remoteCache.get(key) + return mongoCache.get(key) .then(result => { const storedIndex = result?.findIndex(ids => ids.toString() === fileIdsString); if (storedIndex >= 0) { result.splice(storedIndex, 1); - return remoteCache.set(key, result, AVAILABILITY_TTL); + return mongoCache.set(key, result, AVAILABILITY_TTL); } }); } export function getCachedAvailabilityResults(infoHashes) { const keys = infoHashes.map(infoHash => `${AVAILABILITY_KEY_PREFIX}:${infoHash}`) - return remoteCache.getMany(keys) + return mongoCache.getMany(keys) .then(result => { const availabilityResults = {}; infoHashes.forEach((infoHash, index) => { @@ -96,17 +99,17 @@ export function getCachedAvailabilityResults(infoHashes) { export function cacheMochAvailabilityResult(moch, infoHash, result = { cached: true }) { const key = `${AVAILABILITY_KEY_PREFIX}:${moch}:${infoHash}`; - return remoteCache.set(key, result, AVAILABILITY_TTL); + return mongoCache.set(key, result, AVAILABILITY_TTL); } export function removeMochAvailabilityResult(moch, infoHash) { const key = `${AVAILABILITY_KEY_PREFIX}:${moch}:${infoHash}`; - return remoteCache.delete(key); + return mongoCache.delete(key); } export function getMochCachedAvailabilityResults(moch, infoHashes) { const keys = infoHashes.map(infoHash => `${AVAILABILITY_KEY_PREFIX}:${moch}:${infoHash}`) - return remoteCache.getMany(keys) + return mongoCache.getMany(keys) .then(result => { const availabilityResults = {}; infoHashes.forEach((infoHash, index) => { diff --git a/addon/moch/moch.js b/addon/moch/moch.js index a6f9e5b..36d4351 100644 --- a/addon/moch/moch.js +++ b/addon/moch/moch.js @@ -79,7 +79,7 @@ export const MochOptions = { const unrestrictQueues = {} Object.values(MochOptions) .map(moch => moch.key) - .forEach(mochKey => unrestrictQueues[mochKey] = createNamedQueue(40)); + .forEach(mochKey => unrestrictQueues[mochKey] = createNamedQueue(20)); export function hasMochConfigured(config) { return Object.keys(MochOptions).find(moch => config?.[moch]) diff --git a/addon/package-lock.json b/addon/package-lock.json index 35a4fc3..2037160 100644 --- a/addon/package-lock.json +++ b/addon/package-lock.json @@ -10,6 +10,7 @@ "license": "MIT", "dependencies": { "@keyv/mongo": "^3.1.0", + "@keyv/redis": "^5.1.6", "@paradise-cloud/easy-debrid": "^3.0.0", "@putdotio/api-client": "^8.42.0", "all-debrid-api": "^1.3.3", @@ -51,6 +52,23 @@ "keyv": "^5.5.5" } }, + "node_modules/@keyv/redis": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/@keyv/redis/-/redis-5.1.6.tgz", + "integrity": "sha512-eKvW6pspvVaU5dxigaIDZr635/Uw6urTXL3gNbY9WTR8d3QigZQT+r8gxYSEOsw4+1cCBsC4s7T2ptR0WC9LfQ==", + "license": "MIT", + "dependencies": { + "@redis/client": "^5.10.0", + "cluster-key-slot": "^1.1.2", + "hookified": "^1.13.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "keyv": "^5.6.0" + } + }, "node_modules/@keyv/serialize": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/@keyv/serialize/-/serialize-1.1.1.tgz", @@ -1293,9 +1311,9 @@ } }, "node_modules/hookified": { - "version": "1.8.2", - "resolved": "https://registry.npmjs.org/hookified/-/hookified-1.8.2.tgz", - "integrity": "sha512-5nZbBNP44sFCDjSoB//0N7m508APCgbQ4mGGo1KJGBYyCKNHfry1Pvd0JVHZIxjdnqn8nFRBAN/eFB6Rk/4w5w==", + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/hookified/-/hookified-1.15.0.tgz", + "integrity": "sha512-51w+ZZGt7Zw5q7rM3nC4t3aLn/xvKDETsXqMczndvwyVQhAHfUmUuFBRFcos8Iyebtk7OAE9dL26wFNzZVVOkw==", "license": "MIT" }, "node_modules/http-errors": { @@ -1481,9 +1499,9 @@ } }, "node_modules/keyv": { - "version": "5.5.5", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-5.5.5.tgz", - "integrity": "sha512-FA5LmZVF1VziNc0bIdCSA1IoSVnDCqE8HJIZZv2/W8YmoAM50+tnUgJR/gQZwEeIMleuIOnRnHA/UaZRNeV4iQ==", + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-5.6.0.tgz", + "integrity": "sha512-CYDD3SOtsHtyXeEORYRx2qBtpDJFjRTGXUtmNEMGyzYOKj1TE3tycdlho7kA1Ufx9OYWZzg52QFBGALTirzDSw==", "license": "MIT", "peer": true, "dependencies": { diff --git a/addon/package.json b/addon/package.json index 202089e..a97da4a 100644 --- a/addon/package.json +++ b/addon/package.json @@ -10,6 +10,7 @@ "license": "MIT", "dependencies": { "@keyv/mongo": "^3.1.0", + "@keyv/redis": "^5.1.6", "@paradise-cloud/easy-debrid": "^3.0.0", "@putdotio/api-client": "^8.42.0", "all-debrid-api": "^1.3.3",