mirror of
https://github.com/tapframe/NuvioStreaming.git
synced 2026-04-21 00:32:04 +00:00
dropped globalcache
This commit is contained in:
parent
ec23dcc3cb
commit
cfa6bb8689
4 changed files with 36 additions and 741 deletions
|
|
@ -37,11 +37,10 @@ import { useMetadata } from '../hooks/useMetadata';
|
||||||
import { useMetadataAssets } from '../hooks/useMetadataAssets';
|
import { useMetadataAssets } from '../hooks/useMetadataAssets';
|
||||||
import { useTheme } from '../contexts/ThemeContext';
|
import { useTheme } from '../contexts/ThemeContext';
|
||||||
import { useTrailer } from '../contexts/TrailerContext';
|
import { useTrailer } from '../contexts/TrailerContext';
|
||||||
import { Stream, GroupedStreams } from '../types/metadata';
|
import { Stream } from '../types/metadata';
|
||||||
import { tmdbService } from '../services/tmdbService';
|
import { tmdbService } from '../services/tmdbService';
|
||||||
import { stremioService } from '../services/stremioService';
|
import { stremioService } from '../services/stremioService';
|
||||||
import { localScraperService } from '../services/localScraperService';
|
import { localScraperService } from '../services/localScraperService';
|
||||||
import { hybridCacheService } from '../services/hybridCacheService';
|
|
||||||
import { VideoPlayerService } from '../services/videoPlayerService';
|
import { VideoPlayerService } from '../services/videoPlayerService';
|
||||||
import { useSettings } from '../hooks/useSettings';
|
import { useSettings } from '../hooks/useSettings';
|
||||||
import QualityBadge from '../components/metadata/QualityBadge';
|
import QualityBadge from '../components/metadata/QualityBadge';
|
||||||
|
|
@ -736,51 +735,7 @@ export const StreamsScreen = () => {
|
||||||
}
|
}
|
||||||
}, [selectedProvider, availableProviders, episodeStreams, groupedStreams, type]);
|
}, [selectedProvider, availableProviders, episodeStreams, groupedStreams, type]);
|
||||||
|
|
||||||
// Check for cached results immediately on mount
|
// Removed global/local cached results pre-check on mount
|
||||||
useEffect(() => {
|
|
||||||
const checkCachedResults = async () => {
|
|
||||||
if (!settings.enableLocalScrapers) return;
|
|
||||||
|
|
||||||
try {
|
|
||||||
let season: number | undefined;
|
|
||||||
let episode: number | undefined;
|
|
||||||
|
|
||||||
if (episodeId && episodeId.includes(':')) {
|
|
||||||
const parts = episodeId.split(':');
|
|
||||||
if (parts.length >= 3) {
|
|
||||||
season = parseInt(parts[1], 10);
|
|
||||||
episode = parseInt(parts[2], 10);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const installedScrapers = await localScraperService.getInstalledScrapers();
|
|
||||||
const userSettings = {
|
|
||||||
enableLocalScrapers: settings.enableLocalScrapers,
|
|
||||||
enabledScrapers: new Set(
|
|
||||||
installedScrapers
|
|
||||||
.filter(scraper => scraper.enabled)
|
|
||||||
.map(scraper => scraper.id)
|
|
||||||
)
|
|
||||||
};
|
|
||||||
const cachedResults = await hybridCacheService.getCachedResults(type, id, season, episode, userSettings);
|
|
||||||
if (cachedResults.validResults.length > 0) {
|
|
||||||
logger.log(`🔍 Found ${cachedResults.validResults.length} cached scraper results on mount`);
|
|
||||||
|
|
||||||
// If we have cached results, trigger the loading flow immediately
|
|
||||||
if (!hasDoneInitialLoadRef.current) {
|
|
||||||
logger.log('🚀 Triggering immediate load due to cached results');
|
|
||||||
// Force a re-render to ensure cached results are displayed
|
|
||||||
setHasStreamProviders(true);
|
|
||||||
setStreamsLoadStart(Date.now());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
if (__DEV__) console.log('[StreamsScreen] Error checking cached results on mount:', error);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
checkCachedResults();
|
|
||||||
}, [type, id, episodeId, settings.enableLocalScrapers]);
|
|
||||||
|
|
||||||
// Update useEffect to check for sources
|
// Update useEffect to check for sources
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
|
@ -826,52 +781,7 @@ export const StreamsScreen = () => {
|
||||||
}, 500);
|
}, 500);
|
||||||
return () => clearTimeout(timer);
|
return () => clearTimeout(timer);
|
||||||
} else {
|
} else {
|
||||||
// Check for cached streams first before loading
|
// Removed cached streams pre-display logic
|
||||||
if (settings.enableLocalScrapers) {
|
|
||||||
try {
|
|
||||||
let season: number | undefined;
|
|
||||||
let episode: number | undefined;
|
|
||||||
|
|
||||||
if (episodeId && episodeId.includes(':')) {
|
|
||||||
const parts = episodeId.split(':');
|
|
||||||
if (parts.length >= 3) {
|
|
||||||
season = parseInt(parts[1], 10);
|
|
||||||
episode = parseInt(parts[2], 10);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if we have cached streams and load them immediately
|
|
||||||
const cachedStreams = await hybridCacheService.getCachedStreams(type, id, season, episode);
|
|
||||||
if (cachedStreams.length > 0) {
|
|
||||||
logger.log(`🎯 Found ${cachedStreams.length} cached streams, displaying immediately`);
|
|
||||||
|
|
||||||
// Group cached streams by scraper for proper display
|
|
||||||
const groupedCachedStreams: GroupedStreams = {};
|
|
||||||
const scrapersWithCachedResults = new Set<string>();
|
|
||||||
|
|
||||||
// Get cached results to determine which scrapers have results
|
|
||||||
const cachedResults = await hybridCacheService.getCachedResults(type, id, season, episode);
|
|
||||||
|
|
||||||
for (const result of cachedResults.validResults) {
|
|
||||||
if (result.success && result.streams && result.streams.length > 0) {
|
|
||||||
groupedCachedStreams[result.scraperId] = {
|
|
||||||
addonName: result.scraperName,
|
|
||||||
streams: result.streams
|
|
||||||
};
|
|
||||||
scrapersWithCachedResults.add(result.scraperId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the streams state immediately if we have cached results
|
|
||||||
if (Object.keys(groupedCachedStreams).length > 0) {
|
|
||||||
logger.log(`🚀 Immediately displaying ${Object.keys(groupedCachedStreams).length} cached scrapers with streams`);
|
|
||||||
// This will be handled by the useMetadata hook integration
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
if (__DEV__) console.log('[StreamsScreen] Error checking cached streams:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// For series episodes, do not wait for metadata; load directly when episodeId is present
|
// For series episodes, do not wait for metadata; load directly when episodeId is present
|
||||||
if (episodeId) {
|
if (episodeId) {
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,12 @@
|
||||||
import { localScraperCacheService, CachedScraperResult } from './localScraperCacheService';
|
import { localScraperCacheService, CachedScraperResult } from './localScraperCacheService';
|
||||||
import { supabaseGlobalCacheService, GlobalCachedScraperResult } from './supabaseGlobalCacheService';
|
|
||||||
import { logger } from '../utils/logger';
|
import { logger } from '../utils/logger';
|
||||||
import { Stream } from '../types/streams';
|
import { Stream } from '../types/streams';
|
||||||
|
|
||||||
export interface HybridCacheResult {
|
export interface HybridCacheResult {
|
||||||
validResults: Array<CachedScraperResult | GlobalCachedScraperResult>;
|
validResults: Array<CachedScraperResult>;
|
||||||
expiredScrapers: string[];
|
expiredScrapers: string[];
|
||||||
allExpired: boolean;
|
allExpired: boolean;
|
||||||
source: 'local' | 'global' | 'hybrid';
|
source: 'local';
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface HybridCacheStats {
|
export interface HybridCacheStats {
|
||||||
|
|
@ -17,23 +16,11 @@ export interface HybridCacheStats {
|
||||||
oldestEntry: number | null;
|
oldestEntry: number | null;
|
||||||
newestEntry: number | null;
|
newestEntry: number | null;
|
||||||
};
|
};
|
||||||
global: {
|
|
||||||
totalEntries: number;
|
|
||||||
totalSize: number;
|
|
||||||
oldestEntry: number | null;
|
|
||||||
newestEntry: number | null;
|
|
||||||
hitRate: number;
|
|
||||||
};
|
|
||||||
combined: {
|
|
||||||
totalEntries: number;
|
|
||||||
hitRate: number;
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
class HybridCacheService {
|
class HybridCacheService {
|
||||||
private static instance: HybridCacheService;
|
private static instance: HybridCacheService;
|
||||||
private readonly ENABLE_GLOBAL_CACHE = true; // Can be made configurable
|
// Global caching removed; local-only
|
||||||
private readonly FALLBACK_TO_LOCAL = true; // Fallback to local if global fails
|
|
||||||
|
|
||||||
private constructor() {}
|
private constructor() {}
|
||||||
|
|
||||||
|
|
@ -45,7 +32,7 @@ class HybridCacheService {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get cached results with hybrid approach (global first, then local)
|
* Get cached results (local-only)
|
||||||
*/
|
*/
|
||||||
async getCachedResults(
|
async getCachedResults(
|
||||||
type: string,
|
type: string,
|
||||||
|
|
@ -65,56 +52,20 @@ class HybridCacheService {
|
||||||
return true;
|
return true;
|
||||||
};
|
};
|
||||||
|
|
||||||
// Try global cache first if enabled
|
// Local cache only
|
||||||
if (this.ENABLE_GLOBAL_CACHE) {
|
const localResults = await localScraperCacheService.getCachedResults(type, tmdbId, season, episode);
|
||||||
try {
|
|
||||||
const globalResults = await supabaseGlobalCacheService.getCachedResults(type, tmdbId, season, episode);
|
|
||||||
|
|
||||||
// Filter results based on user settings
|
// Filter results based on user settings
|
||||||
const filteredGlobalResults = {
|
const filteredLocalResults = {
|
||||||
...globalResults,
|
...localResults,
|
||||||
validResults: globalResults.validResults.filter(result => isScraperEnabled(result.scraperId)),
|
validResults: localResults.validResults.filter(result => isScraperEnabled(result.scraperId)),
|
||||||
expiredScrapers: globalResults.expiredScrapers.filter(scraperId => isScraperEnabled(scraperId))
|
expiredScrapers: localResults.expiredScrapers.filter(scraperId => isScraperEnabled(scraperId))
|
||||||
};
|
};
|
||||||
|
|
||||||
if (filteredGlobalResults.validResults.length > 0) {
|
logger.log(`[HybridCache] Using local cache: ${filteredLocalResults.validResults.length} results (filtered from ${localResults.validResults.length})`);
|
||||||
logger.log(`[HybridCache] Using global cache: ${filteredGlobalResults.validResults.length} results (filtered from ${globalResults.validResults.length})`);
|
|
||||||
return {
|
|
||||||
...filteredGlobalResults,
|
|
||||||
source: 'global'
|
|
||||||
};
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('[HybridCache] Global cache failed, falling back to local:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback to local cache
|
|
||||||
if (this.FALLBACK_TO_LOCAL) {
|
|
||||||
const localResults = await localScraperCacheService.getCachedResults(type, tmdbId, season, episode);
|
|
||||||
|
|
||||||
// Filter results based on user settings
|
|
||||||
const filteredLocalResults = {
|
|
||||||
...localResults,
|
|
||||||
validResults: localResults.validResults.filter(result => isScraperEnabled(result.scraperId)),
|
|
||||||
expiredScrapers: localResults.expiredScrapers.filter(scraperId => isScraperEnabled(scraperId))
|
|
||||||
};
|
|
||||||
|
|
||||||
if (filteredLocalResults.validResults.length > 0) {
|
|
||||||
logger.log(`[HybridCache] Using local cache: ${filteredLocalResults.validResults.length} results (filtered from ${localResults.validResults.length})`);
|
|
||||||
return {
|
|
||||||
...filteredLocalResults,
|
|
||||||
source: 'local'
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// No valid results found
|
|
||||||
return {
|
return {
|
||||||
validResults: [],
|
...filteredLocalResults,
|
||||||
expiredScrapers: [],
|
source: 'local'
|
||||||
allExpired: true,
|
|
||||||
source: 'hybrid'
|
|
||||||
};
|
};
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
@ -123,13 +74,13 @@ class HybridCacheService {
|
||||||
validResults: [],
|
validResults: [],
|
||||||
expiredScrapers: [],
|
expiredScrapers: [],
|
||||||
allExpired: true,
|
allExpired: true,
|
||||||
source: 'hybrid'
|
source: 'local'
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Cache results in both local and global cache
|
* Cache results (local-only)
|
||||||
*/
|
*/
|
||||||
async cacheResults(
|
async cacheResults(
|
||||||
type: string,
|
type: string,
|
||||||
|
|
@ -144,7 +95,7 @@ class HybridCacheService {
|
||||||
episode?: number
|
episode?: number
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
try {
|
try {
|
||||||
// Cache in local storage first (fastest)
|
// Cache in local storage
|
||||||
const localPromises = results.map(result =>
|
const localPromises = results.map(result =>
|
||||||
localScraperCacheService.cacheScraperResult(
|
localScraperCacheService.cacheScraperResult(
|
||||||
type, tmdbId, result.scraperId, result.scraperName,
|
type, tmdbId, result.scraperId, result.scraperName,
|
||||||
|
|
@ -152,17 +103,7 @@ class HybridCacheService {
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
await Promise.all(localPromises);
|
await Promise.all(localPromises);
|
||||||
|
logger.log(`[HybridCache] Cached ${results.length} results in local cache`);
|
||||||
// Cache in global storage (shared across users)
|
|
||||||
if (this.ENABLE_GLOBAL_CACHE) {
|
|
||||||
try {
|
|
||||||
await supabaseGlobalCacheService.cacheResults(type, tmdbId, results, season, episode);
|
|
||||||
logger.log(`[HybridCache] Cached ${results.length} results in both local and global cache`);
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('[HybridCache] Failed to cache in global storage:', error);
|
|
||||||
// Local cache succeeded, so we continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[HybridCache] Error caching results:', error);
|
logger.error('[HybridCache] Error caching results:', error);
|
||||||
|
|
@ -242,7 +183,7 @@ class HybridCacheService {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invalidate cache for specific content
|
* Invalidate cache for specific content (local-only)
|
||||||
*/
|
*/
|
||||||
async invalidateContent(
|
async invalidateContent(
|
||||||
type: string,
|
type: string,
|
||||||
|
|
@ -251,18 +192,7 @@ class HybridCacheService {
|
||||||
episode?: number
|
episode?: number
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
try {
|
try {
|
||||||
// Invalidate both local and global cache
|
await localScraperCacheService.invalidateContent(type, tmdbId, season, episode);
|
||||||
const promises = [
|
|
||||||
localScraperCacheService.invalidateContent(type, tmdbId, season, episode)
|
|
||||||
];
|
|
||||||
|
|
||||||
if (this.ENABLE_GLOBAL_CACHE) {
|
|
||||||
promises.push(
|
|
||||||
supabaseGlobalCacheService.invalidateContent(type, tmdbId, season, episode)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
await Promise.all(promises);
|
|
||||||
logger.log(`[HybridCache] Invalidated cache for ${type}:${tmdbId}`);
|
logger.log(`[HybridCache] Invalidated cache for ${type}:${tmdbId}`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[HybridCache] Error invalidating cache:', error);
|
logger.error('[HybridCache] Error invalidating cache:', error);
|
||||||
|
|
@ -270,22 +200,11 @@ class HybridCacheService {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invalidate cache for specific scraper
|
* Invalidate cache for specific scraper (local-only)
|
||||||
*/
|
*/
|
||||||
async invalidateScraper(scraperId: string): Promise<void> {
|
async invalidateScraper(scraperId: string): Promise<void> {
|
||||||
try {
|
try {
|
||||||
// Invalidate both local and global cache
|
await localScraperCacheService.invalidateScraper(scraperId);
|
||||||
const promises = [
|
|
||||||
localScraperCacheService.invalidateScraper(scraperId)
|
|
||||||
];
|
|
||||||
|
|
||||||
if (this.ENABLE_GLOBAL_CACHE) {
|
|
||||||
promises.push(
|
|
||||||
supabaseGlobalCacheService.invalidateScraper(scraperId)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
await Promise.all(promises);
|
|
||||||
logger.log(`[HybridCache] Invalidated cache for scraper ${scraperId}`);
|
logger.log(`[HybridCache] Invalidated cache for scraper ${scraperId}`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[HybridCache] Error invalidating scraper cache:', error);
|
logger.error('[HybridCache] Error invalidating scraper cache:', error);
|
||||||
|
|
@ -293,113 +212,43 @@ class HybridCacheService {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Clear all cached results
|
* Clear all cached results (local-only)
|
||||||
*/
|
*/
|
||||||
async clearAllCache(): Promise<void> {
|
async clearAllCache(): Promise<void> {
|
||||||
try {
|
try {
|
||||||
// Clear both local and global cache
|
await localScraperCacheService.clearAllCache();
|
||||||
const promises = [
|
logger.log('[HybridCache] Cleared all local cache');
|
||||||
localScraperCacheService.clearAllCache()
|
|
||||||
];
|
|
||||||
|
|
||||||
if (this.ENABLE_GLOBAL_CACHE) {
|
|
||||||
promises.push(
|
|
||||||
supabaseGlobalCacheService.clearAllCache()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
await Promise.all(promises);
|
|
||||||
logger.log('[HybridCache] Cleared all cache (local and global)');
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[HybridCache] Error clearing cache:', error);
|
logger.error('[HybridCache] Error clearing cache:', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get combined cache statistics
|
* Get cache statistics (local-only)
|
||||||
*/
|
*/
|
||||||
async getCacheStats(): Promise<HybridCacheStats> {
|
async getCacheStats(): Promise<HybridCacheStats> {
|
||||||
try {
|
try {
|
||||||
const [localStats, globalStats] = await Promise.all([
|
const localStats = await localScraperCacheService.getCacheStats();
|
||||||
localScraperCacheService.getCacheStats(),
|
return { local: localStats };
|
||||||
this.ENABLE_GLOBAL_CACHE ? supabaseGlobalCacheService.getCacheStats() : Promise.resolve({
|
|
||||||
totalEntries: 0,
|
|
||||||
totalSize: 0,
|
|
||||||
oldestEntry: null,
|
|
||||||
newestEntry: null,
|
|
||||||
hitRate: 0
|
|
||||||
})
|
|
||||||
]);
|
|
||||||
|
|
||||||
return {
|
|
||||||
local: localStats,
|
|
||||||
global: globalStats,
|
|
||||||
combined: {
|
|
||||||
totalEntries: localStats.totalEntries + globalStats.totalEntries,
|
|
||||||
hitRate: globalStats.hitRate // Global cache hit rate is more meaningful
|
|
||||||
}
|
|
||||||
};
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[HybridCache] Error getting cache stats:', error);
|
logger.error('[HybridCache] Error getting cache stats:', error);
|
||||||
return {
|
return { local: { totalEntries: 0, totalSize: 0, oldestEntry: null, newestEntry: null } };
|
||||||
local: { totalEntries: 0, totalSize: 0, oldestEntry: null, newestEntry: null },
|
|
||||||
global: { totalEntries: 0, totalSize: 0, oldestEntry: null, newestEntry: null, hitRate: 0 },
|
|
||||||
combined: { totalEntries: 0, hitRate: 0 }
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Clean up old entries in both caches
|
* Clean up old entries (local-only)
|
||||||
*/
|
*/
|
||||||
async cleanupOldEntries(): Promise<void> {
|
async cleanupOldEntries(): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const promises = [
|
await localScraperCacheService.clearAllCache();
|
||||||
localScraperCacheService.clearAllCache() // Local cache handles cleanup automatically
|
|
||||||
];
|
|
||||||
|
|
||||||
if (this.ENABLE_GLOBAL_CACHE) {
|
|
||||||
promises.push(
|
|
||||||
supabaseGlobalCacheService.cleanupOldEntries()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
await Promise.all(promises);
|
|
||||||
logger.log('[HybridCache] Cleaned up old entries');
|
logger.log('[HybridCache] Cleaned up old entries');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[HybridCache] Error cleaning up old entries:', error);
|
logger.error('[HybridCache] Error cleaning up old entries:', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
// Configuration APIs removed; local-only
|
||||||
* Get cache configuration
|
|
||||||
*/
|
|
||||||
getConfig(): {
|
|
||||||
enableGlobalCache: boolean;
|
|
||||||
fallbackToLocal: boolean;
|
|
||||||
} {
|
|
||||||
return {
|
|
||||||
enableGlobalCache: this.ENABLE_GLOBAL_CACHE,
|
|
||||||
fallbackToLocal: this.FALLBACK_TO_LOCAL
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Update cache configuration
|
|
||||||
*/
|
|
||||||
updateConfig(config: {
|
|
||||||
enableGlobalCache?: boolean;
|
|
||||||
fallbackToLocal?: boolean;
|
|
||||||
}): void {
|
|
||||||
if (config.enableGlobalCache !== undefined) {
|
|
||||||
(this as any).ENABLE_GLOBAL_CACHE = config.enableGlobalCache;
|
|
||||||
}
|
|
||||||
if (config.fallbackToLocal !== undefined) {
|
|
||||||
(this as any).FALLBACK_TO_LOCAL = config.fallbackToLocal;
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.log('[HybridCache] Configuration updated:', this.getConfig());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const hybridCacheService = HybridCacheService.getInstance();
|
export const hybridCacheService = HybridCacheService.getInstance();
|
||||||
|
|
|
||||||
|
|
@ -1421,17 +1421,6 @@ class LocalScraperService {
|
||||||
oldestEntry: number | null;
|
oldestEntry: number | null;
|
||||||
newestEntry: number | null;
|
newestEntry: number | null;
|
||||||
};
|
};
|
||||||
global: {
|
|
||||||
totalEntries: number;
|
|
||||||
totalSize: number;
|
|
||||||
oldestEntry: number | null;
|
|
||||||
newestEntry: number | null;
|
|
||||||
hitRate: number;
|
|
||||||
};
|
|
||||||
combined: {
|
|
||||||
totalEntries: number;
|
|
||||||
hitRate: number;
|
|
||||||
};
|
|
||||||
}> {
|
}> {
|
||||||
return await hybridCacheService.getCacheStats();
|
return await hybridCacheService.getCacheStats();
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,453 +0,0 @@
|
||||||
import { supabase } from './supabaseClient';
|
|
||||||
import { logger } from '../utils/logger';
|
|
||||||
import { Stream } from '../types/streams';
|
|
||||||
|
|
||||||
export interface GlobalCachedScraperResult {
|
|
||||||
streams: Stream[];
|
|
||||||
timestamp: number;
|
|
||||||
success: boolean;
|
|
||||||
error?: string;
|
|
||||||
scraperId: string;
|
|
||||||
scraperName: string;
|
|
||||||
contentKey: string; // e.g., "movie:123" or "tv:123:1:2"
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface GlobalCacheStats {
|
|
||||||
totalEntries: number;
|
|
||||||
totalSize: number;
|
|
||||||
oldestEntry: number | null;
|
|
||||||
newestEntry: number | null;
|
|
||||||
hitRate: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
class SupabaseGlobalCacheService {
|
|
||||||
private static instance: SupabaseGlobalCacheService;
|
|
||||||
private readonly TABLE_NAME = 'scraper_cache';
|
|
||||||
private readonly DEFAULT_TTL_MS = 30 * 60 * 1000; // 30 minutes default TTL
|
|
||||||
private readonly FAILED_RETRY_TTL_MS = 5 * 60 * 1000; // 5 minutes for failed scrapers
|
|
||||||
private readonly SUCCESS_TTL_MS = 60 * 60 * 1000; // 1 hour for successful scrapers
|
|
||||||
private readonly MAX_CACHE_AGE_MS = 7 * 24 * 60 * 60 * 1000; // 7 days max age
|
|
||||||
private readonly BATCH_SIZE = 50; // Batch size for operations
|
|
||||||
|
|
||||||
// Cache hit/miss tracking
|
|
||||||
private cacheHits = 0;
|
|
||||||
private cacheMisses = 0;
|
|
||||||
|
|
||||||
private constructor() {}
|
|
||||||
|
|
||||||
public static getInstance(): SupabaseGlobalCacheService {
|
|
||||||
if (!SupabaseGlobalCacheService.instance) {
|
|
||||||
SupabaseGlobalCacheService.instance = new SupabaseGlobalCacheService();
|
|
||||||
}
|
|
||||||
return SupabaseGlobalCacheService.instance;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate cache key for content
|
|
||||||
*/
|
|
||||||
private getContentKey(type: string, tmdbId: string, season?: number, episode?: number): string {
|
|
||||||
if (season !== undefined && episode !== undefined) {
|
|
||||||
return `${type}:${tmdbId}:${season}:${episode}`;
|
|
||||||
}
|
|
||||||
return `${type}:${tmdbId}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate unique key for scraper result
|
|
||||||
*/
|
|
||||||
private getScraperKey(contentKey: string, scraperId: string): string {
|
|
||||||
return `${contentKey}:${scraperId}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if cached result is still valid based on TTL
|
|
||||||
*/
|
|
||||||
private isCacheValid(timestamp: number, success: boolean): boolean {
|
|
||||||
const ttl = success ? this.SUCCESS_TTL_MS : this.FAILED_RETRY_TTL_MS;
|
|
||||||
return Date.now() - timestamp < ttl;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get cached results for content from global cache
|
|
||||||
*/
|
|
||||||
async getCachedResults(
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
season?: number,
|
|
||||||
episode?: number
|
|
||||||
): Promise<{
|
|
||||||
validResults: GlobalCachedScraperResult[];
|
|
||||||
expiredScrapers: string[];
|
|
||||||
allExpired: boolean;
|
|
||||||
}> {
|
|
||||||
try {
|
|
||||||
const contentKey = this.getContentKey(type, tmdbId, season, episode);
|
|
||||||
|
|
||||||
const { data, error } = await supabase
|
|
||||||
.from(this.TABLE_NAME)
|
|
||||||
.select('*')
|
|
||||||
.eq('content_key', contentKey)
|
|
||||||
.gte('created_at', new Date(Date.now() - this.MAX_CACHE_AGE_MS).toISOString());
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
logger.error('[GlobalCache] Error fetching cached results:', error);
|
|
||||||
this.cacheMisses++;
|
|
||||||
return {
|
|
||||||
validResults: [],
|
|
||||||
expiredScrapers: [],
|
|
||||||
allExpired: true
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!data || data.length === 0) {
|
|
||||||
this.cacheMisses++;
|
|
||||||
return {
|
|
||||||
validResults: [],
|
|
||||||
expiredScrapers: [],
|
|
||||||
allExpired: true
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Filter valid results and identify expired scrapers
|
|
||||||
const validResults: GlobalCachedScraperResult[] = [];
|
|
||||||
const expiredScrapers: string[] = [];
|
|
||||||
|
|
||||||
for (const row of data) {
|
|
||||||
const result: GlobalCachedScraperResult = {
|
|
||||||
streams: row.streams || [],
|
|
||||||
timestamp: new Date(row.created_at).getTime(),
|
|
||||||
success: row.success,
|
|
||||||
error: row.error,
|
|
||||||
scraperId: row.scraper_id,
|
|
||||||
scraperName: row.scraper_name,
|
|
||||||
contentKey: row.content_key
|
|
||||||
};
|
|
||||||
|
|
||||||
if (this.isCacheValid(result.timestamp, result.success)) {
|
|
||||||
validResults.push(result);
|
|
||||||
} else {
|
|
||||||
expiredScrapers.push(result.scraperId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Track cache hits
|
|
||||||
if (validResults.length > 0) {
|
|
||||||
this.cacheHits++;
|
|
||||||
} else {
|
|
||||||
this.cacheMisses++;
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.log(`[GlobalCache] Retrieved ${validResults.length} valid results, ${expiredScrapers.length} expired scrapers for ${contentKey}`);
|
|
||||||
|
|
||||||
return {
|
|
||||||
validResults,
|
|
||||||
expiredScrapers,
|
|
||||||
allExpired: validResults.length === 0
|
|
||||||
};
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[GlobalCache] Error getting cached results:', error);
|
|
||||||
this.cacheMisses++;
|
|
||||||
return {
|
|
||||||
validResults: [],
|
|
||||||
expiredScrapers: [],
|
|
||||||
allExpired: true
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Cache results for specific scrapers in global cache
|
|
||||||
*/
|
|
||||||
async cacheResults(
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
results: Array<{
|
|
||||||
scraperId: string;
|
|
||||||
scraperName: string;
|
|
||||||
streams: Stream[] | null;
|
|
||||||
error: Error | null;
|
|
||||||
}>,
|
|
||||||
season?: number,
|
|
||||||
episode?: number
|
|
||||||
): Promise<void> {
|
|
||||||
try {
|
|
||||||
const contentKey = this.getContentKey(type, tmdbId, season, episode);
|
|
||||||
const now = new Date().toISOString();
|
|
||||||
|
|
||||||
// Prepare batch insert data
|
|
||||||
const insertData = results.map(result => ({
|
|
||||||
scraper_key: this.getScraperKey(contentKey, result.scraperId),
|
|
||||||
content_key: contentKey,
|
|
||||||
scraper_id: result.scraperId,
|
|
||||||
scraper_name: result.scraperName,
|
|
||||||
streams: result.streams || [],
|
|
||||||
success: !result.error && result.streams !== null,
|
|
||||||
error: result.error?.message || null,
|
|
||||||
created_at: now,
|
|
||||||
updated_at: now
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Use upsert to handle duplicates
|
|
||||||
const { error } = await supabase
|
|
||||||
.from(this.TABLE_NAME)
|
|
||||||
.upsert(insertData, {
|
|
||||||
onConflict: 'scraper_key',
|
|
||||||
ignoreDuplicates: false
|
|
||||||
});
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
logger.error('[GlobalCache] Error caching results:', error);
|
|
||||||
} else {
|
|
||||||
logger.log(`[GlobalCache] Cached ${results.length} results for ${contentKey}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[GlobalCache] Error caching results:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Cache a single scraper result
|
|
||||||
*/
|
|
||||||
async cacheScraperResult(
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
scraperId: string,
|
|
||||||
scraperName: string,
|
|
||||||
streams: Stream[] | null,
|
|
||||||
error: Error | null,
|
|
||||||
season?: number,
|
|
||||||
episode?: number
|
|
||||||
): Promise<void> {
|
|
||||||
await this.cacheResults(type, tmdbId, [{
|
|
||||||
scraperId,
|
|
||||||
scraperName,
|
|
||||||
streams,
|
|
||||||
error
|
|
||||||
}], season, episode);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get list of scrapers that need to be re-run (expired or not cached globally)
|
|
||||||
*/
|
|
||||||
async getScrapersToRerun(
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
availableScrapers: Array<{ id: string; name: string }>,
|
|
||||||
season?: number,
|
|
||||||
episode?: number
|
|
||||||
): Promise<string[]> {
|
|
||||||
const { validResults, expiredScrapers } = await this.getCachedResults(type, tmdbId, season, episode);
|
|
||||||
|
|
||||||
const validScraperIds = new Set(validResults.map(r => r.scraperId));
|
|
||||||
const expiredScraperIds = new Set(expiredScrapers);
|
|
||||||
|
|
||||||
// Return scrapers that are either expired or not cached globally
|
|
||||||
const scrapersToRerun = availableScrapers
|
|
||||||
.filter(scraper =>
|
|
||||||
!validScraperIds.has(scraper.id) || expiredScraperIds.has(scraper.id)
|
|
||||||
)
|
|
||||||
.map(scraper => scraper.id);
|
|
||||||
|
|
||||||
logger.log(`[GlobalCache] Scrapers to re-run: ${scrapersToRerun.join(', ')}`);
|
|
||||||
|
|
||||||
return scrapersToRerun;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all valid cached streams for content from global cache
|
|
||||||
*/
|
|
||||||
async getCachedStreams(
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
season?: number,
|
|
||||||
episode?: number
|
|
||||||
): Promise<Stream[]> {
|
|
||||||
const { validResults } = await this.getCachedResults(type, tmdbId, season, episode);
|
|
||||||
|
|
||||||
// Flatten all valid streams
|
|
||||||
const allStreams: Stream[] = [];
|
|
||||||
for (const result of validResults) {
|
|
||||||
if (result.success && result.streams) {
|
|
||||||
allStreams.push(...result.streams);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return allStreams;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Invalidate cache for specific content globally
|
|
||||||
*/
|
|
||||||
async invalidateContent(
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
season?: number,
|
|
||||||
episode?: number
|
|
||||||
): Promise<void> {
|
|
||||||
try {
|
|
||||||
const contentKey = this.getContentKey(type, tmdbId, season, episode);
|
|
||||||
|
|
||||||
const { error } = await supabase
|
|
||||||
.from(this.TABLE_NAME)
|
|
||||||
.delete()
|
|
||||||
.eq('content_key', contentKey);
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
logger.error('[GlobalCache] Error invalidating cache:', error);
|
|
||||||
} else {
|
|
||||||
logger.log(`[GlobalCache] Invalidated global cache for ${contentKey}`);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[GlobalCache] Error invalidating cache:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Invalidate cache for specific scraper across all content globally
|
|
||||||
*/
|
|
||||||
async invalidateScraper(scraperId: string): Promise<void> {
|
|
||||||
try {
|
|
||||||
const { error } = await supabase
|
|
||||||
.from(this.TABLE_NAME)
|
|
||||||
.delete()
|
|
||||||
.eq('scraper_id', scraperId);
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
logger.error('[GlobalCache] Error invalidating scraper cache:', error);
|
|
||||||
} else {
|
|
||||||
logger.log(`[GlobalCache] Invalidated global cache for scraper ${scraperId}`);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[GlobalCache] Error invalidating scraper cache:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clear all cached results globally (admin function)
|
|
||||||
*/
|
|
||||||
async clearAllCache(): Promise<void> {
|
|
||||||
try {
|
|
||||||
const { error } = await supabase
|
|
||||||
.from(this.TABLE_NAME)
|
|
||||||
.delete()
|
|
||||||
.neq('id', 0); // Delete all rows
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
logger.error('[GlobalCache] Error clearing cache:', error);
|
|
||||||
} else {
|
|
||||||
logger.log('[GlobalCache] Cleared all global cache');
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[GlobalCache] Error clearing cache:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clean up old cache entries (older than MAX_CACHE_AGE_MS)
|
|
||||||
*/
|
|
||||||
async cleanupOldEntries(): Promise<void> {
|
|
||||||
try {
|
|
||||||
const cutoffDate = new Date(Date.now() - this.MAX_CACHE_AGE_MS).toISOString();
|
|
||||||
|
|
||||||
const { error } = await supabase
|
|
||||||
.from(this.TABLE_NAME)
|
|
||||||
.delete()
|
|
||||||
.lt('created_at', cutoffDate);
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
logger.error('[GlobalCache] Error cleaning up old entries:', error);
|
|
||||||
} else {
|
|
||||||
logger.log('[GlobalCache] Cleaned up old cache entries');
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[GlobalCache] Error cleaning up old entries:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get global cache statistics
|
|
||||||
*/
|
|
||||||
async getCacheStats(): Promise<GlobalCacheStats> {
|
|
||||||
try {
|
|
||||||
// Get total count
|
|
||||||
const { count: totalEntries, error: countError } = await supabase
|
|
||||||
.from(this.TABLE_NAME)
|
|
||||||
.select('*', { count: 'exact', head: true });
|
|
||||||
|
|
||||||
if (countError) {
|
|
||||||
logger.error('[GlobalCache] Error getting cache stats:', countError);
|
|
||||||
return {
|
|
||||||
totalEntries: 0,
|
|
||||||
totalSize: 0,
|
|
||||||
oldestEntry: null,
|
|
||||||
newestEntry: null,
|
|
||||||
hitRate: 0
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get oldest and newest entries
|
|
||||||
const { data: oldestData } = await supabase
|
|
||||||
.from(this.TABLE_NAME)
|
|
||||||
.select('created_at')
|
|
||||||
.order('created_at', { ascending: true })
|
|
||||||
.limit(1);
|
|
||||||
|
|
||||||
const { data: newestData } = await supabase
|
|
||||||
.from(this.TABLE_NAME)
|
|
||||||
.select('created_at')
|
|
||||||
.order('created_at', { ascending: false })
|
|
||||||
.limit(1);
|
|
||||||
|
|
||||||
const oldestEntry = oldestData?.[0] ? new Date(oldestData[0].created_at).getTime() : null;
|
|
||||||
const newestEntry = newestData?.[0] ? new Date(newestData[0].created_at).getTime() : null;
|
|
||||||
|
|
||||||
// Calculate hit rate
|
|
||||||
const totalRequests = this.cacheHits + this.cacheMisses;
|
|
||||||
const hitRate = totalRequests > 0 ? (this.cacheHits / totalRequests) * 100 : 0;
|
|
||||||
|
|
||||||
return {
|
|
||||||
totalEntries: totalEntries || 0,
|
|
||||||
totalSize: 0, // Size calculation would require additional queries
|
|
||||||
oldestEntry,
|
|
||||||
newestEntry,
|
|
||||||
hitRate
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[GlobalCache] Error getting cache stats:', error);
|
|
||||||
return {
|
|
||||||
totalEntries: 0,
|
|
||||||
totalSize: 0,
|
|
||||||
oldestEntry: null,
|
|
||||||
newestEntry: null,
|
|
||||||
hitRate: 0
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Reset cache hit/miss statistics
|
|
||||||
*/
|
|
||||||
resetStats(): void {
|
|
||||||
this.cacheHits = 0;
|
|
||||||
this.cacheMisses = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get cache hit/miss statistics
|
|
||||||
*/
|
|
||||||
getHitMissStats(): { hits: number; misses: number; hitRate: number } {
|
|
||||||
const totalRequests = this.cacheHits + this.cacheMisses;
|
|
||||||
const hitRate = totalRequests > 0 ? (this.cacheHits / totalRequests) * 100 : 0;
|
|
||||||
|
|
||||||
return {
|
|
||||||
hits: this.cacheHits,
|
|
||||||
misses: this.cacheMisses,
|
|
||||||
hitRate
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const supabaseGlobalCacheService = SupabaseGlobalCacheService.getInstance();
|
|
||||||
export default supabaseGlobalCacheService;
|
|
||||||
Loading…
Reference in a new issue