mirror of
https://github.com/tapframe/NuvioStreaming.git
synced 2026-01-11 20:10:25 +00:00
cache changes
This commit is contained in:
parent
8b1a40d2e2
commit
0a04ba5743
8 changed files with 108 additions and 916 deletions
|
|
@ -4,6 +4,7 @@ module.exports = function (api) {
|
||||||
presets: ['babel-preset-expo'],
|
presets: ['babel-preset-expo'],
|
||||||
plugins: [
|
plugins: [
|
||||||
'react-native-worklets/plugin',
|
'react-native-worklets/plugin',
|
||||||
|
'react-native-boost/plugin',
|
||||||
],
|
],
|
||||||
env: {
|
env: {
|
||||||
production: {
|
production: {
|
||||||
|
|
|
||||||
3
package-lock.json
generated
3
package-lock.json
generated
|
|
@ -17,7 +17,6 @@
|
||||||
"@expo/metro-runtime": "~6.1.2",
|
"@expo/metro-runtime": "~6.1.2",
|
||||||
"@expo/vector-icons": "^15.0.2",
|
"@expo/vector-icons": "^15.0.2",
|
||||||
"@gorhom/bottom-sheet": "^5.2.6",
|
"@gorhom/bottom-sheet": "^5.2.6",
|
||||||
"@legendapp/list": "^2.0.13",
|
|
||||||
"@lottiefiles/dotlottie-react": "^0.6.5",
|
"@lottiefiles/dotlottie-react": "^0.6.5",
|
||||||
"@react-native-async-storage/async-storage": "2.2.0",
|
"@react-native-async-storage/async-storage": "2.2.0",
|
||||||
"@react-native-community/blur": "^4.4.1",
|
"@react-native-community/blur": "^4.4.1",
|
||||||
|
|
@ -81,7 +80,7 @@
|
||||||
"react-native-svg": "15.12.1",
|
"react-native-svg": "15.12.1",
|
||||||
"react-native-url-polyfill": "^2.0.0",
|
"react-native-url-polyfill": "^2.0.0",
|
||||||
"react-native-vector-icons": "^10.3.0",
|
"react-native-vector-icons": "^10.3.0",
|
||||||
"react-native-video": "^6.17.0",
|
"react-native-video": "^6.12.0",
|
||||||
"react-native-web": "^0.21.0",
|
"react-native-web": "^0.21.0",
|
||||||
"react-native-wheel-color-picker": "^1.3.1",
|
"react-native-wheel-color-picker": "^1.3.1",
|
||||||
"react-native-worklets": "^0.6.1",
|
"react-native-worklets": "^0.6.1",
|
||||||
|
|
|
||||||
|
|
@ -68,6 +68,7 @@
|
||||||
"posthog-react-native": "^4.4.0",
|
"posthog-react-native": "^4.4.0",
|
||||||
"react": "19.1.0",
|
"react": "19.1.0",
|
||||||
"react-native": "0.81.4",
|
"react-native": "0.81.4",
|
||||||
|
"react-native-boost": "^0.6.2",
|
||||||
"react-native-bottom-tabs": "^0.12.2",
|
"react-native-bottom-tabs": "^0.12.2",
|
||||||
"react-native-gesture-handler": "~2.28.0",
|
"react-native-gesture-handler": "~2.28.0",
|
||||||
"react-native-get-random-values": "^1.11.0",
|
"react-native-get-random-values": "^1.11.0",
|
||||||
|
|
|
||||||
|
|
@ -107,28 +107,6 @@ interface UseMetadataReturn {
|
||||||
imdbId: string | null;
|
imdbId: string | null;
|
||||||
scraperStatuses: ScraperStatus[];
|
scraperStatuses: ScraperStatus[];
|
||||||
activeFetchingScrapers: string[];
|
activeFetchingScrapers: string[];
|
||||||
clearScraperCache: () => Promise<void>;
|
|
||||||
invalidateScraperCache: (scraperId: string) => Promise<void>;
|
|
||||||
invalidateContentCache: (type: string, tmdbId: string, season?: number, episode?: number) => Promise<void>;
|
|
||||||
getScraperCacheStats: () => Promise<{
|
|
||||||
local: {
|
|
||||||
totalEntries: number;
|
|
||||||
totalSize: number;
|
|
||||||
oldestEntry: number | null;
|
|
||||||
newestEntry: number | null;
|
|
||||||
};
|
|
||||||
global: {
|
|
||||||
totalEntries: number;
|
|
||||||
totalSize: number;
|
|
||||||
oldestEntry: number | null;
|
|
||||||
newestEntry: number | null;
|
|
||||||
hitRate: number;
|
|
||||||
};
|
|
||||||
combined: {
|
|
||||||
totalEntries: number;
|
|
||||||
hitRate: number;
|
|
||||||
};
|
|
||||||
}>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const useMetadata = ({ id, type, addonId }: UseMetadataProps): UseMetadataReturn => {
|
export const useMetadata = ({ id, type, addonId }: UseMetadataProps): UseMetadataReturn => {
|
||||||
|
|
@ -320,7 +298,38 @@ export const useMetadata = ({ id, type, addonId }: UseMetadataProps): UseMetadat
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
if (__DEV__) logger.log(`🤷 [${logPrefix}:${sourceName}] No streams found for addon ${addonName} (${addonId})`);
|
// Even providers with no streams should be added to the streams object
|
||||||
|
// This ensures streamsEmpty becomes false and UI shows available streams progressively
|
||||||
|
if (__DEV__) logger.log(`🤷 [${logPrefix}:${sourceName}] No streams found for addon ${addonName} (${addonId})`);
|
||||||
|
|
||||||
|
debouncedStreamUpdate(() => {
|
||||||
|
const updateState = (prevState: GroupedStreams): GroupedStreams => {
|
||||||
|
if (__DEV__) logger.log(`🔄 [${logPrefix}:${sourceName}] Adding empty provider ${addonName} (${addonId}) to state`);
|
||||||
|
return {
|
||||||
|
...prevState,
|
||||||
|
[addonId]: {
|
||||||
|
addonName: addonName,
|
||||||
|
streams: [] // Empty array for providers with no streams
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
// Track response order for addons
|
||||||
|
setAddonResponseOrder(prevOrder => {
|
||||||
|
if (!prevOrder.includes(addonId)) {
|
||||||
|
return [...prevOrder, addonId];
|
||||||
|
}
|
||||||
|
return prevOrder;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (isEpisode) {
|
||||||
|
setEpisodeStreams(updateState);
|
||||||
|
setLoadingEpisodeStreams(false);
|
||||||
|
} else {
|
||||||
|
setGroupedStreams(updateState);
|
||||||
|
setLoadingStreams(false);
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Handle case where callback provides null streams without error (e.g., empty results)
|
// Handle case where callback provides null streams without error (e.g., empty results)
|
||||||
|
|
@ -1974,36 +1983,6 @@ export const useMetadata = ({ id, type, addonId }: UseMetadataProps): UseMetadat
|
||||||
};
|
};
|
||||||
}, [cleanupStreams]);
|
}, [cleanupStreams]);
|
||||||
|
|
||||||
// Cache management methods
|
|
||||||
const clearScraperCache = useCallback(async () => {
|
|
||||||
await localScraperService.clearScraperCache();
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const invalidateScraperCache = useCallback(async (scraperId: string) => {
|
|
||||||
await localScraperService.invalidateScraperCache(scraperId);
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const invalidateContentCache = useCallback(async (type: string, tmdbId: string, season?: number, episode?: number) => {
|
|
||||||
await localScraperService.invalidateContentCache(type, tmdbId, season, episode);
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const getScraperCacheStats = useCallback(async () => {
|
|
||||||
const localStats = await localScraperService.getCacheStats();
|
|
||||||
return {
|
|
||||||
local: localStats.local,
|
|
||||||
global: {
|
|
||||||
totalEntries: 0,
|
|
||||||
totalSize: 0,
|
|
||||||
oldestEntry: null,
|
|
||||||
newestEntry: null,
|
|
||||||
hitRate: 0
|
|
||||||
},
|
|
||||||
combined: {
|
|
||||||
totalEntries: localStats.local.totalEntries,
|
|
||||||
hitRate: 0
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
metadata,
|
metadata,
|
||||||
|
|
@ -2038,9 +2017,5 @@ export const useMetadata = ({ id, type, addonId }: UseMetadataProps): UseMetadat
|
||||||
imdbId,
|
imdbId,
|
||||||
scraperStatuses,
|
scraperStatuses,
|
||||||
activeFetchingScrapers,
|
activeFetchingScrapers,
|
||||||
clearScraperCache,
|
|
||||||
invalidateScraperCache,
|
|
||||||
invalidateContentCache,
|
|
||||||
getScraperCacheStats,
|
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
@ -1,267 +0,0 @@
|
||||||
import { localScraperCacheService, CachedScraperResult } from './localScraperCacheService';
|
|
||||||
import { logger } from '../utils/logger';
|
|
||||||
import { Stream } from '../types/streams';
|
|
||||||
|
|
||||||
export interface HybridCacheResult {
|
|
||||||
validResults: Array<CachedScraperResult>;
|
|
||||||
expiredScrapers: string[];
|
|
||||||
allExpired: boolean;
|
|
||||||
source: 'local';
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface HybridCacheStats {
|
|
||||||
local: {
|
|
||||||
totalEntries: number;
|
|
||||||
totalSize: number;
|
|
||||||
oldestEntry: number | null;
|
|
||||||
newestEntry: number | null;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
class HybridCacheService {
|
|
||||||
private static instance: HybridCacheService;
|
|
||||||
// Global caching removed; local-only
|
|
||||||
|
|
||||||
private constructor() {}
|
|
||||||
|
|
||||||
public static getInstance(): HybridCacheService {
|
|
||||||
if (!HybridCacheService.instance) {
|
|
||||||
HybridCacheService.instance = new HybridCacheService();
|
|
||||||
}
|
|
||||||
return HybridCacheService.instance;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get cached results (local-only)
|
|
||||||
*/
|
|
||||||
async getCachedResults(
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
season?: number,
|
|
||||||
episode?: number,
|
|
||||||
userSettings?: { enableLocalScrapers?: boolean; enabledScrapers?: Set<string> }
|
|
||||||
): Promise<HybridCacheResult> {
|
|
||||||
try {
|
|
||||||
// Filter function to check if scraper is enabled for current user
|
|
||||||
const isScraperEnabled = (scraperId: string): boolean => {
|
|
||||||
if (!userSettings?.enableLocalScrapers) return false;
|
|
||||||
if (userSettings?.enabledScrapers) {
|
|
||||||
return userSettings.enabledScrapers.has(scraperId);
|
|
||||||
}
|
|
||||||
// If no specific scraper settings, assume all are enabled if local scrapers are enabled
|
|
||||||
return true;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Local cache only
|
|
||||||
const localResults = await localScraperCacheService.getCachedResults(type, tmdbId, season, episode);
|
|
||||||
|
|
||||||
// Filter results based on user settings
|
|
||||||
const filteredLocalResults = {
|
|
||||||
...localResults,
|
|
||||||
validResults: localResults.validResults.filter(result => isScraperEnabled(result.scraperId)),
|
|
||||||
expiredScrapers: localResults.expiredScrapers.filter(scraperId => isScraperEnabled(scraperId))
|
|
||||||
};
|
|
||||||
|
|
||||||
logger.log(`[HybridCache] Using local cache: ${filteredLocalResults.validResults.length} results (filtered from ${localResults.validResults.length})`);
|
|
||||||
return {
|
|
||||||
...filteredLocalResults,
|
|
||||||
source: 'local'
|
|
||||||
};
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[HybridCache] Error getting cached results:', error);
|
|
||||||
return {
|
|
||||||
validResults: [],
|
|
||||||
expiredScrapers: [],
|
|
||||||
allExpired: true,
|
|
||||||
source: 'local'
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Cache results (local-only)
|
|
||||||
*/
|
|
||||||
async cacheResults(
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
results: Array<{
|
|
||||||
scraperId: string;
|
|
||||||
scraperName: string;
|
|
||||||
streams: Stream[] | null;
|
|
||||||
error: Error | null;
|
|
||||||
}>,
|
|
||||||
season?: number,
|
|
||||||
episode?: number
|
|
||||||
): Promise<void> {
|
|
||||||
try {
|
|
||||||
// Cache in local storage
|
|
||||||
const localPromises = results.map(result =>
|
|
||||||
localScraperCacheService.cacheScraperResult(
|
|
||||||
type, tmdbId, result.scraperId, result.scraperName,
|
|
||||||
result.streams, result.error, season, episode
|
|
||||||
)
|
|
||||||
);
|
|
||||||
await Promise.all(localPromises);
|
|
||||||
logger.log(`[HybridCache] Cached ${results.length} results in local cache`);
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[HybridCache] Error caching results:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Cache a single scraper result
|
|
||||||
*/
|
|
||||||
async cacheScraperResult(
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
scraperId: string,
|
|
||||||
scraperName: string,
|
|
||||||
streams: Stream[] | null,
|
|
||||||
error: Error | null,
|
|
||||||
season?: number,
|
|
||||||
episode?: number
|
|
||||||
): Promise<void> {
|
|
||||||
await this.cacheResults(type, tmdbId, [{
|
|
||||||
scraperId,
|
|
||||||
scraperName,
|
|
||||||
streams,
|
|
||||||
error
|
|
||||||
}], season, episode);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get list of scrapers that need to be re-run (expired, failed, or not cached)
|
|
||||||
*/
|
|
||||||
async getScrapersToRerun(
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
availableScrapers: Array<{ id: string; name: string }>,
|
|
||||||
season?: number,
|
|
||||||
episode?: number,
|
|
||||||
userSettings?: { enableLocalScrapers?: boolean; enabledScrapers?: Set<string> }
|
|
||||||
): Promise<string[]> {
|
|
||||||
const { validResults, expiredScrapers } = await this.getCachedResults(type, tmdbId, season, episode, userSettings);
|
|
||||||
|
|
||||||
const validScraperIds = new Set(validResults.map(r => r.scraperId));
|
|
||||||
const expiredScraperIds = new Set(expiredScrapers);
|
|
||||||
|
|
||||||
// Get scrapers that previously failed (returned no streams)
|
|
||||||
const failedScraperIds = new Set(
|
|
||||||
validResults
|
|
||||||
.filter(r => !r.success || r.streams.length === 0)
|
|
||||||
.map(r => r.scraperId)
|
|
||||||
);
|
|
||||||
|
|
||||||
// Return scrapers that are:
|
|
||||||
// 1. Not cached at all
|
|
||||||
// 2. Expired
|
|
||||||
// 3. Previously failed (regardless of cache status)
|
|
||||||
const scrapersToRerun = availableScrapers
|
|
||||||
.filter(scraper =>
|
|
||||||
!validScraperIds.has(scraper.id) ||
|
|
||||||
expiredScraperIds.has(scraper.id) ||
|
|
||||||
failedScraperIds.has(scraper.id)
|
|
||||||
)
|
|
||||||
.map(scraper => scraper.id);
|
|
||||||
|
|
||||||
logger.log(`[HybridCache] Scrapers to re-run: ${scrapersToRerun.join(', ')} (not cached: ${availableScrapers.filter(s => !validScraperIds.has(s.id)).length}, expired: ${expiredScrapers.length}, failed: ${failedScraperIds.size})`);
|
|
||||||
|
|
||||||
return scrapersToRerun;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all valid cached streams
|
|
||||||
*/
|
|
||||||
async getCachedStreams(
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
season?: number,
|
|
||||||
episode?: number,
|
|
||||||
userSettings?: { enableLocalScrapers?: boolean; enabledScrapers?: Set<string> }
|
|
||||||
): Promise<Stream[]> {
|
|
||||||
const { validResults } = await this.getCachedResults(type, tmdbId, season, episode, userSettings);
|
|
||||||
|
|
||||||
// Flatten all valid streams
|
|
||||||
const allStreams: Stream[] = [];
|
|
||||||
for (const result of validResults) {
|
|
||||||
if (result.success && result.streams) {
|
|
||||||
allStreams.push(...result.streams);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return allStreams;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Invalidate cache for specific content (local-only)
|
|
||||||
*/
|
|
||||||
async invalidateContent(
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
season?: number,
|
|
||||||
episode?: number
|
|
||||||
): Promise<void> {
|
|
||||||
try {
|
|
||||||
await localScraperCacheService.invalidateContent(type, tmdbId, season, episode);
|
|
||||||
logger.log(`[HybridCache] Invalidated cache for ${type}:${tmdbId}`);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[HybridCache] Error invalidating cache:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Invalidate cache for specific scraper (local-only)
|
|
||||||
*/
|
|
||||||
async invalidateScraper(scraperId: string): Promise<void> {
|
|
||||||
try {
|
|
||||||
await localScraperCacheService.invalidateScraper(scraperId);
|
|
||||||
logger.log(`[HybridCache] Invalidated cache for scraper ${scraperId}`);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[HybridCache] Error invalidating scraper cache:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clear all cached results (local-only)
|
|
||||||
*/
|
|
||||||
async clearAllCache(): Promise<void> {
|
|
||||||
try {
|
|
||||||
await localScraperCacheService.clearAllCache();
|
|
||||||
logger.log('[HybridCache] Cleared all local cache');
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[HybridCache] Error clearing cache:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get cache statistics (local-only)
|
|
||||||
*/
|
|
||||||
async getCacheStats(): Promise<HybridCacheStats> {
|
|
||||||
try {
|
|
||||||
const localStats = await localScraperCacheService.getCacheStats();
|
|
||||||
return { local: localStats };
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[HybridCache] Error getting cache stats:', error);
|
|
||||||
return { local: { totalEntries: 0, totalSize: 0, oldestEntry: null, newestEntry: null } };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clean up old entries (local-only)
|
|
||||||
*/
|
|
||||||
async cleanupOldEntries(): Promise<void> {
|
|
||||||
try {
|
|
||||||
await localScraperCacheService.clearAllCache();
|
|
||||||
logger.log('[HybridCache] Cleaned up old entries');
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[HybridCache] Error cleaning up old entries:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Configuration APIs removed; local-only
|
|
||||||
}
|
|
||||||
|
|
||||||
export const hybridCacheService = HybridCacheService.getInstance();
|
|
||||||
export default hybridCacheService;
|
|
||||||
|
|
@ -1,437 +0,0 @@
|
||||||
import AsyncStorage from '@react-native-async-storage/async-storage';
|
|
||||||
import { logger } from '../utils/logger';
|
|
||||||
import { Stream } from '../types/streams';
|
|
||||||
|
|
||||||
export interface CachedScraperResult {
|
|
||||||
streams: Stream[];
|
|
||||||
timestamp: number;
|
|
||||||
success: boolean;
|
|
||||||
error?: string;
|
|
||||||
scraperId: string;
|
|
||||||
scraperName: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface CachedContentResult {
|
|
||||||
contentKey: string; // e.g., "movie:123" or "tv:123:1:2"
|
|
||||||
results: CachedScraperResult[];
|
|
||||||
timestamp: number;
|
|
||||||
ttl: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
class LocalScraperCacheService {
|
|
||||||
private static instance: LocalScraperCacheService;
|
|
||||||
private readonly CACHE_KEY_PREFIX = 'local-scraper-cache';
|
|
||||||
private readonly DEFAULT_TTL_MS = 30 * 60 * 1000; // 30 minutes default TTL
|
|
||||||
private readonly MAX_CACHE_SIZE = 200; // Maximum number of cached content items
|
|
||||||
private readonly FAILED_RETRY_TTL_MS = 5 * 60 * 1000; // 5 minutes for failed scrapers
|
|
||||||
private readonly SUCCESS_TTL_MS = 60 * 60 * 1000; // 1 hour for successful scrapers
|
|
||||||
|
|
||||||
private constructor() {}
|
|
||||||
|
|
||||||
public static getInstance(): LocalScraperCacheService {
|
|
||||||
if (!LocalScraperCacheService.instance) {
|
|
||||||
LocalScraperCacheService.instance = new LocalScraperCacheService();
|
|
||||||
}
|
|
||||||
return LocalScraperCacheService.instance;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate cache key for content
|
|
||||||
*/
|
|
||||||
private getContentKey(type: string, tmdbId: string, season?: number, episode?: number): string {
|
|
||||||
if (season !== undefined && episode !== undefined) {
|
|
||||||
return `${type}:${tmdbId}:${season}:${episode}`;
|
|
||||||
}
|
|
||||||
return `${type}:${tmdbId}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate AsyncStorage key for cached content
|
|
||||||
*/
|
|
||||||
private getStorageKey(contentKey: string): string {
|
|
||||||
return `${this.CACHE_KEY_PREFIX}:${contentKey}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if cached result is still valid based on TTL
|
|
||||||
*/
|
|
||||||
private isCacheValid(timestamp: number, ttl: number): boolean {
|
|
||||||
return Date.now() - timestamp < ttl;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get cached results for content, filtering out expired results
|
|
||||||
*/
|
|
||||||
async getCachedResults(
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
season?: number,
|
|
||||||
episode?: number
|
|
||||||
): Promise<{
|
|
||||||
validResults: CachedScraperResult[];
|
|
||||||
expiredScrapers: string[];
|
|
||||||
allExpired: boolean;
|
|
||||||
}> {
|
|
||||||
try {
|
|
||||||
const contentKey = this.getContentKey(type, tmdbId, season, episode);
|
|
||||||
const storageKey = this.getStorageKey(contentKey);
|
|
||||||
|
|
||||||
const cachedData = await AsyncStorage.getItem(storageKey);
|
|
||||||
if (!cachedData) {
|
|
||||||
return {
|
|
||||||
validResults: [],
|
|
||||||
expiredScrapers: [],
|
|
||||||
allExpired: true
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const parsed: CachedContentResult = JSON.parse(cachedData);
|
|
||||||
|
|
||||||
// Check if the entire cache entry is expired
|
|
||||||
if (!this.isCacheValid(parsed.timestamp, parsed.ttl)) {
|
|
||||||
// Remove expired entry
|
|
||||||
await AsyncStorage.removeItem(storageKey);
|
|
||||||
return {
|
|
||||||
validResults: [],
|
|
||||||
expiredScrapers: parsed.results.map(r => r.scraperId),
|
|
||||||
allExpired: true
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Filter valid results and identify expired scrapers
|
|
||||||
const validResults: CachedScraperResult[] = [];
|
|
||||||
const expiredScrapers: string[] = [];
|
|
||||||
|
|
||||||
for (const result of parsed.results) {
|
|
||||||
// Use different TTL based on success/failure
|
|
||||||
const ttl = result.success ? this.SUCCESS_TTL_MS : this.FAILED_RETRY_TTL_MS;
|
|
||||||
|
|
||||||
if (this.isCacheValid(result.timestamp, ttl)) {
|
|
||||||
validResults.push(result);
|
|
||||||
} else {
|
|
||||||
expiredScrapers.push(result.scraperId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.log(`[LocalScraperCache] Retrieved ${validResults.length} valid results, ${expiredScrapers.length} expired scrapers for ${contentKey}`);
|
|
||||||
|
|
||||||
return {
|
|
||||||
validResults,
|
|
||||||
expiredScrapers,
|
|
||||||
allExpired: validResults.length === 0
|
|
||||||
};
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[LocalScraperCache] Error getting cached results:', error);
|
|
||||||
return {
|
|
||||||
validResults: [],
|
|
||||||
expiredScrapers: [],
|
|
||||||
allExpired: true
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Cache results for specific scrapers
|
|
||||||
*/
|
|
||||||
async cacheResults(
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
results: CachedScraperResult[],
|
|
||||||
season?: number,
|
|
||||||
episode?: number
|
|
||||||
): Promise<void> {
|
|
||||||
try {
|
|
||||||
const contentKey = this.getContentKey(type, tmdbId, season, episode);
|
|
||||||
const storageKey = this.getStorageKey(contentKey);
|
|
||||||
|
|
||||||
// Get existing cached data
|
|
||||||
const existingData = await AsyncStorage.getItem(storageKey);
|
|
||||||
let cachedContent: CachedContentResult;
|
|
||||||
|
|
||||||
if (existingData) {
|
|
||||||
cachedContent = JSON.parse(existingData);
|
|
||||||
|
|
||||||
// Update existing results or add new ones
|
|
||||||
for (const newResult of results) {
|
|
||||||
const existingIndex = cachedContent.results.findIndex(r => r.scraperId === newResult.scraperId);
|
|
||||||
if (existingIndex >= 0) {
|
|
||||||
// Update existing result
|
|
||||||
cachedContent.results[existingIndex] = newResult;
|
|
||||||
} else {
|
|
||||||
// Add new result
|
|
||||||
cachedContent.results.push(newResult);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Create new cache entry
|
|
||||||
cachedContent = {
|
|
||||||
contentKey,
|
|
||||||
results,
|
|
||||||
timestamp: Date.now(),
|
|
||||||
ttl: this.DEFAULT_TTL_MS
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update timestamp
|
|
||||||
cachedContent.timestamp = Date.now();
|
|
||||||
|
|
||||||
// Store updated cache
|
|
||||||
await AsyncStorage.setItem(storageKey, JSON.stringify(cachedContent));
|
|
||||||
|
|
||||||
// Clean up old cache entries if we exceed the limit
|
|
||||||
await this.cleanupOldEntries();
|
|
||||||
|
|
||||||
logger.log(`[LocalScraperCache] Cached ${results.length} results for ${contentKey}`);
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[LocalScraperCache] Error caching results:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Cache a single scraper result
|
|
||||||
*/
|
|
||||||
async cacheScraperResult(
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
scraperId: string,
|
|
||||||
scraperName: string,
|
|
||||||
streams: Stream[] | null,
|
|
||||||
error: Error | null,
|
|
||||||
season?: number,
|
|
||||||
episode?: number
|
|
||||||
): Promise<void> {
|
|
||||||
const result: CachedScraperResult = {
|
|
||||||
streams: streams || [],
|
|
||||||
timestamp: Date.now(),
|
|
||||||
success: !error && streams !== null,
|
|
||||||
error: error?.message,
|
|
||||||
scraperId,
|
|
||||||
scraperName
|
|
||||||
};
|
|
||||||
|
|
||||||
await this.cacheResults(type, tmdbId, [result], season, episode);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get list of scrapers that need to be re-run (expired, failed, or not cached)
|
|
||||||
*/
|
|
||||||
async getScrapersToRerun(
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
availableScrapers: Array<{ id: string; name: string }>,
|
|
||||||
season?: number,
|
|
||||||
episode?: number
|
|
||||||
): Promise<string[]> {
|
|
||||||
const { validResults, expiredScrapers } = await this.getCachedResults(type, tmdbId, season, episode);
|
|
||||||
|
|
||||||
const validScraperIds = new Set(validResults.map(r => r.scraperId));
|
|
||||||
const expiredScraperIds = new Set(expiredScrapers);
|
|
||||||
|
|
||||||
// Get scrapers that previously failed (returned no streams)
|
|
||||||
const failedScraperIds = new Set(
|
|
||||||
validResults
|
|
||||||
.filter(r => !r.success || r.streams.length === 0)
|
|
||||||
.map(r => r.scraperId)
|
|
||||||
);
|
|
||||||
|
|
||||||
// Return scrapers that are:
|
|
||||||
// 1. Not cached at all
|
|
||||||
// 2. Expired
|
|
||||||
// 3. Previously failed (regardless of cache status)
|
|
||||||
const scrapersToRerun = availableScrapers
|
|
||||||
.filter(scraper =>
|
|
||||||
!validScraperIds.has(scraper.id) ||
|
|
||||||
expiredScraperIds.has(scraper.id) ||
|
|
||||||
failedScraperIds.has(scraper.id)
|
|
||||||
)
|
|
||||||
.map(scraper => scraper.id);
|
|
||||||
|
|
||||||
logger.log(`[LocalScraperCache] Scrapers to re-run: ${scrapersToRerun.join(', ')} (not cached: ${availableScrapers.filter(s => !validScraperIds.has(s.id)).length}, expired: ${expiredScrapers.length}, failed: ${failedScraperIds.size})`);
|
|
||||||
|
|
||||||
return scrapersToRerun;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all valid cached streams for content
|
|
||||||
*/
|
|
||||||
async getCachedStreams(
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
season?: number,
|
|
||||||
episode?: number
|
|
||||||
): Promise<Stream[]> {
|
|
||||||
const { validResults } = await this.getCachedResults(type, tmdbId, season, episode);
|
|
||||||
|
|
||||||
// Flatten all valid streams
|
|
||||||
const allStreams: Stream[] = [];
|
|
||||||
for (const result of validResults) {
|
|
||||||
if (result.success && result.streams) {
|
|
||||||
allStreams.push(...result.streams);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return allStreams;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Invalidate cache for specific content
|
|
||||||
*/
|
|
||||||
async invalidateContent(
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
season?: number,
|
|
||||||
episode?: number
|
|
||||||
): Promise<void> {
|
|
||||||
try {
|
|
||||||
const contentKey = this.getContentKey(type, tmdbId, season, episode);
|
|
||||||
const storageKey = this.getStorageKey(contentKey);
|
|
||||||
|
|
||||||
await AsyncStorage.removeItem(storageKey);
|
|
||||||
logger.log(`[LocalScraperCache] Invalidated cache for ${contentKey}`);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[LocalScraperCache] Error invalidating cache:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Invalidate cache for specific scraper across all content
|
|
||||||
*/
|
|
||||||
async invalidateScraper(scraperId: string): Promise<void> {
|
|
||||||
try {
|
|
||||||
const keys = await AsyncStorage.getAllKeys();
|
|
||||||
const cacheKeys = keys.filter(key => key.startsWith(this.CACHE_KEY_PREFIX));
|
|
||||||
|
|
||||||
for (const key of cacheKeys) {
|
|
||||||
const cachedData = await AsyncStorage.getItem(key);
|
|
||||||
if (cachedData) {
|
|
||||||
const parsed: CachedContentResult = JSON.parse(cachedData);
|
|
||||||
|
|
||||||
// Remove results from this scraper
|
|
||||||
parsed.results = parsed.results.filter(r => r.scraperId !== scraperId);
|
|
||||||
|
|
||||||
if (parsed.results.length === 0) {
|
|
||||||
// Remove entire cache entry if no results left
|
|
||||||
await AsyncStorage.removeItem(key);
|
|
||||||
} else {
|
|
||||||
// Update cache with remaining results
|
|
||||||
await AsyncStorage.setItem(key, JSON.stringify(parsed));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.log(`[LocalScraperCache] Invalidated cache for scraper ${scraperId}`);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[LocalScraperCache] Error invalidating scraper cache:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clear all cached results
|
|
||||||
*/
|
|
||||||
async clearAllCache(): Promise<void> {
|
|
||||||
try {
|
|
||||||
const keys = await AsyncStorage.getAllKeys();
|
|
||||||
const cacheKeys = keys.filter(key => key.startsWith(this.CACHE_KEY_PREFIX));
|
|
||||||
|
|
||||||
await AsyncStorage.multiRemove(cacheKeys);
|
|
||||||
logger.log(`[LocalScraperCache] Cleared ${cacheKeys.length} cache entries`);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[LocalScraperCache] Error clearing cache:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clean up old cache entries to stay within size limit
|
|
||||||
*/
|
|
||||||
private async cleanupOldEntries(): Promise<void> {
|
|
||||||
try {
|
|
||||||
const keys = await AsyncStorage.getAllKeys();
|
|
||||||
const cacheKeys = keys.filter(key => key.startsWith(this.CACHE_KEY_PREFIX));
|
|
||||||
|
|
||||||
if (cacheKeys.length <= this.MAX_CACHE_SIZE) {
|
|
||||||
return; // No cleanup needed
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get all cache entries with their timestamps
|
|
||||||
const entriesWithTimestamps = await Promise.all(
|
|
||||||
cacheKeys.map(async (key) => {
|
|
||||||
const data = await AsyncStorage.getItem(key);
|
|
||||||
if (data) {
|
|
||||||
const parsed: CachedContentResult = JSON.parse(data);
|
|
||||||
return { key, timestamp: parsed.timestamp };
|
|
||||||
}
|
|
||||||
return { key, timestamp: 0 };
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
// Sort by timestamp (oldest first)
|
|
||||||
entriesWithTimestamps.sort((a, b) => a.timestamp - b.timestamp);
|
|
||||||
|
|
||||||
// Remove oldest entries
|
|
||||||
const entriesToRemove = entriesWithTimestamps.slice(0, cacheKeys.length - this.MAX_CACHE_SIZE);
|
|
||||||
const keysToRemove = entriesToRemove.map(entry => entry.key);
|
|
||||||
|
|
||||||
if (keysToRemove.length > 0) {
|
|
||||||
await AsyncStorage.multiRemove(keysToRemove);
|
|
||||||
logger.log(`[LocalScraperCache] Cleaned up ${keysToRemove.length} old cache entries`);
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[LocalScraperCache] Error cleaning up cache:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get cache statistics
|
|
||||||
*/
|
|
||||||
async getCacheStats(): Promise<{
|
|
||||||
totalEntries: number;
|
|
||||||
totalSize: number;
|
|
||||||
oldestEntry: number | null;
|
|
||||||
newestEntry: number | null;
|
|
||||||
}> {
|
|
||||||
try {
|
|
||||||
const keys = await AsyncStorage.getAllKeys();
|
|
||||||
const cacheKeys = keys.filter(key => key.startsWith(this.CACHE_KEY_PREFIX));
|
|
||||||
|
|
||||||
let totalSize = 0;
|
|
||||||
let oldestTimestamp: number | null = null;
|
|
||||||
let newestTimestamp: number | null = null;
|
|
||||||
|
|
||||||
for (const key of cacheKeys) {
|
|
||||||
const data = await AsyncStorage.getItem(key);
|
|
||||||
if (data) {
|
|
||||||
totalSize += data.length;
|
|
||||||
const parsed: CachedContentResult = JSON.parse(data);
|
|
||||||
|
|
||||||
if (oldestTimestamp === null || parsed.timestamp < oldestTimestamp) {
|
|
||||||
oldestTimestamp = parsed.timestamp;
|
|
||||||
}
|
|
||||||
if (newestTimestamp === null || parsed.timestamp > newestTimestamp) {
|
|
||||||
newestTimestamp = parsed.timestamp;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
totalEntries: cacheKeys.length,
|
|
||||||
totalSize,
|
|
||||||
oldestEntry: oldestTimestamp,
|
|
||||||
newestEntry: newestTimestamp
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[LocalScraperCache] Error getting cache stats:', error);
|
|
||||||
return {
|
|
||||||
totalEntries: 0,
|
|
||||||
totalSize: 0,
|
|
||||||
oldestEntry: null,
|
|
||||||
newestEntry: null
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const localScraperCacheService = LocalScraperCacheService.getInstance();
|
|
||||||
export default localScraperCacheService;
|
|
||||||
|
|
@ -4,8 +4,6 @@ import { Platform } from 'react-native';
|
||||||
import { logger } from '../utils/logger';
|
import { logger } from '../utils/logger';
|
||||||
import { Stream } from '../types/streams';
|
import { Stream } from '../types/streams';
|
||||||
import { cacheService } from './cacheService';
|
import { cacheService } from './cacheService';
|
||||||
import { localScraperCacheService } from './localScraperCacheService';
|
|
||||||
import { hybridCacheService } from './hybridCacheService';
|
|
||||||
import CryptoJS from 'crypto-js';
|
import CryptoJS from 'crypto-js';
|
||||||
|
|
||||||
// Types for local scrapers
|
// Types for local scrapers
|
||||||
|
|
@ -862,7 +860,7 @@ class LocalScraperService {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Execute scrapers for streams with caching
|
// Execute scrapers for streams
|
||||||
async getStreams(type: string, tmdbId: string, season?: number, episode?: number, callback?: ScraperCallback): Promise<void> {
|
async getStreams(type: string, tmdbId: string, season?: number, episode?: number, callback?: ScraperCallback): Promise<void> {
|
||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
|
|
@ -880,63 +878,21 @@ class LocalScraperService {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get current user settings for enabled scrapers
|
logger.log(`[LocalScraperService] Executing ${enabledScrapers.length} scrapers for ${type}:${tmdbId}`, {
|
||||||
const userSettings = await this.getUserScraperSettings();
|
scrapers: enabledScrapers.map(s => s.name)
|
||||||
|
|
||||||
// Check cache for existing results (hybrid: global first, then local)
|
|
||||||
const { validResults, expiredScrapers, allExpired, source } = await hybridCacheService.getCachedResults(type, tmdbId, season, episode, userSettings);
|
|
||||||
|
|
||||||
// Immediately return cached results for valid scrapers
|
|
||||||
if (validResults.length > 0) {
|
|
||||||
logger.log(`[LocalScraperService] Returning ${validResults.length} cached results for ${type}:${tmdbId} (source: ${source})`);
|
|
||||||
|
|
||||||
for (const cachedResult of validResults) {
|
|
||||||
if (cachedResult.success && cachedResult.streams.length > 0) {
|
|
||||||
// Streams are already in the correct format, just pass them through
|
|
||||||
if (callback) {
|
|
||||||
callback(cachedResult.streams, cachedResult.scraperId, cachedResult.scraperName, null);
|
|
||||||
}
|
|
||||||
} else if (callback) {
|
|
||||||
// Return error for failed cached results
|
|
||||||
const error = cachedResult.error ? new Error(cachedResult.error) : new Error('Scraper failed');
|
|
||||||
callback(null, cachedResult.scraperId, cachedResult.scraperName, error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Determine which scrapers need to be re-run
|
|
||||||
const scrapersToRerun = enabledScrapers.filter(scraper => {
|
|
||||||
const hasValidResult = validResults.some(r => r.scraperId === scraper.id);
|
|
||||||
const isExpired = expiredScrapers.includes(scraper.id);
|
|
||||||
const hasFailedResult = validResults.some(r => r.scraperId === scraper.id && (!r.success || r.streams.length === 0));
|
|
||||||
|
|
||||||
return !hasValidResult || isExpired || hasFailedResult;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (scrapersToRerun.length === 0) {
|
|
||||||
logger.log('[LocalScraperService] All scrapers have valid cached results');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.log(`[LocalScraperService] Re-running ${scrapersToRerun.length} scrapers for ${type}:${tmdbId}`, {
|
|
||||||
totalEnabled: enabledScrapers.length,
|
|
||||||
expired: expiredScrapers.length,
|
|
||||||
failed: validResults.filter(r => !r.success || r.streams.length === 0).length,
|
|
||||||
notCached: enabledScrapers.length - validResults.length,
|
|
||||||
scrapersToRerun: scrapersToRerun.map(s => s.name)
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Generate a lightweight request id for tracing
|
// Generate a lightweight request id for tracing
|
||||||
const requestId = `rs_${Date.now().toString(36)}_${Math.random().toString(36).slice(2, 6)}`;
|
const requestId = `rs_${Date.now().toString(36)}_${Math.random().toString(36).slice(2, 6)}`;
|
||||||
|
|
||||||
// Execute only scrapers that need to be re-run
|
// Execute all enabled scrapers
|
||||||
for (const scraper of scrapersToRerun) {
|
for (const scraper of enabledScrapers) {
|
||||||
this.executeScraperWithCaching(scraper, type, tmdbId, season, episode, callback, requestId);
|
this.executeScraper(scraper, type, tmdbId, season, episode, callback, requestId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Execute individual scraper with caching
|
// Execute individual scraper
|
||||||
private async executeScraperWithCaching(
|
private async executeScraper(
|
||||||
scraper: ScraperInfo,
|
scraper: ScraperInfo,
|
||||||
type: string,
|
type: string,
|
||||||
tmdbId: string,
|
tmdbId: string,
|
||||||
|
|
@ -984,18 +940,6 @@ class LocalScraperService {
|
||||||
// Convert results to Nuvio Stream format
|
// Convert results to Nuvio Stream format
|
||||||
const streams = this.convertToStreams(results, scraper);
|
const streams = this.convertToStreams(results, scraper);
|
||||||
|
|
||||||
// Cache the successful result (hybrid: both local and global)
|
|
||||||
await hybridCacheService.cacheScraperResult(
|
|
||||||
type,
|
|
||||||
tmdbId,
|
|
||||||
scraper.id,
|
|
||||||
scraper.name,
|
|
||||||
streams,
|
|
||||||
null,
|
|
||||||
season,
|
|
||||||
episode
|
|
||||||
);
|
|
||||||
|
|
||||||
if (callback) {
|
if (callback) {
|
||||||
callback(streams, scraper.id, scraper.name, null);
|
callback(streams, scraper.id, scraper.name, null);
|
||||||
}
|
}
|
||||||
|
|
@ -1003,37 +947,12 @@ class LocalScraperService {
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[LocalScraperService] Scraper', scraper.name, 'failed:', error);
|
logger.error('[LocalScraperService] Scraper', scraper.name, 'failed:', error);
|
||||||
|
|
||||||
// Cache the failed result (hybrid: both local and global)
|
|
||||||
await hybridCacheService.cacheScraperResult(
|
|
||||||
type,
|
|
||||||
tmdbId,
|
|
||||||
scraper.id,
|
|
||||||
scraper.name,
|
|
||||||
null,
|
|
||||||
error as Error,
|
|
||||||
season,
|
|
||||||
episode
|
|
||||||
);
|
|
||||||
|
|
||||||
if (callback) {
|
if (callback) {
|
||||||
callback(null, scraper.id, scraper.name, error as Error);
|
callback(null, scraper.id, scraper.name, error as Error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Execute individual scraper (legacy method - kept for compatibility)
|
|
||||||
private async executeScraper(
|
|
||||||
scraper: ScraperInfo,
|
|
||||||
type: string,
|
|
||||||
tmdbId: string,
|
|
||||||
season?: number,
|
|
||||||
episode?: number,
|
|
||||||
callback?: ScraperCallback,
|
|
||||||
requestId?: string
|
|
||||||
): Promise<void> {
|
|
||||||
// Delegate to the caching version
|
|
||||||
return this.executeScraperWithCaching(scraper, type, tmdbId, season, episode, callback, requestId);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Execute scraper code in sandboxed environment
|
// Execute scraper code in sandboxed environment
|
||||||
private async executeSandboxed(code: string, params: any): Promise<LocalScraperResult[]> {
|
private async executeSandboxed(code: string, params: any): Promise<LocalScraperResult[]> {
|
||||||
|
|
@ -1161,7 +1080,7 @@ class LocalScraperService {
|
||||||
...options.headers
|
...options.headers
|
||||||
},
|
},
|
||||||
data: options.body,
|
data: options.body,
|
||||||
timeout: 60000,
|
timeout: 120000, // Increased to 2 minutes for complex scrapers
|
||||||
validateStatus: () => true // Don't throw on HTTP error status codes
|
validateStatus: () => true // Don't throw on HTTP error status codes
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -1201,7 +1120,7 @@ class LocalScraperService {
|
||||||
},
|
},
|
||||||
// Add axios for HTTP requests
|
// Add axios for HTTP requests
|
||||||
axios: axios.create({
|
axios: axios.create({
|
||||||
timeout: 30000,
|
timeout: 120000, // Increased to 2 minutes for complex scrapers
|
||||||
headers: {
|
headers: {
|
||||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
|
||||||
}
|
}
|
||||||
|
|
@ -1217,7 +1136,9 @@ class LocalScraperService {
|
||||||
SCRAPER_ID: params?.scraperId
|
SCRAPER_ID: params?.scraperId
|
||||||
};
|
};
|
||||||
|
|
||||||
// Execute the scraper code without timeout
|
// Execute the scraper code with 1 minute timeout
|
||||||
|
const SCRAPER_EXECUTION_TIMEOUT_MS = 60000; // 1 minute
|
||||||
|
|
||||||
const executionPromise = new Promise<LocalScraperResult[]>((resolve, reject) => {
|
const executionPromise = new Promise<LocalScraperResult[]>((resolve, reject) => {
|
||||||
try {
|
try {
|
||||||
// Create function from code
|
// Create function from code
|
||||||
|
|
@ -1263,7 +1184,13 @@ class LocalScraperService {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
return await executionPromise;
|
// Apply 1-minute timeout to prevent hanging scrapers
|
||||||
|
return await Promise.race([
|
||||||
|
executionPromise,
|
||||||
|
new Promise<never>((_, reject) =>
|
||||||
|
setTimeout(() => reject(new Error(`Scraper execution timed out after ${SCRAPER_EXECUTION_TIMEOUT_MS}ms`)), SCRAPER_EXECUTION_TIMEOUT_MS)
|
||||||
|
)
|
||||||
|
]);
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[LocalScraperService] Sandbox execution failed:', error);
|
logger.error('[LocalScraperService] Sandbox execution failed:', error);
|
||||||
|
|
@ -1365,6 +1292,19 @@ class LocalScraperService {
|
||||||
// Check if local scrapers are available
|
// Check if local scrapers are available
|
||||||
async hasScrapers(): Promise<boolean> {
|
async hasScrapers(): Promise<boolean> {
|
||||||
await this.ensureInitialized();
|
await this.ensureInitialized();
|
||||||
|
|
||||||
|
// Get user settings to check if local scrapers are enabled
|
||||||
|
const userSettings = await this.getUserScraperSettings();
|
||||||
|
if (!userSettings.enableLocalScrapers) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if there are any enabled scrapers based on user settings
|
||||||
|
if (userSettings.enabledScrapers && userSettings.enabledScrapers.size > 0) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: check if any scrapers are enabled in the internal state
|
||||||
return Array.from(this.installedScrapers.values()).some(scraper => scraper.enabled);
|
return Array.from(this.installedScrapers.values()).some(scraper => scraper.enabled);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1384,8 +1324,11 @@ class LocalScraperService {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get user settings from AsyncStorage
|
// Get user settings from AsyncStorage (scoped with fallback)
|
||||||
const settingsData = await AsyncStorage.getItem('app_settings');
|
const scope = (await AsyncStorage.getItem('@user:current')) || 'local';
|
||||||
|
const scopedSettingsJson = await AsyncStorage.getItem(`@user:${scope}:app_settings`);
|
||||||
|
const legacySettingsJson = await AsyncStorage.getItem('app_settings');
|
||||||
|
const settingsData = scopedSettingsJson || legacySettingsJson;
|
||||||
const settings = settingsData ? JSON.parse(settingsData) : {};
|
const settings = settingsData ? JSON.parse(settingsData) : {};
|
||||||
|
|
||||||
// Get enabled scrapers based on current user settings
|
// Get enabled scrapers based on current user settings
|
||||||
|
|
@ -1408,32 +1351,6 @@ class LocalScraperService {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Cache management methods (hybrid: local + global)
|
|
||||||
async clearScraperCache(): Promise<void> {
|
|
||||||
await hybridCacheService.clearAllCache();
|
|
||||||
logger.log('[LocalScraperService] Cleared all scraper cache (local + global)');
|
|
||||||
}
|
|
||||||
|
|
||||||
async invalidateScraperCache(scraperId: string): Promise<void> {
|
|
||||||
await hybridCacheService.invalidateScraper(scraperId);
|
|
||||||
logger.log('[LocalScraperService] Invalidated cache for scraper:', scraperId);
|
|
||||||
}
|
|
||||||
|
|
||||||
async invalidateContentCache(type: string, tmdbId: string, season?: number, episode?: number): Promise<void> {
|
|
||||||
await hybridCacheService.invalidateContent(type, tmdbId, season, episode);
|
|
||||||
logger.log('[LocalScraperService] Invalidated cache for content:', `${type}:${tmdbId}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
async getCacheStats(): Promise<{
|
|
||||||
local: {
|
|
||||||
totalEntries: number;
|
|
||||||
totalSize: number;
|
|
||||||
oldestEntry: number | null;
|
|
||||||
newestEntry: number | null;
|
|
||||||
};
|
|
||||||
}> {
|
|
||||||
return await hybridCacheService.getCacheStats();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const localScraperService = LocalScraperService.getInstance();
|
export const localScraperService = LocalScraperService.getInstance();
|
||||||
|
|
|
||||||
|
|
@ -1235,13 +1235,16 @@ class StremioService {
|
||||||
// Execute local scrapers asynchronously with TMDB ID (when available)
|
// Execute local scrapers asynchronously with TMDB ID (when available)
|
||||||
if (tmdbId) {
|
if (tmdbId) {
|
||||||
localScraperService.getStreams(scraperType, tmdbId, season, episode, (streams, scraperId, scraperName, error) => {
|
localScraperService.getStreams(scraperType, tmdbId, season, episode, (streams, scraperId, scraperName, error) => {
|
||||||
if (error) {
|
// Always call callback to ensure UI updates, regardless of result
|
||||||
if (callback) {
|
if (callback) {
|
||||||
|
if (error) {
|
||||||
callback(null, scraperId, scraperName, error);
|
callback(null, scraperId, scraperName, error);
|
||||||
}
|
} else if (streams && streams.length > 0) {
|
||||||
} else if (streams && streams.length > 0) {
|
|
||||||
if (callback) {
|
|
||||||
callback(streams, scraperId, scraperName, null);
|
callback(streams, scraperId, scraperName, null);
|
||||||
|
} else {
|
||||||
|
// Handle case where scraper completed successfully but returned no streams
|
||||||
|
// This ensures the scraper is removed from "fetching" state in UI
|
||||||
|
callback([], scraperId, scraperName, null);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue