This commit is contained in:
tapframe 2025-07-20 13:37:57 +05:30
parent 19b6e6b3d5
commit da0a69ab0e
10 changed files with 2360 additions and 21 deletions

File diff suppressed because it is too large Load diff

1
local-scrapers-repo Submodule

@ -0,0 +1 @@
Subproject commit fa92f8cf27fc238b48697263aa48d8a2eb5a68ef

View file

@ -2,7 +2,7 @@ import React, { useState, useRef, useEffect } from 'react';
import { View, TouchableOpacity, Dimensions, Animated, ActivityIndicator, Platform, NativeModules, StatusBar, Text, Image, StyleSheet } from 'react-native';
import { VLCPlayer } from 'react-native-vlc-media-player';
import { useNavigation, useRoute, RouteProp } from '@react-navigation/native';
import { RootStackParamList } from '../../navigation/AppNavigator';
import { RootStackParamList, RootStackNavigationProp } from '../../navigation/AppNavigator';
import { PinchGestureHandler, State, PinchGestureHandlerGestureEvent } from 'react-native-gesture-handler';
import RNImmersiveMode from 'react-native-immersive-mode';
import * as ScreenOrientation from 'expo-screen-orientation';
@ -43,7 +43,7 @@ const VideoPlayer: React.FC = () => {
return <AndroidVideoPlayer />;
}
const navigation = useNavigation();
const navigation = useNavigation<RootStackNavigationProp>();
const route = useRoute<RouteProp<RootStackParamList, 'Player'>>();
const {
@ -1338,4 +1338,4 @@ const VideoPlayer: React.FC = () => {
);
};
export default VideoPlayer;
export default VideoPlayer;

View file

@ -36,6 +36,10 @@ export interface AppSettings {
tmdbLanguagePreference: string; // Preferred language for TMDB logos (ISO 639-1 code)
episodeLayoutStyle: 'vertical' | 'horizontal'; // Layout style for episode cards
autoplayBestStream: boolean; // Automatically play the best available stream
// Local scraper settings
scraperRepositoryUrl: string; // URL to the scraper repository
enableLocalScrapers: boolean; // Enable/disable local scraper functionality
scraperTimeout: number; // Timeout for scraper execution in seconds
}
export const DEFAULT_SETTINGS: AppSettings = {
@ -54,6 +58,10 @@ export const DEFAULT_SETTINGS: AppSettings = {
tmdbLanguagePreference: 'en', // Default to English
episodeLayoutStyle: 'horizontal', // Default to the new horizontal layout
autoplayBestStream: false, // Disabled by default for user choice
// Local scraper defaults
scraperRepositoryUrl: '',
enableLocalScrapers: true,
scraperTimeout: 60, // 60 seconds timeout
};
const SETTINGS_STORAGE_KEY = 'app_settings';

View file

@ -39,6 +39,7 @@ import LogoSourceSettings from '../screens/LogoSourceSettings';
import ThemeScreen from '../screens/ThemeScreen';
import ProfilesScreen from '../screens/ProfilesScreen';
import OnboardingScreen from '../screens/OnboardingScreen';
import ScraperSettingsScreen from '../screens/ScraperSettingsScreen';
// Stack navigator types
export type RootStackParamList = {
@ -104,6 +105,7 @@ export type RootStackParamList = {
LogoSourceSettings: undefined;
ThemeSettings: undefined;
ProfilesSettings: undefined;
ScraperSettings: undefined;
};
export type RootStackNavigationProp = NativeStackNavigationProp<RootStackParamList>;
@ -1024,6 +1026,21 @@ const AppNavigator = ({ initialRouteName }: { initialRouteName?: keyof RootStack
},
}}
/>
<Stack.Screen
name="ScraperSettings"
component={ScraperSettingsScreen}
options={{
animation: Platform.OS === 'android' ? 'slide_from_right' : 'fade',
animationDuration: Platform.OS === 'android' ? 250 : 200,
presentation: 'card',
gestureEnabled: true,
gestureDirection: 'horizontal',
headerShown: false,
contentStyle: {
backgroundColor: currentTheme.colors.darkBackground,
},
}}
/>
</Stack.Navigator>
</View>
</PaperProvider>
@ -1031,4 +1048,4 @@ const AppNavigator = ({ initialRouteName }: { initialRouteName?: keyof RootStack
);
};
export default AppNavigator;
export default AppNavigator;

View file

@ -0,0 +1,585 @@
import React, { useState, useEffect } from 'react';
import {
View,
Text,
StyleSheet,
ScrollView,
TextInput,
TouchableOpacity,
Alert,
Switch,
ActivityIndicator,
RefreshControl,
} from 'react-native';
import { SafeAreaView } from 'react-native-safe-area-context';
import { Ionicons } from '@expo/vector-icons';
import { useNavigation } from '@react-navigation/native';
import { useSettings } from '../hooks/useSettings';
import { localScraperService, ScraperInfo } from '../services/localScraperService';
import { logger } from '../utils/logger';
const ScraperSettingsScreen: React.FC = () => {
const navigation = useNavigation();
const { settings, updateSetting } = useSettings();
const [repositoryUrl, setRepositoryUrl] = useState(settings.scraperRepositoryUrl);
const [installedScrapers, setInstalledScrapers] = useState<ScraperInfo[]>([]);
const [isLoading, setIsLoading] = useState(false);
const [isRefreshing, setIsRefreshing] = useState(false);
const [hasRepository, setHasRepository] = useState(false);
useEffect(() => {
loadScrapers();
checkRepository();
}, []);
const loadScrapers = async () => {
try {
const scrapers = await localScraperService.getInstalledScrapers();
setInstalledScrapers(scrapers);
} catch (error) {
logger.error('[ScraperSettings] Failed to load scrapers:', error);
}
};
const checkRepository = async () => {
try {
const repoUrl = await localScraperService.getRepositoryUrl();
setHasRepository(!!repoUrl);
if (repoUrl && repoUrl !== repositoryUrl) {
setRepositoryUrl(repoUrl);
}
} catch (error) {
logger.error('[ScraperSettings] Failed to check repository:', error);
}
};
const handleSaveRepository = async () => {
if (!repositoryUrl.trim()) {
Alert.alert('Error', 'Please enter a valid repository URL');
return;
}
// Validate URL format
const url = repositoryUrl.trim();
if (!url.startsWith('https://raw.githubusercontent.com/') && !url.startsWith('http://')) {
Alert.alert(
'Invalid URL Format',
'Please use a valid GitHub raw URL format:\n\nhttps://raw.githubusercontent.com/username/repo/branch/\n\nExample:\nhttps://raw.githubusercontent.com/tapframe/nuvio-providers/main/'
);
return;
}
try {
setIsLoading(true);
await localScraperService.setRepositoryUrl(url);
await updateSetting('scraperRepositoryUrl', url);
setHasRepository(true);
Alert.alert('Success', 'Repository URL saved successfully');
} catch (error) {
logger.error('[ScraperSettings] Failed to save repository:', error);
Alert.alert('Error', 'Failed to save repository URL');
} finally {
setIsLoading(false);
}
};
const handleRefreshRepository = async () => {
if (!repositoryUrl.trim()) {
Alert.alert('Error', 'Please set a repository URL first');
return;
}
try {
setIsRefreshing(true);
await localScraperService.refreshRepository();
await loadScrapers();
Alert.alert('Success', 'Repository refreshed successfully');
} catch (error) {
logger.error('[ScraperSettings] Failed to refresh repository:', error);
const errorMessage = error instanceof Error ? error.message : String(error);
Alert.alert(
'Repository Error',
`Failed to refresh repository: ${errorMessage}\n\nPlease ensure your URL is correct and follows this format:\nhttps://raw.githubusercontent.com/username/repo/branch/`
);
} finally {
setIsRefreshing(false);
}
};
const handleToggleScraper = async (scraperId: string, enabled: boolean) => {
try {
await localScraperService.setScraperEnabled(scraperId, enabled);
await loadScrapers();
} catch (error) {
logger.error('[ScraperSettings] Failed to toggle scraper:', error);
Alert.alert('Error', 'Failed to update scraper status');
}
};
const handleClearScrapers = () => {
Alert.alert(
'Clear All Scrapers',
'Are you sure you want to remove all installed scrapers? This action cannot be undone.',
[
{ text: 'Cancel', style: 'cancel' },
{
text: 'Clear',
style: 'destructive',
onPress: async () => {
try {
await localScraperService.clearScrapers();
await loadScrapers();
Alert.alert('Success', 'All scrapers have been removed');
} catch (error) {
logger.error('[ScraperSettings] Failed to clear scrapers:', error);
Alert.alert('Error', 'Failed to clear scrapers');
}
},
},
]
);
};
const handleClearCache = () => {
Alert.alert(
'Clear Repository Cache',
'This will remove the saved repository URL and clear all cached scraper data. You will need to re-enter your repository URL.',
[
{ text: 'Cancel', style: 'cancel' },
{
text: 'Clear Cache',
style: 'destructive',
onPress: async () => {
try {
await localScraperService.clearScrapers();
await localScraperService.setRepositoryUrl('');
await updateSetting('scraperRepositoryUrl', '');
setRepositoryUrl('');
setHasRepository(false);
await loadScrapers();
Alert.alert('Success', 'Repository cache cleared successfully');
} catch (error) {
logger.error('[ScraperSettings] Failed to clear cache:', error);
Alert.alert('Error', 'Failed to clear repository cache');
}
},
},
]
);
};
const handleUseDefaultRepo = () => {
const defaultUrl = 'https://raw.githubusercontent.com/tapframe/nuvio-providers/main';
setRepositoryUrl(defaultUrl);
};
const handleToggleLocalScrapers = async (enabled: boolean) => {
await updateSetting('enableLocalScrapers', enabled);
};
const renderScraperItem = (scraper: ScraperInfo) => (
<View key={scraper.id} style={styles.scraperItem}>
<View style={styles.scraperInfo}>
<Text style={styles.scraperName}>{scraper.name}</Text>
<Text style={styles.scraperDescription}>{scraper.description}</Text>
<View style={styles.scraperMeta}>
<Text style={styles.scraperVersion}>v{scraper.version}</Text>
<Text style={styles.scraperTypes}>
{scraper.supportedTypes.join(', ')}
</Text>
</View>
</View>
<Switch
value={scraper.enabled}
onValueChange={(enabled) => handleToggleScraper(scraper.id, enabled)}
trackColor={{ false: '#767577', true: '#007AFF' }}
thumbColor={scraper.enabled ? '#ffffff' : '#f4f3f4'}
/>
</View>
);
return (
<SafeAreaView style={styles.container}>
<View style={styles.header}>
<TouchableOpacity
style={styles.backButton}
onPress={() => navigation.goBack()}
>
<Ionicons name="arrow-back" size={24} color="#007AFF" />
</TouchableOpacity>
<Text style={styles.headerTitle}>Local Scrapers</Text>
</View>
<ScrollView
style={styles.content}
refreshControl={
<RefreshControl refreshing={isRefreshing} onRefresh={loadScrapers} />
}
>
{/* Enable/Disable Local Scrapers */}
<View style={styles.section}>
<View style={styles.settingRow}>
<View style={styles.settingInfo}>
<Text style={styles.settingTitle}>Enable Local Scrapers</Text>
<Text style={styles.settingDescription}>
Allow the app to use locally installed scrapers for finding streams
</Text>
</View>
<Switch
value={settings.enableLocalScrapers}
onValueChange={handleToggleLocalScrapers}
trackColor={{ false: '#767577', true: '#007AFF' }}
thumbColor={settings.enableLocalScrapers ? '#ffffff' : '#f4f3f4'}
/>
</View>
</View>
{/* Repository Configuration */}
<View style={styles.section}>
<View style={styles.sectionHeader}>
<Text style={styles.sectionTitle}>Repository Configuration</Text>
{hasRepository && (
<TouchableOpacity
style={styles.clearButton}
onPress={handleClearCache}
>
<Text style={styles.clearButtonText}>Clear Cache</Text>
</TouchableOpacity>
)}
</View>
<Text style={styles.sectionDescription}>
Enter the URL of a Nuvio scraper repository to download and install scrapers.
</Text>
{hasRepository && repositoryUrl && (
<View style={styles.currentRepoContainer}>
<Text style={styles.currentRepoLabel}>Current Repository:</Text>
<Text style={styles.currentRepoUrl}>{repositoryUrl}</Text>
</View>
)}
<View style={styles.inputContainer}>
<TextInput
style={styles.textInput}
value={repositoryUrl}
onChangeText={setRepositoryUrl}
placeholder="https://raw.githubusercontent.com/tapframe/nuvio-providers/main"
placeholderTextColor="#999"
autoCapitalize="none"
autoCorrect={false}
keyboardType="url"
/>
<Text style={styles.urlHint}>
💡 Use GitHub raw URL format. Default: https://raw.githubusercontent.com/tapframe/nuvio-providers/main
</Text>
<TouchableOpacity
style={styles.defaultRepoButton}
onPress={handleUseDefaultRepo}
>
<Text style={styles.defaultRepoButtonText}>Use Default Repository</Text>
</TouchableOpacity>
</View>
<View style={styles.buttonRow}>
<TouchableOpacity
style={[styles.button, styles.primaryButton]}
onPress={handleSaveRepository}
disabled={isLoading}
>
{isLoading ? (
<ActivityIndicator size="small" color="#ffffff" />
) : (
<Text style={styles.buttonText}>Save Repository</Text>
)}
</TouchableOpacity>
{hasRepository && (
<TouchableOpacity
style={[styles.button, styles.secondaryButton]}
onPress={handleRefreshRepository}
disabled={isRefreshing}
>
{isRefreshing ? (
<ActivityIndicator size="small" color="#007AFF" />
) : (
<Text style={styles.secondaryButtonText}>Refresh</Text>
)}
</TouchableOpacity>
)}
</View>
</View>
{/* Installed Scrapers */}
<View style={styles.section}>
<View style={styles.sectionHeader}>
<Text style={styles.sectionTitle}>Installed Scrapers</Text>
{installedScrapers.length > 0 && (
<TouchableOpacity
style={styles.clearButton}
onPress={handleClearScrapers}
>
<Text style={styles.clearButtonText}>Clear All</Text>
</TouchableOpacity>
)}
</View>
{installedScrapers.length === 0 ? (
<View style={styles.emptyState}>
<Ionicons name="download-outline" size={48} color="#999" />
<Text style={styles.emptyStateTitle}>No Scrapers Installed</Text>
<Text style={styles.emptyStateDescription}>
Add a repository URL above and refresh to install scrapers.
</Text>
</View>
) : (
<View style={styles.scrapersList}>
{installedScrapers.map(renderScraperItem)}
</View>
)}
</View>
{/* Information */}
<View style={styles.section}>
<Text style={styles.sectionTitle}>About Local Scrapers</Text>
<Text style={styles.infoText}>
Local scrapers are JavaScript modules that can search for streaming links from various sources.
They run locally on your device and can be installed from trusted repositories.
</Text>
<Text style={styles.infoText}>
Only install scrapers from trusted sources. Malicious scrapers could potentially access your data.
</Text>
</View>
</ScrollView>
</SafeAreaView>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#000000',
},
header: {
flexDirection: 'row',
alignItems: 'center',
paddingHorizontal: 16,
paddingVertical: 12,
borderBottomWidth: 1,
borderBottomColor: '#333',
},
backButton: {
marginRight: 16,
},
headerTitle: {
fontSize: 20,
fontWeight: '600',
color: '#ffffff',
},
content: {
flex: 1,
},
section: {
padding: 16,
borderBottomWidth: 1,
borderBottomColor: '#333',
},
sectionHeader: {
flexDirection: 'row',
justifyContent: 'space-between',
alignItems: 'center',
marginBottom: 12,
},
sectionTitle: {
fontSize: 18,
fontWeight: '600',
color: '#ffffff',
marginBottom: 8,
},
sectionDescription: {
fontSize: 14,
color: '#999',
marginBottom: 16,
lineHeight: 20,
},
settingRow: {
flexDirection: 'row',
justifyContent: 'space-between',
alignItems: 'center',
},
settingInfo: {
flex: 1,
marginRight: 16,
},
settingTitle: {
fontSize: 16,
fontWeight: '500',
color: '#ffffff',
marginBottom: 4,
},
settingDescription: {
fontSize: 14,
color: '#999',
lineHeight: 18,
},
inputContainer: {
marginBottom: 16,
},
textInput: {
backgroundColor: '#1a1a1a',
borderRadius: 8,
padding: 12,
fontSize: 16,
color: '#ffffff',
borderWidth: 1,
borderColor: '#333',
},
buttonRow: {
flexDirection: 'row',
gap: 12,
},
button: {
flex: 1,
paddingVertical: 12,
paddingHorizontal: 16,
borderRadius: 8,
alignItems: 'center',
justifyContent: 'center',
minHeight: 44,
},
primaryButton: {
backgroundColor: '#007AFF',
},
secondaryButton: {
backgroundColor: 'transparent',
borderWidth: 1,
borderColor: '#007AFF',
},
buttonText: {
color: '#ffffff',
fontSize: 16,
fontWeight: '600',
},
secondaryButtonText: {
color: '#007AFF',
fontSize: 16,
fontWeight: '600',
},
clearButton: {
paddingVertical: 6,
paddingHorizontal: 12,
borderRadius: 6,
backgroundColor: '#ff3b30',
},
clearButtonText: {
color: '#ffffff',
fontSize: 14,
fontWeight: '500',
},
scrapersList: {
gap: 12,
},
scraperItem: {
flexDirection: 'row',
alignItems: 'center',
backgroundColor: '#1a1a1a',
borderRadius: 8,
padding: 16,
borderWidth: 1,
borderColor: '#333',
},
scraperInfo: {
flex: 1,
marginRight: 16,
},
scraperName: {
fontSize: 16,
fontWeight: '600',
color: '#ffffff',
marginBottom: 4,
},
scraperDescription: {
fontSize: 14,
color: '#999',
marginBottom: 8,
lineHeight: 18,
},
scraperMeta: {
flexDirection: 'row',
gap: 12,
},
scraperVersion: {
fontSize: 12,
color: '#007AFF',
fontWeight: '500',
},
scraperTypes: {
fontSize: 12,
color: '#666',
textTransform: 'uppercase',
},
emptyState: {
alignItems: 'center',
paddingVertical: 32,
},
emptyStateTitle: {
fontSize: 18,
fontWeight: '600',
color: '#ffffff',
marginTop: 16,
marginBottom: 8,
},
emptyStateDescription: {
fontSize: 14,
color: '#999',
textAlign: 'center',
lineHeight: 20,
},
infoText: {
fontSize: 14,
color: '#999',
lineHeight: 20,
marginBottom: 12,
},
currentRepoContainer: {
backgroundColor: '#1a1a1a',
borderRadius: 8,
padding: 12,
marginBottom: 16,
borderWidth: 1,
borderColor: '#333',
},
currentRepoLabel: {
fontSize: 14,
fontWeight: '500',
color: '#007AFF',
marginBottom: 4,
},
currentRepoUrl: {
fontSize: 14,
color: '#ffffff',
fontFamily: 'monospace',
lineHeight: 18,
},
urlHint: {
fontSize: 12,
color: '#666',
marginTop: 8,
lineHeight: 16,
},
defaultRepoButton: {
backgroundColor: '#333',
borderRadius: 6,
paddingVertical: 8,
paddingHorizontal: 12,
marginTop: 8,
alignItems: 'center',
},
defaultRepoButtonText: {
color: '#007AFF',
fontSize: 14,
fontWeight: '500',
},
});
export default ScraperSettingsScreen;

View file

@ -401,6 +401,13 @@ const SettingsScreen: React.FC = () => {
renderControl={ChevronRight}
onPress={() => navigation.navigate('PlayerSettings')}
/>
<SettingItem
title="Local Scrapers"
description="Manage local scraper repositories"
icon="code"
renderControl={ChevronRight}
onPress={() => navigation.navigate('ScraperSettings')}
/>
<SettingItem
title="Notifications"
description="Episode reminders"

View file

@ -0,0 +1,439 @@
import AsyncStorage from '@react-native-async-storage/async-storage';
import axios from 'axios';
import { logger } from '../utils/logger';
import { Stream } from '../types/streams';
import { cacheService } from './cacheService';
// Types for local scrapers
export interface ScraperManifest {
name: string;
version: string;
description: string;
author: string;
scrapers: ScraperInfo[];
}
export interface ScraperInfo {
id: string;
name: string;
description: string;
version: string;
filename: string;
supportedTypes: ('movie' | 'tv')[];
enabled: boolean;
}
export interface LocalScraperResult {
title: string;
url: string;
quality?: string;
size?: string;
language?: string;
provider?: string;
type?: string;
seeders?: number;
peers?: number;
infoHash?: string;
[key: string]: any;
}
// Callback type for scraper results
type ScraperCallback = (streams: Stream[] | null, scraperId: string | null, scraperName: string | null, error: Error | null) => void;
class LocalScraperService {
private static instance: LocalScraperService;
private readonly STORAGE_KEY = 'local-scrapers';
private readonly REPOSITORY_KEY = 'scraper-repository-url';
private readonly SCRAPER_SETTINGS_KEY = 'scraper-settings';
private installedScrapers: Map<string, ScraperInfo> = new Map();
private scraperCode: Map<string, string> = new Map();
private repositoryUrl: string = '';
private initialized: boolean = false;
private constructor() {
this.initialize();
}
static getInstance(): LocalScraperService {
if (!LocalScraperService.instance) {
LocalScraperService.instance = new LocalScraperService();
}
return LocalScraperService.instance;
}
private async initialize(): Promise<void> {
if (this.initialized) return;
try {
// Load repository URL
const storedRepoUrl = await AsyncStorage.getItem(this.REPOSITORY_KEY);
if (storedRepoUrl) {
this.repositoryUrl = storedRepoUrl;
}
// Load installed scrapers
const storedScrapers = await AsyncStorage.getItem(this.STORAGE_KEY);
if (storedScrapers) {
const scrapers: ScraperInfo[] = JSON.parse(storedScrapers);
scrapers.forEach(scraper => {
this.installedScrapers.set(scraper.id, scraper);
});
}
// Load scraper code from cache
await this.loadScraperCode();
this.initialized = true;
logger.log('[LocalScraperService] Initialized with', this.installedScrapers.size, 'scrapers');
} catch (error) {
logger.error('[LocalScraperService] Failed to initialize:', error);
this.initialized = true; // Set to true to prevent infinite retry
}
}
private async ensureInitialized(): Promise<void> {
if (!this.initialized) {
await this.initialize();
}
}
// Set repository URL
async setRepositoryUrl(url: string): Promise<void> {
this.repositoryUrl = url;
await AsyncStorage.setItem(this.REPOSITORY_KEY, url);
logger.log('[LocalScraperService] Repository URL set to:', url);
}
// Get repository URL
async getRepositoryUrl(): Promise<string> {
await this.ensureInitialized();
return this.repositoryUrl;
}
// Fetch and install scrapers from repository
async refreshRepository(): Promise<void> {
await this.ensureInitialized();
if (!this.repositoryUrl) {
throw new Error('No repository URL configured');
}
try {
logger.log('[LocalScraperService] Fetching repository manifest from:', this.repositoryUrl);
// Fetch manifest
const manifestUrl = this.repositoryUrl.endsWith('/')
? `${this.repositoryUrl}manifest.json`
: `${this.repositoryUrl}/manifest.json`;
const response = await axios.get(manifestUrl, { timeout: 10000 });
const manifest: ScraperManifest = response.data;
logger.log('[LocalScraperService] Found', manifest.scrapers.length, 'scrapers in repository');
// Download and install each scraper
for (const scraperInfo of manifest.scrapers) {
await this.downloadScraper(scraperInfo);
}
await this.saveInstalledScrapers();
logger.log('[LocalScraperService] Repository refresh completed');
} catch (error) {
logger.error('[LocalScraperService] Failed to refresh repository:', error);
throw error;
}
}
// Download individual scraper
private async downloadScraper(scraperInfo: ScraperInfo): Promise<void> {
try {
const scraperUrl = this.repositoryUrl.endsWith('/')
? `${this.repositoryUrl}${scraperInfo.filename}`
: `${this.repositoryUrl}/${scraperInfo.filename}`;
logger.log('[LocalScraperService] Downloading scraper:', scraperInfo.name);
const response = await axios.get(scraperUrl, { timeout: 15000 });
const scraperCode = response.data;
// Store scraper info and code
this.installedScrapers.set(scraperInfo.id, {
...scraperInfo,
enabled: this.installedScrapers.get(scraperInfo.id)?.enabled ?? true // Preserve enabled state
});
this.scraperCode.set(scraperInfo.id, scraperCode);
// Cache the scraper code
await this.cacheScraperCode(scraperInfo.id, scraperCode);
logger.log('[LocalScraperService] Successfully downloaded:', scraperInfo.name);
} catch (error) {
logger.error('[LocalScraperService] Failed to download scraper', scraperInfo.name, ':', error);
}
}
// Cache scraper code locally
private async cacheScraperCode(scraperId: string, code: string): Promise<void> {
try {
await AsyncStorage.setItem(`scraper-code-${scraperId}`, code);
} catch (error) {
logger.error('[LocalScraperService] Failed to cache scraper code:', error);
}
}
// Load scraper code from cache
private async loadScraperCode(): Promise<void> {
for (const [scraperId] of this.installedScrapers) {
try {
const cachedCode = await AsyncStorage.getItem(`scraper-code-${scraperId}`);
if (cachedCode) {
this.scraperCode.set(scraperId, cachedCode);
}
} catch (error) {
logger.error('[LocalScraperService] Failed to load cached code for', scraperId, ':', error);
}
}
}
// Save installed scrapers to storage
private async saveInstalledScrapers(): Promise<void> {
try {
const scrapers = Array.from(this.installedScrapers.values());
await AsyncStorage.setItem(this.STORAGE_KEY, JSON.stringify(scrapers));
} catch (error) {
logger.error('[LocalScraperService] Failed to save scrapers:', error);
}
}
// Get installed scrapers
async getInstalledScrapers(): Promise<ScraperInfo[]> {
await this.ensureInitialized();
return Array.from(this.installedScrapers.values());
}
// Enable/disable scraper
async setScraperEnabled(scraperId: string, enabled: boolean): Promise<void> {
await this.ensureInitialized();
const scraper = this.installedScrapers.get(scraperId);
if (scraper) {
scraper.enabled = enabled;
this.installedScrapers.set(scraperId, scraper);
await this.saveInstalledScrapers();
logger.log('[LocalScraperService] Scraper', scraperId, enabled ? 'enabled' : 'disabled');
}
}
// Execute scrapers for streams
async getStreams(type: string, tmdbId: string, season?: number, episode?: number, callback?: ScraperCallback): Promise<void> {
await this.ensureInitialized();
const enabledScrapers = Array.from(this.installedScrapers.values())
.filter(scraper => scraper.enabled && scraper.supportedTypes.includes(type as 'movie' | 'tv'));
if (enabledScrapers.length === 0) {
logger.log('[LocalScraperService] No enabled scrapers found for type:', type);
return;
}
logger.log('[LocalScraperService] Executing', enabledScrapers.length, 'scrapers for', type, tmdbId);
// Execute each scraper
for (const scraper of enabledScrapers) {
this.executeScraper(scraper, type, tmdbId, season, episode, callback);
}
}
// Execute individual scraper
private async executeScraper(
scraper: ScraperInfo,
type: string,
tmdbId: string,
season?: number,
episode?: number,
callback?: ScraperCallback
): Promise<void> {
try {
const code = this.scraperCode.get(scraper.id);
if (!code) {
throw new Error(`No code found for scraper ${scraper.id}`);
}
logger.log('[LocalScraperService] Executing scraper:', scraper.name);
// Create a sandboxed execution environment
const results = await this.executeSandboxed(code, {
tmdbId,
mediaType: type,
season,
episode
});
// Convert results to Nuvio Stream format
const streams = this.convertToStreams(results, scraper);
if (callback) {
callback(streams, scraper.id, scraper.name, null);
}
logger.log('[LocalScraperService] Scraper', scraper.name, 'returned', streams.length, 'streams');
} catch (error) {
logger.error('[LocalScraperService] Scraper', scraper.name, 'failed:', error);
if (callback) {
callback(null, scraper.id, scraper.name, error as Error);
}
}
}
// Execute scraper code in sandboxed environment
private async executeSandboxed(code: string, params: any): Promise<LocalScraperResult[]> {
// This is a simplified sandbox - in production, you'd want more security
try {
// Create a limited global context
const sandbox = {
console: {
log: (...args: any[]) => logger.log('[Scraper]', ...args),
error: (...args: any[]) => logger.error('[Scraper]', ...args),
warn: (...args: any[]) => logger.warn('[Scraper]', ...args)
},
setTimeout,
clearTimeout,
Promise,
JSON,
Date,
Math,
parseInt,
parseFloat,
encodeURIComponent,
decodeURIComponent,
// Add axios for HTTP requests
axios: axios.create({
timeout: 30000,
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
}
})
};
// Execute the scraper code with timeout
const timeoutPromise = new Promise((_, reject) => {
setTimeout(() => reject(new Error('Scraper execution timeout')), 60000); // 60 second timeout
});
const executionPromise = new Promise<LocalScraperResult[]>((resolve, reject) => {
try {
// Create function from code
const func = new Function('sandbox', 'params', `
const { console, setTimeout, clearTimeout, Promise, JSON, Date, Math, parseInt, parseFloat, encodeURIComponent, decodeURIComponent, axios } = sandbox;
${code}
// Call the main function (assuming it's exported)
if (typeof getStreams === 'function') {
return getStreams(params.tmdbId, params.mediaType, params.season, params.episode);
} else if (typeof module !== 'undefined' && module.exports && typeof module.exports.getStreams === 'function') {
return module.exports.getStreams(params.tmdbId, params.mediaType, params.season, params.episode);
} else {
throw new Error('No getStreams function found in scraper');
}
`);
const result = func(sandbox, params);
// Handle both sync and async results
if (result && typeof result.then === 'function') {
result.then(resolve).catch(reject);
} else {
resolve(result || []);
}
} catch (error) {
reject(error);
}
});
return await Promise.race([executionPromise, timeoutPromise]) as LocalScraperResult[];
} catch (error) {
logger.error('[LocalScraperService] Sandbox execution failed:', error);
throw error;
}
}
// Convert scraper results to Nuvio Stream format
private convertToStreams(results: LocalScraperResult[], scraper: ScraperInfo): Stream[] {
if (!Array.isArray(results)) {
logger.warn('[LocalScraperService] Scraper returned non-array result');
return [];
}
return results.map((result, index) => {
const stream: Stream = {
name: result.title || `${scraper.name} Stream ${index + 1}`,
title: result.title || `${scraper.name} Stream ${index + 1}`,
url: result.url,
addon: scraper.id,
addonId: scraper.id,
addonName: scraper.name,
description: result.quality ? `${result.quality}${result.size ? `${result.size}` : ''}` : undefined,
size: result.size ? this.parseSize(result.size) : undefined,
behaviorHints: {
bingeGroup: `local-scraper-${scraper.id}`
}
};
// Add additional properties if available
if (result.infoHash) {
stream.infoHash = result.infoHash;
}
return stream;
}).filter(stream => stream.url); // Filter out streams without URLs
}
// Parse size string to bytes
private parseSize(sizeStr: string): number {
if (!sizeStr) return 0;
const match = sizeStr.match(/([0-9.]+)\s*(GB|MB|KB|TB)/i);
if (!match) return 0;
const value = parseFloat(match[1]);
const unit = match[2].toUpperCase();
switch (unit) {
case 'TB': return value * 1024 * 1024 * 1024 * 1024;
case 'GB': return value * 1024 * 1024 * 1024;
case 'MB': return value * 1024 * 1024;
case 'KB': return value * 1024;
default: return value;
}
}
// Remove all scrapers
async clearScrapers(): Promise<void> {
this.installedScrapers.clear();
this.scraperCode.clear();
// Clear from storage
await AsyncStorage.removeItem(this.STORAGE_KEY);
// Clear cached code
const keys = await AsyncStorage.getAllKeys();
const scraperCodeKeys = keys.filter(key => key.startsWith('scraper-code-'));
await AsyncStorage.multiRemove(scraperCodeKeys);
logger.log('[LocalScraperService] All scrapers cleared');
}
// Check if local scrapers are available
async hasScrapers(): Promise<boolean> {
await this.ensureInitialized();
return Array.from(this.installedScrapers.values()).some(scraper => scraper.enabled);
}
}
export const localScraperService = LocalScraperService.getInstance();
export default localScraperService;

View file

@ -254,7 +254,8 @@ class NotificationService {
this.librarySubscription = catalogService.subscribeToLibraryUpdates(async (libraryItems) => {
if (!this.settings.enabled) return;
logger.log('[NotificationService] Library updated, syncing notifications for', libraryItems.length, 'items');
// Reduced logging verbosity
// logger.log('[NotificationService] Library updated, syncing notifications for', libraryItems.length, 'items');
await this.syncNotificationsForLibrary(libraryItems);
});
} catch (error) {
@ -267,7 +268,8 @@ class NotificationService {
// Sync notifications every 6 hours
this.backgroundSyncInterval = setInterval(async () => {
if (this.settings.enabled) {
logger.log('[NotificationService] Running background notification sync');
// Reduced logging verbosity
// logger.log('[NotificationService] Running background notification sync');
await this.performBackgroundSync();
}
}, 6 * 60 * 60 * 1000); // 6 hours
@ -283,7 +285,8 @@ class NotificationService {
private handleAppStateChange = async (nextAppState: AppStateStatus) => {
if (nextAppState === 'active' && this.settings.enabled) {
// App came to foreground, sync notifications
logger.log('[NotificationService] App became active, syncing notifications');
// Reduced logging verbosity
// logger.log('[NotificationService] App became active, syncing notifications');
await this.performBackgroundSync();
}
};
@ -299,7 +302,8 @@ class NotificationService {
await new Promise(resolve => setTimeout(resolve, 100));
}
logger.log(`[NotificationService] Synced notifications for ${seriesItems.length} series from library`);
// Reduced logging verbosity
// logger.log(`[NotificationService] Synced notifications for ${seriesItems.length} series from library`);
} catch (error) {
logger.error('[NotificationService] Error syncing library notifications:', error);
}
@ -308,7 +312,8 @@ class NotificationService {
// Perform comprehensive background sync including Trakt integration
private async performBackgroundSync(): Promise<void> {
try {
logger.log('[NotificationService] Starting comprehensive background sync');
// Reduced logging verbosity
// logger.log('[NotificationService] Starting comprehensive background sync');
// Get library items
const libraryItems = catalogService.getLibraryItems();
@ -320,7 +325,8 @@ class NotificationService {
// Clean up old notifications
await this.cleanupOldNotifications();
logger.log('[NotificationService] Background sync completed');
// Reduced logging verbosity
// logger.log('[NotificationService] Background sync completed');
} catch (error) {
logger.error('[NotificationService] Error in background sync:', error);
}
@ -330,12 +336,14 @@ class NotificationService {
private async syncTraktNotifications(): Promise<void> {
try {
const isAuthenticated = await traktService.isAuthenticated();
if (!isAuthenticated) {
logger.log('[NotificationService] Trakt not authenticated, skipping Trakt sync');
if (!traktService.isAuthenticated()) {
// Reduced logging verbosity
// logger.log('[NotificationService] Trakt not authenticated, skipping Trakt sync');
return;
}
logger.log('[NotificationService] Syncing comprehensive Trakt notifications');
// Reduced logging verbosity
// logger.log('[NotificationService] Syncing comprehensive Trakt notifications');
// Get all Trakt data sources (same as calendar screen uses)
const [watchlistShows, continueWatching, watchedShows, collectionShows] = await Promise.all([
@ -418,7 +426,8 @@ class NotificationService {
});
}
logger.log(`[NotificationService] Found ${allTraktShows.size} unique Trakt shows from all sources`);
// Reduced logging verbosity
// logger.log(`[NotificationService] Found ${allTraktShows.size} unique Trakt shows from all sources`);
// Sync notifications for each Trakt show
let syncedCount = 0;
@ -433,7 +442,8 @@ class NotificationService {
}
}
logger.log(`[NotificationService] Successfully synced notifications for ${syncedCount}/${allTraktShows.size} Trakt shows`);
// Reduced logging verbosity
// logger.log(`[NotificationService] Successfully synced notifications for ${syncedCount}/${allTraktShows.size} Trakt shows`);
} catch (error) {
logger.error('[NotificationService] Error syncing Trakt notifications:', error);
}
@ -442,7 +452,8 @@ class NotificationService {
// Enhanced series notification update with TMDB fallback
async updateNotificationsForSeries(seriesId: string): Promise<void> {
try {
logger.log(`[NotificationService] Updating notifications for series: ${seriesId}`);
// Reduced logging verbosity - only log for debug purposes
// logger.log(`[NotificationService] Updating notifications for series: ${seriesId}`);
// Try Stremio first
let metadata = await stremioService.getMetaDetails('series', seriesId);
@ -543,9 +554,10 @@ class NotificationService {
}));
const scheduledCount = await this.scheduleMultipleEpisodeNotifications(notificationItems);
logger.log(`[NotificationService] Scheduled ${scheduledCount} notifications for ${metadata.name}`);
// Reduced logging verbosity
// logger.log(`[NotificationService] Scheduled ${scheduledCount} notifications for ${metadata.name}`);
} else {
logger.log(`[NotificationService] No upcoming episodes found for ${metadata.name}`);
// logger.log(`[NotificationService] No upcoming episodes found for ${metadata.name}`);
}
} catch (error) {
logger.error(`[NotificationService] Error updating notifications for series ${seriesId}:`, error);
@ -567,7 +579,8 @@ class NotificationService {
if (validNotifications.length !== this.scheduledNotifications.length) {
this.scheduledNotifications = validNotifications;
await this.saveScheduledNotifications();
logger.log(`[NotificationService] Cleaned up ${this.scheduledNotifications.length - validNotifications.length} old notifications`);
// Reduced logging verbosity
// logger.log(`[NotificationService] Cleaned up ${this.scheduledNotifications.length - validNotifications.length} old notifications`);
}
} catch (error) {
logger.error('[NotificationService] Error cleaning up notifications:', error);
@ -576,7 +589,8 @@ class NotificationService {
// Public method to manually trigger sync for all library items
public async syncAllNotifications(): Promise<void> {
logger.log('[NotificationService] Manual sync triggered');
// Reduced logging verbosity
// logger.log('[NotificationService] Manual sync triggered');
await this.performBackgroundSync();
}
@ -622,4 +636,4 @@ class NotificationService {
}
// Export singleton instance
export const notificationService = NotificationService.getInstance();
export const notificationService = NotificationService.getInstance();

View file

@ -2,6 +2,8 @@ import axios from 'axios';
import AsyncStorage from '@react-native-async-storage/async-storage';
import { logger } from '../utils/logger';
import EventEmitter from 'eventemitter3';
import { localScraperService } from './localScraperService';
import { DEFAULT_SETTINGS, AppSettings } from '../hooks/useSettings';
// Create an event emitter for addon changes
export const addonEmitter = new EventEmitter();
@ -618,6 +620,37 @@ class StremioService {
const addons = this.getInstalledAddons();
logger.log('📌 [getStreams] Installed addons:', addons.map(a => ({ id: a.id, name: a.name, url: a.url })));
// Check if local scrapers are enabled and execute them first
try {
// Load settings from AsyncStorage directly
const settingsJson = await AsyncStorage.getItem('app_settings');
const settings: AppSettings = settingsJson ? JSON.parse(settingsJson) : DEFAULT_SETTINGS;
if (settings.enableLocalScrapers) {
const hasScrapers = await localScraperService.hasScrapers();
if (hasScrapers) {
logger.log('🔧 [getStreams] Executing local scrapers for', type, id);
// Execute local scrapers asynchronously
localScraperService.getStreams(type, id, undefined, undefined, (streams, scraperId, scraperName, error) => {
if (error) {
logger.error(`❌ [getStreams] Local scraper ${scraperName} failed:`, error);
if (callback) {
callback(null, scraperId, scraperName, error);
}
} else if (streams && streams.length > 0) {
logger.log(`✅ [getStreams] Local scraper ${scraperName} returned ${streams.length} streams`);
if (callback) {
callback(streams, scraperId, scraperName, null);
}
}
});
}
}
} catch (error) {
logger.error('❌ [getStreams] Failed to execute local scrapers:', error);
}
// Check specifically for TMDB Embed addon
const tmdbEmbed = addons.find(addon => addon.id === 'org.tmdbembedapi');
if (tmdbEmbed) {