diff --git a/src/hooks/useMetadata.ts b/src/hooks/useMetadata.ts
index 5934ceb..1279b58 100644
--- a/src/hooks/useMetadata.ts
+++ b/src/hooks/useMetadata.ts
@@ -4,7 +4,7 @@ import { catalogService } from '../services/catalogService';
import { stremioService } from '../services/stremioService';
import { tmdbService } from '../services/tmdbService';
import { cacheService } from '../services/cacheService';
-import { localScraperService, ScraperInfo } from '../services/localScraperService';
+import { localScraperService, ScraperInfo } from '../services/pluginService';
import { Cast, Episode, GroupedEpisodes, GroupedStreams } from '../types/metadata';
import { TMDBService } from '../services/tmdbService';
import { logger } from '../utils/logger';
diff --git a/src/screens/PluginsScreen.tsx b/src/screens/PluginsScreen.tsx
index 4e90c75..9a08a26 100644
--- a/src/screens/PluginsScreen.tsx
+++ b/src/screens/PluginsScreen.tsx
@@ -22,7 +22,7 @@ import { SafeAreaView } from 'react-native-safe-area-context';
import { Ionicons } from '@expo/vector-icons';
import { useNavigation } from '@react-navigation/native';
import { useSettings } from '../hooks/useSettings';
-import { localScraperService, ScraperInfo, RepositoryInfo } from '../services/localScraperService';
+import { localScraperService, pluginService, ScraperInfo, RepositoryInfo } from '../services/pluginService';
import { logger } from '../utils/logger';
import { useTheme } from '../contexts/ThemeContext';
@@ -934,7 +934,7 @@ const PluginsScreen: React.FC = () => {
try {
setIsRefreshing(true);
const promises = filteredScrapers.map(scraper =>
- localScraperService.setScraperEnabled(scraper.id, enabled)
+ pluginService.setScraperEnabled(scraper.id, enabled)
);
await Promise.all(promises);
await loadScrapers();
@@ -988,7 +988,7 @@ const PluginsScreen: React.FC = () => {
try {
setIsLoading(true);
- const repoId = await localScraperService.addRepository({
+ const repoId = await pluginService.addRepository({
name: '', // Let the service fetch from manifest
url: normalizedUrl, // Use normalized URL (without manifest.json)
description: '',
@@ -998,7 +998,7 @@ const PluginsScreen: React.FC = () => {
await loadRepositories();
// Switch to the new repository and refresh it
- await localScraperService.setCurrentRepository(repoId);
+ await pluginService.setCurrentRepository(repoId);
await loadRepositories();
await loadScrapers();
@@ -1016,7 +1016,7 @@ const PluginsScreen: React.FC = () => {
const handleSwitchRepository = async (repoId: string) => {
try {
setSwitchingRepository(repoId);
- await localScraperService.setCurrentRepository(repoId);
+ await pluginService.setCurrentRepository(repoId);
await loadRepositories();
await loadScrapers();
openAlert('Success', 'Repository switched successfully');
@@ -1049,7 +1049,7 @@ const PluginsScreen: React.FC = () => {
label: 'Remove',
onPress: async () => {
try {
- await localScraperService.removeRepository(repoId);
+ await pluginService.removeRepository(repoId);
await loadRepositories();
await loadScrapers();
const successMessage = isLastRepository
@@ -1073,7 +1073,7 @@ const PluginsScreen: React.FC = () => {
const loadScrapers = async () => {
try {
- const scrapers = await localScraperService.getAvailableScrapers();
+ const scrapers = await pluginService.getAvailableScrapers();
setInstalledScrapers(scrapers);
@@ -1086,7 +1086,7 @@ const PluginsScreen: React.FC = () => {
});
if (sb) {
setShowboxScraperId(sb.id);
- const s = await localScraperService.getScraperSettings(sb.id);
+ const s = await pluginService.getScraperSettings(sb.id);
setShowboxUiToken(s.uiToken || '');
setShowboxSavedToken(s.uiToken || '');
setShowboxTokenVisible(false);
@@ -1104,13 +1104,13 @@ const PluginsScreen: React.FC = () => {
const loadRepositories = async () => {
try {
// First refresh repository names from manifests for existing repositories
- await localScraperService.refreshRepositoryNamesFromManifests();
+ await pluginService.refreshRepositoryNamesFromManifests();
- const repos = await localScraperService.getRepositories();
+ const repos = await pluginService.getRepositories();
setRepositories(repos);
setHasRepository(repos.length > 0);
- const currentRepoId = localScraperService.getCurrentRepositoryId();
+ const currentRepoId = pluginService.getCurrentRepositoryId();
setCurrentRepositoryId(currentRepoId);
const currentRepo = repos.find(r => r.id === currentRepoId);
@@ -1124,7 +1124,7 @@ const PluginsScreen: React.FC = () => {
const checkRepository = async () => {
try {
- const repoUrl = await localScraperService.getRepositoryUrl();
+ const repoUrl = await pluginService.getRepositoryUrl();
setHasRepository(!!repoUrl);
if (repoUrl && repoUrl !== repositoryUrl) {
setRepositoryUrl(repoUrl);
@@ -1152,7 +1152,7 @@ const PluginsScreen: React.FC = () => {
try {
setIsLoading(true);
- await localScraperService.setRepositoryUrl(url);
+ await pluginService.setRepositoryUrl(url);
await updateSetting('scraperRepositoryUrl', url);
setHasRepository(true);
openAlert('Success', 'Repository URL saved successfully');
@@ -1175,7 +1175,7 @@ const PluginsScreen: React.FC = () => {
logger.log('[PluginsScreen] Starting hard refresh of repository...');
// Force a complete hard refresh by clearing any cached data first
- await localScraperService.refreshRepository();
+ await pluginService.refreshRepository();
// Load fresh scrapers from the updated repository
await loadScrapers();
@@ -1197,18 +1197,18 @@ const PluginsScreen: React.FC = () => {
try {
if (enabled) {
// If enabling a scraper, ensure it's installed first
- const installedScrapers = await localScraperService.getInstalledScrapers();
+ const installedScrapers = await pluginService.getInstalledScrapers();
const isInstalled = installedScrapers.some(scraper => scraper.id === scraperId);
if (!isInstalled) {
// Need to install the scraper first
setIsRefreshing(true);
- await localScraperService.refreshRepository();
+ await pluginService.refreshRepository();
setIsRefreshing(false);
}
}
- await localScraperService.setScraperEnabled(scraperId, enabled);
+ await pluginService.setScraperEnabled(scraperId, enabled);
await loadScrapers();
} catch (error) {
logger.error('[ScraperSettings] Failed to toggle scraper:', error);
@@ -1227,7 +1227,7 @@ const PluginsScreen: React.FC = () => {
label: 'Clear',
onPress: async () => {
try {
- await localScraperService.clearScrapers();
+ await pluginService.clearScrapers();
await loadScrapers();
openAlert('Success', 'All scrapers have been removed');
} catch (error) {
@@ -1250,8 +1250,8 @@ const PluginsScreen: React.FC = () => {
label: 'Clear Cache',
onPress: async () => {
try {
- await localScraperService.clearScrapers();
- await localScraperService.setRepositoryUrl('');
+ await pluginService.clearScrapers();
+ await pluginService.setRepositoryUrl('');
await updateSetting('scraperRepositoryUrl', '');
setRepositoryUrl('');
setHasRepository(false);
@@ -1275,21 +1275,21 @@ const PluginsScreen: React.FC = () => {
const handleToggleLocalScrapers = async (enabled: boolean) => {
await updateSetting('enableLocalScrapers', enabled);
- // If enabling local scrapers, refresh repository and reload scrapers
+ // If enabling plugins, refresh repository and reload plugins
if (enabled) {
try {
setIsRefreshing(true);
- logger.log('[PluginsScreen] Enabling local scrapers - refreshing repository...');
+ logger.log('[PluginsScreen] Enabling plugins - refreshing repository...');
- // Refresh repository to ensure scrapers are available
- await localScraperService.refreshRepository();
+ // Refresh repository to ensure plugins are available
+ await pluginService.refreshRepository();
- // Reload scrapers to get the latest state
+ // Reload plugins to get the latest state
await loadScrapers();
- logger.log('[PluginsScreen] Local scrapers enabled and repository refreshed');
+ logger.log('[PluginsScreen] Plugins enabled and repository refreshed');
} catch (error) {
- logger.error('[PluginsScreen] Failed to refresh repository when enabling local scrapers:', error);
+ logger.error('[PluginsScreen] Failed to refresh repository when enabling plugins:', error);
// Don't show error to user as the toggle still succeeded
} finally {
setIsRefreshing(false);
@@ -1379,7 +1379,7 @@ const PluginsScreen: React.FC = () => {
logger.log('[PluginsScreen] Pull-to-refresh: Starting hard refresh...');
// Force hard refresh of repository
- await localScraperService.refreshRepository();
+ await pluginService.refreshRepository();
await loadScrapers();
logger.log('[PluginsScreen] Pull-to-refresh completed');
@@ -1394,9 +1394,9 @@ const PluginsScreen: React.FC = () => {
>
{/* Quick Setup banner removed */}
- {/* Enable Local Scrapers */}
+ {/* Enable Plugins */}
toggleSection('repository')}
colors={colors}
@@ -1404,9 +1404,9 @@ const PluginsScreen: React.FC = () => {
>
- Enable Local Scrapers
+ Enable Plugins
- Allow the app to use locally installed scrapers for finding streams
+ Allow the app to use installed plugins for finding streams
{
{currentRepositoryId && (
Current Repository:
- {localScraperService.getRepositoryName()}
+ {pluginService.getRepositoryName()}
{repositoryUrl}
)}
@@ -1518,9 +1518,9 @@ const PluginsScreen: React.FC = () => {
- {/* Available Scrapers */}
+ {/* Available Plugins */}
toggleSection('scrapers')}
colors={colors}
@@ -1710,7 +1710,7 @@ const PluginsScreen: React.FC = () => {
style={[styles.button, styles.primaryButton]}
onPress={async () => {
if (showboxScraperId) {
- await localScraperService.setScraperSettings(showboxScraperId, { uiToken: showboxUiToken });
+ await pluginService.setScraperSettings(showboxScraperId, { uiToken: showboxUiToken });
}
setShowboxSavedToken(showboxUiToken);
openAlert('Saved', 'ShowBox settings updated');
@@ -1725,7 +1725,7 @@ const PluginsScreen: React.FC = () => {
setShowboxUiToken('');
setShowboxSavedToken('');
if (showboxScraperId) {
- await localScraperService.setScraperSettings(showboxScraperId, {});
+ await pluginService.setScraperSettings(showboxScraperId, {});
}
}}
>
@@ -1768,7 +1768,7 @@ const PluginsScreen: React.FC = () => {
Group Plugin Streams
- When enabled, all plugin streams are grouped under "{localScraperService.getRepositoryName()}". When disabled, each plugin shows as a separate provider.
+ When enabled, all plugin streams are grouped under "{pluginService.getRepositoryName()}". When disabled, each plugin shows as a separate provider.
{
Getting Started with Plugins
- 1. Enable Local Scrapers - Turn on the main switch to allow plugins
+ 1. Enable Plugins - Turn on the main switch to allow plugins
2. Add Repository - Add a GitHub raw URL or use the default repository
diff --git a/src/screens/StreamsScreen.tsx b/src/screens/StreamsScreen.tsx
index aeda36b..32a23b1 100644
--- a/src/screens/StreamsScreen.tsx
+++ b/src/screens/StreamsScreen.tsx
@@ -40,7 +40,7 @@ import { useTrailer } from '../contexts/TrailerContext';
import { Stream } from '../types/metadata';
import { tmdbService, IMDbRatings } from '../services/tmdbService';
import { stremioService } from '../services/stremioService';
-import { localScraperService } from '../services/localScraperService';
+import { localScraperService } from '../services/pluginService';
import { VideoPlayerService } from '../services/videoPlayerService';
import { useSettings } from '../hooks/useSettings';
import QualityBadge from '../components/metadata/QualityBadge';
diff --git a/src/services/catalogService.ts b/src/services/catalogService.ts
index fdb6b4c..b6970b2 100644
--- a/src/services/catalogService.ts
+++ b/src/services/catalogService.ts
@@ -919,11 +919,11 @@ class CatalogService {
}
public async getLibraryItems(): Promise {
- logger.log(`[CatalogService] getLibraryItems() called. Library contains ${Object.keys(this.library).length} items`);
- await this.ensureInitialized();
- const items = Object.values(this.library);
- logger.log(`[CatalogService] getLibraryItems() returning ${items.length} items`);
- return items;
+ // Only ensure initialization if not already done to avoid redundant calls
+ if (!this.isInitialized) {
+ await this.ensureInitialized();
+ }
+ return Object.values(this.library);
}
public subscribeToLibraryUpdates(callback: (items: StreamingContent[]) => void): () => void {
diff --git a/src/services/localScraperService.ts b/src/services/pluginService.ts
similarity index 97%
rename from src/services/localScraperService.ts
rename to src/services/pluginService.ts
index 1588e03..52ffc14 100644
--- a/src/services/localScraperService.ts
+++ b/src/services/pluginService.ts
@@ -632,8 +632,8 @@ class LocalScraperService {
// Force disable if:
// 1. Manifest says enabled: false (globally disabled)
// 2. Platform incompatible
- // Otherwise, preserve user's enabled state or default to false
- enabled: scraperInfo.enabled && isPlatformCompatible ? (existingScraper?.enabled ?? false) : false
+ // Otherwise, preserve user's enabled state or default to true for new installations
+ enabled: scraperInfo.enabled && isPlatformCompatible ? (existingScraper?.enabled ?? true) : false
};
// Ensure contentLanguage is an array (migration for older scrapers)
@@ -786,8 +786,8 @@ class LocalScraperService {
// Store the manifest's enabled state separately
manifestEnabled: scraperInfo.enabled,
// If manifest says enabled: false, scraper cannot be enabled
- // If manifest says enabled: true, use installed state or default to false
- enabled: scraperInfo.enabled ? (installedScraper?.enabled ?? false) : false
+ // If manifest says enabled: true, use installed state or default to true for new installs
+ enabled: scraperInfo.enabled ? (installedScraper?.enabled ?? true) : false
};
@@ -1341,34 +1341,43 @@ class LocalScraperService {
// Get user settings to check if local scrapers are enabled
const userSettings = await this.getUserScraperSettings();
+ logger.log('[LocalScraperService.hasScrapers] enableLocalScrapers:', userSettings.enableLocalScrapers);
if (!userSettings.enableLocalScrapers) {
+ logger.log('[LocalScraperService.hasScrapers] Returning false: local scrapers disabled');
return false;
}
// If no repository is configured, return false
if (!this.repositoryUrl) {
- logger.log('[LocalScraperService] No repository URL configured');
+ logger.log('[LocalScraperService.hasScrapers] Returning false: no repository URL configured');
return false;
}
// If no scrapers are installed, try to refresh repository
if (this.installedScrapers.size === 0) {
- logger.log('[LocalScraperService] No scrapers installed, attempting to refresh repository');
+ logger.log('[LocalScraperService.hasScrapers] No scrapers installed, attempting to refresh repository');
try {
await this.performRepositoryRefresh();
} catch (error) {
- logger.error('[LocalScraperService] Failed to refresh repository for hasScrapers check:', error);
+ logger.error('[LocalScraperService.hasScrapers] Failed to refresh repository:', error);
return false;
}
}
+ logger.log('[LocalScraperService.hasScrapers] installedScrapers.size:', this.installedScrapers.size);
+ logger.log('[LocalScraperService.hasScrapers] enabledScrapers set size:', userSettings.enabledScrapers?.size);
+
// Check if there are any enabled scrapers based on user settings
if (userSettings.enabledScrapers && userSettings.enabledScrapers.size > 0) {
+ logger.log('[LocalScraperService.hasScrapers] Returning true: enabledScrapers set has items');
return true;
}
// Fallback: check if any scrapers are enabled in the internal state
- return Array.from(this.installedScrapers.values()).some(scraper => scraper.enabled);
+ const hasEnabledScrapers = Array.from(this.installedScrapers.values()).some(scraper => scraper.enabled);
+ logger.log('[LocalScraperService.hasScrapers] Fallback check - hasEnabledScrapers:', hasEnabledScrapers);
+ logger.log('[LocalScraperService.hasScrapers] Scrapers state:', Array.from(this.installedScrapers.values()).map(s => ({ id: s.id, name: s.name, enabled: s.enabled })));
+ return hasEnabledScrapers;
}
// Get current user scraper settings for cache filtering
@@ -1394,18 +1403,21 @@ class LocalScraperService {
const settingsData = scopedSettingsJson || legacySettingsJson;
const settings = settingsData ? JSON.parse(settingsData) : {};
+ // Default to true if the setting is not yet saved
+ const enableLocalScrapers = settings.enableLocalScrapers !== false;
+
// Get enabled scrapers based on current user settings
const enabledScrapers = new Set();
const installedScrapers = Array.from(this.installedScrapers.values());
for (const scraper of installedScrapers) {
- if (scraper.enabled && settings.enableLocalScrapers) {
+ if (scraper.enabled && enableLocalScrapers) {
enabledScrapers.add(scraper.id);
}
}
return {
- enableLocalScrapers: settings.enableLocalScrapers,
+ enableLocalScrapers: enableLocalScrapers,
enabledScrapers: enabledScrapers.size > 0 ? enabledScrapers : undefined
};
} catch (error) {
@@ -1417,4 +1429,5 @@ class LocalScraperService {
}
export const localScraperService = LocalScraperService.getInstance();
+export const pluginService = localScraperService; // Alias for UI consistency
export default localScraperService;
\ No newline at end of file
diff --git a/src/services/stremioService.ts b/src/services/stremioService.ts
index 660395b..40c789f 100644
--- a/src/services/stremioService.ts
+++ b/src/services/stremioService.ts
@@ -2,7 +2,7 @@ import axios from 'axios';
import { mmkvStorage } from './mmkvStorage';
import { logger } from '../utils/logger';
import EventEmitter from 'eventemitter3';
-import { localScraperService } from './localScraperService';
+import { localScraperService } from './pluginService';
import { DEFAULT_SETTINGS, AppSettings } from '../hooks/useSettings';
import { TMDBService } from './tmdbService';