try abort controler

This commit is contained in:
Pas 2025-12-19 14:44:03 -07:00
parent 37d7f4f31e
commit 1ec806bea8
6 changed files with 333 additions and 2 deletions

View file

@ -1,12 +1,16 @@
import { FullScraperEvents, IndividualScraperEvents } from '@/entrypoint/utils/events';
import { FullScraperEvents, IndividualScraperEvents, UpdateEvent } from '@/entrypoint/utils/events';
import { ScrapeMedia } from '@/entrypoint/utils/media';
import { MetaOutput, getAllEmbedMetaSorted, getAllSourceMetaSorted, getSpecificId } from '@/entrypoint/utils/meta';
import { FeatureMap } from '@/entrypoint/utils/targets';
import { FeatureMap, flagsAllowedInFeatures } from '@/entrypoint/utils/targets';
import { makeFetcher } from '@/fetchers/common';
import { Fetcher } from '@/fetchers/types';
import { Embed, EmbedOutput, Sourcerer, SourcererOutput } from '@/providers/base';
import { scrapeIndividualEmbed, scrapeInvidualSource } from '@/runners/individualRunner';
import { RunOutput, runAllProviders } from '@/runners/runner';
import { ScrapeContext } from '@/utils/context';
import { NotFoundError } from '@/utils/errors';
import { requiresProxy, setupProxy } from '@/utils/proxy';
import { isValidStream, validatePlayableStream } from '@/utils/valid';
export interface ProviderControlsInput {
fetcher: Fetcher;
@ -35,6 +39,9 @@ export interface RunnerOptions {
// it makes sense to have this in the builder
// but I belive it's more useful in runner ops
disableOpensubtitles?: boolean;
// abort signal for cancelling scraping
abortSignal?: AbortSignal;
}
export interface SourceRunnerOptions {
@ -72,6 +79,10 @@ export interface ProviderControls {
// returns the stream, or null if none found
runAll(runnerOps: RunnerOptions): Promise<RunOutput | null>;
// Run a single source and its embeds (if any) with abort capability
// returns the stream, or null if none found or aborted
runSourceWithEmbeds(runnerOps: Omit<RunnerOptions, 'sourceOrder'> & { sourceId: string }): Promise<RunOutput | null>;
// Run a specific source scraper
runSourceScraper(runnerOps: SourceRunnerOptions): Promise<SourcererOutput>;
@ -101,13 +112,208 @@ export function makeControls(ops: ProviderControlsInput): ProviderControls {
proxyStreams: ops.proxyStreams,
};
const runSourceWithEmbeds = async (
runnerOps: Omit<RunnerOptions, 'sourceOrder'> & { sourceId: string },
): Promise<RunOutput | null> => {
const sourceItem = list.sources.find((s) => s.id === runnerOps.sourceId);
if (!sourceItem) {
throw new Error(`Source with ID ${runnerOps.sourceId} not found`);
}
// Check if media type is supported
if (runnerOps.media.type === 'movie' && !sourceItem.scrapeMovie) {
throw new Error(`Source ${runnerOps.sourceId} does not support movies`);
}
if (runnerOps.media.type === 'show' && !sourceItem.scrapeShow) {
throw new Error(`Source ${runnerOps.sourceId} does not support shows`);
}
const sources = [sourceItem];
const embeds = list.embeds;
const embedIds = embeds.map((embed) => embed.id);
let lastId = '';
const contextBase: ScrapeContext = {
fetcher: providerRunnerOps.fetcher,
proxiedFetcher: providerRunnerOps.proxiedFetcher,
features: providerRunnerOps.features,
abortSignal: runnerOps.abortSignal,
progress(val) {
runnerOps.events?.update?.({
id: lastId,
percentage: val,
status: 'pending',
});
},
};
runnerOps.events?.init?.({
sourceIds: sources.map((v) => v.id),
});
for (const currentSource of sources) {
// Check for abort before starting the source
if (runnerOps.abortSignal?.aborted) {
runnerOps.events?.abort?.(currentSource.id);
return null;
}
runnerOps.events?.start?.(currentSource.id);
lastId = currentSource.id;
// run source scrapers
let output: SourcererOutput | null = null;
try {
if (runnerOps.media.type === 'movie' && currentSource.scrapeMovie)
output = await currentSource.scrapeMovie({
...contextBase,
media: runnerOps.media,
});
else if (runnerOps.media.type === 'show' && currentSource.scrapeShow)
output = await currentSource.scrapeShow({
...contextBase,
media: runnerOps.media,
});
if (output) {
output.stream = (output.stream ?? [])
.filter(isValidStream)
.filter((stream) => flagsAllowedInFeatures(providerRunnerOps.features, stream.flags));
output.stream = output.stream.map((stream) =>
requiresProxy(stream) && providerRunnerOps.proxyStreams ? setupProxy(stream) : stream,
);
}
if (!output || (!output.stream?.length && !output.embeds.length)) {
throw new NotFoundError('No streams found');
}
} catch (error) {
const updateParams: UpdateEvent = {
id: currentSource.id,
percentage: 100,
status: error instanceof NotFoundError ? 'notfound' : 'failure',
reason: error instanceof NotFoundError ? error.message : undefined,
error: error instanceof NotFoundError ? undefined : error,
};
runnerOps.events?.update?.(updateParams);
continue;
}
if (!output) throw new Error('Invalid media type');
// return stream if there are any
if (output.stream?.[0]) {
const validationOps = {
fetcher: providerRunnerOps.fetcher,
proxiedFetcher: providerRunnerOps.proxiedFetcher,
features: providerRunnerOps.features,
media: runnerOps.media,
proxyStreams: providerRunnerOps.proxyStreams,
};
const playableStream = await validatePlayableStream(output.stream[0], validationOps, currentSource.id);
if (!playableStream) throw new NotFoundError('No streams found');
return {
sourceId: currentSource.id,
stream: playableStream,
};
}
// filter disabled and run embed scrapers on listed embeds
const sortedEmbeds = output.embeds
.filter((embed) => {
const e = list.embeds.find((v) => v.id === embed.embedId);
return e && !e.disabled;
})
.sort((a, b) => embedIds.indexOf(a.embedId) - embedIds.indexOf(b.embedId));
if (sortedEmbeds.length > 0) {
runnerOps.events?.discoverEmbeds?.({
embeds: sortedEmbeds.map((embed, i) => ({
id: [currentSource.id, i].join('-'),
embedScraperId: embed.embedId,
})),
sourceId: currentSource.id,
});
}
for (const [ind, embed] of sortedEmbeds.entries()) {
// Check for abort before starting each embed
if (runnerOps.abortSignal?.aborted) {
runnerOps.events?.abort?.([currentSource.id, ind].join('-'));
return null;
}
const scraper = embeds.find((v) => v.id === embed.embedId);
if (!scraper) throw new Error('Invalid embed returned');
// run embed scraper
const id = [currentSource.id, ind].join('-');
runnerOps.events?.start?.(id);
lastId = id;
let embedOutput: EmbedOutput;
try {
embedOutput = await scraper.scrape({
...contextBase,
url: embed.url,
});
embedOutput.stream = embedOutput.stream
.filter(isValidStream)
.filter((stream) => flagsAllowedInFeatures(providerRunnerOps.features, stream.flags));
embedOutput.stream = embedOutput.stream.map((stream) =>
requiresProxy(stream) && providerRunnerOps.proxyStreams ? setupProxy(stream) : stream,
);
if (embedOutput.stream.length === 0) {
throw new NotFoundError('No streams found');
}
const validationOps = {
fetcher: providerRunnerOps.fetcher,
proxiedFetcher: providerRunnerOps.proxiedFetcher,
features: providerRunnerOps.features,
media: runnerOps.media,
proxyStreams: providerRunnerOps.proxyStreams,
};
const playableStream = await validatePlayableStream(embedOutput.stream[0], validationOps, embed.embedId);
if (!playableStream) throw new NotFoundError('No streams found');
embedOutput.stream = [playableStream];
} catch (error) {
const updateParams: UpdateEvent = {
id,
percentage: 100,
status: error instanceof NotFoundError ? 'notfound' : 'failure',
reason: error instanceof NotFoundError ? error.message : undefined,
error: error instanceof NotFoundError ? undefined : error,
};
runnerOps.events?.update?.(updateParams);
continue;
}
return {
sourceId: currentSource.id,
embedId: scraper.id,
stream: embedOutput.stream[0],
};
}
}
// no providers or embeds returns streams
return null;
};
return {
runAll(runnerOps) {
return runAllProviders(list, {
...providerRunnerOps,
...runnerOps,
abortSignal: runnerOps.abortSignal,
});
},
runSourceWithEmbeds(runnerOps) {
return runSourceWithEmbeds(runnerOps);
},
runSourceScraper(runnerOps) {
return scrapeInvidualSource(list, {
...providerRunnerOps,

View file

@ -39,6 +39,9 @@ export type FullScraperEvents = {
// start scraping an item.
start?: (id: string) => void;
// abort scraping of a specific item
abort?: (id: string) => void;
};
export type IndividualScraperEvents = {

View file

@ -207,3 +207,106 @@ export function gatherAllEmbeds(): Array<Embed> {
voeScraper,
];
}
// Export all individual providers for visibility
export {
// Sources
fsOnlineScraper,
dopeboxScraper,
cuevana3Scraper,
ridooMoviesScraper,
hdRezkaScraper,
warezcdnScraper,
insertunitScraper,
soaperTvScraper,
autoembedScraper,
myanimeScraper,
tugaflixScraper,
ee3Scraper,
fsharetvScraper,
zoechipScraper,
mp4hydraScraper,
embedsuScraper,
slidemoviesScraper,
vidapiClickScraper,
coitusScraper,
streamboxScraper,
nunflixScraper,
EightStreamScraper,
wecimaScraper,
animeflvScraper,
pirxcyScraper,
vidsrcvipScraper,
rgshowsScraper,
vidifyScraper,
zunimeScraper,
vidnestScraper,
animetsuScraper,
lookmovieScraper,
turbovidSourceScraper,
pelisplushdScraper,
primewireScraper,
movies4fScraper,
debridScraper,
cinehdplusScraper,
fullhdfilmizleScraper,
// Embeds
fsOnlineEmbeds,
dopeboxEmbeds,
serverMirrorEmbed,
upcloudScraper,
vidCloudScraper,
mixdropScraper,
ridooScraper,
closeLoadScraper,
doodScraper,
streamvidScraper,
streamtapeScraper,
warezcdnembedHlsScraper,
warezcdnembedMp4Scraper,
warezPlayerScraper,
autoembedEnglishScraper,
autoembedHindiScraper,
autoembedBengaliScraper,
autoembedTamilScraper,
autoembedTeluguScraper,
turbovidScraper,
mp4hydraServer1Scraper,
mp4hydraServer2Scraper,
VidsrcsuServer1Scraper,
VidsrcsuServer2Scraper,
VidsrcsuServer3Scraper,
VidsrcsuServer4Scraper,
VidsrcsuServer5Scraper,
VidsrcsuServer6Scraper,
VidsrcsuServer7Scraper,
VidsrcsuServer8Scraper,
VidsrcsuServer9Scraper,
VidsrcsuServer10Scraper,
VidsrcsuServer11Scraper,
VidsrcsuServer12Scraper,
VidsrcsuServer20Scraper,
viperScraper,
streamwishJapaneseScraper,
streamwishLatinoScraper,
streamwishSpanishScraper,
streamwishEnglishScraper,
streamtapeLatinoScraper,
cinemaosEmbeds,
vidifyEmbeds,
zunimeEmbeds,
AnimetsuEmbeds,
vidnestHollymoviehdEmbed,
vidnestAllmoviesEmbed,
myanimesubScraper,
myanimedubScraper,
filemoonScraper,
vidhideLatinoScraper,
vidhideSpanishScraper,
vidhideEnglishScraper,
filelionsScraper,
droploadScraper,
supervideoScraper,
voeScraper,
};

View file

@ -17,6 +17,7 @@ export type IndividualSourceRunnerOptions = {
id: string;
events?: IndividualScraperEvents;
proxyStreams?: boolean; // temporary
abortSignal?: AbortSignal;
};
export async function scrapeInvidualSource(
@ -32,6 +33,7 @@ export async function scrapeInvidualSource(
fetcher: ops.fetcher,
proxiedFetcher: ops.proxiedFetcher,
features: ops.features,
abortSignal: ops.abortSignal,
progress(val) {
ops.events?.update?.({
id: sourceScraper.id,
@ -94,6 +96,7 @@ export type IndividualEmbedRunnerOptions = {
id: string;
events?: IndividualScraperEvents;
proxyStreams?: boolean; // temporary
abortSignal?: AbortSignal;
};
export async function scrapeIndividualEmbed(
@ -109,6 +112,7 @@ export async function scrapeIndividualEmbed(
fetcher: ops.fetcher,
proxiedFetcher: ops.proxiedFetcher,
features: ops.features,
abortSignal: ops.abortSignal,
url,
progress(val) {
ops.events?.update?.({

View file

@ -37,6 +37,7 @@ export type ProviderRunnerOptions = {
events?: FullScraperEvents;
media: ScrapeMedia;
proxyStreams?: boolean; // temporary
abortSignal?: AbortSignal;
};
export async function runAllProviders(list: ProviderList, ops: ProviderRunnerOptions): Promise<RunOutput | null> {
@ -53,6 +54,7 @@ export async function runAllProviders(list: ProviderList, ops: ProviderRunnerOpt
fetcher: ops.fetcher,
proxiedFetcher: ops.proxiedFetcher,
features: ops.features,
abortSignal: ops.abortSignal,
progress(val) {
ops.events?.update?.({
id: lastId,
@ -67,6 +69,12 @@ export async function runAllProviders(list: ProviderList, ops: ProviderRunnerOpt
});
for (const source of sources) {
// Check for abort before starting each source
if (ops.abortSignal?.aborted) {
ops.events?.abort?.(source.id);
break;
}
ops.events?.start?.(source.id);
lastId = source.id;
@ -139,6 +147,12 @@ export async function runAllProviders(list: ProviderList, ops: ProviderRunnerOpt
}
for (const [ind, embed] of sortedEmbeds.entries()) {
// Check for abort before starting each embed
if (ops.abortSignal?.aborted) {
ops.events?.abort?.([source.id, ind].join('-'));
break;
}
const scraper = embeds.find((v) => v.id === embed.embedId);
if (!scraper) throw new Error('Invalid embed returned');

View file

@ -7,6 +7,7 @@ export type ScrapeContext = {
fetcher: UseableFetcher;
progress(val: number): void;
features: FeatureMap;
abortSignal?: AbortSignal;
};
export type EmbedInput = {