Merge branch 'sussy-code:dev' into moviplus

This commit is contained in:
TPN 2024-07-17 18:28:00 +05:30 committed by GitHub
commit 263155925d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 3049 additions and 3356 deletions

File diff suppressed because it is too large Load diff

View file

@ -19,6 +19,7 @@ import { autoembedScraper } from '@/providers/sources/autoembed';
import { catflixScraper } from '@/providers/sources/catflix';
import { ee3Scraper } from '@/providers/sources/ee3';
import { flixhqScraper } from '@/providers/sources/flixhq/index';
import { fsharetvScraper } from '@/providers/sources/fsharetv';
import { goMoviesScraper } from '@/providers/sources/gomovies/index';
import { insertunitScraper } from '@/providers/sources/insertunit';
import { kissAsianScraper } from '@/providers/sources/kissasian/index';
@ -99,6 +100,7 @@ export function gatherAllSources(): Array<Sourcerer> {
ee3Scraper,
moviplusScraper,
whvxScraper,
fsharetvScraper,
];
}

View file

@ -22,7 +22,7 @@ export async function login(
const cookie = parseSetCookie(
// It retruns a cookie even when the login failed
// I have the backup cookie here just in case
res.status === 1 ? (req.headers.get('Set-Cookie') ?? '') : 'PHPSESSID=mk2p73c77qc28o5i5120843ruu;',
res.status === 1 ? req.headers.get('Set-Cookie') ?? '' : 'PHPSESSID=mk2p73c77qc28o5i5120843ruu;',
);
return cookie.PHPSESSID.value;

View file

@ -0,0 +1,90 @@
import { load } from 'cheerio';
import { SourcererOutput, makeSourcerer } from '@/providers/base';
import { FileBasedStream } from '@/providers/streams';
import { compareMedia } from '@/utils/compare';
import { MovieScrapeContext, ShowScrapeContext } from '@/utils/context';
import { NotFoundError } from '@/utils/errors';
import { getValidQualityFromString } from '@/utils/quality';
const baseUrl = 'https://fsharetv.co';
async function comboScraper(ctx: ShowScrapeContext | MovieScrapeContext): Promise<SourcererOutput> {
const searchPage = await ctx.proxiedFetcher('/search', {
baseUrl,
query: {
q: ctx.media.title,
},
});
const search$ = load(searchPage);
const searchResults: { title: string; year?: number; url: string }[] = [];
search$('.movie-item').each((_, element) => {
const [, title, year] =
search$(element)
.find('b')
.text()
?.match(/^(.*?)\s*(?:\(?\s*(\d{4})(?:\s*-\s*\d{0,4})?\s*\)?)?\s*$/) || [];
const url = search$(element).find('a').attr('href');
if (!title || !url) return;
searchResults.push({ title, year: Number(year) ?? undefined, url });
});
const watchPageUrl = searchResults.find((x) => x && compareMedia(ctx.media, x.title, x.year))?.url;
if (!watchPageUrl) throw new NotFoundError('No watchable item found');
const watchPage = await ctx.proxiedFetcher(watchPageUrl.replace('/movie', '/w'), { baseUrl });
const fileId = watchPage.match(/Movie\.setSource\('([^']*)'/)?.[1];
if (!fileId) throw new Error('File ID not found');
const apiRes: { data: { file: { sources: { src: string; quality: string | number }[] } } } = await ctx.proxiedFetcher(
`/api/file/${fileId}/source`,
{
baseUrl,
query: {
type: 'watch',
},
},
);
if (!apiRes.data.file.sources.length) throw new Error('No sources found');
const qualities = apiRes.data.file.sources.reduce(
(acc, source) => {
const quality = typeof source.quality === 'number' ? source.quality.toString() : source.quality;
const validQuality = getValidQualityFromString(quality);
acc[validQuality] = {
type: 'mp4',
url: `${baseUrl}${source.src}`,
};
return acc;
},
{} as FileBasedStream['qualities'],
);
return {
embeds: [],
stream: [
{
id: 'primary',
type: 'file',
flags: [],
headers: {
referer: 'https://fsharetv.co',
},
qualities,
captions: [],
},
],
};
}
export const fsharetvScraper = makeSourcerer({
id: 'fsharetv',
name: 'FshareTV',
rank: 93,
flags: [],
scrapeMovie: comboScraper,
});