mirror of
https://github.com/sussy-code/providers.git
synced 2026-01-11 20:10:17 +00:00
Upgrade packages, lint and vite, use new eslint flat config and add vidsrc.su
This commit is contained in:
parent
f5a8df4c01
commit
dfa3420b62
37 changed files with 2011 additions and 1304 deletions
145
eslint.config.mjs
Normal file
145
eslint.config.mjs
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
/* eslint-disable import/no-unresolved */
|
||||
/* eslint-disable import/no-extraneous-dependencies */
|
||||
/* eslint-disable no-underscore-dangle */
|
||||
import typescriptEslint from '@typescript-eslint/eslint-plugin';
|
||||
import importPlugin from 'eslint-plugin-import';
|
||||
import prettier from 'eslint-plugin-prettier';
|
||||
import globals from 'globals';
|
||||
import tsParser from '@typescript-eslint/parser';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import js from '@eslint/js';
|
||||
import { FlatCompat } from '@eslint/eslintrc';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: __dirname,
|
||||
recommendedConfig: js.configs.recommended,
|
||||
allConfig: js.configs.all,
|
||||
});
|
||||
|
||||
export default [
|
||||
{
|
||||
ignores: ['lib/*', 'tests/*', '*.js', '*.ts', 'src/__test__/*', '**/*.test.ts', 'test/*'],
|
||||
},
|
||||
// TODO: use the new flat config method for prettier and typescript configs
|
||||
...compat.extends('airbnb-base', 'plugin:@typescript-eslint/recommended', 'plugin:prettier/recommended'),
|
||||
{
|
||||
files: ['**/*.ts', '**/*.js'],
|
||||
|
||||
plugins: {
|
||||
'@typescript-eslint': typescriptEslint,
|
||||
importPlugin,
|
||||
prettier,
|
||||
},
|
||||
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.browser,
|
||||
},
|
||||
|
||||
parser: tsParser,
|
||||
ecmaVersion: 5,
|
||||
sourceType: 'script',
|
||||
|
||||
parserOptions: {
|
||||
project: './tsconfig.json',
|
||||
tsconfigRootDir: './',
|
||||
},
|
||||
},
|
||||
|
||||
settings: {
|
||||
'import/resolver': {
|
||||
typescript: {
|
||||
project: './tsconfig.json',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
rules: {
|
||||
'no-plusplus': 'off',
|
||||
'class-methods-use-this': 'off',
|
||||
'no-bitwise': 'off',
|
||||
'no-underscore-dangle': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
|
||||
'no-console': [
|
||||
'error',
|
||||
{
|
||||
allow: ['warn', 'error'],
|
||||
},
|
||||
],
|
||||
|
||||
'@typescript-eslint/no-this-alias': 'off',
|
||||
'import/prefer-default-export': 'off',
|
||||
'@typescript-eslint/no-empty-function': 'off',
|
||||
'no-shadow': 'off',
|
||||
'@typescript-eslint/no-shadow': ['error'],
|
||||
'no-restricted-syntax': 'off',
|
||||
|
||||
'import/no-unresolved': [
|
||||
'error',
|
||||
{
|
||||
ignore: ['^virtual:'],
|
||||
},
|
||||
],
|
||||
|
||||
'consistent-return': 'off',
|
||||
'no-continue': 'off',
|
||||
'no-eval': 'off',
|
||||
'no-await-in-loop': 'off',
|
||||
'no-nested-ternary': 'off',
|
||||
|
||||
'no-param-reassign': [
|
||||
'error',
|
||||
{
|
||||
props: false,
|
||||
},
|
||||
],
|
||||
|
||||
'prefer-destructuring': 'off',
|
||||
|
||||
'@typescript-eslint/no-unused-vars': [
|
||||
'warn',
|
||||
{
|
||||
argsIgnorePattern: '^_',
|
||||
},
|
||||
],
|
||||
|
||||
'import/extensions': [
|
||||
'error',
|
||||
'ignorePackages',
|
||||
{
|
||||
ts: 'never',
|
||||
tsx: 'never',
|
||||
},
|
||||
],
|
||||
|
||||
'import/order': [
|
||||
'error',
|
||||
{
|
||||
groups: ['builtin', 'external', 'internal', ['sibling', 'parent'], 'index', 'unknown'],
|
||||
|
||||
'newlines-between': 'always',
|
||||
|
||||
alphabetize: {
|
||||
order: 'asc',
|
||||
caseInsensitive: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
'sort-imports': [
|
||||
'error',
|
||||
{
|
||||
ignoreCase: false,
|
||||
ignoreDeclarationSort: true,
|
||||
ignoreMemberSort: false,
|
||||
memberSyntaxSortOrder: ['none', 'all', 'multiple', 'single'],
|
||||
allowSeparatedGroups: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
];
|
||||
40
package.json
40
package.json
|
|
@ -42,54 +42,58 @@
|
|||
"test:providers": "cross-env MW_TEST_PROVIDERS=true vitest run --reporter verbose",
|
||||
"test:integration": "node ./tests/cjs && node ./tests/esm && node ./tests/browser",
|
||||
"test:coverage": "vitest run --coverage",
|
||||
"lint": "eslint --ext .ts,.js src/",
|
||||
"lint:fix": "eslint --fix --ext .ts,.js src/",
|
||||
"lint:report": "eslint --ext .ts,.js --output-file eslint_report.json --format json src/",
|
||||
"lint": "eslint src/",
|
||||
"lint:fix": "eslint --fix src/",
|
||||
"lint:report": "eslint --output-file eslint_report.json --format json src/",
|
||||
"preinstall": "npx -y only-allow pnpm",
|
||||
"prepare": "pnpm run build",
|
||||
"prepublishOnly": "pnpm test && pnpm run lint"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/compat": "^1.2.4",
|
||||
"@eslint/eslintrc": "^3.2.0",
|
||||
"@eslint/js": "^9.17.0",
|
||||
"@nabla/vite-plugin-eslint": "^2.0.5",
|
||||
"@types/cookie": "^0.6.0",
|
||||
"@types/crypto-js": "^4.2.2",
|
||||
"@types/node-fetch": "^2.6.12",
|
||||
"@types/randombytes": "^2.0.3",
|
||||
"@types/set-cookie-parser": "^2.4.10",
|
||||
"@types/spinnies": "^0.5.3",
|
||||
"@typescript-eslint/eslint-plugin": "^7.18.0",
|
||||
"@typescript-eslint/parser": "^7.18.0",
|
||||
"@vitest/coverage-v8": "^1.6.0",
|
||||
"commander": "^12.1.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.19.1",
|
||||
"@typescript-eslint/parser": "^8.19.1",
|
||||
"@vitest/coverage-v8": "^2.1.8",
|
||||
"commander": "^13.0.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"dotenv": "^16.4.7",
|
||||
"enquirer": "^2.4.1",
|
||||
"eslint": "^8.57.1",
|
||||
"eslint": "^9.17.0",
|
||||
"eslint-config-airbnb-base": "^15.0.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-import-resolver-typescript": "^3.7.0",
|
||||
"eslint-plugin-import": "^2.31.0",
|
||||
"eslint-plugin-prettier": "^5.2.1",
|
||||
"globals": "^15.14.0",
|
||||
"node-fetch": "^3.3.2",
|
||||
"prettier": "^3.4.2",
|
||||
"puppeteer": "^22.15.0",
|
||||
"puppeteer": "^23.11.1",
|
||||
"spinnies": "^0.5.1",
|
||||
"tsc-alias": "^1.8.10",
|
||||
"tsconfig-paths": "^4.2.0",
|
||||
"typescript": "^5.7.2",
|
||||
"vite": "^5.4.11",
|
||||
"vite-node": "^1.6.0",
|
||||
"vite-plugin-dts": "^3.9.1",
|
||||
"vitest": "^1.6.0"
|
||||
"vite": "^6.0.7",
|
||||
"vite-node": "^2.1.8",
|
||||
"vite-plugin-dts": "^4.4.0",
|
||||
"vitest": "^2.1.8"
|
||||
},
|
||||
"dependencies": {
|
||||
"cheerio": "1.0.0-rc.12",
|
||||
"cookie": "^0.6.0",
|
||||
"cheerio": "1.0.0",
|
||||
"cookie": "^1.0.2",
|
||||
"crypto-js": "^4.2.0",
|
||||
"form-data": "^4.0.1",
|
||||
"hls-parser": "^0.13.3",
|
||||
"hls-parser": "^0.13.5",
|
||||
"iso-639-1": "^3.1.3",
|
||||
"nanoid": "^3.3.8",
|
||||
"json5": "^2.2.3",
|
||||
"nanoid": "^5.0.9",
|
||||
"node-fetch": "^3.3.2",
|
||||
"set-cookie-parser": "^2.7.1",
|
||||
"unpacker": "^1.0.1"
|
||||
|
|
|
|||
2876
pnpm-lock.yaml
2876
pnpm-lock.yaml
File diff suppressed because it is too large
Load diff
|
|
@ -41,7 +41,7 @@ export function testSource(ops: TestSourceOptions) {
|
|||
let hasError = false;
|
||||
let streamCount = 0;
|
||||
let embedCount = 0;
|
||||
let embeds = [];
|
||||
const embeds = [];
|
||||
try {
|
||||
const result = await providers.runSourceScraper({
|
||||
id: ops.source.id,
|
||||
|
|
|
|||
|
|
@ -37,9 +37,7 @@ const sources = [...sourceScrapers, ...embedScrapers];
|
|||
function joinMediaTypes(mediaTypes: string[] | undefined) {
|
||||
if (mediaTypes) {
|
||||
const formatted = mediaTypes
|
||||
.map((type: string) => {
|
||||
return `${type[0].toUpperCase() + type.substring(1).toLowerCase()}s`;
|
||||
})
|
||||
.map((type: string) => `${type[0].toUpperCase() + type.substring(1).toLowerCase()}s`)
|
||||
.join(' / ');
|
||||
|
||||
return `(${formatted})`;
|
||||
|
|
|
|||
|
|
@ -31,8 +31,8 @@ export function makeFullUrl(url: string, ops?: FullUrlOptions): string {
|
|||
}
|
||||
|
||||
export function makeFetcher(fetcher: Fetcher): UseableFetcher {
|
||||
const newFetcher = (url: string, ops?: FetcherOptions) => {
|
||||
return fetcher(url, {
|
||||
const newFetcher = (url: string, ops?: FetcherOptions) =>
|
||||
fetcher(url, {
|
||||
headers: ops?.headers ?? {},
|
||||
method: ops?.method ?? 'GET',
|
||||
query: ops?.query ?? {},
|
||||
|
|
@ -41,7 +41,6 @@ export function makeFetcher(fetcher: Fetcher): UseableFetcher {
|
|||
body: ops?.body,
|
||||
credentials: ops?.credentials,
|
||||
});
|
||||
};
|
||||
const output: UseableFetcher = async (url, ops) => (await newFetcher(url, ops)).body;
|
||||
output.full = newFetcher;
|
||||
return output;
|
||||
|
|
|
|||
|
|
@ -31,6 +31,7 @@ import { remotestreamScraper } from '@/providers/sources/remotestream';
|
|||
import { showboxScraper } from '@/providers/sources/showbox/index';
|
||||
import { tugaflixScraper } from '@/providers/sources/tugaflix';
|
||||
import { vidsrcScraper } from '@/providers/sources/vidsrc/index';
|
||||
import { vidsrcsuScraper } from '@/providers/sources/vidsrcsu';
|
||||
import { whvxScraper } from '@/providers/sources/whvx';
|
||||
import { zoechipScraper } from '@/providers/sources/zoechip';
|
||||
|
||||
|
|
@ -104,6 +105,7 @@ export function gatherAllSources(): Array<Sourcerer> {
|
|||
fsharetvScraper,
|
||||
redStarScraper,
|
||||
bombtheirishScraper,
|
||||
vidsrcsuScraper,
|
||||
];
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { MediaTypes } from '@/entrypoint/utils/media';
|
||||
|
||||
export const febBoxBase = `https://www.febbox.com`;
|
||||
export const febBoxBase = 'https://www.febbox.com';
|
||||
|
||||
export interface FebboxFileList {
|
||||
file_name: string;
|
||||
|
|
|
|||
|
|
@ -50,9 +50,7 @@ export async function getSubtitles(
|
|||
const subtitleFilePath = subtitle.file_path
|
||||
.replace(captionsDomains[0], captionsDomains[1])
|
||||
.replace(/\s/g, '+')
|
||||
.replace(/[()]/g, (c) => {
|
||||
return `%${c.charCodeAt(0).toString(16)}`;
|
||||
});
|
||||
.replace(/[()]/g, (c) => `%${c.charCodeAt(0).toString(16)}`);
|
||||
|
||||
const subtitleType = getCaptionTypeFromUrl(subtitleFilePath);
|
||||
if (!subtitleType) return;
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ export const mp4uploadScraper = makeEmbed({
|
|||
flags: [flags.CORS_ALLOWED],
|
||||
captions: [],
|
||||
qualities: {
|
||||
'1080': {
|
||||
1080: {
|
||||
type: 'mp4',
|
||||
url: streamUrl,
|
||||
},
|
||||
|
|
|
|||
|
|
@ -26,12 +26,14 @@ function decodeHunter(encoded: string, mask: string, charCodeOffset: number, del
|
|||
.map((chunk) => {
|
||||
// Chunks are in reverse order. 'reduceRight' removes the
|
||||
// need to 'reverse' the array first
|
||||
const charCode = chunk.split('').reduceRight((c, value, index) => {
|
||||
// Calculate the character code for each character in the chunk.
|
||||
// This involves finding the index of 'value' in the 'mask' and
|
||||
// multiplying it by (delimiterOffset^position).
|
||||
return c + mask.indexOf(value) * delimiterOffset ** (chunk.length - 1 - index);
|
||||
}, 0);
|
||||
const charCode = chunk.split('').reduceRight(
|
||||
(c, value, index) =>
|
||||
// Calculate the character code for each character in the chunk.
|
||||
// This involves finding the index of 'value' in the 'mask' and
|
||||
// multiplying it by (delimiterOffset^position).
|
||||
c + mask.indexOf(value) * delimiterOffset ** (chunk.length - 1 - index),
|
||||
0,
|
||||
);
|
||||
|
||||
// The actual character code is offset by the given amount
|
||||
return String.fromCharCode(charCode - charCodeOffset);
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import { EmbedScrapeContext } from '@/utils/context';
|
|||
async function fetchCaptchaToken(ctx: EmbedScrapeContext, domain: string, recaptchaKey: string) {
|
||||
const domainHash = Base64.stringify(Utf8.parse(domain)).replace(/=/g, '.');
|
||||
|
||||
const recaptchaRender = await ctx.proxiedFetcher<string>(`https://www.google.com/recaptcha/api.js`, {
|
||||
const recaptchaRender = await ctx.proxiedFetcher<string>('https://www.google.com/recaptcha/api.js', {
|
||||
query: {
|
||||
render: recaptchaKey,
|
||||
},
|
||||
|
|
@ -23,7 +23,7 @@ async function fetchCaptchaToken(ctx: EmbedScrapeContext, domain: string, recapt
|
|||
);
|
||||
|
||||
const recaptchaAnchor = await ctx.proxiedFetcher<string>(
|
||||
`https://www.google.com/recaptcha/api2/anchor?cb=1&hl=en&size=invisible&cb=flicklax`,
|
||||
'https://www.google.com/recaptcha/api2/anchor?cb=1&hl=en&size=invisible&cb=flicklax',
|
||||
{
|
||||
query: {
|
||||
k: recaptchaKey,
|
||||
|
|
@ -36,7 +36,7 @@ async function fetchCaptchaToken(ctx: EmbedScrapeContext, domain: string, recapt
|
|||
const cToken = load(recaptchaAnchor)('#recaptcha-token').attr('value');
|
||||
if (!cToken) throw new Error('Unable to find cToken');
|
||||
|
||||
const tokenData = await ctx.proxiedFetcher<string>(`https://www.google.com/recaptcha/api2/reload`, {
|
||||
const tokenData = await ctx.proxiedFetcher<string>('https://www.google.com/recaptcha/api2/reload', {
|
||||
query: {
|
||||
v: vToken,
|
||||
reason: 'q',
|
||||
|
|
@ -107,7 +107,7 @@ export const streamsbScraper = makeEmbed({
|
|||
hash: dl.parameters[2],
|
||||
};
|
||||
|
||||
const getDownload = await ctx.proxiedFetcher<string>(`/dl`, {
|
||||
const getDownload = await ctx.proxiedFetcher<string>('/dl', {
|
||||
query,
|
||||
baseUrl: parsedUrl.origin,
|
||||
});
|
||||
|
|
@ -127,7 +127,7 @@ export const streamsbScraper = makeEmbed({
|
|||
dlForm.append('hash', dl.parameters[2]);
|
||||
dlForm.append('g-recaptcha-response', captchaToken);
|
||||
|
||||
const download = await ctx.proxiedFetcher<string>(`/dl`, {
|
||||
const download = await ctx.proxiedFetcher<string>('/dl', {
|
||||
method: 'POST',
|
||||
baseUrl: parsedUrl.origin,
|
||||
body: dlForm,
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ export const upcloudScraper = makeEmbed({
|
|||
let sources: { file: string; type: string } | null = null;
|
||||
|
||||
if (!isJSON(streamRes.sources)) {
|
||||
const scriptJs = await ctx.proxiedFetcher<string>(`https://rabbitstream.net/js/player/prod/e4-player.min.js`, {
|
||||
const scriptJs = await ctx.proxiedFetcher<string>('https://rabbitstream.net/js/player/prod/e4-player.min.js', {
|
||||
query: {
|
||||
// browser side caching on this endpoint is quite extreme. Add version query paramter to circumvent any caching
|
||||
v: Date.now().toString(),
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ function decrypt(input: string) {
|
|||
}
|
||||
|
||||
export async function getDecryptedId(ctx: EmbedScrapeContext) {
|
||||
const page = await ctx.proxiedFetcher<string>(`/player.php`, {
|
||||
const page = await ctx.proxiedFetcher<string>('/player.php', {
|
||||
baseUrl: warezcdnPlayerBase,
|
||||
headers: {
|
||||
Referer: `${warezcdnPlayerBase}/getEmbed.php?${new URLSearchParams({
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ function embed(provider: { id: string; rank: number; disabled?: boolean }) {
|
|||
ctx.progress(100);
|
||||
|
||||
return result as EmbedOutput;
|
||||
} catch (error) {
|
||||
} catch {
|
||||
clearInterval(interval);
|
||||
ctx.progress(100);
|
||||
throw new NotFoundError('Failed to search');
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ export const wootlyScraper = makeEmbed({
|
|||
|
||||
if (!tk || !vd) throw new Error('wootly source not found');
|
||||
|
||||
const url = await ctx.proxiedFetcher<string>(`/grabd`, {
|
||||
const url = await ctx.proxiedFetcher<string>('/grabd', {
|
||||
baseUrl,
|
||||
query: { t: tk, id: vd },
|
||||
method: 'GET',
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import { NotFoundError } from '@/utils/errors';
|
|||
const baseUrl = 'https://autoembed.cc/';
|
||||
|
||||
async function comboScraper(ctx: ShowScrapeContext | MovieScrapeContext): Promise<SourcererOutput> {
|
||||
const playerPage = await ctx.proxiedFetcher(`/embed/player.php`, {
|
||||
const playerPage = await ctx.proxiedFetcher('/embed/player.php', {
|
||||
baseUrl,
|
||||
query: {
|
||||
id: ctx.media.tmdbId,
|
||||
|
|
|
|||
|
|
@ -59,12 +59,10 @@ export async function getFlixhqShowSources(ctx: ScrapeContext, media: ShowMedia,
|
|||
const seasonDoc = load(seasonData);
|
||||
const episode = seasonDoc('.nav-item > a')
|
||||
.toArray()
|
||||
.map((el) => {
|
||||
return {
|
||||
id: seasonDoc(el).attr('data-id'),
|
||||
title: seasonDoc(el).attr('title'),
|
||||
};
|
||||
})
|
||||
.map((el) => ({
|
||||
id: seasonDoc(el).attr('data-id'),
|
||||
title: seasonDoc(el).attr('title'),
|
||||
}))
|
||||
.find((e) => e.title?.startsWith(`Eps ${media.episode.number}`))?.id;
|
||||
|
||||
if (!episode) throw new NotFoundError('episode not found');
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ import { NotFoundError } from '@/utils/errors';
|
|||
|
||||
import { getSource } from './source';
|
||||
|
||||
export const gomoviesBase = `https://gomovies.sx`;
|
||||
export const gomoviesBase = 'https://gomovies.sx';
|
||||
|
||||
export const goMoviesScraper = makeSourcerer({
|
||||
id: 'gomovies',
|
||||
|
|
|
|||
|
|
@ -17,8 +17,8 @@ export async function getEmbeds(ctx: ScrapeContext, id: string): Promise<EmbedsR
|
|||
});
|
||||
|
||||
const cookies = parseSetCookie(data.headers.get('Set-Cookie') || '');
|
||||
const RandomCookieName = data.body.split(`_3chk('`)[1].split(`'`)[0];
|
||||
const RandomCookieValue = data.body.split(`_3chk('`)[1].split(`'`)[2];
|
||||
const RandomCookieName = data.body.split("_3chk('")[1].split("'")[0];
|
||||
const RandomCookieValue = data.body.split("_3chk('")[1].split("'")[2];
|
||||
|
||||
let aGoozCookie = '';
|
||||
let cookie = '';
|
||||
|
|
|
|||
|
|
@ -12,7 +12,8 @@ let data;
|
|||
|
||||
// The cookie for this headerData doesn't matter, Goojara just checks it's there.
|
||||
const headersData = {
|
||||
cookie: `aGooz=t9pmkdtef1b3lg3pmo1u2re816; bd9aa48e=0d7b89e8c79844e9df07a2; _b414=2151C6B12E2A88379AFF2C0DD65AC8298DEC2BF4; 9d287aaa=8f32ad589e1c4288fe152f`,
|
||||
cookie:
|
||||
'aGooz=t9pmkdtef1b3lg3pmo1u2re816; bd9aa48e=0d7b89e8c79844e9df07a2; _b414=2151C6B12E2A88379AFF2C0DD65AC8298DEC2BF4; 9d287aaa=8f32ad589e1c4288fe152f',
|
||||
Referer: 'https://www.goojara.to/',
|
||||
};
|
||||
|
||||
|
|
@ -20,7 +21,7 @@ export async function searchAndFindMedia(
|
|||
ctx: ScrapeContext,
|
||||
media: MovieMedia | ShowMedia,
|
||||
): Promise<Result | undefined> {
|
||||
data = await ctx.fetcher<string>(`/xhrr.php`, {
|
||||
data = await ctx.fetcher<string>('/xhrr.php', {
|
||||
baseUrl,
|
||||
headers: headersData,
|
||||
method: 'POST',
|
||||
|
|
@ -44,7 +45,12 @@ export async function searchAndFindMedia(
|
|||
if (!slug) throw new NotFoundError('Not found');
|
||||
|
||||
if (media.type === type) {
|
||||
results.push({ title, year, slug, type });
|
||||
results.push({
|
||||
title,
|
||||
year,
|
||||
slug,
|
||||
type,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ async function searchAndFindMediaId(ctx: ShowScrapeContext | MovieScrapeContext)
|
|||
const itemRegexPattern = /<a href="([^"]+)"><span class="enty">([^<]+)<\/span> \(([^)]+)\)/g;
|
||||
const idRegexPattern = /\/(\d+)-[^/]+\.html$/;
|
||||
|
||||
const searchData = await ctx.proxiedFetcher<string>(`/engine/ajax/search.php`, {
|
||||
const searchData = await ctx.proxiedFetcher<string>('/engine/ajax/search.php', {
|
||||
baseUrl: rezkaBase,
|
||||
headers: baseHeaders,
|
||||
query: { q: ctx.media.title },
|
||||
|
|
@ -32,7 +32,12 @@ async function searchAndFindMediaId(ctx: ShowScrapeContext | MovieScrapeContext)
|
|||
if (result !== null) {
|
||||
const id = url.match(idRegexPattern)?.[1] || null;
|
||||
|
||||
movieData.push({ id: id ?? '', year: result.year ?? 0, type: ctx.media.type, url });
|
||||
movieData.push({
|
||||
id: id ?? '',
|
||||
year: result.year ?? 0,
|
||||
type: ctx.media.type,
|
||||
url,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -82,7 +87,7 @@ async function getTranslatorId(
|
|||
});
|
||||
|
||||
// Translator ID 238 represents the Original + subtitles player.
|
||||
if (response.includes(`data-translator_id="238"`)) return '238';
|
||||
if (response.includes('data-translator_id="238"')) return '238';
|
||||
|
||||
const functionName = ctx.media.type === 'movie' ? 'initCDNMoviesEvents' : 'initCDNSeriesEvents';
|
||||
const regexPattern = new RegExp(`sof\\.tv\\.${functionName}\\(${id}, ([^,]+)`, 'i');
|
||||
|
|
|
|||
|
|
@ -18,10 +18,8 @@ export async function search(ctx: ScrapeContext, title: string, seasonNumber?: n
|
|||
|
||||
const searchPage = load(searchResults);
|
||||
|
||||
return Array.from(searchPage('a')).map((drama) => {
|
||||
return {
|
||||
name: searchPage(drama).text(),
|
||||
url: drama.attribs.href,
|
||||
};
|
||||
});
|
||||
return Array.from(searchPage('a')).map((drama) => ({
|
||||
name: searchPage(drama).text(),
|
||||
url: drama.attribs.href,
|
||||
}));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ export async function searchAndFindMedia(
|
|||
media: MovieMedia | ShowMedia,
|
||||
): Promise<ResultItem | undefined> {
|
||||
if (media.type === 'show') {
|
||||
const searchRes = await ctx.fetcher<Result>(`/v1/shows`, {
|
||||
const searchRes = await ctx.fetcher<Result>('/v1/shows', {
|
||||
baseUrl,
|
||||
query: { 'filters[q]': media.title },
|
||||
});
|
||||
|
|
@ -24,7 +24,7 @@ export async function searchAndFindMedia(
|
|||
return result;
|
||||
}
|
||||
if (media.type === 'movie') {
|
||||
const searchRes = await ctx.fetcher<Result>(`/v1/movies`, {
|
||||
const searchRes = await ctx.fetcher<Result>('/v1/movies', {
|
||||
baseUrl,
|
||||
query: { 'filters[q]': media.title },
|
||||
});
|
||||
|
|
@ -41,14 +41,15 @@ export async function scrape(ctx: ScrapeContext, media: MovieMedia | ShowMedia,
|
|||
if (media.type === 'movie') {
|
||||
id = result.id_movie;
|
||||
} else if (media.type === 'show') {
|
||||
const data = await ctx.fetcher<ShowDataResult>(`/v1/shows`, {
|
||||
const data = await ctx.fetcher<ShowDataResult>('/v1/shows', {
|
||||
baseUrl,
|
||||
query: { expand: 'episodes', id: result.id_show },
|
||||
});
|
||||
|
||||
const episode = data.episodes?.find((v: episodeObj) => {
|
||||
return Number(v.season) === Number(media.season.number) && Number(v.episode) === Number(media.episode.number);
|
||||
});
|
||||
const episode = data.episodes?.find(
|
||||
(v: episodeObj) =>
|
||||
Number(v.season) === Number(media.season.number) && Number(v.episode) === Number(media.episode.number),
|
||||
);
|
||||
|
||||
if (episode) id = episode.id;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,9 +14,9 @@ export async function getVideoSources(
|
|||
|
||||
let path = '';
|
||||
if (media.type === 'show') {
|
||||
path = `/v1/episodes/view`;
|
||||
path = '/v1/episodes/view';
|
||||
} else if (media.type === 'movie') {
|
||||
path = `/v1/movies/view`;
|
||||
path = '/v1/movies/view';
|
||||
}
|
||||
const data = await ctx.fetcher<StreamsDataResult>(path, {
|
||||
baseUrl,
|
||||
|
|
|
|||
|
|
@ -21,12 +21,10 @@ async function comboScraper(ctx: ShowScrapeContext | MovieScrapeContext): Promis
|
|||
if (res.providers?.length === 0) throw new NotFoundError('No providers available');
|
||||
if (!res.endpoint) throw new Error('No endpoint returned');
|
||||
|
||||
const embeds = res.providers.map((provider: string) => {
|
||||
return {
|
||||
embedId: provider,
|
||||
url: `${JSON.stringify(query)}|${res.endpoint}`,
|
||||
};
|
||||
});
|
||||
const embeds = res.providers.map((provider: string) => ({
|
||||
embedId: provider,
|
||||
url: `${JSON.stringify(query)}|${res.endpoint}`,
|
||||
}));
|
||||
|
||||
return {
|
||||
embeds,
|
||||
|
|
|
|||
|
|
@ -109,9 +109,7 @@ export const primewireScraper = makeSourcerer({
|
|||
|
||||
const episodeLink = seasonPage(`.show_season[data-id='${ctx.media.season.number}'] > div > a`)
|
||||
.toArray()
|
||||
.find((link) => {
|
||||
return link.attribs.href.includes(`-episode-${ctx.media.episode.number}`);
|
||||
})?.attribs.href;
|
||||
.find((link) => link.attribs.href.includes(`-episode-${ctx.media.episode.number}`))?.attribs.href;
|
||||
|
||||
if (!episodeLink) throw new NotFoundError('No episode links found');
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import { NotFoundError } from '@/utils/errors';
|
|||
|
||||
const universalScraper = async (ctx: ShowScrapeContext | MovieScrapeContext): Promise<SourcererOutput> => {
|
||||
try {
|
||||
const res = await ctx.fetcher.full(`https://red-star.ningai.workers.dev/scrape/showbox`, {
|
||||
const res = await ctx.fetcher.full('https://red-star.ningai.workers.dev/scrape/showbox', {
|
||||
query: {
|
||||
type: ctx.media.type,
|
||||
title: ctx.media.title,
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import { NotFoundError } from '@/utils/errors';
|
|||
|
||||
import { IframeSourceResult, SearchResult } from './types';
|
||||
|
||||
const ridoMoviesBase = `https://ridomovies.tv`;
|
||||
const ridoMoviesBase = 'https://ridomovies.tv';
|
||||
const ridoMoviesApiBase = `${ridoMoviesBase}/core/api`;
|
||||
|
||||
const universalScraper = async (ctx: MovieScrapeContext | ShowScrapeContext) => {
|
||||
|
|
|
|||
67
src/providers/sources/vidsrcsu.ts
Normal file
67
src/providers/sources/vidsrcsu.ts
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
import JSON5 from 'json5';
|
||||
|
||||
import { flags } from '@/entrypoint/utils/targets';
|
||||
import { SourcererOutput, makeSourcerer } from '@/providers/base';
|
||||
import { Caption, labelToLanguageCode } from '@/providers/captions';
|
||||
import { MovieScrapeContext, ShowScrapeContext } from '@/utils/context';
|
||||
import { NotFoundError } from '@/utils/errors';
|
||||
|
||||
async function comboScraper(ctx: ShowScrapeContext | MovieScrapeContext): Promise<SourcererOutput> {
|
||||
const embedPage = await ctx.proxiedFetcher(
|
||||
`https://vidsrc.su/embed/${ctx.media.type === 'movie' ? `movie/${ctx.media.tmdbId}` : `tv/${ctx.media.tmdbId}/${ctx.media.season.number}/${ctx.media.episode.number}`}`,
|
||||
);
|
||||
|
||||
const serverDataMatch = embedPage.match(/const fixedServers = +(\[.*?\])/s);
|
||||
if (!serverDataMatch[1]) throw new NotFoundError('No data found');
|
||||
|
||||
// const servers: { label: string; url: string }[] = JSON.parse(serverDataMatch[1].replace(/([a-zA-Z0-9_]+): /g, '"$1":').replace(/'/g, '"').replace(/,\s*\]$/, ']'))
|
||||
const servers: { label: string; url: string }[] = JSON5.parse(serverDataMatch[1]);
|
||||
|
||||
let playlist;
|
||||
// we only want flixhq which is server 1 and server 2
|
||||
servers.forEach((server) => {
|
||||
if (['Server 1', 'Server 2'].includes(server.label) && server.url) playlist = server.url;
|
||||
});
|
||||
if (!playlist) throw new NotFoundError('No flixhq playlist found');
|
||||
|
||||
const captionsDataMatch = embedPage.match(/const subtitles = +(\[.*?\])/s);
|
||||
const captions: Caption[] = [];
|
||||
|
||||
if (captionsDataMatch[1]) {
|
||||
const captionsData: { label: string; file: string }[] = JSON5.parse(captionsDataMatch[1]);
|
||||
for (const caption of captionsData) {
|
||||
const language = labelToLanguageCode(caption.label);
|
||||
if (!language) continue;
|
||||
|
||||
captions.push({
|
||||
id: caption.file,
|
||||
url: caption.file,
|
||||
type: 'vtt',
|
||||
hasCorsRestrictions: false,
|
||||
language,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
embeds: [],
|
||||
stream: [
|
||||
{
|
||||
id: 'primary',
|
||||
playlist,
|
||||
type: 'hls',
|
||||
flags: [flags.CORS_ALLOWED],
|
||||
captions,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
export const vidsrcsuScraper = makeSourcerer({
|
||||
id: 'vidsrcsu',
|
||||
name: 'vidsrc.su (FlixHQ)',
|
||||
rank: 229,
|
||||
flags: [flags.CORS_ALLOWED],
|
||||
scrapeMovie: comboScraper,
|
||||
scrapeShow: comboScraper,
|
||||
});
|
||||
|
|
@ -10,7 +10,7 @@ export async function getExternalPlayerUrl(ctx: ScrapeContext, embedId: string,
|
|||
id: embedUrl,
|
||||
sv: embedId,
|
||||
};
|
||||
const realUrl = await ctx.proxiedFetcher<string>(`/getPlay.php`, {
|
||||
const realUrl = await ctx.proxiedFetcher<string>('/getPlay.php', {
|
||||
baseUrl: warezcdnApiBase,
|
||||
headers: {
|
||||
Referer: `${warezcdnApiBase}/getEmbed.php?${new URLSearchParams(params)}`,
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ async function getEmbeds(id: string, servers: string, ctx: ScrapeContext): Promi
|
|||
const embeds: SourcererEmbed[] = [];
|
||||
|
||||
for (const server of servers.split(',')) {
|
||||
await ctx.proxiedFetcher<string>(`/getEmbed.php`, {
|
||||
await ctx.proxiedFetcher<string>('/getEmbed.php', {
|
||||
baseUrl: warezcdnBase,
|
||||
headers: {
|
||||
Referer: `${warezcdnBase}/getEmbed.php?${new URLSearchParams({ id, sv: server })}`,
|
||||
|
|
@ -22,7 +22,7 @@ async function getEmbeds(id: string, servers: string, ctx: ScrapeContext): Promi
|
|||
query: { id, sv: server },
|
||||
});
|
||||
|
||||
const embedPage = await ctx.proxiedFetcher<string>(`/getPlay.php`, {
|
||||
const embedPage = await ctx.proxiedFetcher<string>('/getPlay.php', {
|
||||
baseUrl: warezcdnBase,
|
||||
headers: {
|
||||
Referer: `${warezcdnBase}/getEmbed.php?${new URLSearchParams({ id, sv: server })}`,
|
||||
|
|
|
|||
|
|
@ -22,12 +22,10 @@ async function comboScraper(ctx: ShowScrapeContext | MovieScrapeContext): Promis
|
|||
|
||||
if (res.providers?.length === 0) throw new NotFoundError('No providers available');
|
||||
|
||||
const embeds = res.providers.map((provider: string) => {
|
||||
return {
|
||||
embedId: provider,
|
||||
url: JSON.stringify(query),
|
||||
};
|
||||
});
|
||||
const embeds = res.providers.map((provider: string) => ({
|
||||
embedId: provider,
|
||||
url: JSON.stringify(query),
|
||||
}));
|
||||
|
||||
return {
|
||||
embeds,
|
||||
|
|
|
|||
|
|
@ -43,16 +43,17 @@ export async function scrapeInvidualSource(
|
|||
};
|
||||
|
||||
let output: SourcererOutput | null = null;
|
||||
if (ops.media.type === 'movie' && sourceScraper.scrapeMovie)
|
||||
if (ops.media.type === 'movie' && sourceScraper.scrapeMovie) {
|
||||
output = await sourceScraper.scrapeMovie({
|
||||
...contextBase,
|
||||
media: ops.media,
|
||||
});
|
||||
else if (ops.media.type === 'show' && sourceScraper.scrapeShow)
|
||||
} else if (ops.media.type === 'show' && sourceScraper.scrapeShow) {
|
||||
output = await sourceScraper.scrapeShow({
|
||||
...contextBase,
|
||||
media: ops.media,
|
||||
});
|
||||
}
|
||||
|
||||
// filter output with only valid streams
|
||||
if (output?.stream) {
|
||||
|
|
@ -75,13 +76,15 @@ export async function scrapeInvidualSource(
|
|||
});
|
||||
|
||||
// opensubtitles
|
||||
if (!ops.disableOpensubtitles)
|
||||
for (const embed of output.embeds)
|
||||
if (!ops.disableOpensubtitles) {
|
||||
for (const embed of output.embeds) {
|
||||
embed.url = `${embed.url}${btoa('MEDIA=')}${btoa(
|
||||
`${ops.media.imdbId}${
|
||||
ops.media.type === 'show' ? `.${ops.media.season.number}.${ops.media.episode.number}` : ''
|
||||
}`,
|
||||
)}`;
|
||||
}
|
||||
}
|
||||
|
||||
if ((!output.stream || output.stream.length === 0) && output.embeds.length === 0)
|
||||
throw new NotFoundError('No streams found');
|
||||
|
|
@ -92,7 +95,7 @@ export async function scrapeInvidualSource(
|
|||
if (playableStreams.length === 0) throw new NotFoundError('No playable streams found');
|
||||
|
||||
// opensubtitles
|
||||
if (!ops.disableOpensubtitles)
|
||||
if (!ops.disableOpensubtitles) {
|
||||
for (const playableStream of playableStreams) {
|
||||
playableStream.captions = await addOpenSubtitlesCaptions(
|
||||
playableStream.captions,
|
||||
|
|
@ -104,6 +107,7 @@ export async function scrapeInvidualSource(
|
|||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
output.stream = playableStreams;
|
||||
}
|
||||
return output;
|
||||
|
|
|
|||
|
|
@ -74,16 +74,17 @@ export async function runAllProviders(list: ProviderList, ops: ProviderRunnerOpt
|
|||
// run source scrapers
|
||||
let output: SourcererOutput | null = null;
|
||||
try {
|
||||
if (ops.media.type === 'movie' && source.scrapeMovie)
|
||||
if (ops.media.type === 'movie' && source.scrapeMovie) {
|
||||
output = await source.scrapeMovie({
|
||||
...contextBase,
|
||||
media: ops.media,
|
||||
});
|
||||
else if (ops.media.type === 'show' && source.scrapeShow)
|
||||
} else if (ops.media.type === 'show' && source.scrapeShow) {
|
||||
output = await source.scrapeShow({
|
||||
...contextBase,
|
||||
media: ops.media,
|
||||
});
|
||||
}
|
||||
if (output) {
|
||||
output.stream = (output.stream ?? [])
|
||||
.filter(isValidStream)
|
||||
|
|
@ -116,7 +117,7 @@ export async function runAllProviders(list: ProviderList, ops: ProviderRunnerOpt
|
|||
if (!playableStream) throw new NotFoundError('No streams found');
|
||||
|
||||
// opensubtitles
|
||||
if (!ops.disableOpensubtitles)
|
||||
if (!ops.disableOpensubtitles) {
|
||||
playableStream.captions = await addOpenSubtitlesCaptions(
|
||||
playableStream.captions,
|
||||
ops,
|
||||
|
|
@ -126,6 +127,7 @@ export async function runAllProviders(list: ProviderList, ops: ProviderRunnerOpt
|
|||
}`,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
sourceId: source.id,
|
||||
|
|
@ -179,7 +181,7 @@ export async function runAllProviders(list: ProviderList, ops: ProviderRunnerOpt
|
|||
if (!playableStream) throw new NotFoundError('No streams found');
|
||||
|
||||
// opensubtitles
|
||||
if (!ops.disableOpensubtitles)
|
||||
if (!ops.disableOpensubtitles) {
|
||||
playableStream.captions = await addOpenSubtitlesCaptions(
|
||||
playableStream.captions,
|
||||
ops,
|
||||
|
|
@ -189,6 +191,7 @@ export async function runAllProviders(list: ProviderList, ops: ProviderRunnerOpt
|
|||
}`,
|
||||
),
|
||||
);
|
||||
}
|
||||
embedOutput.stream = [playableStream];
|
||||
} catch (error) {
|
||||
const updateParams: UpdateEvent = {
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
export const isReactNative = () => {
|
||||
try {
|
||||
// eslint-disable-next-line global-require, @typescript-eslint/no-var-requires
|
||||
// eslint-disable-next-line global-require, @typescript-eslint/no-require-imports
|
||||
require('react-native');
|
||||
return true;
|
||||
} catch (e) {
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ export async function addOpenSubtitlesCaptions(
|
|||
const url = caption.SubDownloadLink.replace('.gz', '').replace('download/', 'download/subencoding-utf8/');
|
||||
const language = labelToLanguageCode(caption.LanguageName);
|
||||
if (!url || !language) continue;
|
||||
else
|
||||
else {
|
||||
openSubtilesCaptions.push({
|
||||
id: url,
|
||||
opensubtitles: true,
|
||||
|
|
@ -41,6 +41,7 @@ export async function addOpenSubtitlesCaptions(
|
|||
hasCorsRestrictions: false,
|
||||
language,
|
||||
});
|
||||
}
|
||||
}
|
||||
return [...captions, ...removeDuplicatedLanguages(openSubtilesCaptions)];
|
||||
} catch {
|
||||
|
|
|
|||
Loading…
Reference in a new issue