mirror of
https://github.com/anidl/multi-downloader-nx.git
synced 2026-01-11 20:10:20 +00:00
my nerves
This commit is contained in:
parent
c3eee3d6cb
commit
cd29a54c0b
11 changed files with 4875 additions and 4233 deletions
32
hidive.ts
32
hidive.ts
|
|
@ -20,7 +20,7 @@ import vtt2ass from './modules/module.vtt2ass';
|
|||
|
||||
// load req
|
||||
import { domain, api } from './modules/module.api-urls';
|
||||
import * as reqModule from './modules/module.req';
|
||||
import * as reqModule from './modules/module.fetch';
|
||||
import { DownloadedMedia } from './@types/hidiveTypes';
|
||||
import parseFileName, { Variable } from './modules/module.filename';
|
||||
import { downloaded } from './modules/module.downloadArchive';
|
||||
|
|
@ -152,7 +152,7 @@ export default class Hidive implements ServiceClass {
|
|||
};
|
||||
let apiReq = await this.req.getData(options.url, apiReqOpts);
|
||||
if(!apiReq.ok || !apiReq.res){
|
||||
if (apiReq.error && apiReq.error.res?.statusCode == 401) {
|
||||
if (apiReq.error && apiReq.error.res?.status == 401) {
|
||||
console.warn('Token expired, refreshing token and retrying.');
|
||||
if (await this.refreshToken()) {
|
||||
if (authType == 'other') {
|
||||
|
|
@ -208,7 +208,7 @@ export default class Hidive implements ServiceClass {
|
|||
console.error('Authentication failed!');
|
||||
return { isOk: false, reason: new Error('Authentication failed') };
|
||||
}
|
||||
const tokens: Record<string, string> = JSON.parse(authReq.res.body);
|
||||
const tokens: Record<string, string> = JSON.parse(await authReq.res.text());
|
||||
for (const token in tokens) {
|
||||
this.token[token] = tokens[token];
|
||||
}
|
||||
|
|
@ -224,7 +224,7 @@ export default class Hidive implements ServiceClass {
|
|||
console.error('Authentication failed!');
|
||||
return false;
|
||||
}
|
||||
const tokens: Record<string, string> = JSON.parse(authReq.res.body);
|
||||
const tokens: Record<string, string> = JSON.parse(await authReq.res.text());
|
||||
for (const token in tokens) {
|
||||
this.token[token] = tokens[token];
|
||||
}
|
||||
|
|
@ -245,7 +245,7 @@ export default class Hidive implements ServiceClass {
|
|||
console.error('Token refresh failed, reinitializing session...');
|
||||
return this.initSession();
|
||||
}
|
||||
const tokens: Record<string, string> = JSON.parse(authReq.res.body);
|
||||
const tokens: Record<string, string> = JSON.parse(await authReq.res.text());
|
||||
for (const token in tokens) {
|
||||
this.token[token] = tokens[token];
|
||||
}
|
||||
|
|
@ -260,7 +260,7 @@ export default class Hidive implements ServiceClass {
|
|||
console.error('Failed to initialize session.');
|
||||
return false;
|
||||
}
|
||||
const tokens: Record<string, string> = JSON.parse(authReq.res.body).authentication;
|
||||
const tokens: Record<string, string> = JSON.parse(await authReq.res.text()).authentication;
|
||||
for (const token in tokens) {
|
||||
this.token[token] = tokens[token];
|
||||
}
|
||||
|
|
@ -284,7 +284,7 @@ export default class Hidive implements ServiceClass {
|
|||
console.error('Search FAILED!');
|
||||
return { isOk: false, reason: new Error('Search failed. No more information provided') };
|
||||
}
|
||||
const searchData = JSON.parse(searchReq.res.body) as NewHidiveSearch;
|
||||
const searchData = JSON.parse(await searchReq.res.text()) as NewHidiveSearch;
|
||||
const searchItems: Hit[] = [];
|
||||
console.info('Search Results:');
|
||||
for (const category of searchData.results) {
|
||||
|
|
@ -318,7 +318,7 @@ export default class Hidive implements ServiceClass {
|
|||
console.error('Failed to get Series Data');
|
||||
return { isOk: false };
|
||||
}
|
||||
const seriesData = JSON.parse(getSeriesData.res.body) as NewHidiveSeries;
|
||||
const seriesData = JSON.parse(await getSeriesData.res.text()) as NewHidiveSeries;
|
||||
return { isOk: true, value: seriesData };
|
||||
}
|
||||
|
||||
|
|
@ -334,7 +334,7 @@ export default class Hidive implements ServiceClass {
|
|||
console.error('Failed to get Season Data');
|
||||
return { isOk: false };
|
||||
}
|
||||
const seasonData = JSON.parse(getSeasonData.res.body) as NewHidiveSeason;
|
||||
const seasonData = JSON.parse(await getSeasonData.res.text()) as NewHidiveSeason;
|
||||
return { isOk: true, value: seasonData };
|
||||
}
|
||||
|
||||
|
|
@ -506,7 +506,7 @@ export default class Hidive implements ServiceClass {
|
|||
console.error('Failed to get episode data');
|
||||
return { isOk: false, reason: new Error('Failed to get Episode Data') };
|
||||
}
|
||||
const episodeData = JSON.parse(episodeDataReq.res.body) as NewHidiveEpisode;
|
||||
const episodeData = JSON.parse(await episodeDataReq.res.text()) as NewHidiveEpisode;
|
||||
|
||||
if (!episodeData.playerUrlCallback) {
|
||||
console.error('Failed to download episode: You do not have access to this');
|
||||
|
|
@ -519,7 +519,7 @@ export default class Hidive implements ServiceClass {
|
|||
console.error('Playback Request Failed');
|
||||
return { isOk: false, reason: new Error('Playback request failed') };
|
||||
}
|
||||
const playbackData = JSON.parse(playbackReq.res.body) as NewHidivePlayback;
|
||||
const playbackData = JSON.parse(await playbackReq.res.text()) as NewHidivePlayback;
|
||||
|
||||
//Get actual MPD
|
||||
const mpdRequest = await this.req.getData(playbackData.dash[0].url);
|
||||
|
|
@ -527,7 +527,7 @@ export default class Hidive implements ServiceClass {
|
|||
console.error('MPD Request Failed');
|
||||
return { isOk: false, reason: new Error('MPD request failed') };
|
||||
}
|
||||
const mpd = mpdRequest.res.body as string;
|
||||
const mpd = await mpdRequest.res.text() as string;
|
||||
|
||||
selectedEpisode.jwtToken = playbackData.dash[0].drm.jwtToken;
|
||||
|
||||
|
|
@ -566,7 +566,7 @@ export default class Hidive implements ServiceClass {
|
|||
console.error('Failed to get episode data');
|
||||
return { isOk: false, reason: new Error('Failed to get Episode Data') };
|
||||
}
|
||||
const episodeData = JSON.parse(episodeDataReq.res.body) as NewHidiveEpisode;
|
||||
const episodeData = JSON.parse(await episodeDataReq.res.text()) as NewHidiveEpisode;
|
||||
|
||||
if (episodeData.title.includes(' - ') && episodeData.episodeInformation) {
|
||||
episodeData.episodeInformation.episodeNumber = parseFloat(episodeData.title.split(' - ')[0].replace('E', ''));
|
||||
|
|
@ -599,7 +599,7 @@ export default class Hidive implements ServiceClass {
|
|||
console.error('Playback Request Failed');
|
||||
return { isOk: false, reason: new Error('Playback request failed') };
|
||||
}
|
||||
const playbackData = JSON.parse(playbackReq.res.body) as NewHidivePlayback;
|
||||
const playbackData = JSON.parse(await playbackReq.res.text()) as NewHidivePlayback;
|
||||
|
||||
//Get actual MPD
|
||||
const mpdRequest = await this.req.getData(playbackData.dash[0].url);
|
||||
|
|
@ -607,7 +607,7 @@ export default class Hidive implements ServiceClass {
|
|||
console.error('MPD Request Failed');
|
||||
return { isOk: false, reason: new Error('MPD request failed') };
|
||||
}
|
||||
const mpd = mpdRequest.res.body as string;
|
||||
const mpd = await mpdRequest.res.text() as string;
|
||||
|
||||
const selectedEpisode: NewHidiveEpisodeExtra = {
|
||||
...episodeData,
|
||||
|
|
@ -989,7 +989,7 @@ export default class Hidive implements ServiceClass {
|
|||
if (getVttContent.ok && getVttContent.res) {
|
||||
console.info(`Subtitle Downloaded: ${sub.url}`);
|
||||
//vttConvert(getVttContent.res.body, false, subLang.name, fontSize);
|
||||
const sBody = vtt2ass(undefined, chosenFontSize, getVttContent.res.body, '', subsMargin, options.fontName, options.combineLines);
|
||||
const sBody = vtt2ass(undefined, chosenFontSize, await getVttContent.res.text(), '', subsMargin, options.fontName, options.combineLines);
|
||||
sxData.title = `${subLang.language} / ${sxData.title}`;
|
||||
sxData.fonts = fontsData.assFonts(sBody) as Font[];
|
||||
fs.writeFileSync(sxData.path, sBody);
|
||||
|
|
|
|||
168
modules/cdm.ts
168
modules/cdm.ts
|
|
@ -1,13 +1,12 @@
|
|||
import fs from 'fs';
|
||||
import { console } from './log';
|
||||
import got from 'got';
|
||||
import { workingDir } from './module.cfg-loader';
|
||||
import path from 'path';
|
||||
import { ReadError, Response } from 'got';
|
||||
import { Device } from './playready/device';
|
||||
import Cdm from './playready/cdm';
|
||||
import { PSSH } from './playready/pssh';
|
||||
import { KeyContainer, Session } from './widevine/license';
|
||||
import { ofetch } from 'ofetch';
|
||||
|
||||
//read cdm files located in the same directory
|
||||
let privateKey: Buffer = Buffer.from([]),
|
||||
|
|
@ -25,7 +24,7 @@ try {
|
|||
const stats = fs.statSync(file_prd);
|
||||
if (stats.size < 1024 * 8 && stats.isFile()) {
|
||||
const fileContents = fs.readFileSync(file_prd, {
|
||||
encoding: 'utf8',
|
||||
encoding: 'utf8'
|
||||
});
|
||||
if (fileContents.includes('CERT')) {
|
||||
prd = fs.readFileSync(file_prd);
|
||||
|
|
@ -46,10 +45,7 @@ try {
|
|||
const stats = fs.statSync(file);
|
||||
if (stats.size < 1024 * 8 && stats.isFile()) {
|
||||
const fileContents = fs.readFileSync(file, { encoding: 'utf8' });
|
||||
if (
|
||||
fileContents.includes('-BEGIN PRIVATE KEY-') ||
|
||||
fileContents.includes('-BEGIN RSA PRIVATE KEY-')
|
||||
) {
|
||||
if (fileContents.includes('-BEGIN PRIVATE KEY-') || fileContents.includes('-BEGIN RSA PRIVATE KEY-')) {
|
||||
privateKey = fs.readFileSync(file);
|
||||
}
|
||||
if (fileContents.includes('widevine_cdm_version')) {
|
||||
|
|
@ -85,11 +81,7 @@ try {
|
|||
canDecrypt = false;
|
||||
}
|
||||
|
||||
export async function getKeysWVD(
|
||||
pssh: string | undefined,
|
||||
licenseServer: string,
|
||||
authData: Record<string, string>
|
||||
): Promise<KeyContainer[]> {
|
||||
export async function getKeysWVD(pssh: string | undefined, licenseServer: string, authData: Record<string, string>): Promise<KeyContainer[]> {
|
||||
if (!pssh || !canDecrypt) return [];
|
||||
//pssh found in the mpd manifest
|
||||
const psshBuffer = Buffer.from(pssh, 'base64');
|
||||
|
|
@ -98,147 +90,93 @@ export async function getKeysWVD(
|
|||
const session = new Session({ privateKey, identifierBlob }, psshBuffer);
|
||||
|
||||
//Generate license
|
||||
let response;
|
||||
try {
|
||||
response = await fetch(licenseServer, {
|
||||
method: 'POST',
|
||||
body: session.createLicenseRequest(),
|
||||
headers: authData
|
||||
});
|
||||
} catch (_error) {
|
||||
const error = _error as {
|
||||
name: string;
|
||||
} & ReadError & {
|
||||
res: Response<unknown>;
|
||||
};
|
||||
if (
|
||||
error.response &&
|
||||
error.response.statusCode &&
|
||||
error.response.statusMessage
|
||||
) {
|
||||
console.error(
|
||||
`${error.name} ${error.response.statusCode}: ${error.response.statusMessage}`
|
||||
);
|
||||
const data = await ofetch(licenseServer, {
|
||||
method: 'POST',
|
||||
body: session.createLicenseRequest(),
|
||||
headers: authData,
|
||||
responseType: 'arrayBuffer'
|
||||
}).catch((error) => {
|
||||
if (error.status && error.statusText) {
|
||||
console.error(`${error.name} ${error.status}: ${error.statusText}`);
|
||||
} else {
|
||||
console.error(`${error.name}: ${error.code || error.message}`);
|
||||
console.error(`${error.name}: ${error.message}`);
|
||||
}
|
||||
if (error.response && !error.res) {
|
||||
error.res = error.response;
|
||||
const docTitle = (error.res.body as string).match(/<title>(.*)<\/title>/);
|
||||
if (error.res.body && docTitle) {
|
||||
|
||||
if (!error.data) return;
|
||||
const data = error.data instanceof ArrayBuffer ? new TextDecoder().decode(error.data) : error.data;
|
||||
if (data) {
|
||||
const docTitle = data.match(/<title>(.*)<\/title>/);
|
||||
if (docTitle) {
|
||||
console.error(docTitle[1]);
|
||||
}
|
||||
if (error.status && error.status != 404 && error.status != 403) {
|
||||
console.error('Body:', data);
|
||||
}
|
||||
}
|
||||
if (
|
||||
error.res &&
|
||||
error.res.body &&
|
||||
error.response.statusCode &&
|
||||
error.response.statusCode != 404 &&
|
||||
error.response.statusCode != 403
|
||||
) {
|
||||
console.error('Body:', error.res.body);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
});
|
||||
|
||||
if (response.status === 200) {
|
||||
if (data) {
|
||||
//Parse License and return keys
|
||||
const buffer = await response.arrayBuffer();
|
||||
const text = new TextDecoder().decode(buffer);
|
||||
const text = new TextDecoder().decode(data);
|
||||
try {
|
||||
const json = JSON.parse(text);
|
||||
return session.parseLicense(Buffer.from(json['license'], 'base64')) as KeyContainer[];
|
||||
} catch {
|
||||
return session.parseLicense(Buffer.from(new Uint8Array(buffer))) as KeyContainer[];
|
||||
return session.parseLicense(Buffer.from(new Uint8Array(data))) as KeyContainer[];
|
||||
}
|
||||
} else {
|
||||
console.info(
|
||||
'License request failed:',
|
||||
response.status,
|
||||
await response.text()
|
||||
);
|
||||
console.error('License request failed');
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function getKeysPRD(
|
||||
pssh: string | undefined,
|
||||
licenseServer: string,
|
||||
authData: Record<string, string>
|
||||
): Promise<KeyContainer[]> {
|
||||
export async function getKeysPRD(pssh: string | undefined, licenseServer: string, authData: Record<string, string>): Promise<KeyContainer[]> {
|
||||
if (!pssh || !canDecrypt || !prd_cdm) return [];
|
||||
const pssh_parsed = new PSSH(pssh);
|
||||
|
||||
//Create a new playready session
|
||||
const session = prd_cdm.getLicenseChallenge(
|
||||
pssh_parsed.get_wrm_headers(true)[0]
|
||||
);
|
||||
const session = prd_cdm.getLicenseChallenge(pssh_parsed.get_wrm_headers(true)[0]);
|
||||
|
||||
//Generate license
|
||||
let response;
|
||||
try {
|
||||
response = await got(licenseServer, {
|
||||
method: 'POST',
|
||||
body: session,
|
||||
headers: authData,
|
||||
responseType: 'text',
|
||||
});
|
||||
} catch (_error) {
|
||||
const error = _error as {
|
||||
name: string;
|
||||
} & ReadError & {
|
||||
res: Response<unknown>;
|
||||
};
|
||||
if (
|
||||
error.response &&
|
||||
error.response.statusCode &&
|
||||
error.response.statusMessage
|
||||
) {
|
||||
console.error(
|
||||
`${error.name} ${error.response.statusCode}: ${error.response.statusMessage}`
|
||||
);
|
||||
const data = await ofetch(licenseServer, {
|
||||
method: 'POST',
|
||||
body: session,
|
||||
headers: authData,
|
||||
responseType: 'text'
|
||||
}).catch((error) => {
|
||||
if (error && error.status && error.statusText) {
|
||||
console.error(`${error.name} ${error.status}: ${error.statusText}`);
|
||||
} else {
|
||||
console.error(`${error.name}: ${error.code || error.message}`);
|
||||
console.error(`${error.name}: ${error.message}`);
|
||||
}
|
||||
if (error.response && !error.res) {
|
||||
error.res = error.response;
|
||||
const docTitle = (error.res.body as string).match(/<title>(.*)<\/title>/);
|
||||
if (error.res.body && docTitle) {
|
||||
console.error(docTitle[1]);
|
||||
}
|
||||
}
|
||||
if (
|
||||
error.res &&
|
||||
error.res.body &&
|
||||
error.response.statusCode &&
|
||||
error.response.statusCode != 404 &&
|
||||
error.response.statusCode != 403
|
||||
) {
|
||||
console.error('Body:', error.res.body);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
if (response.statusCode === 200) {
|
||||
if (!error.data) return;
|
||||
const docTitle = error.data.match(/<title>(.*)<\/title>/);
|
||||
if (docTitle) {
|
||||
console.error(docTitle[1]);
|
||||
}
|
||||
if (error.status && error.status != 404 && error.status != 403) {
|
||||
console.error('Body:', error.data);
|
||||
}
|
||||
});
|
||||
|
||||
if (data) {
|
||||
//Parse License and return keys
|
||||
try {
|
||||
const keys = prd_cdm.parseLicense(response.body);
|
||||
const keys = prd_cdm.parseLicense(data);
|
||||
|
||||
return keys.map((k) => {
|
||||
return {
|
||||
kid: k.key_id,
|
||||
key: k.key,
|
||||
key: k.key
|
||||
};
|
||||
});
|
||||
} catch {
|
||||
console.error('License parsing failed');
|
||||
return [];
|
||||
}
|
||||
} else {
|
||||
console.info(
|
||||
'License request failed:',
|
||||
response.statusMessage,
|
||||
response.body
|
||||
);
|
||||
console.error('License request failed');
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
|
|
|||
393
modules/hls-download-new.ts
Normal file
393
modules/hls-download-new.ts
Normal file
|
|
@ -0,0 +1,393 @@
|
|||
// build-in
|
||||
import crypto from 'crypto';
|
||||
import fs from 'fs/promises';
|
||||
import fsp from 'fs';
|
||||
import url from 'url';
|
||||
|
||||
// extra
|
||||
import shlp from 'sei-helper';
|
||||
|
||||
import { console } from './log';
|
||||
import { ProgressData } from '../@types/messageHandler';
|
||||
import { ofetch } from 'ofetch';
|
||||
|
||||
export type HLSCallback = (data: ProgressData) => unknown;
|
||||
|
||||
export type M3U8Json = {
|
||||
segments: Record<string, unknown>[];
|
||||
mediaSequence?: number;
|
||||
};
|
||||
|
||||
type Segment = {
|
||||
uri: string;
|
||||
key: Key;
|
||||
byterange?: {
|
||||
offset: number;
|
||||
length: number;
|
||||
};
|
||||
};
|
||||
|
||||
type Key = {
|
||||
uri: string;
|
||||
iv: number[];
|
||||
};
|
||||
|
||||
export type HLSOptions = {
|
||||
m3u8json: M3U8Json;
|
||||
output?: string;
|
||||
threads?: number;
|
||||
retries?: number;
|
||||
offset?: number;
|
||||
baseurl?: string;
|
||||
skipInit?: boolean;
|
||||
timeout?: number;
|
||||
fsRetryTime?: number;
|
||||
override?: 'Y' | 'y' | 'N' | 'n' | 'C' | 'c';
|
||||
callback?: HLSCallback;
|
||||
};
|
||||
|
||||
type Data = {
|
||||
parts: {
|
||||
first: number;
|
||||
total: number;
|
||||
completed: number;
|
||||
};
|
||||
m3u8json: M3U8Json;
|
||||
outputFile: string;
|
||||
threads: number;
|
||||
retries: number;
|
||||
offset: number;
|
||||
baseurl?: string;
|
||||
skipInit?: boolean;
|
||||
keys: {
|
||||
[uri: string]: Buffer | string;
|
||||
};
|
||||
timeout: number;
|
||||
checkPartLength: boolean;
|
||||
isResume: boolean;
|
||||
bytesDownloaded: number;
|
||||
waitTime: number;
|
||||
callback?: HLSCallback;
|
||||
override?: string;
|
||||
dateStart: number;
|
||||
};
|
||||
|
||||
// hls class
|
||||
class hlsDownload {
|
||||
private data: Data;
|
||||
constructor(options: HLSOptions) {
|
||||
// check playlist
|
||||
if (!options || !options.m3u8json || !options.m3u8json.segments || options.m3u8json.segments.length === 0) {
|
||||
throw new Error('Playlist is empty!');
|
||||
}
|
||||
// init options
|
||||
this.data = {
|
||||
parts: {
|
||||
first: options.m3u8json.mediaSequence || 0,
|
||||
total: options.m3u8json.segments.length,
|
||||
completed: 0
|
||||
},
|
||||
m3u8json: options.m3u8json,
|
||||
outputFile: options.output || 'stream.ts',
|
||||
threads: options.threads || 5,
|
||||
retries: options.retries || 4,
|
||||
offset: options.offset || 0,
|
||||
baseurl: options.baseurl,
|
||||
skipInit: options.skipInit,
|
||||
keys: {},
|
||||
timeout: options.timeout ? options.timeout : 60 * 1000,
|
||||
checkPartLength: false,
|
||||
isResume: options.offset ? options.offset > 0 : false,
|
||||
bytesDownloaded: 0,
|
||||
waitTime: options.fsRetryTime ?? 1000 * 5,
|
||||
callback: options.callback,
|
||||
override: options.override,
|
||||
dateStart: 0
|
||||
};
|
||||
}
|
||||
async download() {
|
||||
// set output
|
||||
const fn = this.data.outputFile;
|
||||
// try load resume file
|
||||
if (fsp.existsSync(fn) && fsp.existsSync(`${fn}.resume`) && this.data.offset < 1) {
|
||||
try {
|
||||
console.info('Resume data found! Trying to resume...');
|
||||
const resumeData = JSON.parse(await fs.readFile(`${fn}.resume`, 'utf-8'));
|
||||
if (resumeData.total == this.data.m3u8json.segments.length && resumeData.completed != resumeData.total && !isNaN(resumeData.completed)) {
|
||||
console.info('Resume data is ok!');
|
||||
this.data.offset = resumeData.completed;
|
||||
this.data.isResume = true;
|
||||
} else {
|
||||
console.warn(' Resume data is wrong!');
|
||||
console.warn({
|
||||
resume: { total: resumeData.total, dled: resumeData.completed },
|
||||
current: { total: this.data.m3u8json.segments.length }
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Resume failed, downloading will be not resumed!');
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
// ask before rewrite file
|
||||
if (fsp.existsSync(`${fn}`) && !this.data.isResume) {
|
||||
let rwts = this.data.override ?? (await shlp.question(`[Q] File «${fn}» already exists! Rewrite? ([y]es/[N]o/[c]ontinue)`));
|
||||
rwts = rwts || 'N';
|
||||
if (['Y', 'y'].includes(rwts[0])) {
|
||||
console.info(`Deleting «${fn}»...`);
|
||||
await fs.unlink(fn);
|
||||
} else if (['C', 'c'].includes(rwts[0])) {
|
||||
return { ok: true, parts: this.data.parts };
|
||||
} else {
|
||||
return { ok: false, parts: this.data.parts };
|
||||
}
|
||||
}
|
||||
// show output filename
|
||||
if (fsp.existsSync(fn) && this.data.isResume) {
|
||||
console.info(`Adding content to «${fn}»...`);
|
||||
} else {
|
||||
console.info(`Saving stream to «${fn}»...`);
|
||||
}
|
||||
// start time
|
||||
this.data.dateStart = Date.now();
|
||||
let segments = this.data.m3u8json.segments;
|
||||
// download init part
|
||||
if (segments[0].map && this.data.offset === 0 && !this.data.skipInit) {
|
||||
console.info('Download and save init part...');
|
||||
const initSeg = segments[0].map as Segment;
|
||||
if (segments[0].key) {
|
||||
initSeg.key = segments[0].key as Key;
|
||||
}
|
||||
try {
|
||||
const initDl = await this.downloadPart(initSeg, 0, 0);
|
||||
await fs.writeFile(fn, initDl.dec, { flag: 'a' });
|
||||
await fs.writeFile(
|
||||
`${fn}.resume`,
|
||||
JSON.stringify({
|
||||
completed: 0,
|
||||
total: this.data.m3u8json.segments.length
|
||||
})
|
||||
);
|
||||
console.info('Init part downloaded.');
|
||||
} catch (e: any) {
|
||||
console.error(`Part init download error:\n\t${e.message}`);
|
||||
return { ok: false, parts: this.data.parts };
|
||||
}
|
||||
} else if (segments[0].map && this.data.offset === 0 && this.data.skipInit) {
|
||||
console.warn('Skipping init part can lead to broken video!');
|
||||
}
|
||||
// resuming ...
|
||||
if (this.data.offset > 0) {
|
||||
segments = segments.slice(this.data.offset);
|
||||
console.info(`Resuming download from part ${this.data.offset + 1}...`);
|
||||
this.data.parts.completed = this.data.offset;
|
||||
}
|
||||
// dl process
|
||||
for (let p = 0; p < segments.length / this.data.threads; p++) {
|
||||
// set offsets
|
||||
const offset = p * this.data.threads;
|
||||
const dlOffset = offset + this.data.threads;
|
||||
// map download threads
|
||||
const krq = new Map(),
|
||||
prq = new Map();
|
||||
const res = [];
|
||||
let errcnt = 0;
|
||||
for (let px = offset; px < dlOffset && px < segments.length; px++) {
|
||||
const curp = segments[px];
|
||||
const key = curp.key as Key;
|
||||
if (key && !krq.has(key.uri) && !this.data.keys[key.uri as string]) {
|
||||
krq.set(key.uri, this.downloadKey(key, px, this.data.offset));
|
||||
}
|
||||
}
|
||||
try {
|
||||
await Promise.all(krq.values());
|
||||
} catch (er: any) {
|
||||
console.error(`Key ${er.p + 1} download error:\n\t${er.message}`);
|
||||
return { ok: false, parts: this.data.parts };
|
||||
}
|
||||
for (let px = offset; px < dlOffset && px < segments.length; px++) {
|
||||
const curp = segments[px] as Segment;
|
||||
prq.set(px, this.downloadPart(curp, px, this.data.offset));
|
||||
}
|
||||
for (let i = prq.size; i--; ) {
|
||||
try {
|
||||
const r = await Promise.race(prq.values());
|
||||
prq.delete(r.p);
|
||||
res[r.p - offset] = r.dec;
|
||||
} catch (error: any) {
|
||||
console.error('Part %s download error:\n\t%s', error.p + 1 + this.data.offset, error.message);
|
||||
prq.delete(error.p);
|
||||
errcnt++;
|
||||
}
|
||||
}
|
||||
// catch error
|
||||
if (errcnt > 0) {
|
||||
console.error(`${errcnt} parts not downloaded`);
|
||||
return { ok: false, parts: this.data.parts };
|
||||
}
|
||||
// write downloaded
|
||||
for (const r of res) {
|
||||
let error = 0;
|
||||
while (error < 3) {
|
||||
try {
|
||||
await fs.writeFile(fn, r, { flag: 'a' });
|
||||
break;
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
console.error(`Unable to write to file '${fn}' (Attempt ${error + 1}/3)`);
|
||||
console.info(`Waiting ${Math.round(this.data.waitTime / 1000)}s before retrying`);
|
||||
await new Promise<void>((resolve) => setTimeout(() => resolve(), this.data.waitTime));
|
||||
}
|
||||
error++;
|
||||
}
|
||||
if (error === 3) {
|
||||
console.error(`Unable to write content to '${fn}'.`);
|
||||
return { ok: false, parts: this.data.parts };
|
||||
}
|
||||
}
|
||||
// log downloaded
|
||||
const totalSeg = segments.length + this.data.offset; // Add the sliced lenght back so the resume data will be correct even if an resumed download fails
|
||||
const downloadedSeg = dlOffset < totalSeg ? dlOffset : totalSeg;
|
||||
this.data.parts.completed = downloadedSeg + this.data.offset;
|
||||
const data = extFn.getDownloadInfo(this.data.dateStart, downloadedSeg, totalSeg, this.data.bytesDownloaded);
|
||||
await fs.writeFile(
|
||||
`${fn}.resume`,
|
||||
JSON.stringify({
|
||||
completed: this.data.parts.completed,
|
||||
total: totalSeg
|
||||
})
|
||||
);
|
||||
console.info(
|
||||
`${downloadedSeg} of ${totalSeg} parts downloaded [${data.percent}%] (${shlp.formatTime(parseInt((data.time / 1000).toFixed(0)))} | ${(
|
||||
data.downloadSpeed / 1000000
|
||||
).toPrecision(2)}Mb/s)`
|
||||
);
|
||||
if (this.data.callback)
|
||||
this.data.callback({
|
||||
total: this.data.parts.total,
|
||||
cur: this.data.parts.completed,
|
||||
bytes: this.data.bytesDownloaded,
|
||||
percent: data.percent,
|
||||
time: data.time,
|
||||
downloadSpeed: data.downloadSpeed
|
||||
});
|
||||
}
|
||||
// return result
|
||||
await fs.unlink(`${fn}.resume`);
|
||||
return { ok: true, parts: this.data.parts };
|
||||
}
|
||||
async downloadPart(seg: Segment, segIndex: number, segOffset: number) {
|
||||
const sURI = extFn.getURI(seg.uri, this.data.baseurl);
|
||||
let decipher, part, dec;
|
||||
const p = segIndex;
|
||||
try {
|
||||
if (seg.key != undefined) {
|
||||
decipher = await this.getKey(seg.key, p, segOffset);
|
||||
}
|
||||
part = await extFn.getData(
|
||||
p,
|
||||
sURI,
|
||||
{
|
||||
...(seg.byterange
|
||||
? {
|
||||
Range: `bytes=${seg.byterange.offset}-${seg.byterange.offset + seg.byterange.length - 1}`
|
||||
}
|
||||
: {})
|
||||
},
|
||||
segOffset,
|
||||
false
|
||||
);
|
||||
// if (this.data.checkPartLength) {
|
||||
// this.data.checkPartLength = false;
|
||||
// console.warn(`Part ${segIndex + segOffset + 1}: can't check parts size!`);
|
||||
// }
|
||||
if (decipher == undefined) {
|
||||
this.data.bytesDownloaded += Buffer.from(part).byteLength;
|
||||
return { dec: Buffer.from(part), p };
|
||||
}
|
||||
dec = decipher.update(Buffer.from(part));
|
||||
dec = Buffer.concat([dec, decipher.final()]);
|
||||
this.data.bytesDownloaded += dec.byteLength;
|
||||
} catch (error: any) {
|
||||
error.p = p;
|
||||
throw error;
|
||||
}
|
||||
return { dec, p };
|
||||
}
|
||||
async downloadKey(key: Key, segIndex: number, segOffset: number) {
|
||||
const kURI = extFn.getURI(key.uri, this.data.baseurl);
|
||||
if (!this.data.keys[kURI]) {
|
||||
try {
|
||||
const rkey = await extFn.getData(segIndex, kURI, {}, segOffset, true);
|
||||
return rkey;
|
||||
} catch (error: any) {
|
||||
error.p = segIndex;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
async getKey(key: Key, segIndex: number, segOffset: number) {
|
||||
const kURI = extFn.getURI(key.uri, this.data.baseurl);
|
||||
const p = segIndex;
|
||||
if (!this.data.keys[kURI]) {
|
||||
try {
|
||||
const rkey = await this.downloadKey(key, segIndex, segOffset);
|
||||
if (!rkey) throw new Error();
|
||||
this.data.keys[kURI] = Buffer.from(rkey);
|
||||
} catch (error: any) {
|
||||
error.p = p;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// get ivs
|
||||
const iv = Buffer.alloc(16);
|
||||
const ivs = key.iv ? key.iv : [0, 0, 0, p + 1];
|
||||
for (let i = 0; i < ivs.length; i++) {
|
||||
iv.writeUInt32BE(ivs[i], i * 4);
|
||||
}
|
||||
return crypto.createDecipheriv('aes-128-cbc', this.data.keys[kURI], iv);
|
||||
}
|
||||
}
|
||||
|
||||
const extFn = {
|
||||
getURI: (uri: string, baseurl?: string) => {
|
||||
const httpURI = /^https{0,1}:/.test(uri);
|
||||
if (!baseurl && !httpURI) {
|
||||
throw new Error('No base and not http(s) uri');
|
||||
} else if (httpURI) {
|
||||
return uri;
|
||||
}
|
||||
return baseurl + uri;
|
||||
},
|
||||
getDownloadInfo: (dateStart: number, partsDL: number, partsTotal: number, downloadedBytes: number) => {
|
||||
const dateElapsed = Date.now() - dateStart;
|
||||
const percentFxd = parseInt(((partsDL / partsTotal) * 100).toFixed());
|
||||
const percent = percentFxd < 100 ? percentFxd : partsTotal == partsDL ? 100 : 99;
|
||||
const revParts = dateElapsed * (partsTotal / partsDL - 1);
|
||||
const downloadSpeed = downloadedBytes / (dateElapsed / 1000); //Bytes per second
|
||||
return { percent, time: revParts, downloadSpeed };
|
||||
},
|
||||
getData: async (partIndex: number, uri: string, headers: Record<string, string>, segOffset: number, isKey: boolean) => {
|
||||
// get file if uri is local
|
||||
if (uri.startsWith('file://')) {
|
||||
const buffer = await fs.readFile(url.fileURLToPath(uri));
|
||||
return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength);
|
||||
}
|
||||
// do request
|
||||
return await ofetch(uri, {
|
||||
method: 'GET',
|
||||
headers: headers,
|
||||
responseType: 'arrayBuffer',
|
||||
retry: 10,
|
||||
retryDelay: 500,
|
||||
async onRequestError({ error }) {
|
||||
const partType = isKey ? 'Key' : 'Part';
|
||||
const partIndx = partIndex + 1 + segOffset;
|
||||
console.warn('%s %s: attempt to retrieve data', partType, partIndx);
|
||||
console.error(`\t${error.message}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export default hlsDownload;
|
||||
|
|
@ -59,7 +59,7 @@ const api: APIType = {
|
|||
rss_gid: `${domain.www}/syndication/feed?type=episodes&group_id=`, // &lang=enUS
|
||||
media_page: `${domain.www}/media-`,
|
||||
series_page: `${domain.www}/series-`,
|
||||
auth: `${domain.api_beta}/auth/v1/token`,
|
||||
auth: `${domain.www}/auth/v1/token`,
|
||||
// mobile api
|
||||
search3: `${domain.api}/autocomplete.0.json`,
|
||||
session: `${domain.api}/start_session.0.json`,
|
||||
|
|
|
|||
|
|
@ -1,163 +0,0 @@
|
|||
// build-in
|
||||
import child_process from 'child_process';
|
||||
import fs from 'fs-extra';
|
||||
import { Headers } from 'got';
|
||||
import path from 'path';
|
||||
import { console } from './log';
|
||||
|
||||
export type CurlOptions = {
|
||||
headers?: Headers,
|
||||
curlProxy?: boolean,
|
||||
curlProxyAuth?: string,
|
||||
minVersion?: string,
|
||||
http2?: boolean,
|
||||
body?: unknown,
|
||||
curlDebug?: boolean
|
||||
} | undefined;
|
||||
|
||||
export type Res = {
|
||||
httpVersion: string,
|
||||
statusCode: string,
|
||||
statusMessage: string,
|
||||
rawHeaders: string,
|
||||
headers: Record<string, string[]|string>,
|
||||
rawBody: Buffer,
|
||||
body: string,
|
||||
}
|
||||
|
||||
// req
|
||||
const curlReq = async (curlBin: string, url: string, options: CurlOptions, cache: string) => {
|
||||
|
||||
const curlOpt = [
|
||||
`"${curlBin}"`,
|
||||
`"${url}"`,
|
||||
];
|
||||
|
||||
options = options || {};
|
||||
|
||||
if(options.headers && Object.keys(options.headers).length > 0){
|
||||
for(const h of Object.keys(options.headers)){
|
||||
const hC = options.headers[h];
|
||||
curlOpt.push('-H', `"${h}: ${hC}"`);
|
||||
}
|
||||
}
|
||||
|
||||
if(options.curlProxy){
|
||||
curlOpt.push('--proxy-insecure', '-x', `"${options.curlProxy}"`);
|
||||
if(options.curlProxyAuth && typeof options.curlProxyAuth == 'string' && options.curlProxyAuth.match(':')){
|
||||
curlOpt.push('-U', `"${options.curlProxyAuth}"`);
|
||||
}
|
||||
}
|
||||
|
||||
const reqId = uuidv4();
|
||||
const headFile = path.join(cache, `/res-headers-${reqId}`);
|
||||
const bodyFile = path.join(cache, `/res-body-${reqId}`);
|
||||
const errFile = path.join(cache, `/res-err-${reqId}`);
|
||||
|
||||
curlOpt.push('-D', `"${headFile}"`);
|
||||
curlOpt.push('-o', `"${bodyFile}"`);
|
||||
curlOpt.push('--stderr', `"${errFile}"`);
|
||||
curlOpt.push('-L', '-s', '-S');
|
||||
|
||||
if(options.minVersion == 'TLSv1.3'){
|
||||
curlOpt.push('--tlsv1.3');
|
||||
}
|
||||
if(options.http2){
|
||||
curlOpt.push('--http2');
|
||||
}
|
||||
|
||||
if(options.body){
|
||||
curlOpt.push('--data-urlencode', `"${options.body}"`);
|
||||
}
|
||||
|
||||
const curlComm = curlOpt.join(' ');
|
||||
|
||||
try{
|
||||
if(options.curlDebug){
|
||||
console.info(curlComm, '\n');
|
||||
}
|
||||
child_process.execSync(curlComm, { stdio: 'inherit', windowsHide: true });
|
||||
}
|
||||
catch(next){
|
||||
const errData = { name: 'RequestError', message: 'EACCES' };
|
||||
try{
|
||||
fs.unlinkSync(headFile);
|
||||
}
|
||||
catch(e){
|
||||
// ignore it...
|
||||
}
|
||||
try{
|
||||
errData.message =
|
||||
fs.readFileSync(errFile, 'utf8')
|
||||
.replace(/^curl: /, '');
|
||||
fs.unlinkSync(errFile);
|
||||
}
|
||||
catch(e){
|
||||
// ignore it...
|
||||
}
|
||||
throw errData;
|
||||
}
|
||||
|
||||
const rawHeaders = fs.readFileSync(headFile, 'utf8');
|
||||
const rawBody = fs.readFileSync(bodyFile);
|
||||
fs.unlinkSync(headFile);
|
||||
fs.unlinkSync(bodyFile);
|
||||
fs.unlinkSync(errFile);
|
||||
|
||||
const res: Res = {
|
||||
httpVersion: '',
|
||||
statusCode: '',
|
||||
statusMessage: '',
|
||||
rawHeaders: rawHeaders,
|
||||
headers: {},
|
||||
rawBody: rawBody,
|
||||
body: rawBody.toString(),
|
||||
};
|
||||
|
||||
const headersCont = rawHeaders.replace(/\r/g, '').split('\n');
|
||||
|
||||
for(const h of headersCont){
|
||||
if( h == '' ){ continue; }
|
||||
if(!h.match(':')){
|
||||
const statusRes = h.split(' ');
|
||||
res.httpVersion = statusRes[0].split('/')[1];
|
||||
res.statusCode = statusRes[1];
|
||||
res.statusMessage = statusRes.slice(2).join(' ');
|
||||
}
|
||||
else{
|
||||
const resHeader = h.split(': ');
|
||||
const resHeadName = resHeader[0].toLowerCase();
|
||||
const resHeadCont = resHeader.slice(1).join(': ');
|
||||
if(resHeadName == 'set-cookie'){
|
||||
if(!Object.prototype.hasOwnProperty.call(res.headers, resHeadName)){
|
||||
res.headers[resHeadName] = [];
|
||||
}
|
||||
(res.headers[resHeadName] as string[]).push(resHeadCont);
|
||||
}
|
||||
else{
|
||||
res.headers[resHeadName] = resHeadCont;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(!res.statusCode.match(/^(2|3)\d\d$/)){
|
||||
const httpStatusMessage = res.statusMessage ? ` (${res.statusMessage})` : '';
|
||||
throw {
|
||||
name: 'HTTPError',
|
||||
message: `Response code ${res.statusCode}${httpStatusMessage}`,
|
||||
response: res
|
||||
};
|
||||
}
|
||||
|
||||
return res;
|
||||
|
||||
};
|
||||
|
||||
function uuidv4() {
|
||||
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
|
||||
const r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8);
|
||||
return v.toString(16);
|
||||
});
|
||||
}
|
||||
|
||||
export default curlReq;
|
||||
|
|
@ -1,9 +1,8 @@
|
|||
import * as yamlCfg from './module.cfg-loader';
|
||||
import { console } from './log';
|
||||
import { Method } from 'got';
|
||||
|
||||
export type Params = {
|
||||
method?: Method,
|
||||
method?: 'GET' | 'POST' | 'PUT' | 'PATCH' | 'DELETE',
|
||||
headers?: Record<string, string>,
|
||||
body?: string | Buffer,
|
||||
binary?: boolean,
|
||||
|
|
@ -35,29 +34,10 @@ export class Req {
|
|||
// options
|
||||
const options: RequestInit = {
|
||||
method: params.method ? params.method : 'GET',
|
||||
headers: {
|
||||
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
|
||||
'accept-language': 'en-US,en;q=0.9',
|
||||
'cache-control': 'no-cache',
|
||||
'pragma': 'no-cache',
|
||||
'sec-ch-ua': '"Google Chrome";v="123", "Not:A-Brand";v="8", "Chromium";v="123"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"Windows"',
|
||||
'sec-fetch-dest': 'document',
|
||||
'sec-fetch-mode': 'navigate',
|
||||
'sec-fetch-site': 'none',
|
||||
'sec-fetch-user': '?1',
|
||||
'upgrade-insecure-requests': '1',
|
||||
},
|
||||
};
|
||||
// additional params
|
||||
if(params.headers){
|
||||
options.headers = {...options.headers, ...params.headers};
|
||||
}
|
||||
if(options.method == 'POST'){
|
||||
if (!(options.headers as Record<string, string>)['Content-Type']) {
|
||||
(options.headers as Record<string, string>)['Content-Type'] = 'application/x-www-form-urlencoded';
|
||||
}
|
||||
options.headers = params.headers;
|
||||
}
|
||||
if(params.body){
|
||||
options.body = params.body;
|
||||
|
|
|
|||
|
|
@ -1,132 +0,0 @@
|
|||
import got, { OptionsOfUnknownResponseBody, ReadError, Response, ResponseType } from 'got';
|
||||
import { console } from './log';
|
||||
|
||||
// Used for future updates
|
||||
// const argv = require('../funi').argv;
|
||||
//
|
||||
// const lang = {
|
||||
// 'ptBR': {
|
||||
// langCode: 'pt-BR',
|
||||
// regionCode: 'BR'
|
||||
// },
|
||||
// 'esLA': {
|
||||
// langCode: 'es-LA',
|
||||
// regionCode: 'MX'
|
||||
// }
|
||||
// };
|
||||
|
||||
|
||||
export type Options = {
|
||||
url: string,
|
||||
responseType?: ResponseType,
|
||||
baseUrl?: string,
|
||||
querystring?: Record<string, any>,
|
||||
auth?: {
|
||||
user: string,
|
||||
pass: string
|
||||
},
|
||||
useToken?: boolean,
|
||||
token?: string|boolean,
|
||||
dinstid?: boolean|string,
|
||||
debug?: boolean
|
||||
}
|
||||
// TODO convert to class
|
||||
const getData = async <T = string>(options: Options) => {
|
||||
const regionHeaders = {};
|
||||
|
||||
|
||||
const gOptions = {
|
||||
url: options.url,
|
||||
http2: true,
|
||||
headers: {
|
||||
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:70.0) Gecko/20100101 Firefox/70.0',
|
||||
'Accept-Encoding': 'gzip',
|
||||
...regionHeaders
|
||||
}
|
||||
} as OptionsOfUnknownResponseBody;
|
||||
if(options.responseType) {
|
||||
gOptions.responseType = options.responseType;
|
||||
}
|
||||
if(options.baseUrl){
|
||||
gOptions.prefixUrl = options.baseUrl;
|
||||
gOptions.url = gOptions.url?.toString().replace(/^\//,'');
|
||||
}
|
||||
if(options.querystring){
|
||||
gOptions.url += `?${new URLSearchParams(options.querystring).toString()}`;
|
||||
}
|
||||
if(options.auth){
|
||||
gOptions.method = 'POST';
|
||||
const newHeaders = {
|
||||
...gOptions.headers,
|
||||
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
|
||||
'Origin': 'https://ww.funimation.com',
|
||||
'Accept': 'application/json, text/javascript, */*; q=0.01',
|
||||
'Accept-Encoding': 'gzip, deflate, br',
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:89.0) Gecko/20100101 Firefox/89.0'
|
||||
};
|
||||
gOptions.headers = newHeaders;
|
||||
gOptions.body = `username=${encodeURIComponent(options.auth.user)}&password=${encodeURIComponent(options.auth.pass)}`;
|
||||
}
|
||||
if(options.useToken && options.token){
|
||||
gOptions.headers = {
|
||||
...gOptions.headers,
|
||||
Authorization: `Token ${options.token}`
|
||||
};
|
||||
}
|
||||
if(options.dinstid){
|
||||
gOptions.headers = {
|
||||
...gOptions.headers,
|
||||
devicetype: 'Android Phone'
|
||||
};
|
||||
}
|
||||
// debug
|
||||
gOptions.hooks = {
|
||||
beforeRequest: [
|
||||
(gotOpts) => {
|
||||
if(options.debug){
|
||||
console.debug('GOT OPTIONS:');
|
||||
console.debug(gotOpts);
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
||||
try {
|
||||
const res = await got(gOptions);
|
||||
if(res.body && (options.responseType !== 'buffer' && (res.body as string).match(/^</))){
|
||||
throw { name: 'HTMLError', res };
|
||||
}
|
||||
return {
|
||||
ok: true,
|
||||
res: {
|
||||
...res,
|
||||
body: res.body as T
|
||||
},
|
||||
};
|
||||
}
|
||||
catch(_error){
|
||||
const error = _error as {
|
||||
name: string,
|
||||
} & ReadError & {
|
||||
res: Response<unknown>
|
||||
};
|
||||
if(options.debug){
|
||||
console.debug(error);
|
||||
}
|
||||
if(error.response && error.response.statusCode && error.response.statusMessage){
|
||||
console.error(`${error.name} ${error.response.statusCode}: ${error.response.statusMessage}`);
|
||||
}
|
||||
else if(error.name && error.name == 'HTMLError' && error.res && error.res.body){
|
||||
console.error(`${error.name}:`);
|
||||
console.error(error.res.body);
|
||||
}
|
||||
else{
|
||||
console.error(`${error.name}: ${error.code||error.message}`);
|
||||
}
|
||||
return {
|
||||
ok: false,
|
||||
error,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
export default getData;
|
||||
|
|
@ -1,245 +0,0 @@
|
|||
import shlp from 'sei-helper';
|
||||
import got, { Headers, Method, Options, ReadError, Response } from 'got';
|
||||
import cookieFile from './module.cookieFile';
|
||||
import * as yamlCfg from './module.cfg-loader';
|
||||
import { console } from './log';
|
||||
//import curlReq from './module.curl-req';
|
||||
|
||||
export type Params = {
|
||||
method?: Method,
|
||||
headers?: Headers,
|
||||
body?: string | Buffer,
|
||||
binary?: boolean,
|
||||
followRedirect?: boolean
|
||||
}
|
||||
|
||||
// set usable cookies
|
||||
const usefulCookies = {
|
||||
auth: [
|
||||
'etp_rt',
|
||||
'c_visitor',
|
||||
],
|
||||
sess: [
|
||||
'session_id',
|
||||
],
|
||||
};
|
||||
|
||||
// req
|
||||
class Req {
|
||||
private sessCfg: string;
|
||||
private service: 'cr'|'hd'|'ao';
|
||||
private session: Record<string, {
|
||||
value: string;
|
||||
expires: Date;
|
||||
path: string;
|
||||
domain: string;
|
||||
secure: boolean;
|
||||
'Max-Age'?: string
|
||||
}> = {};
|
||||
private cfgDir = yamlCfg.cfgDir;
|
||||
private curl: boolean|string = false;
|
||||
|
||||
constructor(private domain: Record<string, unknown>, private debug: boolean, private nosess = false, private type: 'cr'|'hd'|'ao') {
|
||||
this.sessCfg = yamlCfg.sessCfgFile[type];
|
||||
this.service = type;
|
||||
}
|
||||
async getData<T = string> (durl: string, params?: Params) {
|
||||
params = params || {};
|
||||
// options
|
||||
const options: Options & {
|
||||
minVersion?: string,
|
||||
maxVersion?: string
|
||||
curlDebug?: boolean
|
||||
} = {
|
||||
method: params.method ? params.method : 'GET',
|
||||
headers: {
|
||||
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:90.0) Gecko/20100101 Firefox/90.0',
|
||||
},
|
||||
};
|
||||
// additional params
|
||||
if(params.headers){
|
||||
options.headers = {...options.headers, ...params.headers};
|
||||
}
|
||||
if(options.method == 'POST'){
|
||||
if (!(options.headers as Headers)['Content-Type']) {
|
||||
(options.headers as Headers)['Content-Type'] = 'application/x-www-form-urlencoded';
|
||||
}
|
||||
}
|
||||
if(params.body){
|
||||
options.body = params.body;
|
||||
}
|
||||
if(params.binary == true){
|
||||
options.responseType = 'buffer';
|
||||
}
|
||||
if(typeof params.followRedirect == 'boolean'){
|
||||
options.followRedirect = params.followRedirect;
|
||||
}
|
||||
// if auth
|
||||
//const loc = new URL(durl);
|
||||
// avoid cloudflare protection
|
||||
// debug
|
||||
options.hooks = {
|
||||
beforeRequest: [
|
||||
(options) => {
|
||||
if(this.debug){
|
||||
console.debug('[DEBUG] GOT OPTIONS:');
|
||||
console.debug(options);
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
||||
if(this.debug){
|
||||
options.curlDebug = true;
|
||||
}
|
||||
// try do request
|
||||
try {
|
||||
const res = await got(durl.toString(), options) as unknown as Response<T>;
|
||||
return {
|
||||
ok: true,
|
||||
res
|
||||
};
|
||||
}
|
||||
catch(_error){
|
||||
const error = _error as {
|
||||
name: string
|
||||
} & ReadError & {
|
||||
res: Response<unknown>
|
||||
};
|
||||
if(error.response && error.response.statusCode && error.response.statusMessage){
|
||||
console.error(`${error.name} ${error.response.statusCode}: ${error.response.statusMessage}`);
|
||||
}
|
||||
else{
|
||||
console.error(`${error.name}: ${error.code || error.message}`);
|
||||
}
|
||||
if(error.response && !error.res){
|
||||
error.res = error.response;
|
||||
const docTitle = (error.res.body as string).match(/<title>(.*)<\/title>/);
|
||||
if(error.res.body && docTitle){
|
||||
console.error(docTitle[1]);
|
||||
}
|
||||
}
|
||||
if(error.res && error.res.body && error.response.statusCode
|
||||
&& error.response.statusCode != 404 && error.response.statusCode != 403){
|
||||
console.error('Body:', error.res.body);
|
||||
}
|
||||
return {
|
||||
ok: false,
|
||||
error,
|
||||
};
|
||||
}
|
||||
}
|
||||
setNewCookie(setCookie: Record<string, string>, isAuth: boolean, fileData?: string){
|
||||
const cookieUpdated: string[] = []; let lastExp = 0;
|
||||
console.trace('Type of setCookie:', typeof setCookie, setCookie);
|
||||
const parsedCookie = fileData ? cookieFile(fileData) : shlp.cookie.parse(setCookie);
|
||||
for(const cookieName of Object.keys(parsedCookie)){
|
||||
if(parsedCookie[cookieName] && parsedCookie[cookieName].value && parsedCookie[cookieName].value == 'deleted'){
|
||||
delete parsedCookie[cookieName];
|
||||
}
|
||||
}
|
||||
for(const uCookie of usefulCookies.auth){
|
||||
const cookieForceExp = 60*60*24*7;
|
||||
const cookieExpCur = this.session[uCookie] ? this.session[uCookie] : { expires: 0 };
|
||||
const cookieExp = new Date(cookieExpCur.expires).getTime() - cookieForceExp;
|
||||
if(cookieExp > lastExp){
|
||||
lastExp = cookieExp;
|
||||
}
|
||||
}
|
||||
for(const uCookie of usefulCookies.auth){
|
||||
if(!parsedCookie[uCookie]){
|
||||
continue;
|
||||
}
|
||||
if(isAuth || parsedCookie[uCookie] && Date.now() > lastExp){
|
||||
this.session[uCookie] = parsedCookie[uCookie];
|
||||
cookieUpdated.push(uCookie);
|
||||
}
|
||||
}
|
||||
for(const uCookie of usefulCookies.sess){
|
||||
if(!parsedCookie[uCookie]){
|
||||
continue;
|
||||
}
|
||||
if(
|
||||
isAuth
|
||||
|| this.nosess && parsedCookie[uCookie]
|
||||
|| parsedCookie[uCookie] && !this.checkSessId(this.session[uCookie])
|
||||
){
|
||||
const sessionExp = 60*60;
|
||||
this.session[uCookie] = parsedCookie[uCookie];
|
||||
this.session[uCookie].expires = new Date(Date.now() + sessionExp*1000);
|
||||
this.session[uCookie]['Max-Age'] = sessionExp.toString();
|
||||
cookieUpdated.push(uCookie);
|
||||
}
|
||||
}
|
||||
if(cookieUpdated.length > 0){
|
||||
if(this.debug){
|
||||
console.info('[SAVING FILE]',`${this.sessCfg}.yml`);
|
||||
}
|
||||
if (this.type === 'cr') {
|
||||
yamlCfg.saveCRSession(this.session);
|
||||
} else if (this.type === 'hd') {
|
||||
yamlCfg.saveHDSession(this.session);
|
||||
}
|
||||
console.info(`Cookies were updated! (${cookieUpdated.join(', ')})\n`);
|
||||
}
|
||||
}
|
||||
checkCookieVal(chcookie: Record<string, string>){
|
||||
return chcookie
|
||||
&& chcookie.toString() == '[object Object]'
|
||||
&& typeof chcookie.value == 'string'
|
||||
? true : false;
|
||||
}
|
||||
checkSessId(session_id: Record<string, unknown>){
|
||||
if(session_id && typeof session_id.expires == 'string'){
|
||||
session_id.expires = new Date(session_id.expires);
|
||||
}
|
||||
return session_id
|
||||
&& session_id.toString() == '[object Object]'
|
||||
&& typeof session_id.expires == 'object'
|
||||
&& Date.now() < new Date(session_id.expires as any).getTime()
|
||||
&& typeof session_id.value == 'string'
|
||||
? true : false;
|
||||
}
|
||||
uuidv4(){
|
||||
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
|
||||
const r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8);
|
||||
return v.toString(16);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function buildProxy(proxyBaseUrl: string, proxyAuth: string){
|
||||
if(!proxyBaseUrl.match(/^(https?|socks4|socks5):/)){
|
||||
proxyBaseUrl = 'http://' + proxyBaseUrl;
|
||||
}
|
||||
|
||||
const proxyCfg = new URL(proxyBaseUrl);
|
||||
let proxyStr = `${proxyCfg.protocol}//`;
|
||||
|
||||
if(typeof proxyCfg.hostname != 'string' || proxyCfg.hostname == ''){
|
||||
throw new Error('[ERROR] Hostname and port required for proxy!');
|
||||
}
|
||||
|
||||
if(proxyAuth && typeof proxyAuth == 'string' && proxyAuth.match(':')){
|
||||
proxyCfg.username = proxyAuth.split(':')[0];
|
||||
proxyCfg.password = proxyAuth.split(':')[1];
|
||||
proxyStr += `${proxyCfg.username}:${proxyCfg.password}@`;
|
||||
}
|
||||
|
||||
proxyStr += proxyCfg.hostname;
|
||||
|
||||
if(!proxyCfg.port && proxyCfg.protocol == 'http:'){
|
||||
proxyStr += ':80';
|
||||
}
|
||||
else if(!proxyCfg.port && proxyCfg.protocol == 'https:'){
|
||||
proxyStr += ':443';
|
||||
}
|
||||
|
||||
return proxyStr;
|
||||
}
|
||||
|
||||
export {
|
||||
buildProxy,
|
||||
usefulCookies,
|
||||
Req,
|
||||
};
|
||||
|
||||
|
|
@ -1,4 +1,3 @@
|
|||
import got from 'got';
|
||||
import fs from 'fs';
|
||||
import { GithubTag, TagCompare } from '../@types/github';
|
||||
import path from 'path';
|
||||
|
|
@ -26,28 +25,32 @@ const updateIgnore = [
|
|||
'tsc.ts'
|
||||
];
|
||||
|
||||
const askBeforeUpdate = [
|
||||
'*.yml'
|
||||
];
|
||||
const askBeforeUpdate = ['*.yml'];
|
||||
|
||||
enum ApplyType {
|
||||
DELETE, ADD, UPDATE
|
||||
}
|
||||
|
||||
export type ApplyItem = {
|
||||
type: ApplyType,
|
||||
path: string,
|
||||
content: string
|
||||
DELETE,
|
||||
ADD,
|
||||
UPDATE
|
||||
}
|
||||
|
||||
export default (async (force = false) => {
|
||||
const isPackaged = (process as NodeJS.Process & {
|
||||
pkg?: unknown
|
||||
}).pkg ? true : !!process.env.contentDirectory;
|
||||
export type ApplyItem = {
|
||||
type: ApplyType;
|
||||
path: string;
|
||||
content: string;
|
||||
};
|
||||
|
||||
export default async (force = false) => {
|
||||
const isPackaged = (
|
||||
process as NodeJS.Process & {
|
||||
pkg?: unknown;
|
||||
}
|
||||
).pkg
|
||||
? true
|
||||
: !!process.env.contentDirectory;
|
||||
if (isPackaged) {
|
||||
return;
|
||||
}
|
||||
let updateFile: UpdateFile|undefined;
|
||||
let updateFile: UpdateFile | undefined;
|
||||
if (fs.existsSync(updateFilePlace)) {
|
||||
updateFile = JSON.parse(fs.readFileSync(updateFilePlace).toString()) as UpdateFile;
|
||||
if (new Date() < new Date(updateFile.nextCheck) && !force) {
|
||||
|
|
@ -55,73 +58,77 @@ export default (async (force = false) => {
|
|||
}
|
||||
}
|
||||
console.info('Checking for updates...');
|
||||
const tagRequest = await got('https://api.github.com/repos/anidl/multi-downloader-nx/tags');
|
||||
const tags = JSON.parse(tagRequest.body) as GithubTag[];
|
||||
const tagRequest = await fetch('https://api.github.com/repos/anidl/multi-downloader-nx/tags');
|
||||
const tags = JSON.parse(await tagRequest.text()) as GithubTag[];
|
||||
|
||||
if (tags.length > 0) {
|
||||
const newer = tags.filter(a => {
|
||||
const newer = tags.filter((a) => {
|
||||
return isNewer(packageJson.version, a.name);
|
||||
});
|
||||
console.info(`Found ${tags.length} release tags and ${newer.length} that are new.`);
|
||||
|
||||
|
||||
if (newer.length < 1) {
|
||||
console.info('No new tags found');
|
||||
return done();
|
||||
}
|
||||
const newest = newer.sort((a, b) => a.name < b.name ? 1 : a.name > b.name ? -1 : 0)[0];
|
||||
const compareRequest = await got(`https://api.github.com/repos/anidl/multi-downloader-nx/compare/${packageJson.version}...${newest.name}`);
|
||||
const newest = newer.sort((a, b) => (a.name < b.name ? 1 : a.name > b.name ? -1 : 0))[0];
|
||||
const compareRequest = await fetch(`https://api.github.com/repos/anidl/multi-downloader-nx/compare/${packageJson.version}...${newest.name}`);
|
||||
|
||||
const compareJSON = JSON.parse(compareRequest.body) as TagCompare;
|
||||
const compareJSON = JSON.parse(await compareRequest.text()) as TagCompare;
|
||||
|
||||
console.info(`You are behind by ${compareJSON.ahead_by} releases!`);
|
||||
const changedFiles = compareJSON.files.map(a => ({
|
||||
...a,
|
||||
filename: path.join(...a.filename.split('/'))
|
||||
})).filter(a => {
|
||||
return !updateIgnore.some(_filter => matchString(_filter, a.filename));
|
||||
});
|
||||
const changedFiles = compareJSON.files
|
||||
.map((a) => ({
|
||||
...a,
|
||||
filename: path.join(...a.filename.split('/'))
|
||||
}))
|
||||
.filter((a) => {
|
||||
return !updateIgnore.some((_filter) => matchString(_filter, a.filename));
|
||||
});
|
||||
if (changedFiles.length < 1) {
|
||||
console.info('No file changes found... updating package.json. If you think this is an error please get the newst version yourself.');
|
||||
return done(newest.name);
|
||||
}
|
||||
console.info(`Found file changes: \n${changedFiles.map(a => ` [${
|
||||
a.status === 'modified' ? '*' : a.status === 'added' ? '+' : '-'
|
||||
}] ${a.filename}`).join('\n')}`);
|
||||
console.info(`Found file changes: \n${changedFiles.map((a) => ` [${a.status === 'modified' ? '*' : a.status === 'added' ? '+' : '-'}] ${a.filename}`).join('\n')}`);
|
||||
|
||||
const remove: string[] = [];
|
||||
|
||||
for (const a of changedFiles.filter(a => a.status !== 'added')) {
|
||||
if (!askBeforeUpdate.some(pattern => matchString(pattern, a.filename)))
|
||||
continue;
|
||||
const answer = await seiHelper.question(`The developer decided that the file '${a.filename}' may contain information you changed yourself. Should they be overriden to be updated? [y/N]`);
|
||||
if (answer.toLowerCase() === 'y')
|
||||
remove.push(a.sha);
|
||||
for (const a of changedFiles.filter((a) => a.status !== 'added')) {
|
||||
if (!askBeforeUpdate.some((pattern) => matchString(pattern, a.filename))) continue;
|
||||
const answer = await seiHelper.question(
|
||||
`The developer decided that the file '${a.filename}' may contain information you changed yourself. Should they be overriden to be updated? [y/N]`
|
||||
);
|
||||
if (answer.toLowerCase() === 'y') remove.push(a.sha);
|
||||
}
|
||||
|
||||
const changesToApply = await Promise.all(changedFiles.filter(a => !remove.includes(a.sha)).map(async (a): Promise<ApplyItem> => {
|
||||
if (a.filename.endsWith('.ts') || a.filename.endsWith('tsx')) {
|
||||
const isTSX = a.filename.endsWith('tsx');
|
||||
const ret = {
|
||||
path: a.filename.slice(0, isTSX ? -3 : -2) + `js${isTSX ? 'x' : ''}`,
|
||||
content: transpileModule((await got(a.raw_url)).body, {
|
||||
compilerOptions: tsConfig.compilerOptions as unknown as CompilerOptions
|
||||
}).outputText,
|
||||
type: a.status === 'modified' ? ApplyType.UPDATE : a.status === 'added' ? ApplyType.ADD : ApplyType.DELETE
|
||||
};
|
||||
console.info('✓ Transpiled %s', ret.path);
|
||||
return ret;
|
||||
} else {
|
||||
const ret = {
|
||||
path: a.filename,
|
||||
content: (await got(a.raw_url)).body,
|
||||
type: a.status === 'modified' ? ApplyType.UPDATE : a.status === 'added' ? ApplyType.ADD : ApplyType.DELETE
|
||||
};
|
||||
console.info('✓ Got %s', ret.path);
|
||||
return ret;
|
||||
}
|
||||
}));
|
||||
const changesToApply = await Promise.all(
|
||||
changedFiles
|
||||
.filter((a) => !remove.includes(a.sha))
|
||||
.map(async (a): Promise<ApplyItem> => {
|
||||
if (a.filename.endsWith('.ts') || a.filename.endsWith('tsx')) {
|
||||
const isTSX = a.filename.endsWith('tsx');
|
||||
const ret = {
|
||||
path: a.filename.slice(0, isTSX ? -3 : -2) + `js${isTSX ? 'x' : ''}`,
|
||||
content: transpileModule(await (await fetch(a.raw_url)).text(), {
|
||||
compilerOptions: tsConfig.compilerOptions as unknown as CompilerOptions
|
||||
}).outputText,
|
||||
type: a.status === 'modified' ? ApplyType.UPDATE : a.status === 'added' ? ApplyType.ADD : ApplyType.DELETE
|
||||
};
|
||||
console.info('✓ Transpiled %s', ret.path);
|
||||
return ret;
|
||||
} else {
|
||||
const ret = {
|
||||
path: a.filename,
|
||||
content: await (await fetch(a.raw_url)).text(),
|
||||
type: a.status === 'modified' ? ApplyType.UPDATE : a.status === 'added' ? ApplyType.ADD : ApplyType.DELETE
|
||||
};
|
||||
console.info('✓ Got %s', ret.path);
|
||||
return ret;
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
changesToApply.forEach(a => {
|
||||
changesToApply.forEach((a) => {
|
||||
try {
|
||||
fsextra.ensureDirSync(path.dirname(a.path));
|
||||
fs.writeFileSync(path.join(__dirname, '..', a.path), a.content);
|
||||
|
|
@ -133,41 +140,52 @@ export default (async (force = false) => {
|
|||
|
||||
console.info('Done');
|
||||
return done();
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
function done(newVersion?: string) {
|
||||
const next = new Date(Date.now() + 1000 * 60 * 60 * 24);
|
||||
fs.writeFileSync(updateFilePlace, JSON.stringify({
|
||||
lastCheck: Date.now(),
|
||||
nextCheck: next.getTime()
|
||||
} as UpdateFile, null, 2));
|
||||
fs.writeFileSync(
|
||||
updateFilePlace,
|
||||
JSON.stringify(
|
||||
{
|
||||
lastCheck: Date.now(),
|
||||
nextCheck: next.getTime()
|
||||
} as UpdateFile,
|
||||
null,
|
||||
2
|
||||
)
|
||||
);
|
||||
if (newVersion) {
|
||||
fs.writeFileSync('../package.json', JSON.stringify({
|
||||
...packageJson,
|
||||
version: newVersion
|
||||
}, null, 4));
|
||||
fs.writeFileSync(
|
||||
'../package.json',
|
||||
JSON.stringify(
|
||||
{
|
||||
...packageJson,
|
||||
version: newVersion
|
||||
},
|
||||
null,
|
||||
4
|
||||
)
|
||||
);
|
||||
}
|
||||
console.info('[INFO] Searching for update finished. Next time running on the ' + next.toLocaleDateString() + ' at ' + next.toLocaleTimeString() + '.');
|
||||
}
|
||||
|
||||
function isNewer(curr: string, compare: string) : boolean {
|
||||
const currParts = curr.split('.').map(a => parseInt(a));
|
||||
const compareParts = compare.split('.').map(a => parseInt(a));
|
||||
function isNewer(curr: string, compare: string): boolean {
|
||||
const currParts = curr.split('.').map((a) => parseInt(a));
|
||||
const compareParts = compare.split('.').map((a) => parseInt(a));
|
||||
|
||||
for (let i = 0; i < Math.max(currParts.length, compareParts.length); i++) {
|
||||
if (currParts.length <= i)
|
||||
return true;
|
||||
if (compareParts.length <= i)
|
||||
return false;
|
||||
if (currParts[i] !== compareParts[i])
|
||||
return compareParts[i] > currParts[i];
|
||||
if (currParts.length <= i) return true;
|
||||
if (compareParts.length <= i) return false;
|
||||
if (currParts[i] !== compareParts[i]) return compareParts[i] > currParts[i];
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function matchString(pattern: string, toMatch: string) : boolean {
|
||||
function matchString(pattern: string, toMatch: string): boolean {
|
||||
const filter = path.join('..', pattern);
|
||||
if (pattern.startsWith('*')) {
|
||||
return toMatch.endsWith(pattern.slice(1));
|
||||
|
|
@ -176,4 +194,4 @@ function matchString(pattern: string, toMatch: string) : boolean {
|
|||
} else {
|
||||
return toMatch.split(path.sep).pop() === pattern;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@
|
|||
"fast-xml-parser": "^5.2.2",
|
||||
"ffprobe": "^1.1.2",
|
||||
"fs-extra": "^11.3.0",
|
||||
"got": "^11.8.6",
|
||||
"got": "11.8.6",
|
||||
"iso-639": "^0.2.2",
|
||||
"leven": "^3.1.0",
|
||||
"log4js": "^6.9.1",
|
||||
|
|
@ -63,6 +63,7 @@
|
|||
"m3u8-parsed": "^1.3.0",
|
||||
"mpd-parser": "^1.3.1",
|
||||
"node-forge": "^1.3.1",
|
||||
"ofetch": "^1.4.1",
|
||||
"open": "^8.4.2",
|
||||
"protobufjs": "^7.5.0",
|
||||
"sei-helper": "^3.3.0",
|
||||
|
|
|
|||
7764
pnpm-lock.yaml
7764
pnpm-lock.yaml
File diff suppressed because it is too large
Load diff
Loading…
Reference in a new issue