Compare commits
2 commits
master
...
multi-down
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6d1926cca3 | ||
|
|
501928b92d |
9 changed files with 219 additions and 121 deletions
6
@types/messageHandler.d.ts
vendored
6
@types/messageHandler.d.ts
vendored
|
|
@ -107,7 +107,7 @@ export type FuniStreamData = { force?: 'Y'|'y'|'N'|'n'|'C'|'c', callbackMaker?:
|
|||
ffmpegOptions: string[], mkvmergeOptions: string[], defaultAudio: LanguageItem, defaultSub: LanguageItem, ccTag: string }
|
||||
export type FuniSubsData = { nosubs?: boolean, sub: boolean, dlsubs: string[], ccTag: string }
|
||||
export type DownloadData = {
|
||||
hslang?: string; id: string, e: string, dubLang: string[], dlsubs: string[], fileName: string, q: number, novids: boolean, noaudio: boolean, dlVideoOnce: boolean
|
||||
hslang?: string; id: string, e: string, dubLang: string[], dlsubs: string[], fileName: string, q: number, novids: boolean, noaudio: boolean, dlVideoOnce: boolean
|
||||
}
|
||||
|
||||
export type AuthResponse = ResponseBase<undefined>;
|
||||
|
|
@ -136,7 +136,7 @@ export type ProgressData = {
|
|||
|
||||
export type PossibleMessages = keyof ServiceHandler;
|
||||
|
||||
export type DownloadInfo = {
|
||||
export type DownloadInfo = {
|
||||
image: string,
|
||||
parent: {
|
||||
title: string
|
||||
|
|
@ -158,4 +158,4 @@ export type GuiState = {
|
|||
|
||||
export type GuiStateService = {
|
||||
queue: QueueItem[]
|
||||
}
|
||||
}
|
||||
|
|
|
|||
83
crunchy.ts
83
crunchy.ts
|
|
@ -44,6 +44,7 @@ import { CrunchyAndroidObject } from './@types/crunchyAndroidObject';
|
|||
import { CrunchyChapters, CrunchyChapter, CrunchyOldChapter } from './@types/crunchyChapters';
|
||||
import vtt2ass from './modules/module.vtt2ass';
|
||||
import { CrunchyPlayStream } from './@types/crunchyPlayStreams';
|
||||
import buildCLIHandler from './modules/downloadProgress';
|
||||
|
||||
export type sxItem = {
|
||||
language: langsData.LanguageItem,
|
||||
|
|
@ -1183,7 +1184,7 @@ export default class Crunchy implements ServiceClass {
|
|||
return;
|
||||
}
|
||||
|
||||
if (!this.cfg.bin.ffmpeg)
|
||||
if (!this.cfg.bin.ffmpeg)
|
||||
this.cfg.bin = await yamlCfg.loadBinCfg();
|
||||
|
||||
let mediaName = '...';
|
||||
|
|
@ -1267,7 +1268,7 @@ export default class Crunchy implements ServiceClass {
|
|||
const startMS = startTimeMS ? startTimeMS : '00', endMS = endTimeMS ? endTimeMS : '00';
|
||||
const startFormatted = startTime.toISOString().substring(11, 19)+'.'+startMS;
|
||||
const endFormatted = endTime.toISOString().substring(11, 19)+'.'+endMS;
|
||||
|
||||
|
||||
//Push Generated Chapters
|
||||
if (chapterData.startTime > 1) {
|
||||
compiledChapters.push(
|
||||
|
|
@ -1316,7 +1317,7 @@ export default class Crunchy implements ServiceClass {
|
|||
endTime.setSeconds(chapter.end);
|
||||
const startFormatted = startTime.toISOString().substring(11, 19)+'.00';
|
||||
const endFormatted = endTime.toISOString().substring(11, 19)+'.00';
|
||||
|
||||
|
||||
//Push generated chapters
|
||||
if (chapter.type == 'intro') {
|
||||
if (chapter.start > 0) {
|
||||
|
|
@ -1462,8 +1463,8 @@ export default class Crunchy implements ServiceClass {
|
|||
|
||||
for(const s of Object.keys(pbStreams)){
|
||||
if (
|
||||
(s.match(/hls/) || s.match(/dash/))
|
||||
&& !(s.match(/hls/) && s.match(/drm/))
|
||||
(s.match(/hls/) || s.match(/dash/))
|
||||
&& !(s.match(/hls/) && s.match(/drm/))
|
||||
&& !((!canDecrypt || !this.cfg.bin.mp4decrypt) && s.match(/drm/))
|
||||
&& !s.match(/trailer/)
|
||||
) {
|
||||
|
|
@ -1559,6 +1560,7 @@ export default class Crunchy implements ServiceClass {
|
|||
}
|
||||
|
||||
let tsFile = undefined;
|
||||
const downloadStreams = [];
|
||||
|
||||
if(!dlFailed && curStream !== undefined && !(options.novids && options.noaudio)){
|
||||
const streamPlaylistsReq = await this.req.getData(curStream.url, AuthHeaders);
|
||||
|
|
@ -1670,28 +1672,17 @@ export default class Crunchy implements ServiceClass {
|
|||
const videoJson: M3U8Json = {
|
||||
segments: chosenVideoSegments.segments
|
||||
};
|
||||
const videoDownload = await new streamdl({
|
||||
output: chosenVideoSegments.pssh ? `${tempTsFile}.video.enc.m4s` : `${tsFile}.video.m4s`,
|
||||
const output = chosenVideoSegments.pssh ? `${tempTsFile}.video.enc.m4s` : `${tsFile}.video.m4s`;
|
||||
downloadStreams.push(new streamdl({
|
||||
output,
|
||||
timeout: options.timeout,
|
||||
m3u8json: videoJson,
|
||||
// baseurl: chunkPlaylist.baseUrl,
|
||||
threads: options.partsize,
|
||||
fsRetryTime: options.fsRetryTime * 1000,
|
||||
override: options.force,
|
||||
callback: options.callbackMaker ? options.callbackMaker({
|
||||
fileName: `${path.isAbsolute(outFile) ? outFile.slice(this.cfg.dir.content.length) : outFile}`,
|
||||
image: medias.image,
|
||||
parent: {
|
||||
title: medias.seasonTitle
|
||||
},
|
||||
title: medias.episodeTitle,
|
||||
language: lang
|
||||
}) : undefined
|
||||
}).download();
|
||||
if(!videoDownload.ok){
|
||||
console.error(`DL Stats: ${JSON.stringify(videoDownload.parts)}\n`);
|
||||
dlFailed = true;
|
||||
}
|
||||
identifier: output
|
||||
}).download());
|
||||
dlVideoOnce = true;
|
||||
videoDownloaded = true;
|
||||
}
|
||||
|
|
@ -1712,28 +1703,17 @@ export default class Crunchy implements ServiceClass {
|
|||
const audioJson: M3U8Json = {
|
||||
segments: chosenAudioSegments.segments
|
||||
};
|
||||
const audioDownload = await new streamdl({
|
||||
output: chosenAudioSegments.pssh ? `${tempTsFile}.audio.enc.m4s` : `${tsFile}.audio.m4s`,
|
||||
const output = chosenAudioSegments.pssh ? `${tempTsFile}.audio.enc.m4s` : `${tsFile}.audio.m4s`;
|
||||
downloadStreams.push(new streamdl({
|
||||
output,
|
||||
timeout: options.timeout,
|
||||
m3u8json: audioJson,
|
||||
// baseurl: chunkPlaylist.baseUrl,
|
||||
threads: options.partsize,
|
||||
fsRetryTime: options.fsRetryTime * 1000,
|
||||
override: options.force,
|
||||
callback: options.callbackMaker ? options.callbackMaker({
|
||||
fileName: `${path.isAbsolute(outFile) ? outFile.slice(this.cfg.dir.content.length) : outFile}`,
|
||||
image: medias.image,
|
||||
parent: {
|
||||
title: medias.seasonTitle
|
||||
},
|
||||
title: medias.episodeTitle,
|
||||
language: lang
|
||||
}) : undefined
|
||||
}).download();
|
||||
if(!audioDownload.ok){
|
||||
console.error(`DL Stats: ${JSON.stringify(audioDownload.parts)}\n`);
|
||||
dlFailed = true;
|
||||
}
|
||||
identifier: output
|
||||
}).download());
|
||||
audioDownloaded = true;
|
||||
} else if (options.noaudio) {
|
||||
console.info('Skipping audio download...');
|
||||
|
|
@ -1965,28 +1945,17 @@ export default class Crunchy implements ServiceClass {
|
|||
if (!fs.existsSync(path.join(isAbsolut ? '' : this.cfg.dir.content, ...arr.slice(0, ind), val)))
|
||||
fs.mkdirSync(path.join(isAbsolut ? '' : this.cfg.dir.content, ...arr.slice(0, ind), val));
|
||||
});
|
||||
const dlStreamByPl = await new streamdl({
|
||||
output: `${tsFile}.ts`,
|
||||
const output = `${tsFile}.ts`;
|
||||
downloadStreams.push(new streamdl({
|
||||
output,
|
||||
timeout: options.timeout,
|
||||
m3u8json: chunkPlaylist,
|
||||
// baseurl: chunkPlaylist.baseUrl,
|
||||
threads: options.partsize,
|
||||
fsRetryTime: options.fsRetryTime * 1000,
|
||||
override: options.force,
|
||||
callback: options.callbackMaker ? options.callbackMaker({
|
||||
fileName: `${path.isAbsolute(outFile) ? outFile.slice(this.cfg.dir.content.length) : outFile}`,
|
||||
image: medias.image,
|
||||
parent: {
|
||||
title: medias.seasonTitle
|
||||
},
|
||||
title: medias.episodeTitle,
|
||||
language: lang
|
||||
}) : undefined
|
||||
}).download();
|
||||
if (!dlStreamByPl.ok) {
|
||||
console.error(`DL Stats: ${JSON.stringify(dlStreamByPl.parts)}\n`);
|
||||
dlFailed = true;
|
||||
}
|
||||
identifier: output
|
||||
}).download());
|
||||
files.push({
|
||||
type: 'Video',
|
||||
path: `${tsFile}.ts`,
|
||||
|
|
@ -2008,6 +1977,14 @@ export default class Crunchy implements ServiceClass {
|
|||
fileName = parseFileName(options.fileName, variables, options.numbers, options.override).join(path.sep);
|
||||
}
|
||||
|
||||
const downloads = await Promise.all(downloadStreams);
|
||||
for (const download of downloads) {
|
||||
if (!download.ok) {
|
||||
console.error('Download failed, download stats: ', download.parts);
|
||||
dlFailed = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (compiledChapters.length > 0) {
|
||||
try {
|
||||
fileName = parseFileName(options.fileName, variables, options.numbers, options.override).join(path.sep);
|
||||
|
|
|
|||
9
index.ts
9
index.ts
|
|
@ -3,7 +3,7 @@ import { ServiceClass } from './@types/serviceClassInterface';
|
|||
import { appArgv, overrideArguments } from './modules/module.app-args';
|
||||
import * as yamlCfg from './modules/module.cfg-loader';
|
||||
import { makeCommand, addToArchive } from './modules/module.downloadArchive';
|
||||
|
||||
import buildCLIHandler from './modules/downloadProgress';
|
||||
import update from './modules/module.updater';
|
||||
|
||||
(async () => {
|
||||
|
|
@ -70,13 +70,14 @@ import update from './modules/module.updater';
|
|||
case 'adn':
|
||||
service = new (await import('./adn')).default;
|
||||
break;
|
||||
default:
|
||||
default:
|
||||
service = new (await import(`./${argv.service}`)).default;
|
||||
break;
|
||||
}
|
||||
await service.cli();
|
||||
}
|
||||
} else {
|
||||
buildCLIHandler();
|
||||
let service: ServiceClass;
|
||||
switch(argv.service) {
|
||||
case 'crunchy':
|
||||
|
|
@ -91,10 +92,10 @@ import update from './modules/module.updater';
|
|||
case 'adn':
|
||||
service = new (await import('./adn')).default;
|
||||
break;
|
||||
default:
|
||||
default:
|
||||
service = new (await import(`./${argv.service}`)).default;
|
||||
break;
|
||||
}
|
||||
await service.cli();
|
||||
}
|
||||
})();
|
||||
})();
|
||||
|
|
|
|||
63
modules/downloadProgress.ts
Normal file
63
modules/downloadProgress.ts
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
import path from 'path';
|
||||
import type { HLSCallback } from './hls-download';
|
||||
import { console } from './log';
|
||||
import cliProgress, { SingleBar } from 'cli-progress';
|
||||
import shlp from 'sei-helper';
|
||||
import HLSEvents from './hlsEventEmitter';
|
||||
import { levels } from 'log4js';
|
||||
|
||||
export default function buildCLIHandler() {
|
||||
const mb = new cliProgress.MultiBar({
|
||||
clearOnComplete: true,
|
||||
stream: process.stdout,
|
||||
format: '{filename} [{bar}] {percentage}% | {speed} | {value}/{total} | {time}',
|
||||
hideCursor: true
|
||||
});
|
||||
const bars: Record<string, {
|
||||
bar: SingleBar,
|
||||
textPos: number,
|
||||
filename: string
|
||||
}> = {};
|
||||
|
||||
HLSEvents.on('end', ({ identifier }) => {
|
||||
bars[identifier]?.bar.stop();
|
||||
delete bars[identifier];
|
||||
});
|
||||
HLSEvents.on('message', ({ identifier, severity, msg }) => {
|
||||
if (severity.isGreaterThanOrEqualTo(levels.WARN))
|
||||
console.log(severity, `${identifier.split(path.sep).pop() || ''}: ${msg}`);
|
||||
mb.remove(bars[identifier]?.bar);
|
||||
});
|
||||
HLSEvents.on('progress', ({ identifier, total, cur, downloadSpeed, time }) => {
|
||||
const filename = identifier.split(path.sep).pop() || '';
|
||||
if (!Object.prototype.hasOwnProperty.call(bars, identifier)) {
|
||||
bars[identifier] = {
|
||||
bar: mb.create(total, cur, {
|
||||
filename: filename.slice(0, 30),
|
||||
speed: `${(downloadSpeed / 1000000).toPrecision(2)}Mb/s`,
|
||||
time: `${shlp.formatTime(parseInt((time / 1000).toFixed(0)))}`
|
||||
}),
|
||||
textPos: 0,
|
||||
filename
|
||||
};
|
||||
}
|
||||
bars[identifier].bar.update(cur, {
|
||||
speed: `${(downloadSpeed / 1000000).toPrecision(2)}Mb/s`,
|
||||
time: `${shlp.formatTime(parseInt((time / 1000).toFixed(0)))}`,
|
||||
});
|
||||
});
|
||||
|
||||
setInterval(() => {
|
||||
for (const item of Object.values(bars)) {
|
||||
if (item.filename.length < 30)
|
||||
continue;
|
||||
if (item.textPos === item.filename.length)
|
||||
item.textPos = 0;
|
||||
item.bar.update({
|
||||
filename: `${item.filename} ${item.filename}`.slice(item.textPos, item.textPos + 30)
|
||||
});
|
||||
item.textPos += 1;
|
||||
}
|
||||
}, 100);
|
||||
}
|
||||
|
||||
|
|
@ -7,15 +7,17 @@ import url from 'url';
|
|||
import shlp from 'sei-helper';
|
||||
import got, { Response } from 'got';
|
||||
|
||||
import { console } from './log';
|
||||
import { ProgressData } from '../@types/messageHandler';
|
||||
import HLSEvents from './hlsEventEmitter';
|
||||
import { levels } from 'log4js';
|
||||
const console = undefined;
|
||||
|
||||
// The following function should fix an issue with downloading. For more information see https://github.com/sindresorhus/got/issues/1489
|
||||
const fixMiddleWare = (res: Response) => {
|
||||
const isResponseOk = (response: Response) => {
|
||||
const {statusCode} = response;
|
||||
const limitStatusCode = response.request.options.followRedirect ? 299 : 399;
|
||||
|
||||
|
||||
return (statusCode >= 200 && statusCode <= limitStatusCode) || statusCode === 304;
|
||||
};
|
||||
if (isResponseOk(res)) {
|
||||
|
|
@ -47,6 +49,7 @@ type Key = {
|
|||
|
||||
export type HLSOptions = {
|
||||
m3u8json: M3U8Json,
|
||||
identifier: string,
|
||||
output?: string,
|
||||
threads?: number,
|
||||
retries?: number,
|
||||
|
|
@ -56,10 +59,10 @@ export type HLSOptions = {
|
|||
timeout?: number,
|
||||
fsRetryTime?: number,
|
||||
override?: 'Y'|'y'|'N'|'n'|'C'|'c'
|
||||
callback?: HLSCallback
|
||||
}
|
||||
|
||||
type Data = {
|
||||
identifier: string,
|
||||
parts: {
|
||||
first: number,
|
||||
total: number,
|
||||
|
|
@ -80,7 +83,6 @@ type Data = {
|
|||
isResume: boolean,
|
||||
bytesDownloaded: number,
|
||||
waitTime: number,
|
||||
callback?: HLSCallback,
|
||||
override?: string,
|
||||
dateStart: number
|
||||
}
|
||||
|
|
@ -92,8 +94,8 @@ class hlsDownload {
|
|||
// check playlist
|
||||
if(
|
||||
!options
|
||||
|| !options.m3u8json
|
||||
|| !options.m3u8json.segments
|
||||
|| !options.m3u8json
|
||||
|| !options.m3u8json.segments
|
||||
|| options.m3u8json.segments.length === 0
|
||||
){
|
||||
throw new Error('Playlist is empty!');
|
||||
|
|
@ -118,7 +120,7 @@ class hlsDownload {
|
|||
isResume: options.offset ? options.offset > 0 : false,
|
||||
bytesDownloaded: 0,
|
||||
waitTime: options.fsRetryTime ?? 1000 * 5,
|
||||
callback: options.callback,
|
||||
identifier: options.identifier,
|
||||
override: options.override,
|
||||
dateStart: 0
|
||||
};
|
||||
|
|
@ -129,28 +131,23 @@ class hlsDownload {
|
|||
// try load resume file
|
||||
if(fs.existsSync(fn) && fs.existsSync(`${fn}.resume`) && this.data.offset < 1){
|
||||
try{
|
||||
console.info('Resume data found! Trying to resume...');
|
||||
HLSEvents.emit('message', { identifier: this.data.identifier, msg: 'Resume data found! Trying to resume...', severity: levels.INFO });
|
||||
const resumeData = JSON.parse(fs.readFileSync(`${fn}.resume`, 'utf-8'));
|
||||
if(
|
||||
resumeData.total == this.data.m3u8json.segments.length
|
||||
&& resumeData.completed != resumeData.total
|
||||
&& !isNaN(resumeData.completed)
|
||||
){
|
||||
console.info('Resume data is ok!');
|
||||
HLSEvents.emit('message', { identifier: this.data.identifier, msg: 'Resume data is ok!', severity: levels.INFO });
|
||||
this.data.offset = resumeData.completed;
|
||||
this.data.isResume = true;
|
||||
}
|
||||
else{
|
||||
console.warn(' Resume data is wrong!');
|
||||
console.warn({
|
||||
resume: { total: resumeData.total, dled: resumeData.completed },
|
||||
current: { total: this.data.m3u8json.segments.length },
|
||||
});
|
||||
HLSEvents.emit('message', { identifier: this.data.identifier, msg: 'Resume data is wrong!', severity: levels.WARN });
|
||||
}
|
||||
}
|
||||
catch(e){
|
||||
console.error('Resume failed, downloading will be not resumed!');
|
||||
console.error(e);
|
||||
HLSEvents.emit('message', { identifier: this.data.identifier, msg: `Resume failed, downloading will be not resumed!\n${e}`, severity: levels.ERROR });
|
||||
}
|
||||
}
|
||||
// ask before rewrite file
|
||||
|
|
@ -158,54 +155,55 @@ class hlsDownload {
|
|||
let rwts = this.data.override ?? await shlp.question(`[Q] File «${fn}» already exists! Rewrite? ([y]es/[N]o/[c]ontinue)`);
|
||||
rwts = rwts || 'N';
|
||||
if (['Y', 'y'].includes(rwts[0])) {
|
||||
console.info(`Deleting «${fn}»...`);
|
||||
fs.unlinkSync(fn);
|
||||
}
|
||||
else if (['C', 'c'].includes(rwts[0])) {
|
||||
HLSEvents.emit('end', { identifier: this.data.identifier });
|
||||
return { ok: true, parts: this.data.parts };
|
||||
}
|
||||
else {
|
||||
HLSEvents.emit('end', { identifier: this.data.identifier });
|
||||
return { ok: false, parts: this.data.parts };
|
||||
}
|
||||
}
|
||||
// show output filename
|
||||
if (fs.existsSync(fn) && this.data.isResume) {
|
||||
console.info(`Adding content to «${fn}»...`);
|
||||
HLSEvents.emit('message', { identifier: this.data.identifier, msg: `Adding content to «${fn}»...`, severity: levels.INFO });
|
||||
}
|
||||
else{
|
||||
console.info(`Saving stream to «${fn}»...`);
|
||||
HLSEvents.emit('message', { identifier: this.data.identifier, msg: `Saving stream to «${fn}»...`, severity: levels.INFO });
|
||||
}
|
||||
// start time
|
||||
this.data.dateStart = Date.now();
|
||||
let segments = this.data.m3u8json.segments;
|
||||
// download init part
|
||||
if (segments[0].map && this.data.offset === 0 && !this.data.skipInit) {
|
||||
console.info('Download and save init part...');
|
||||
HLSEvents.emit('message', { identifier: this.data.identifier, msg: 'Download and save init part...', severity: levels.INFO });
|
||||
const initSeg = segments[0].map as Segment;
|
||||
if(segments[0].key){
|
||||
initSeg.key = segments[0].key as Key;
|
||||
}
|
||||
try{
|
||||
const initDl = await this.downloadPart(initSeg, 0, 0);
|
||||
const initDl = await this.downloadPart(initSeg, 0, 0, this.data.identifier);
|
||||
fs.writeFileSync(fn, initDl.dec, { flag: 'a' });
|
||||
fs.writeFileSync(`${fn}.resume`, JSON.stringify({
|
||||
completed: 0,
|
||||
total: this.data.m3u8json.segments.length
|
||||
}));
|
||||
console.info('Init part downloaded.');
|
||||
}
|
||||
catch(e: any){
|
||||
console.error(`Part init download error:\n\t${e.message}`);
|
||||
HLSEvents.emit('message', { identifier: this.data.identifier, msg: `Part init download error:\n\t${e.message}`, severity: levels.ERROR });
|
||||
HLSEvents.emit('end', { identifier: this.data.identifier });
|
||||
return { ok: false, parts: this.data.parts };
|
||||
}
|
||||
}
|
||||
else if(segments[0].map && this.data.offset === 0 && this.data.skipInit){
|
||||
console.warn('Skipping init part can lead to broken video!');
|
||||
HLSEvents.emit('message', { identifier: this.data.identifier, msg: 'Skipping init part can lead to broken video!', severity: levels.WARN });
|
||||
}
|
||||
// resuming ...
|
||||
if(this.data.offset > 0){
|
||||
segments = segments.slice(this.data.offset);
|
||||
console.info(`Resuming download from part ${this.data.offset+1}...`);
|
||||
HLSEvents.emit('message', { identifier: this.data.identifier, msg: `Resuming download from part ${this.data.offset+1}...`, severity: levels.INFO });
|
||||
this.data.parts.completed = this.data.offset;
|
||||
}
|
||||
// dl process
|
||||
|
|
@ -221,18 +219,19 @@ class hlsDownload {
|
|||
const curp = segments[px];
|
||||
const key = curp.key as Key;
|
||||
if(key && !krq.has(key.uri) && !this.data.keys[key.uri as string]){
|
||||
krq.set(key.uri, this.downloadKey(key, px, this.data.offset));
|
||||
krq.set(key.uri, this.downloadKey(key, px, this.data.offset, this.data.identifier));
|
||||
}
|
||||
}
|
||||
try {
|
||||
await Promise.all(krq.values());
|
||||
} catch (er: any) {
|
||||
console.error(`Key ${er.p + 1} download error:\n\t${er.message}`);
|
||||
HLSEvents.emit('message', { identifier: this.data.identifier, msg: `Key ${er.p + 1} download error:\n\t${er.message}`, severity: levels.ERROR });
|
||||
HLSEvents.emit('end', { identifier: this.data.identifier });
|
||||
return { ok: false, parts: this.data.parts };
|
||||
}
|
||||
for (let px = offset; px < dlOffset && px < segments.length; px++){
|
||||
const curp = segments[px] as Segment;
|
||||
prq.set(px, this.downloadPart(curp, px, this.data.offset));
|
||||
prq.set(px, this.downloadPart(curp, px, this.data.offset, this.data.identifier));
|
||||
}
|
||||
for (let i = prq.size; i--;) {
|
||||
try {
|
||||
|
|
@ -241,15 +240,15 @@ class hlsDownload {
|
|||
res[r.p - offset] = r.dec;
|
||||
}
|
||||
catch (error: any) {
|
||||
console.error('Part %s download error:\n\t%s',
|
||||
error.p + 1 + this.data.offset, error.message);
|
||||
HLSEvents.emit('message', { identifier: this.data.identifier, msg: `Part ${error.p + 1 + this.data.offset} download error:\n\t${error.message}`, severity: levels.ERROR });
|
||||
prq.delete(error.p);
|
||||
errcnt++;
|
||||
}
|
||||
}
|
||||
// catch error
|
||||
if (errcnt > 0) {
|
||||
console.error(`${errcnt} parts not downloaded`);
|
||||
HLSEvents.emit('message', { identifier: this.data.identifier, msg:`${errcnt} parts not downloaded`, severity: levels.ERROR });
|
||||
HLSEvents.emit('end', { identifier: this.data.identifier });
|
||||
return { ok: false, parts: this.data.parts };
|
||||
}
|
||||
// write downloaded
|
||||
|
|
@ -260,15 +259,15 @@ class hlsDownload {
|
|||
fs.writeFileSync(fn, r, { flag: 'a' });
|
||||
break;
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
console.error(`Unable to write to file '${fn}' (Attempt ${error+1}/3)`);
|
||||
console.info(`Waiting ${Math.round(this.data.waitTime / 1000)}s before retrying`);
|
||||
HLSEvents.emit('message', { identifier: this.data.identifier, msg: `Unable to write to file '${fn}' (Attempt ${error+1}/3)\n\t${err}`, severity: levels.ERROR });
|
||||
HLSEvents.emit('message', { identifier: this.data.identifier, msg: `Waiting ${Math.round(this.data.waitTime / 1000)}s before retrying`, severity: levels.INFO });
|
||||
await new Promise<void>((resolve) => setTimeout(() => resolve(), this.data.waitTime));
|
||||
}
|
||||
error++;
|
||||
}
|
||||
if (error === 3) {
|
||||
console.error(`Unable to write content to '${fn}'.`);
|
||||
HLSEvents.emit('message', { identifier: this.data.identifier, msg: `Unable to write content to '${fn}'.`, severity: levels.ERROR });
|
||||
HLSEvents.emit('end', { identifier: this.data.identifier });
|
||||
return { ok: false, parts: this.data.parts };
|
||||
}
|
||||
}
|
||||
|
|
@ -284,21 +283,29 @@ class hlsDownload {
|
|||
completed: this.data.parts.completed,
|
||||
total: totalSeg
|
||||
}));
|
||||
console.info(`${downloadedSeg} of ${totalSeg} parts downloaded [${data.percent}%] (${shlp.formatTime(parseInt((data.time / 1000).toFixed(0)))} | ${(data.downloadSpeed / 1000000).toPrecision(2)}Mb/s)`);
|
||||
if (this.data.callback)
|
||||
this.data.callback({ total: this.data.parts.total, cur: this.data.parts.completed, bytes: this.data.bytesDownloaded, percent: data.percent, time: data.time, downloadSpeed: data.downloadSpeed });
|
||||
//console.info(`${downloadedSeg} of ${totalSeg} parts downloaded [${data.percent}%] (${shlp.formatTime(parseInt((data.time / 1000).toFixed(0)))} | ${(data.downloadSpeed / 1000000).toPrecision(2)}Mb/s)`);
|
||||
HLSEvents.emit('progress', {
|
||||
identifier: this.data.identifier,
|
||||
total: this.data.parts.total,
|
||||
cur: this.data.parts.completed,
|
||||
bytes: this.data.bytesDownloaded,
|
||||
percent: data.percent,
|
||||
time: data.time,
|
||||
downloadSpeed: data.downloadSpeed
|
||||
});
|
||||
}
|
||||
// return result
|
||||
fs.unlinkSync(`${fn}.resume`);
|
||||
HLSEvents.emit('end', { identifier: this.data.identifier });
|
||||
return { ok: true, parts: this.data.parts };
|
||||
}
|
||||
async downloadPart(seg: Segment, segIndex: number, segOffset: number){
|
||||
async downloadPart(seg: Segment, segIndex: number, segOffset: number, identifier: string){
|
||||
const sURI = extFn.getURI(seg.uri, this.data.baseurl);
|
||||
let decipher, part, dec;
|
||||
const p = segIndex;
|
||||
try {
|
||||
if (seg.key != undefined) {
|
||||
decipher = await this.getKey(seg.key, p, segOffset);
|
||||
decipher = await this.getKey(seg.key, p, segOffset, identifier);
|
||||
}
|
||||
part = await extFn.getData(p, sURI, {
|
||||
...(seg.byterange ? {
|
||||
|
|
@ -314,10 +321,10 @@ class hlsDownload {
|
|||
}
|
||||
return res;
|
||||
}
|
||||
]);
|
||||
], identifier);
|
||||
if(this.data.checkPartLength && !(part as any).headers['content-length']){
|
||||
this.data.checkPartLength = false;
|
||||
console.warn(`Part ${segIndex+segOffset+1}: can't check parts size!`);
|
||||
HLSEvents.emit('message', { identifier: this.data.identifier, msg: `Part ${segIndex+segOffset+1}: can't check parts size!`, severity: levels.WARN });
|
||||
}
|
||||
if (decipher == undefined) {
|
||||
this.data.bytesDownloaded += (part.body as Buffer).byteLength;
|
||||
|
|
@ -333,7 +340,7 @@ class hlsDownload {
|
|||
}
|
||||
return { dec, p };
|
||||
}
|
||||
async downloadKey(key: Key, segIndex: number, segOffset: number){
|
||||
async downloadKey(key: Key, segIndex: number, segOffset: number, identifier: string){
|
||||
const kURI = extFn.getURI(key.uri, this.data.baseurl);
|
||||
if (!this.data.keys[kURI]) {
|
||||
try {
|
||||
|
|
@ -349,7 +356,7 @@ class hlsDownload {
|
|||
}
|
||||
return res;
|
||||
}
|
||||
]);
|
||||
], identifier);
|
||||
return rkey;
|
||||
}
|
||||
catch (error: any) {
|
||||
|
|
@ -358,12 +365,12 @@ class hlsDownload {
|
|||
}
|
||||
}
|
||||
}
|
||||
async getKey(key: Key, segIndex: number, segOffset: number){
|
||||
async getKey(key: Key, segIndex: number, segOffset: number, identifier: string){
|
||||
const kURI = extFn.getURI(key.uri, this.data.baseurl);
|
||||
const p = segIndex;
|
||||
if (!this.data.keys[kURI]) {
|
||||
try{
|
||||
const rkey = await this.downloadKey(key, segIndex, segOffset);
|
||||
const rkey = await this.downloadKey(key, segIndex, segOffset, identifier);
|
||||
if (!rkey)
|
||||
throw new Error();
|
||||
this.data.keys[kURI] = rkey.body;
|
||||
|
|
@ -382,7 +389,7 @@ class hlsDownload {
|
|||
return crypto.createDecipheriv('aes-128-cbc', this.data.keys[kURI], iv);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const extFn = {
|
||||
getURI: (uri: string, baseurl?: string) => {
|
||||
const httpURI = /^https{0,1}:/.test(uri);
|
||||
|
|
@ -402,7 +409,7 @@ const extFn = {
|
|||
const downloadSpeed = downloadedBytes / (dateElapsed / 1000); //Bytes per second
|
||||
return { percent, time: revParts, downloadSpeed };
|
||||
},
|
||||
getData: (partIndex: number, uri: string, headers: Record<string, string>, segOffset: number, isKey: boolean, timeout: number, retry: number, afterResponse: ((res: Response, retryWithMergedOptions: () => Response) => Response)[]) => {
|
||||
getData: (partIndex: number, uri: string, headers: Record<string, string>, segOffset: number, isKey: boolean, timeout: number, retry: number, afterResponse: ((res: Response, retryWithMergedOptions: () => Response) => Response)[], identifier: string) => {
|
||||
// get file if uri is local
|
||||
if (uri.startsWith('file://')) {
|
||||
return {
|
||||
|
|
@ -437,8 +444,7 @@ const extFn = {
|
|||
if(error){
|
||||
const partType = isKey ? 'Key': 'Part';
|
||||
const partIndx = partIndex + 1 + segOffset;
|
||||
console.warn('%s %s: %d attempt to retrieve data', partType, partIndx, retryCount + 1);
|
||||
console.error(`\t${error.message}`);
|
||||
HLSEvents.emit('message', { identifier: identifier, msg: `${partType} ${partIndx}: ${retryCount + 1} attempt to retrieve data\n\t${error.message}`, severity: levels.WARN });
|
||||
}
|
||||
}
|
||||
]
|
||||
|
|
@ -449,5 +455,5 @@ const extFn = {
|
|||
return got(uri, options);
|
||||
}
|
||||
};
|
||||
|
||||
export default hlsDownload;
|
||||
|
||||
export default hlsDownload;
|
||||
|
|
|
|||
31
modules/hlsEventEmitter.ts
Normal file
31
modules/hlsEventEmitter.ts
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
import EventEmitter from "events";
|
||||
import { ProgressData } from "../@types/messageHandler";
|
||||
import { Level } from "log4js";
|
||||
|
||||
type BaseEvent = {
|
||||
identifier: string
|
||||
}
|
||||
|
||||
type ProgressEvent = ProgressData & BaseEvent
|
||||
|
||||
type MessageEvent = {
|
||||
msg: string,
|
||||
severity: Level
|
||||
} & BaseEvent
|
||||
|
||||
type HLSEventTypes = {
|
||||
progress: (data: ProgressEvent) => unknown,
|
||||
message: (data: MessageEvent) => unknown,
|
||||
end: (data: BaseEvent) => unknown
|
||||
}
|
||||
|
||||
declare interface HLSEventEmitter {
|
||||
on<T extends keyof HLSEventTypes>(event: T, listener: HLSEventTypes[T]): this;
|
||||
emit<T extends keyof HLSEventTypes>(event: T, data: Parameters<HLSEventTypes[T]>[0]): boolean;
|
||||
}
|
||||
|
||||
class HLSEventEmitter extends EventEmitter {}
|
||||
|
||||
const eventHandler = new HLSEventEmitter();
|
||||
|
||||
export default eventHandler;
|
||||
|
|
@ -2,7 +2,6 @@ import fs from 'fs';
|
|||
import path from 'path';
|
||||
import { workingDir } from './module.cfg-loader';
|
||||
import log4js from 'log4js';
|
||||
|
||||
const logFolder = path.join(workingDir, 'logs');
|
||||
const latest = path.join(logFolder, 'latest.log');
|
||||
|
||||
|
|
@ -16,26 +15,26 @@ const makeLogFolder = () => {
|
|||
};
|
||||
|
||||
const makeLogger = () => {
|
||||
global.console.log =
|
||||
global.console.log =
|
||||
global.console.info =
|
||||
global.console.warn =
|
||||
global.console.error =
|
||||
global.console.warn =
|
||||
global.console.error =
|
||||
global.console.debug = (...data: any[]) => {
|
||||
console.info((data.length >= 1 ? data.shift() : ''), ...data);
|
||||
};
|
||||
makeLogFolder();
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
console: {
|
||||
console: {
|
||||
type: 'console', layout: {
|
||||
type: 'pattern',
|
||||
pattern: process.env.isGUI === 'true' ? '%[%x{info}%m%]' : '%x{info}%m',
|
||||
pattern: process.env.isGUI === 'true' ? '\r%[%x{info}%m%]' : '\r%x{info}%m',
|
||||
tokens: {
|
||||
info: (ev) => {
|
||||
return ev.level.levelStr === 'INFO' ? '' : `[${ev.level.levelStr}] `;
|
||||
return ev.level.levelStr === 'INFO' ? '\r' : `\r[${ev.level.levelStr}] `;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
file: {
|
||||
type: 'file',
|
||||
|
|
@ -66,4 +65,4 @@ const getLogger = () => {
|
|||
return log4js.getLogger();
|
||||
};
|
||||
|
||||
export const console = getLogger();
|
||||
export const console = getLogger();
|
||||
|
|
|
|||
|
|
@ -42,6 +42,7 @@
|
|||
"dependencies": {
|
||||
"@types/xmldom": "^0.1.34",
|
||||
"@yao-pkg/pkg": "^5.11.1",
|
||||
"cli-progress": "^3.12.0",
|
||||
"cors": "^2.8.5",
|
||||
"express": "^4.19.2",
|
||||
"ffprobe": "^1.1.2",
|
||||
|
|
@ -62,6 +63,7 @@
|
|||
"yargs": "^17.7.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/cli-progress": "^3.11.5",
|
||||
"@types/cors": "^2.8.17",
|
||||
"@types/express": "^4.17.21",
|
||||
"@types/ffprobe": "^1.1.8",
|
||||
|
|
|
|||
|
|
@ -11,6 +11,9 @@ dependencies:
|
|||
'@yao-pkg/pkg':
|
||||
specifier: ^5.11.1
|
||||
version: 5.11.1
|
||||
cli-progress:
|
||||
specifier: ^3.12.0
|
||||
version: 3.12.0
|
||||
cors:
|
||||
specifier: ^2.8.5
|
||||
version: 2.8.5
|
||||
|
|
@ -67,6 +70,9 @@ dependencies:
|
|||
version: 17.7.2
|
||||
|
||||
devDependencies:
|
||||
'@types/cli-progress':
|
||||
specifier: ^3.11.5
|
||||
version: 3.11.5
|
||||
'@types/cors':
|
||||
specifier: ^2.8.17
|
||||
version: 2.8.17
|
||||
|
|
@ -1851,6 +1857,12 @@ packages:
|
|||
'@types/responselike': 1.0.0
|
||||
dev: false
|
||||
|
||||
/@types/cli-progress@3.11.5:
|
||||
resolution: {integrity: sha512-D4PbNRbviKyppS5ivBGyFO29POlySLmA2HyUFE4p5QGazAMM3CwkKWcvTl8gvElSuxRh6FPKL8XmidX873ou4g==}
|
||||
dependencies:
|
||||
'@types/node': 18.15.11
|
||||
dev: true
|
||||
|
||||
/@types/connect@3.4.38:
|
||||
resolution: {integrity: sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==}
|
||||
dependencies:
|
||||
|
|
@ -2830,6 +2842,13 @@ packages:
|
|||
resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==}
|
||||
dev: false
|
||||
|
||||
/cli-progress@3.12.0:
|
||||
resolution: {integrity: sha512-tRkV3HJ1ASwm19THiiLIXLO7Im7wlTuKnvkYaTkyoAPefqjNg7W7DHKUlGRxy9vxDvbyCYQkQozvptuMkGCg8A==}
|
||||
engines: {node: '>=4'}
|
||||
dependencies:
|
||||
string-width: 4.2.3
|
||||
dev: false
|
||||
|
||||
/cliui@7.0.4:
|
||||
resolution: {integrity: sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==}
|
||||
dependencies:
|
||||
|
|
|
|||
Loading…
Reference in a new issue